forked from frc971/971-Robot-Code
-
Notifications
You must be signed in to change notification settings - Fork 0
/
base_test.cc
288 lines (252 loc) · 10.7 KB
/
base_test.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
#include "aos/flatbuffers/base.h"
#include <stddef.h>
#include <algorithm>
#include "gtest/gtest.h"
namespace aos::fbs::testing {
// Tests that AlignOffset() behaves as expected.
TEST(BaseTest, AlignOffset) {
EXPECT_EQ(0, AlignOffset(0, 4));
EXPECT_EQ(4, AlignOffset(4, 4));
EXPECT_EQ(8, AlignOffset(5, 4));
EXPECT_EQ(8, AlignOffset(6, 4));
EXPECT_EQ(8, AlignOffset(7, 4));
}
// Tests that AlignOffset handles the alignment point being nonzero. This shows
// up when you want 8 byte alignment 4 bytes into the start of the buffer, and
// don't want to pad out the front of the buffer.
TEST(BaseTest, AlignOffsetWithOffset) {
EXPECT_EQ(4, AlignOffset(4, 4, 4));
EXPECT_EQ(4, AlignOffset(0, 8, 4));
EXPECT_EQ(4, AlignOffset(1, 8, 4));
EXPECT_EQ(4, AlignOffset(2, 8, 4));
EXPECT_EQ(4, AlignOffset(3, 8, 4));
EXPECT_EQ(4, AlignOffset(4, 8, 4));
EXPECT_EQ(12, AlignOffset(5, 8, 4));
}
inline constexpr size_t kDefaultSize = AlignedVectorAllocator::kAlignment * 2;
template <typename T>
class AllocatorTest : public ::testing::Test {
protected:
AllocatorTest() : allocator_(std::make_unique<T>()) {}
alignas(64) std::array<uint8_t, kDefaultSize> buffer_;
// unique_ptr so that we can destroy the allocator at will.
std::unique_ptr<T> allocator_;
};
template <>
AllocatorTest<SpanAllocator>::AllocatorTest()
: allocator_(std::make_unique<SpanAllocator>(
std::span<uint8_t>{buffer_.data(), buffer_.size()})) {}
using AllocatorTypes = ::testing::Types<SpanAllocator, AlignedVectorAllocator,
FixedStackAllocator<kDefaultSize>>;
TYPED_TEST_SUITE(AllocatorTest, AllocatorTypes);
// Tests that we can create and not use a VectorAllocator.
TYPED_TEST(AllocatorTest, UnusedAllocator) {}
// Tests that a simple allocate works.
TYPED_TEST(AllocatorTest, BasicAllocate) {
std::span<uint8_t> span =
this->allocator_->Allocate(kDefaultSize, 4, SetZero::kYes).value();
ASSERT_EQ(kDefaultSize, span.size());
// We set SetZero::kYes; it should be zero-initialized.
EXPECT_EQ(kDefaultSize, std::count(span.begin(), span.end(), 0));
this->allocator_->Deallocate(span);
}
// Tests that we can insert bytes into an arbitrary spot in the buffer.
TYPED_TEST(AllocatorTest, InsertBytes) {
const size_t half_size = kDefaultSize / 2;
std::span<uint8_t> span =
this->allocator_->Allocate(half_size, 4, SetZero::kYes).value();
ASSERT_EQ(half_size, span.size());
// Set the span with some sentinel values so that we can detect that the
// insertion occurred correctly.
for (size_t ii = 0; ii < span.size(); ++ii) {
span[ii] = ii + 1;
}
// Insert new bytes such that one old byte will still be at the start.
span = this->allocator_
->InsertBytes(span.data() + 1u, half_size, 0, SetZero::kYes)
.value();
ASSERT_EQ(kDefaultSize, span.size());
size_t index = 0;
EXPECT_EQ(1u, span[index]);
index++;
for (; index < half_size + 1u; ++index) {
EXPECT_EQ(0u, span[index]);
}
for (; index < span.size(); ++index) {
EXPECT_EQ(index - half_size + 1, span[index]);
}
this->allocator_->Deallocate(span);
}
// Tests that all allocators return data aligned to the requested alignment.
TYPED_TEST(AllocatorTest, Alignment) {
for (size_t alignment : {4, 8, 16, 32, 64}) {
std::span<uint8_t> span =
this->allocator_->Allocate(kDefaultSize, alignment, SetZero::kYes)
.value();
EXPECT_EQ(reinterpret_cast<size_t>(span.data()) % alignment, 0);
this->allocator_->Deallocate(span);
}
}
// Tests that we can remove bytes from an arbitrary spot in the buffer.
TYPED_TEST(AllocatorTest, RemoveBytes) {
// Deletion doesn't require resizing, so we don't need to worry about it being
// larger than the alignment to test everything. The test requires the size
// to be < 255 to store the sentinal values.
const size_t kDefaultSize = 128;
const size_t half_size = kDefaultSize / 2;
std::span<uint8_t> span =
this->allocator_->Allocate(kDefaultSize, 4, SetZero::kYes).value();
ASSERT_EQ(kDefaultSize, span.size());
// Set the span with some sentinel values so that we can detect that the
// removal occurred correctly.
for (size_t ii = 0; ii < span.size(); ++ii) {
span[ii] = ii + 1;
}
// Remove bytes such that one old byte will remain at the start, and a chunk
// of 8 bytes will be cut out after that..
span = this->allocator_->RemoveBytes(span.subspan(1, half_size));
ASSERT_EQ(half_size, span.size());
size_t index = 0;
EXPECT_EQ(1u, span[index]);
index++;
for (; index < span.size(); ++index) {
EXPECT_EQ(index + half_size + 1, span[index]);
}
this->allocator_->Deallocate(span);
}
// Tests that if we fail to deallocate that we fail during destruction.
TYPED_TEST(AllocatorTest, NoDeallocate) {
EXPECT_DEATH(
{
EXPECT_EQ(
4, this->allocator_->Allocate(4, 4, SetZero::kYes).value().size());
this->allocator_.reset();
},
"Must deallocate");
}
// Tests that if we never allocate that we cannot deallocate.
TYPED_TEST(AllocatorTest, NoAllocateThenDeallocate) {
EXPECT_DEATH(this->allocator_->Deallocate(std::span<uint8_t>()),
"prior allocation");
}
// Tests that if we attempt to allocate more than the backing span allows that
// we correctly return an empty span.
TEST(SpanAllocatorTest, OverAllocate) {
std::vector<uint8_t> buffer(kDefaultSize);
SpanAllocator allocator({buffer.data(), buffer.size()});
EXPECT_FALSE(
allocator.Allocate(kDefaultSize + 1u, 0, SetZero::kYes).has_value());
}
// Tests that if we attempt to insert more than the backing span allows that
// we correctly return an empty span.
TEST(SpanAllocatorTest, OverInsert) {
std::vector<uint8_t> buffer(kDefaultSize);
SpanAllocator allocator({buffer.data(), buffer.size()});
std::span<uint8_t> span =
allocator.Allocate(kDefaultSize, 1, SetZero::kYes).value();
EXPECT_EQ(kDefaultSize, span.size());
EXPECT_FALSE(
allocator.InsertBytes(span.data(), 1u, 0, SetZero::kYes).has_value());
allocator.Deallocate(span);
}
// Because we really aren't meant to instantiate ResizeableObject's directly (if
// nothing else it has virtual member functions), define a testing
// implementation.
class TestResizeableObject : public ResizeableObject {
public:
TestResizeableObject(std::span<uint8_t> buffer, ResizeableObject *parent)
: ResizeableObject(buffer, parent) {}
TestResizeableObject(std::span<uint8_t> buffer, Allocator *allocator)
: ResizeableObject(buffer, allocator) {}
virtual ~TestResizeableObject() {}
using ResizeableObject::SubObject;
bool InsertBytes(void *insertion_point, size_t bytes) {
return ResizeableObject::InsertBytes(insertion_point, bytes, SetZero::kYes)
.has_value();
}
TestResizeableObject(TestResizeableObject &&) = default;
struct TestObject {
uoffset_t inline_entry_offset;
std::unique_ptr<TestResizeableObject> object;
size_t absolute_offset;
};
// Adds a new object of the requested size.
void AddEntry(uoffset_t inline_entry_offset, size_t absolute_offset,
size_t buffer_size, bool set_object) {
*reinterpret_cast<uoffset_t *>(buffer_.data() + inline_entry_offset) =
set_object ? (absolute_offset - inline_entry_offset) : 0;
objects_.emplace_back(
TestObject{inline_entry_offset, nullptr, absolute_offset});
if (set_object) {
objects_.back().object = std::make_unique<TestResizeableObject>(
buffer().subspan(absolute_offset, buffer_size), this);
}
}
size_t NumberOfSubObjects() const override { return objects_.size(); }
SubObject GetSubObject(size_t index) override {
TestObject &subobject = objects_.at(index);
return {reinterpret_cast<uoffset_t *>(buffer_.data() +
subobject.inline_entry_offset),
subobject.object.get(), &subobject.absolute_offset};
}
TestObject &GetObject(size_t index) { return objects_.at(index); }
size_t Alignment() const override { return 64; }
private:
std::vector<TestObject> objects_;
};
class ResizeableObjectTest : public ::testing::Test {
protected:
static constexpr size_t kInitialSize = 128;
ResizeableObjectTest()
: object_(allocator_.Allocate(kInitialSize, 4, SetZero::kYes).value(),
&allocator_) {}
~ResizeableObjectTest() { allocator_.Deallocate(object_.buffer()); }
AlignedVectorAllocator allocator_;
TestResizeableObject object_;
};
// Tests that if we created an object and then do nothing with it that nothing
// untoward happens.
TEST_F(ResizeableObjectTest, DoNothing) {}
// Test that when we move the ResizeableObject we clear the reference to the old
// buffer.
TEST_F(ResizeableObjectTest, Move) {
TestResizeableObject target_object = std::move(object_);
ASSERT_EQ(0u, object_.buffer().size());
ASSERT_EQ(kInitialSize, target_object.buffer().size());
}
// Tests the pathways for resizing a nested ResizeableObject works.
TEST_F(ResizeableObjectTest, ResizeNested) {
constexpr size_t kAbsoluteOffset = 64;
object_.AddEntry(4, kAbsoluteOffset, 64, true);
TestResizeableObject *subobject = object_.GetObject(0).object.get();
object_.AddEntry(0, kAbsoluteOffset, 64, false);
EXPECT_EQ(60, *object_.GetSubObject(0).inline_entry);
EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
EXPECT_EQ(64, object_.GetObject(0).object->buffer().data() -
object_.buffer().data());
constexpr size_t kInsertBytes = 5;
// The insert should succeed.
ASSERT_TRUE(
subobject->InsertBytes(subobject->buffer().data() + 1u, kInsertBytes));
// We should now observe the size of the buffers increasing, but the start
// _not_ moving.
// We should've rounded the insert up to the alignment we areusing (64 bytes).
EXPECT_EQ(kInitialSize + 64, object_.buffer().size());
EXPECT_EQ(128, subobject->buffer().size());
EXPECT_EQ(60, *object_.GetSubObject(0).inline_entry);
EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
EXPECT_EQ(kAbsoluteOffset, object_.GetObject(0).absolute_offset);
EXPECT_EQ(kAbsoluteOffset, object_.GetObject(1).absolute_offset);
// And next we insert before the subobjects, so that we can see their offsets
// shift. The insert should succeed.
ASSERT_TRUE(object_.InsertBytes(subobject->buffer().data(), kInsertBytes));
EXPECT_EQ(kInitialSize + 2 * 64, object_.buffer().size());
EXPECT_EQ(128, subobject->buffer().size());
EXPECT_EQ(60 + 64, *object_.GetSubObject(0).inline_entry);
// The unpopulated object's inline entry should not have changed since
// it was zero.
EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
EXPECT_EQ(kAbsoluteOffset + 64, object_.GetObject(0).absolute_offset);
EXPECT_EQ(kAbsoluteOffset + 64, object_.GetObject(1).absolute_offset);
}
} // namespace aos::fbs::testing