blob: 522882dca4b6f2b3d5553da4aeffed44286bfdaa [file] [log] [blame]
James Kuszmaulf5eb4682023-09-22 17:16:59 -07001#include "aos/flatbuffers/base.h"
2
3#include "gtest/gtest.h"
4
5namespace aos::fbs::testing {
6// Tests that PaddedSize() behaves as expected.
7TEST(BaseTest, PaddedSize) {
8 EXPECT_EQ(0, PaddedSize(0, 4));
9 EXPECT_EQ(4, PaddedSize(4, 4));
10 EXPECT_EQ(8, PaddedSize(5, 4));
11 EXPECT_EQ(8, PaddedSize(6, 4));
12 EXPECT_EQ(8, PaddedSize(7, 4));
13}
14
15inline constexpr size_t kDefaultSize = 16;
16template <typename T>
17class AllocatorTest : public ::testing::Test {
18 protected:
19 AllocatorTest() : allocator_(std::make_unique<T>()) {}
20 std::vector<uint8_t> buffer_;
21 // unique_ptr so that we can destroy the allocator at will.
22 std::unique_ptr<T> allocator_;
23};
24
25template <>
26AllocatorTest<SpanAllocator>::AllocatorTest()
27 : buffer_(kDefaultSize),
28 allocator_(std::make_unique<SpanAllocator>(
29 std::span<uint8_t>{buffer_.data(), buffer_.size()})) {}
30
31using AllocatorTypes = ::testing::Types<SpanAllocator, VectorAllocator>;
32TYPED_TEST_SUITE(AllocatorTest, AllocatorTypes);
33
34// Tests that we can create and not use a VectorAllocator.
35TYPED_TEST(AllocatorTest, UnusedAllocator) {}
36
37// Tests that a simple allocate works.
38TYPED_TEST(AllocatorTest, BasicAllocate) {
39 std::span<uint8_t> span =
40 this->allocator_->Allocate(kDefaultSize, 4, SetZero::kYes).value();
41 ASSERT_EQ(kDefaultSize, span.size());
42 // We set SetZero::kYes; it should be zero-initialized.
43 EXPECT_EQ(kDefaultSize, std::count(span.begin(), span.end(), 0));
44 this->allocator_->Deallocate(span);
45}
46
47// Tests that we can insert bytes into an arbitrary spot in the buffer.
48TYPED_TEST(AllocatorTest, InsertBytes) {
49 const size_t half_size = kDefaultSize / 2;
50 std::span<uint8_t> span =
51 this->allocator_->Allocate(half_size, 4, SetZero::kYes).value();
52 ASSERT_EQ(half_size, span.size());
53 // Set the span with some sentinel values so that we can detect that the
54 // insertion occurred correctly.
55 for (size_t ii = 0; ii < span.size(); ++ii) {
56 span[ii] = ii + 1;
57 }
58
59 // Insert new bytes such that one old byte will still be at the start.
60 span = this->allocator_
61 ->InsertBytes(span.data() + 1u, half_size, 0, SetZero::kYes)
62 .value();
63 ASSERT_EQ(kDefaultSize, span.size());
64 size_t index = 0;
65 EXPECT_EQ(1u, span[index]);
66 index++;
67 for (; index < half_size + 1u; ++index) {
68 EXPECT_EQ(0u, span[index]);
69 }
70 for (; index < span.size(); ++index) {
71 EXPECT_EQ(index - half_size + 1, span[index]);
72 }
73 this->allocator_->Deallocate(span);
74}
75
76// Tests that we can remove bytes from an arbitrary spot in the buffer.
77TYPED_TEST(AllocatorTest, RemoveBytes) {
78 const size_t half_size = kDefaultSize / 2;
79 std::span<uint8_t> span =
80 this->allocator_->Allocate(kDefaultSize, 4, SetZero::kYes).value();
81 ASSERT_EQ(kDefaultSize, span.size());
82 // Set the span with some sentinel values so that we can detect that the
83 // removal occurred correctly.
84 for (size_t ii = 0; ii < span.size(); ++ii) {
85 span[ii] = ii + 1;
86 }
87
88 // Remove bytes such that one old byte will remain at the start, and a chunk
89 // of 8 bytes will be cut out after that..
90 span = this->allocator_->RemoveBytes(span.subspan(1, half_size));
91 ASSERT_EQ(half_size, span.size());
92 size_t index = 0;
93 EXPECT_EQ(1u, span[index]);
94 index++;
95 for (; index < span.size(); ++index) {
96 EXPECT_EQ(index + half_size + 1, span[index]);
97 }
98 this->allocator_->Deallocate(span);
99}
100
101// Tests that if we fail to deallocate that we fail during destruction.
102TYPED_TEST(AllocatorTest, NoDeallocate) {
103 EXPECT_DEATH(
104 {
105 EXPECT_EQ(
106 4, this->allocator_->Allocate(4, 4, SetZero::kYes).value().size());
107 this->allocator_.reset();
108 },
109 "Must deallocate");
110}
111
112// Tests that if we never allocate that we cannot deallocate.
113TYPED_TEST(AllocatorTest, NoAllocateThenDeallocate) {
114 EXPECT_DEATH(this->allocator_->Deallocate(std::span<uint8_t>()),
115 "prior allocation");
116}
117
118// Tests that if we attempt to allocate more than the backing span allows that
119// we correctly return an empty span.
120TEST(SpanAllocatorTest, OverAllocate) {
121 std::vector<uint8_t> buffer(kDefaultSize);
122 SpanAllocator allocator({buffer.data(), buffer.size()});
123 EXPECT_FALSE(
124 allocator.Allocate(kDefaultSize + 1u, 0, SetZero::kYes).has_value());
125}
126
127// Tests that if we attempt to insert more than the backing span allows that
128// we correctly return an empty span.
129TEST(SpanAllocatorTest, OverInsert) {
130 std::vector<uint8_t> buffer(kDefaultSize);
131 SpanAllocator allocator({buffer.data(), buffer.size()});
132 std::span<uint8_t> span =
133 allocator.Allocate(kDefaultSize, 0, SetZero::kYes).value();
134 EXPECT_EQ(kDefaultSize, span.size());
135 EXPECT_FALSE(
136 allocator.InsertBytes(span.data(), 1u, 0, SetZero::kYes).has_value());
137 allocator.Deallocate(span);
138}
139
140// Because we really aren't meant to instantiate ResizeableObject's directly (if
141// nothing else it has virtual member functions), define a testing
142// implementation.
143
144class TestResizeableObject : public ResizeableObject {
145 public:
146 TestResizeableObject(std::span<uint8_t> buffer, ResizeableObject *parent)
147 : ResizeableObject(buffer, parent) {}
148 TestResizeableObject(std::span<uint8_t> buffer, Allocator *allocator)
149 : ResizeableObject(buffer, allocator) {}
150 virtual ~TestResizeableObject() {}
151 using ResizeableObject::SubObject;
152 bool InsertBytes(void *insertion_point, size_t bytes) {
153 return ResizeableObject::InsertBytes(insertion_point, bytes, SetZero::kYes);
154 }
155 TestResizeableObject(TestResizeableObject &&) = default;
156
157 struct TestObject {
158 uoffset_t inline_entry_offset;
159 std::unique_ptr<TestResizeableObject> object;
160 size_t absolute_offset;
161 };
162
163 // Adds a new object of the requested size.
164 void AddEntry(uoffset_t inline_entry_offset, size_t absolute_offset,
165 size_t buffer_size, bool set_object) {
166 *reinterpret_cast<uoffset_t *>(buffer_.data() + inline_entry_offset) =
167 set_object ? (absolute_offset - inline_entry_offset) : 0;
168 objects_.emplace_back(
169 TestObject{inline_entry_offset, nullptr, absolute_offset});
170 if (set_object) {
171 objects_.back().object = std::make_unique<TestResizeableObject>(
172 buffer().subspan(absolute_offset, buffer_size), this);
173 }
174 }
175
176 size_t NumberOfSubObjects() const override { return objects_.size(); }
177 SubObject GetSubObject(size_t index) override {
178 TestObject &subobject = objects_.at(index);
179 return {reinterpret_cast<uoffset_t *>(buffer_.data() +
180 subobject.inline_entry_offset),
181 subobject.object.get(), &subobject.absolute_offset};
182 }
183
184 TestObject &GetObject(size_t index) { return objects_.at(index); }
185
186 size_t Alignment() const override { return 64; }
187 size_t AbsoluteOffsetOffset() const override { return 0; }
188
189 private:
190 std::vector<TestObject> objects_;
191};
192
193class ResizeableObjectTest : public ::testing::Test {
194 protected:
195 static constexpr size_t kInitialSize = 128;
196 ResizeableObjectTest()
197 : object_(allocator_.Allocate(kInitialSize, 4, SetZero::kYes).value(),
198 &allocator_) {}
199 ~ResizeableObjectTest() { allocator_.Deallocate(object_.buffer()); }
200 VectorAllocator allocator_;
201 TestResizeableObject object_;
202};
203
204// Tests that if we created an object and then do nothing with it that nothing
205// untoward happens.
206TEST_F(ResizeableObjectTest, DoNothing) {}
207
208// Test that when we move the ResizeableObject we clear the reference to the old
209// buffer.
210TEST_F(ResizeableObjectTest, Move) {
211 TestResizeableObject target_object = std::move(object_);
212 ASSERT_EQ(0u, object_.buffer().size());
213 ASSERT_EQ(kInitialSize, target_object.buffer().size());
214}
215
216// Tests the pathways for resizing a nested ResizeableObject works.
217TEST_F(ResizeableObjectTest, ResizeNested) {
218 constexpr size_t kAbsoluteOffset = 64;
219 object_.AddEntry(4, kAbsoluteOffset, 64, true);
220 TestResizeableObject *subobject = object_.GetObject(0).object.get();
221 object_.AddEntry(0, kAbsoluteOffset, 64, false);
222 EXPECT_EQ(60, *object_.GetSubObject(0).inline_entry);
223 EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
224 EXPECT_EQ(64, object_.GetObject(0).object->buffer().data() -
225 object_.buffer().data());
226
227 constexpr size_t kInsertBytes = 5;
228 // The insert should succeed.
229 ASSERT_TRUE(
230 subobject->InsertBytes(subobject->buffer().data() + 1u, kInsertBytes));
231 // We should now observe the size of the buffers increasing, but the start
232 // _not_ moving.
233 // We should've rounded the insert up to the alignment we areusing (64 bytes).
234 EXPECT_EQ(kInitialSize + 64, object_.buffer().size());
235 EXPECT_EQ(128, subobject->buffer().size());
236 EXPECT_EQ(60, *object_.GetSubObject(0).inline_entry);
237 EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
238 EXPECT_EQ(kAbsoluteOffset, object_.GetObject(0).absolute_offset);
239 EXPECT_EQ(kAbsoluteOffset, object_.GetObject(1).absolute_offset);
240
241 // And next we insert before the subobjects, so that we can see their offsets
242 // shift. The insert should succeed.
243 ASSERT_TRUE(object_.InsertBytes(subobject->buffer().data(), kInsertBytes));
244 EXPECT_EQ(kInitialSize + 2 * 64, object_.buffer().size());
245 EXPECT_EQ(128, subobject->buffer().size());
246 EXPECT_EQ(60 + 64, *object_.GetSubObject(0).inline_entry);
247 // The unpopulated object's inline entry should not have changed since
248 // it was zero.
249 EXPECT_EQ(0, *object_.GetSubObject(1).inline_entry);
250 EXPECT_EQ(kAbsoluteOffset + 64, object_.GetObject(0).absolute_offset);
251 EXPECT_EQ(kAbsoluteOffset + 64, object_.GetObject(1).absolute_offset);
252}
253
254} // namespace aos::fbs::testing