blob: 8ad3b981a71d3d2659d92d181af2a2b6075f0f43 [file] [log] [blame]
James Kuszmaulf5eb4682023-09-22 17:16:59 -07001#include "aos/flatbuffers/base.h"
2namespace aos::fbs {
3
4namespace {
5void *DereferenceOffset(uoffset_t *offset) {
6 return reinterpret_cast<uint8_t *>(offset) + *offset;
7}
8} // namespace
9
James Kuszmauld4b4f1d2024-03-13 15:57:35 -070010ResizeableObject::ResizeableObject(ResizeableObject &&other)
11 : buffer_(other.buffer_),
12 parent_(other.parent_),
13 owned_allocator_(std::move(other.owned_allocator_)),
14 allocator_(other.allocator_) {
15 // At this stage in the move the move constructors of the inherited types have
16 // not yet been called, so we edit the state of the other object now so that
17 // when everything is moved over into the new objects they will have the
18 // correct pointers.
19 for (size_t index = 0; index < other.NumberOfSubObjects(); ++index) {
20 SubObject object = other.GetSubObject(index);
21 if (object.object != nullptr) {
22 object.object->parent_ = this;
23 }
24 }
25 other.buffer_ = {};
26 other.allocator_ = nullptr;
27 other.parent_ = nullptr;
28 // Sanity check that the std::unique_ptr move didn't reallocate/move memory
29 // around.
30 if (owned_allocator_.get() != nullptr) {
31 CHECK_EQ(owned_allocator_.get(), allocator_);
32 }
33}
34
James Kuszmaulf5eb4682023-09-22 17:16:59 -070035bool ResizeableObject::InsertBytes(void *insertion_point, size_t bytes,
36 SetZero set_zero) {
37 // See comments on InsertBytes() declaration and in FixObjects()
38 // implementation below.
39 CHECK_LT(buffer_.data(), reinterpret_cast<const uint8_t *>(insertion_point))
40 << ": Insertion may not be prior to the start of the buffer.";
41 // Check that we started off with a properly aligned size.
42 // Doing this CHECK earlier is tricky because if done in the constructor then
43 // it executes prior to the Alignment() implementation being available.
44 CHECK_EQ(0u, buffer_.size() % Alignment());
45 // Note that we will round up the size to the current alignment, so that we
46 // ultimately end up only adjusting the buffer size by a multiple of its
47 // alignment, to avoid having to do any more complicated bookkeeping.
48 const size_t aligned_bytes = PaddedSize(bytes, Alignment());
49 if (parent_ != nullptr) {
50 return parent_->InsertBytes(insertion_point, aligned_bytes, set_zero);
51 } else {
52 std::optional<std::span<uint8_t>> new_buffer =
53 CHECK_NOTNULL(allocator_)
54 ->InsertBytes(insertion_point, aligned_bytes, Alignment(),
55 set_zero);
56 if (!new_buffer.has_value()) {
57 return false;
58 }
59 UpdateBuffer(new_buffer.value(),
60 new_buffer.value().data() +
61 (reinterpret_cast<const uint8_t *>(insertion_point) -
62 buffer_.data()),
63 aligned_bytes);
64 return true;
65 }
66}
67
68void ResizeableObject::UpdateBuffer(std::span<uint8_t> new_buffer,
69 void *modification_point,
70 ssize_t bytes_inserted) {
71 buffer_ = new_buffer;
72 FixObjects(modification_point, bytes_inserted);
73 ObserveBufferModification();
74}
75
76std::span<uint8_t> ResizeableObject::BufferForObject(
77 size_t absolute_offset, size_t size, size_t terminal_alignment) {
78 const size_t padded_size = PaddedSize(size, terminal_alignment);
79 std::span<uint8_t> padded_buffer =
80 internal::GetSubSpan(buffer_, absolute_offset, padded_size);
81 std::span<uint8_t> object_buffer =
82 internal::GetSubSpan(padded_buffer, 0, size);
83 std::span<uint8_t> padding = internal::GetSubSpan(padded_buffer, size);
84 internal::ClearSpan(padding);
85 return object_buffer;
86}
87
88void ResizeableObject::FixObjects(void *modification_point,
89 ssize_t bytes_inserted) {
90 CHECK_EQ(bytes_inserted % Alignment(), 0u)
91 << ": We only support inserting N * Alignment() bytes at a time. This "
92 "may change in the future.";
93 for (size_t index = 0; index < NumberOfSubObjects(); ++index) {
94 SubObject object = GetSubObject(index);
95 const void *const absolute_offset =
96 PointerForAbsoluteOffset(*object.absolute_offset);
97 if (absolute_offset >= modification_point &&
98 object.inline_entry < modification_point) {
99 if (*object.inline_entry != 0) {
100 CHECK_EQ(static_cast<const void *>(
101 static_cast<const uint8_t *>(absolute_offset) +
102 CHECK_NOTNULL(object.object)->AbsoluteOffsetOffset()),
103 DereferenceOffset(object.inline_entry));
104 *object.inline_entry += bytes_inserted;
105 CHECK_GE(DereferenceOffset(object.inline_entry), modification_point)
106 << ": Encountered offset which points to a now-deleted section "
107 "of memory. The offset should have been null'd out prior to "
108 "deleting the memory.";
109 } else {
110 CHECK_EQ(nullptr, object.object);
111 }
112 *object.absolute_offset += bytes_inserted;
113 }
114 // We only need to update the object's buffer if it currently exists.
115 if (object.object != nullptr) {
116 std::span<uint8_t> subbuffer = BufferForObject(
117 *object.absolute_offset, object.object->buffer_.size(),
118 object.object->Alignment());
119 // By convention (enforced in InsertBytes()), the modification_point shall
120 // not be at the start of the subobjects data buffer; it may be the byte
121 // just past the end of the buffer. This makes it so that is unambiguous
122 // which subobject(s) should get the extra space when a buffer size
123 // increase is requested on the edge of a buffer.
124 if (subbuffer.data() < modification_point &&
125 (subbuffer.data() + subbuffer.size()) >= modification_point) {
126 subbuffer = {subbuffer.data(), subbuffer.size() + bytes_inserted};
127 }
128 object.object->UpdateBuffer(subbuffer, modification_point,
129 bytes_inserted);
130 }
131 }
132}
133
134std::optional<std::span<uint8_t>> VectorAllocator::Allocate(
135 size_t size, size_t /*alignment*/, SetZero set_zero) {
136 CHECK(buffer_.empty()) << ": Must deallocate before calling Allocate().";
137 buffer_.resize(size);
138 if (set_zero == SetZero::kYes) {
139 memset(buffer_.data(), 0, buffer_.size());
140 }
141 return std::span<uint8_t>{buffer_.data(), buffer_.size()};
142}
143
144std::optional<std::span<uint8_t>> VectorAllocator::InsertBytes(
145 void *insertion_point, size_t bytes, size_t /*alignment*/, SetZero) {
146 const ssize_t insertion_index =
147 reinterpret_cast<uint8_t *>(insertion_point) - buffer_.data();
148 CHECK_LE(0, insertion_index);
149 CHECK_LE(insertion_index, static_cast<ssize_t>(buffer_.size()));
150 buffer_.insert(buffer_.begin() + insertion_index, bytes, 0);
151 return std::span<uint8_t>{buffer_.data(), buffer_.size()};
152}
153
154std::span<uint8_t> VectorAllocator::RemoveBytes(
155 std::span<uint8_t> remove_bytes) {
156 const ssize_t removal_index = remove_bytes.data() - buffer_.data();
157 CHECK_LE(0, removal_index);
158 CHECK_LE(removal_index, static_cast<ssize_t>(buffer_.size()));
159 CHECK_LE(removal_index + remove_bytes.size(), buffer_.size());
160 buffer_.erase(buffer_.begin() + removal_index,
161 buffer_.begin() + removal_index + remove_bytes.size());
162 return {buffer_.data(), buffer_.size()};
163}
164
165std::optional<std::span<uint8_t>> SpanAllocator::Allocate(size_t size,
166 size_t /*alignment*/,
167 SetZero set_zero) {
168 CHECK(!allocated_);
169 if (size > buffer_.size()) {
170 return std::nullopt;
171 }
172 if (set_zero == SetZero::kYes) {
173 memset(buffer_.data(), 0, buffer_.size());
174 }
175 allocated_size_ = size;
176 allocated_ = true;
177 return internal::GetSubSpan(buffer_, buffer_.size() - size);
178}
179
180std::optional<std::span<uint8_t>> SpanAllocator::InsertBytes(
181 void *insertion_point, size_t bytes, size_t /*alignment*/,
182 SetZero set_zero) {
183 uint8_t *insertion_point_typed = reinterpret_cast<uint8_t *>(insertion_point);
184 const ssize_t insertion_index = insertion_point_typed - buffer_.data();
185 CHECK_LE(0, insertion_index);
186 CHECK_LE(insertion_index, static_cast<ssize_t>(buffer_.size()));
187 const size_t new_size = allocated_size_ + bytes;
188 if (new_size > buffer_.size()) {
189 VLOG(1) << ": Insufficient space to grow by " << bytes << " bytes.";
190 return std::nullopt;
191 }
192 const size_t old_start_index = buffer_.size() - allocated_size_;
193 const size_t new_start_index = buffer_.size() - new_size;
194 memmove(buffer_.data() + new_start_index, buffer_.data() + old_start_index,
195 insertion_index - old_start_index);
196 if (set_zero == SetZero::kYes) {
197 memset(insertion_point_typed - bytes, 0, bytes);
198 }
199 allocated_size_ = new_size;
200 return internal::GetSubSpan(buffer_, buffer_.size() - allocated_size_);
201}
202
203std::span<uint8_t> SpanAllocator::RemoveBytes(std::span<uint8_t> remove_bytes) {
204 const ssize_t removal_index = remove_bytes.data() - buffer_.data();
205 const size_t old_start_index = buffer_.size() - allocated_size_;
206 CHECK_LE(static_cast<ssize_t>(old_start_index), removal_index);
207 CHECK_LE(removal_index, static_cast<ssize_t>(buffer_.size()));
208 CHECK_LE(removal_index + remove_bytes.size(), buffer_.size());
209 uint8_t *old_buffer_start = buffer_.data() + old_start_index;
210 memmove(old_buffer_start + remove_bytes.size(), old_buffer_start,
211 removal_index - old_start_index);
212 allocated_size_ -= remove_bytes.size();
213 return internal::GetSubSpan(buffer_, buffer_.size() - allocated_size_);
214}
215
216void SpanAllocator::Deallocate(std::span<uint8_t>) {
217 CHECK(allocated_) << ": Called Deallocate() without a prior allocation.";
218 allocated_ = false;
219}
220
221namespace internal {
222std::ostream &DebugBytes(std::span<const uint8_t> span, std::ostream &os) {
223 constexpr size_t kRowSize = 8u;
224 for (size_t index = 0; index < span.size(); index += kRowSize) {
225 os << std::hex << std::setw(4) << std::setfill('0') << std::uppercase
226 << index << ": ";
227 for (size_t subindex = 0;
228 subindex < kRowSize && (index + subindex) < span.size(); ++subindex) {
229 os << std::setw(2) << static_cast<int>(span[index + subindex]) << " ";
230 }
231 os << "\n";
232 }
233 os << std::resetiosflags(std::ios_base::basefield | std::ios_base::uppercase);
234 return os;
235}
236} // namespace internal
237} // namespace aos::fbs