blob: 697f8376d0b3f77569e98715be4d396f81a341fa [file] [log] [blame]
James Kuszmaulf5eb4682023-09-22 17:16:59 -07001#include "aos/flatbuffers/base.h"
2namespace aos::fbs {
3
4namespace {
5void *DereferenceOffset(uoffset_t *offset) {
6 return reinterpret_cast<uint8_t *>(offset) + *offset;
7}
8} // namespace
9
10bool ResizeableObject::InsertBytes(void *insertion_point, size_t bytes,
11 SetZero set_zero) {
12 // See comments on InsertBytes() declaration and in FixObjects()
13 // implementation below.
14 CHECK_LT(buffer_.data(), reinterpret_cast<const uint8_t *>(insertion_point))
15 << ": Insertion may not be prior to the start of the buffer.";
16 // Check that we started off with a properly aligned size.
17 // Doing this CHECK earlier is tricky because if done in the constructor then
18 // it executes prior to the Alignment() implementation being available.
19 CHECK_EQ(0u, buffer_.size() % Alignment());
20 // Note that we will round up the size to the current alignment, so that we
21 // ultimately end up only adjusting the buffer size by a multiple of its
22 // alignment, to avoid having to do any more complicated bookkeeping.
23 const size_t aligned_bytes = PaddedSize(bytes, Alignment());
24 if (parent_ != nullptr) {
25 return parent_->InsertBytes(insertion_point, aligned_bytes, set_zero);
26 } else {
27 std::optional<std::span<uint8_t>> new_buffer =
28 CHECK_NOTNULL(allocator_)
29 ->InsertBytes(insertion_point, aligned_bytes, Alignment(),
30 set_zero);
31 if (!new_buffer.has_value()) {
32 return false;
33 }
34 UpdateBuffer(new_buffer.value(),
35 new_buffer.value().data() +
36 (reinterpret_cast<const uint8_t *>(insertion_point) -
37 buffer_.data()),
38 aligned_bytes);
39 return true;
40 }
41}
42
43void ResizeableObject::UpdateBuffer(std::span<uint8_t> new_buffer,
44 void *modification_point,
45 ssize_t bytes_inserted) {
46 buffer_ = new_buffer;
47 FixObjects(modification_point, bytes_inserted);
48 ObserveBufferModification();
49}
50
51std::span<uint8_t> ResizeableObject::BufferForObject(
52 size_t absolute_offset, size_t size, size_t terminal_alignment) {
53 const size_t padded_size = PaddedSize(size, terminal_alignment);
54 std::span<uint8_t> padded_buffer =
55 internal::GetSubSpan(buffer_, absolute_offset, padded_size);
56 std::span<uint8_t> object_buffer =
57 internal::GetSubSpan(padded_buffer, 0, size);
58 std::span<uint8_t> padding = internal::GetSubSpan(padded_buffer, size);
59 internal::ClearSpan(padding);
60 return object_buffer;
61}
62
63void ResizeableObject::FixObjects(void *modification_point,
64 ssize_t bytes_inserted) {
65 CHECK_EQ(bytes_inserted % Alignment(), 0u)
66 << ": We only support inserting N * Alignment() bytes at a time. This "
67 "may change in the future.";
68 for (size_t index = 0; index < NumberOfSubObjects(); ++index) {
69 SubObject object = GetSubObject(index);
70 const void *const absolute_offset =
71 PointerForAbsoluteOffset(*object.absolute_offset);
72 if (absolute_offset >= modification_point &&
73 object.inline_entry < modification_point) {
74 if (*object.inline_entry != 0) {
75 CHECK_EQ(static_cast<const void *>(
76 static_cast<const uint8_t *>(absolute_offset) +
77 CHECK_NOTNULL(object.object)->AbsoluteOffsetOffset()),
78 DereferenceOffset(object.inline_entry));
79 *object.inline_entry += bytes_inserted;
80 CHECK_GE(DereferenceOffset(object.inline_entry), modification_point)
81 << ": Encountered offset which points to a now-deleted section "
82 "of memory. The offset should have been null'd out prior to "
83 "deleting the memory.";
84 } else {
85 CHECK_EQ(nullptr, object.object);
86 }
87 *object.absolute_offset += bytes_inserted;
88 }
89 // We only need to update the object's buffer if it currently exists.
90 if (object.object != nullptr) {
91 std::span<uint8_t> subbuffer = BufferForObject(
92 *object.absolute_offset, object.object->buffer_.size(),
93 object.object->Alignment());
94 // By convention (enforced in InsertBytes()), the modification_point shall
95 // not be at the start of the subobjects data buffer; it may be the byte
96 // just past the end of the buffer. This makes it so that is unambiguous
97 // which subobject(s) should get the extra space when a buffer size
98 // increase is requested on the edge of a buffer.
99 if (subbuffer.data() < modification_point &&
100 (subbuffer.data() + subbuffer.size()) >= modification_point) {
101 subbuffer = {subbuffer.data(), subbuffer.size() + bytes_inserted};
102 }
103 object.object->UpdateBuffer(subbuffer, modification_point,
104 bytes_inserted);
105 }
106 }
107}
108
109std::optional<std::span<uint8_t>> VectorAllocator::Allocate(
110 size_t size, size_t /*alignment*/, SetZero set_zero) {
111 CHECK(buffer_.empty()) << ": Must deallocate before calling Allocate().";
112 buffer_.resize(size);
113 if (set_zero == SetZero::kYes) {
114 memset(buffer_.data(), 0, buffer_.size());
115 }
116 return std::span<uint8_t>{buffer_.data(), buffer_.size()};
117}
118
119std::optional<std::span<uint8_t>> VectorAllocator::InsertBytes(
120 void *insertion_point, size_t bytes, size_t /*alignment*/, SetZero) {
121 const ssize_t insertion_index =
122 reinterpret_cast<uint8_t *>(insertion_point) - buffer_.data();
123 CHECK_LE(0, insertion_index);
124 CHECK_LE(insertion_index, static_cast<ssize_t>(buffer_.size()));
125 buffer_.insert(buffer_.begin() + insertion_index, bytes, 0);
126 return std::span<uint8_t>{buffer_.data(), buffer_.size()};
127}
128
129std::span<uint8_t> VectorAllocator::RemoveBytes(
130 std::span<uint8_t> remove_bytes) {
131 const ssize_t removal_index = remove_bytes.data() - buffer_.data();
132 CHECK_LE(0, removal_index);
133 CHECK_LE(removal_index, static_cast<ssize_t>(buffer_.size()));
134 CHECK_LE(removal_index + remove_bytes.size(), buffer_.size());
135 buffer_.erase(buffer_.begin() + removal_index,
136 buffer_.begin() + removal_index + remove_bytes.size());
137 return {buffer_.data(), buffer_.size()};
138}
139
140std::optional<std::span<uint8_t>> SpanAllocator::Allocate(size_t size,
141 size_t /*alignment*/,
142 SetZero set_zero) {
143 CHECK(!allocated_);
144 if (size > buffer_.size()) {
145 return std::nullopt;
146 }
147 if (set_zero == SetZero::kYes) {
148 memset(buffer_.data(), 0, buffer_.size());
149 }
150 allocated_size_ = size;
151 allocated_ = true;
152 return internal::GetSubSpan(buffer_, buffer_.size() - size);
153}
154
155std::optional<std::span<uint8_t>> SpanAllocator::InsertBytes(
156 void *insertion_point, size_t bytes, size_t /*alignment*/,
157 SetZero set_zero) {
158 uint8_t *insertion_point_typed = reinterpret_cast<uint8_t *>(insertion_point);
159 const ssize_t insertion_index = insertion_point_typed - buffer_.data();
160 CHECK_LE(0, insertion_index);
161 CHECK_LE(insertion_index, static_cast<ssize_t>(buffer_.size()));
162 const size_t new_size = allocated_size_ + bytes;
163 if (new_size > buffer_.size()) {
164 VLOG(1) << ": Insufficient space to grow by " << bytes << " bytes.";
165 return std::nullopt;
166 }
167 const size_t old_start_index = buffer_.size() - allocated_size_;
168 const size_t new_start_index = buffer_.size() - new_size;
169 memmove(buffer_.data() + new_start_index, buffer_.data() + old_start_index,
170 insertion_index - old_start_index);
171 if (set_zero == SetZero::kYes) {
172 memset(insertion_point_typed - bytes, 0, bytes);
173 }
174 allocated_size_ = new_size;
175 return internal::GetSubSpan(buffer_, buffer_.size() - allocated_size_);
176}
177
178std::span<uint8_t> SpanAllocator::RemoveBytes(std::span<uint8_t> remove_bytes) {
179 const ssize_t removal_index = remove_bytes.data() - buffer_.data();
180 const size_t old_start_index = buffer_.size() - allocated_size_;
181 CHECK_LE(static_cast<ssize_t>(old_start_index), removal_index);
182 CHECK_LE(removal_index, static_cast<ssize_t>(buffer_.size()));
183 CHECK_LE(removal_index + remove_bytes.size(), buffer_.size());
184 uint8_t *old_buffer_start = buffer_.data() + old_start_index;
185 memmove(old_buffer_start + remove_bytes.size(), old_buffer_start,
186 removal_index - old_start_index);
187 allocated_size_ -= remove_bytes.size();
188 return internal::GetSubSpan(buffer_, buffer_.size() - allocated_size_);
189}
190
191void SpanAllocator::Deallocate(std::span<uint8_t>) {
192 CHECK(allocated_) << ": Called Deallocate() without a prior allocation.";
193 allocated_ = false;
194}
195
196namespace internal {
197std::ostream &DebugBytes(std::span<const uint8_t> span, std::ostream &os) {
198 constexpr size_t kRowSize = 8u;
199 for (size_t index = 0; index < span.size(); index += kRowSize) {
200 os << std::hex << std::setw(4) << std::setfill('0') << std::uppercase
201 << index << ": ";
202 for (size_t subindex = 0;
203 subindex < kRowSize && (index + subindex) < span.size(); ++subindex) {
204 os << std::setw(2) << static_cast<int>(span[index + subindex]) << " ";
205 }
206 os << "\n";
207 }
208 os << std::resetiosflags(std::ios_base::basefield | std::ios_base::uppercase);
209 return os;
210}
211} // namespace internal
212} // namespace aos::fbs