blob: f0c48cd422e1d917f261ee81ff9994dc7ec0e6dc [file] [log] [blame]
James Kuszmaulf5eb4682023-09-22 17:16:59 -07001#ifndef AOS_FLATBUFFERS_STATIC_VECTOR_H_
2#define AOS_FLATBUFFERS_STATIC_VECTOR_H_
3#include <span>
4
5#include "flatbuffers/base.h"
James Kuszmaule65fb402024-01-13 14:10:51 -08006#include "flatbuffers/vector.h"
James Kuszmaulf5eb4682023-09-22 17:16:59 -07007#include "glog/logging.h"
8
9#include "aos/containers/inlined_vector.h"
10#include "aos/containers/sized_array.h"
11#include "aos/flatbuffers/base.h"
12
13namespace aos::fbs {
14
15namespace internal {
16// Helper class for managing how we specialize the Vector object for different
17// contained types.
18// Users of the Vector class should never need to care about this.
19// Template arguments:
20// T: The type that the vector stores.
21// kInline: Whether the type in question is stored inline or not.
22// Enable: Used for SFINAE around struct values; can be ignored.
23// The struct provides the following types:
24// Type: The type of the data that will be stored inline in the vector.
25// ObjectType: The type of the actual data (only used for non-inline objects).
26// FlatbufferType: The type used by flatbuffers::Vector to store this type.
27// ConstFlatbufferType: The type used by a const flatbuffers::Vector to store
28// this type.
Austin Schuhf8440852024-05-31 10:46:50 -070029// kDataElementAlign: Alignment required by the stored type.
30// kDataElementSize: Nominal size required by each non-inline data member.
31// This is what will be initially allocated; once created, individual
32// members may grow to accommodate dynamically lengthed vectors.
33// kDataElementAlignOffset: Alignment offset required by the stored type.
James Kuszmaulf5eb4682023-09-22 17:16:59 -070034template <typename T, bool kInline, class Enable = void>
35struct InlineWrapper;
36} // namespace internal
37
38// This Vector class provides a mutable, resizeable, flatbuffer vector.
39//
40// Upon creation, the Vector will start with enough space allocated for
41// kStaticLength elements, and must be provided with a memory buffer that
42// is large enough to serialize all the kStaticLength members (kStaticLength may
43// be zero).
44//
45// Once created, the Vector may be grown using calls to reserve().
46// This will result in the Vector attempting to allocate memory via its
47// parent object; such calls may fail if there is no space available in the
48// allocator.
49//
50// Note that if you are using the Vector class in a realtime context (and thus
51// must avoid dynamic memory allocations) you must only be using a Vector of
52// inline data (i.e., scalars, enums, or structs). Flatbuffer tables and strings
53// require overhead to manage and so require some form of dynamic memory
54// allocation. If we discover a strong use-case for such things, then we may
55// provide some interface that allows managing said metadata on the stack or
56// in another realtime-safe manner.
57//
58// Template arguments:
59// T: Type contained by the vector; either a scalar/struct/enum type or a
60// static flatbuffer type of some sort (a String or an implementation of
61// aos::fbs::Table).
62// kStaticLength: Number of elements to statically allocate memory for.
63// May be zero.
64// kInline: Whether the type T will be stored inline in the vector.
65// kForceAlign: Alignment to force for the start of the vector (e.g., for
66// byte arrays it may be desirable to have the entire array aligned).
67// kNullTerminate: Whether to reserve an extra byte past the end of
68// the inline data for null termination. Not included in kStaticLength,
69// so if e.g. you want to store the string "abc" then kStaticLength can
70// be 3 and kNullTerminate can be true and the vector data will take
71// up 4 bytes of memory.
72//
73// Vector buffer memory layout:
74// * Requirements:
75// * Minimum alignment of 4 bytes (for element count).
76// * The start of the vector data must be aligned to either
77// alignof(InlineType) or a user-specified number.
78// * The element count for the vector must immediately precede the vector
79// data (and so may itself not be aligned to alignof(InlineType)).
80// * For non-inlined types, the individual types must be aligned to
81// their own alignment.
82// * In order to accommodate this, the vector buffer as a whole must
83// generally be aligned to the greatest of the above alignments. There
84// are two reasonable ways one could do this:
85// * Require that the 4th byte of the buffer provided by aligned to
86// the maximum alignment of its contents.
87// * Require that the buffer itself by aligned, and provide padding
88// ourselves. The Vector would then have to expose its own offset
89// because it would not start at the start of the buffer.
90// The former requires that the wrapping code understand the internals
91// of how vectors work; the latter generates extra padding and adds
92// extra logic around handling non-zero offsets.
93// To maintain general simplicity, we will use the second condition and eat
94// the cost of the potential extra few bytes of padding.
95// * The layout of the buffer will thus be:
Austin Schuhf8440852024-05-31 10:46:50 -070096// [element_count; inline_data; padding; offset_data]
James Kuszmaulf5eb4682023-09-22 17:16:59 -070097// The element_count is of size 4.
98// The inline_data is of size sizeof(InlineType) * kStaticLength.
Austin Schuhf8440852024-05-31 10:46:50 -070099// The padding is sized such that the sum of the size of inline_data and the
100// padding adds up to the alignment if we have offset_data.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700101// The remaining data is only present if kInline is false.
Austin Schuhf8440852024-05-31 10:46:50 -0700102// The offset data is of size T::kSize * kStaticLength. T::kSize is rounded
103// up to a multiple of T::kAlign.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700104// Note that no padding is required on the end because T::kAlign will always
105// end up being equal to the alignment (this can only be violated if
106// kForceAlign is used, but we do not allow that).
James Kuszmaul1c9693f2023-12-08 09:45:26 -0800107// The Vector class leaves any padding uninitialized. Until and unless we
108// determine that it is a performance issue, it is the responsibility of the
109// parent of this object to zero-initialize the memory.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700110template <typename T, size_t kStaticLength, bool kInline,
111 size_t kForceAlign = 0, bool kNullTerminate = false>
112class Vector : public ResizeableObject {
James Kuszmaul22448052023-12-14 15:55:14 -0800113 template <typename VectorType, typename ValueType>
114 class generic_iterator {
115 public:
116 using iterator_category = std::random_access_iterator_tag;
117 using value_type = ValueType;
118 using difference_type = std::ptrdiff_t;
119 using pointer = value_type *;
120 using reference = value_type &;
121
122 explicit generic_iterator(VectorType *vector, size_t index)
123 : vector_(vector), index_(index) {}
124 generic_iterator(const generic_iterator &) = default;
125 generic_iterator() : vector_(nullptr), index_(0) {}
126 generic_iterator &operator=(const generic_iterator &) = default;
127
128 generic_iterator &operator++() {
129 ++index_;
130 return *this;
131 }
132 generic_iterator operator++(int) {
133 generic_iterator retval = *this;
134 ++(*this);
135 return retval;
136 }
137 generic_iterator &operator--() {
138 --index_;
139 return *this;
140 }
141 generic_iterator operator--(int) {
142 generic_iterator retval = *this;
143 --(*this);
144 return retval;
145 }
146 bool operator==(const generic_iterator &other) const {
147 CHECK_EQ(other.vector_, vector_);
148 return index_ == other.index_;
149 }
150 std::strong_ordering operator<=>(const generic_iterator &other) const {
151 CHECK_EQ(other.vector_, vector_);
152 return index_ <=> other.index_;
153 }
154 reference operator*() const { return vector_->at(index_); }
155 difference_type operator-(const generic_iterator &other) const {
156 CHECK_EQ(other.vector_, vector_);
157 return index_ - other.index_;
158 }
159 generic_iterator operator-(difference_type decrement) const {
160 return generic_iterator(vector_, index_ - decrement);
161 }
162 friend generic_iterator operator-(difference_type decrement,
163 const generic_iterator &rhs) {
164 return rhs - decrement;
165 }
166 generic_iterator operator+(difference_type increment) const {
167 return generic_iterator(vector_, index_ + increment);
168 }
169 friend generic_iterator operator+(difference_type increment,
170 const generic_iterator &rhs) {
171 return rhs + increment;
172 }
173 generic_iterator &operator+=(difference_type increment) {
174 index_ += increment;
175 return *this;
176 }
177 generic_iterator &operator-=(difference_type increment) {
178 index_ -= increment;
179 return *this;
180 }
181 reference operator[](difference_type index) const {
182 return *(*this + index);
183 }
184
185 private:
186 VectorType *vector_;
187 size_t index_;
188 };
189
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700190 public:
James Kuszmaul22448052023-12-14 15:55:14 -0800191 using iterator = generic_iterator<Vector, T>;
192 using const_iterator = generic_iterator<const Vector, const T>;
193
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700194 static_assert(kInline || !kNullTerminate,
195 "It does not make sense to null-terminate vectors of objects.");
196 // Type stored inline in the serialized vector (offsets for tables/strings; T
197 // otherwise).
198 using InlineType = typename internal::InlineWrapper<T, kInline>::Type;
Austin Schuhf8440852024-05-31 10:46:50 -0700199 // Out-of-line type for out-of-line T.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700200 using ObjectType = typename internal::InlineWrapper<T, kInline>::ObjectType;
201 // Type used as the template parameter to flatbuffers::Vector<>.
202 using FlatbufferType =
203 typename internal::InlineWrapper<T, kInline>::FlatbufferType;
204 using ConstFlatbufferType =
205 typename internal::InlineWrapper<T, kInline>::ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800206 // FlatbufferObjectType corresponds to the type used by the flatbuffer
207 // "object" API (i.e. the FlatbufferT types).
208 // This type will be something unintelligble for inline types.
209 using FlatbufferObjectType =
210 typename internal::InlineWrapper<T, kInline>::FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700211 // flatbuffers::Vector type that corresponds to this Vector.
212 typedef flatbuffers::Vector<FlatbufferType> Flatbuffer;
213 typedef const flatbuffers::Vector<ConstFlatbufferType> ConstFlatbuffer;
214 // Alignment of the inline data.
215 static constexpr size_t kInlineAlign =
216 std::max(kForceAlign, alignof(InlineType));
217 // Type used for serializing the length of the vector.
218 typedef uint32_t LengthType;
Austin Schuhf8440852024-05-31 10:46:50 -0700219 static constexpr size_t kDataElementAlign =
220 internal::InlineWrapper<T, kInline>::kDataElementAlign;
221 static constexpr size_t kDataElementAlignOffset =
222 internal::InlineWrapper<T, kInline>::kDataElementAlignOffset;
223 // Per-element size of any out-of-line data.
224 static constexpr size_t kDataElementSize =
225 internal::InlineWrapper<T, kInline>::kDataElementSize;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700226 // Overall alignment of this type, and required alignment of the buffer that
227 // must be provided to the Vector.
228 static constexpr size_t kAlign =
Austin Schuhf8440852024-05-31 10:46:50 -0700229 std::max({alignof(LengthType), kInlineAlign, kDataElementAlign});
230 // Offset into the buffer of where things must be aligned to the specified
231 // alignment.
232 static constexpr size_t kAlignOffset = sizeof(LengthType);
233
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700234 // Size of the vector length field.
235 static constexpr size_t kLengthSize = sizeof(LengthType);
236 // Size of all the inline vector data, including null termination (prior to
237 // any dynamic increases in size).
238 static constexpr size_t kInlineSize =
239 sizeof(InlineType) * (kStaticLength + (kNullTerminate ? 1 : 0));
Austin Schuhf8440852024-05-31 10:46:50 -0700240
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700241 // Padding between the inline data and any out-of-line data, to manage
242 // mismatches in alignment between the two.
Austin Schuhf8440852024-05-31 10:46:50 -0700243 //
244 // For inline vectors, we don't want to add any extra padding. The allocator
245 // will add extra padding if needed and communicate it to our constructor.
246 //
247 // For non-inline vectors, we need to pad out the offsets so that their end
248 // ends up kDataElementAlignOffset before the aligned start of the elements.
249 //
250 // This pads kInlineSize out to
251 static constexpr size_t kPadding1 =
252 kInline
253 ? 0
254 : ((kAlign - ((kInlineSize + kAlign /* Add kAlign to guarentee we
255 don't mod a negative number */
256 - kDataElementAlignOffset) %
257 kAlign)) %
258 kAlign);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700259 // Total statically allocated space for any out-of-line data ("offset data")
260 // (prior to any dynamic increases in size).
261 static constexpr size_t kOffsetOffsetDataSize =
262 kInline ? 0 : (kStaticLength * kDataElementSize);
263 // Total nominal size of the Vector.
264 static constexpr size_t kSize =
Austin Schuhf8440852024-05-31 10:46:50 -0700265 kLengthSize + kInlineSize + kPadding1 + kOffsetOffsetDataSize;
266 // If this is 0, then the parent object will not plan to statically
267 // reserve any memory for this object and will only reserve memory when the
268 // user requests creation of this object. This makes it so that zero-length
269 // vectors (which would require dynamic allocation *anyways* to actually be
270 // helpful) do not use up memory when unpopulated.
271 static constexpr size_t kPreallocatedSize = (kStaticLength > 0) ? kSize : 0;
272
273 // Returns the buffer size (in bytes) needed to hold the largest number of
274 // elements that can fit fully in the provided length (in bytes). This lets
275 // us compute how much of the padding we can fill with elements.
276 static constexpr size_t RoundedLength(size_t length) {
277 constexpr size_t overall_element_size =
278 sizeof(InlineType) + (kInline ? 0 : kDataElementSize);
279 return ((length - kLengthSize) / overall_element_size) *
280 overall_element_size +
281 kLengthSize;
282 }
283
284 // Constructors; the provided buffer must be aligned to kAlign and be kSize
285 // in length. parent must be non-null.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700286 Vector(std::span<uint8_t> buffer, ResizeableObject *parent)
287 : ResizeableObject(buffer, parent) {
Austin Schuhf8440852024-05-31 10:46:50 -0700288 CHECK_EQ(0u,
289 reinterpret_cast<size_t>(buffer.data() + kAlignOffset) % kAlign);
290 CHECK_LE(kSize, buffer.size());
291 if constexpr (kInline) {
292 // If everything is inline, it costs us nothing to consume the padding and
293 // use it for holding elements. For something like a short string in 8
294 // byte aligned space, this saves a second 8 byte allocation for the data.
295 allocated_length_ = (buffer.size() - kLengthSize) / sizeof(InlineType) -
296 (kNullTerminate ? 1 : 0);
297 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700298 SetLength(0u);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700299 if (!kInline) {
300 // Initialize the offsets for any sub-tables. These are used to track
301 // where each table will get serialized in memory as memory gets
302 // resized/moved around.
Austin Schuhf8440852024-05-31 10:46:50 -0700303 //
304 // We don't want to expand allocated_length_ here because that would then
305 // imply we have more memory for elements too, which we don't.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700306 for (size_t index = 0; index < kStaticLength; ++index) {
Austin Schuhf8440852024-05-31 10:46:50 -0700307 object_absolute_offsets_.emplace_back(
308 kLengthSize + kInlineSize + kPadding1 + index * kDataElementSize);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700309 }
310 }
311 }
312 Vector(const Vector &) = delete;
313 Vector &operator=(const Vector &) = delete;
314 virtual ~Vector() {}
315 // Current allocated length of this vector.
316 // Does not include null termination.
317 size_t capacity() const { return allocated_length_; }
318 // Current length of the vector.
319 // Does not include null termination.
320 size_t size() const { return length_; }
321
322 // Appends an element to the Vector. Used when kInline is false. Returns
323 // nullptr if the append failed due to insufficient capacity. If you need to
324 // increase the capacity() of the vector, call reserve().
325 [[nodiscard]] T *emplace_back();
326 // Appends an element to the Vector. Used when kInline is true. Returns false
327 // if there is insufficient capacity for a new element.
328 [[nodiscard]] bool emplace_back(T element) {
329 static_assert(kInline);
330 return AddInlineElement(element);
331 }
332
333 // Adjusts the allocated size of the vector (does not affect the actual
334 // current length as returned by size()). Returns true on success, and false
335 // if the allocation failed for some reason.
336 // Note that reductions in size will not currently result in the allocated
337 // size actually changing.
James Kuszmaul22efcb82024-03-22 16:20:56 -0700338 // For vectors of non-inline types (e.g., vectors of strings or vectors of
339 // tables), reserve() will allocate memory in an internal vector that we use
340 // for storing some metadata.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700341 [[nodiscard]] bool reserve(size_t new_length) {
342 if (new_length > allocated_length_) {
343 const size_t new_elements = new_length - allocated_length_;
344 // First, we must add space for our new inline elements.
Austin Schuhf8440852024-05-31 10:46:50 -0700345 std::optional<std::span<uint8_t>> inserted_bytes;
346
347 if (allocated_length_ == 0) {
348 // If we have padding and the padding is enough to hold the buffer, use
349 // it. This only consumes the padding in the case where we have a
350 // non-inline object, but are allocating small enough data that the
351 // padding is big enough.
352 //
353 // TODO(austin): Use the padding when we are adding large numbers of
354 // elements too.
355 if (new_elements * sizeof(InlineType) <= kPadding1) {
356 inserted_bytes = internal::GetSubSpan(vector_buffer(), kLengthSize,
357 kPadding1 / sizeof(InlineType));
358 }
359 }
360
361 if (!inserted_bytes.has_value()) {
362 inserted_bytes = InsertBytes(
363 inline_data() + allocated_length_ + (kNullTerminate ? 1 : 0),
364 new_elements * sizeof(InlineType), SetZero::kYes);
365 }
366 if (!inserted_bytes.has_value()) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700367 return false;
368 }
369 if (!kInline) {
370 // For non-inline objects, create the space required for all the new
371 // object data.
372 const size_t insertion_point = buffer_.size();
373 if (!InsertBytes(buffer_.data() + insertion_point,
374 new_elements * kDataElementSize, SetZero::kYes)) {
375 return false;
376 }
377 for (size_t index = 0; index < new_elements; ++index) {
378 // Note that the already-allocated data may be arbitrarily-sized, so
379 // we cannot use the same static calculation that we do in the
380 // constructor.
381 object_absolute_offsets_.emplace_back(insertion_point +
382 index * kDataElementSize);
383 }
384 objects_.reserve(new_length);
Austin Schuhf8440852024-05-31 10:46:50 -0700385 } else {
386 // If we allocated memory, and the elements are inline (so we don't have
387 // to deal with allocating elements too), consume any extra space
388 // allocated as extra elements.
389 if (new_elements * sizeof(InlineType) < inserted_bytes->size()) {
390 new_length +=
391 inserted_bytes->size() / sizeof(InlineType) - new_elements;
392 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700393 }
394 allocated_length_ = new_length;
395 }
396 return true;
397 }
398
399 // Accessors for using the Vector as a flatbuffers::Vector.
400 // Note that these pointers will be unstable if any memory allocations occur
401 // that cause memory to get shifted around.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700402 ConstFlatbuffer *AsFlatbufferVector() const {
403 return reinterpret_cast<const Flatbuffer *>(vector_buffer().data());
404 }
405
406 // Copies the contents of the provided vector into this; returns false on
407 // failure (e.g., if the provided vector is too long for the amount of space
408 // we can allocate through reserve()).
James Kuszmaul710883b2023-12-14 14:34:48 -0800409 // This is a deep copy, and will call FromFlatbuffer on any constituent
410 // objects.
James Kuszmaul692780f2023-12-20 14:01:56 -0800411 [[nodiscard]] bool FromFlatbuffer(ConstFlatbuffer *vector) {
Austin Schuh6bdcc372024-06-27 14:49:11 -0700412 CHECK(vector != nullptr);
413 return FromFlatbuffer(*vector);
James Kuszmaul692780f2023-12-20 14:01:56 -0800414 }
415 [[nodiscard]] bool FromFlatbuffer(ConstFlatbuffer &vector);
James Kuszmaul6be41022023-12-20 11:55:28 -0800416 // The remaining FromFlatbuffer() overloads are for when using the flatbuffer
417 // "object" API, which uses std::vector's for representing vectors.
418 [[nodiscard]] bool FromFlatbuffer(const std::vector<InlineType> &vector) {
419 static_assert(kInline);
420 return FromData(vector.data(), vector.size());
421 }
422 // Overload for vectors of bools, since the standard library may not use a
423 // full byte per vector element.
424 [[nodiscard]] bool FromFlatbuffer(const std::vector<bool> &vector) {
425 static_assert(kInline);
426 // We won't be able to do a clean memcpy because std::vector<bool> may be
427 // implemented using bit-packing.
428 return FromIterator(vector.cbegin(), vector.cend());
429 }
430 // Overload for non-inline types. Note that to avoid having this overload get
431 // resolved with inline types, we make FlatbufferObjectType != InlineType.
432 [[nodiscard]] bool FromFlatbuffer(
433 const std::vector<FlatbufferObjectType> &vector) {
434 static_assert(!kInline);
435 return FromNotInlineIterable(vector);
436 }
437
438 // Copies values from the provided data pointer into the vector, resizing the
439 // vector as needed to match. Returns false on failure (e.g., if the
440 // underlying allocator has insufficient space to perform the copy). Only
441 // works for inline data types.
442 [[nodiscard]] bool FromData(const InlineType *input_data, size_t input_size) {
443 static_assert(kInline);
444 if (!reserve(input_size)) {
445 return false;
446 }
447
448 // We will be overwriting the whole vector very shortly; there is no need to
449 // clear the buffer to zero.
450 resize_inline(input_size, SetZero::kNo);
451
Philipp Schraderd1c74a82024-04-30 13:46:31 -0700452 if (input_size > 0) {
Austin Schuh6bdcc372024-06-27 14:49:11 -0700453 CHECK(input_data != nullptr);
454 memcpy(inline_data(), input_data, size() * sizeof(InlineType));
Philipp Schraderd1c74a82024-04-30 13:46:31 -0700455 }
James Kuszmaul6be41022023-12-20 11:55:28 -0800456 return true;
457 }
458
459 // Copies values from the provided iterators into the vector, resizing the
460 // vector as needed to match. Returns false on failure (e.g., if the
461 // underlying allocator has insufficient space to perform the copy). Only
462 // works for inline data types.
463 // Does not attempt any optimizations if the iterators meet the
464 // std::contiguous_iterator concept; instead, it simply copies each element
465 // out one-by-one.
466 template <typename Iterator>
467 [[nodiscard]] bool FromIterator(Iterator begin, Iterator end) {
468 static_assert(kInline);
469 resize(0);
470 for (Iterator it = begin; it != end; ++it) {
471 if (!reserve(size() + 1)) {
472 return false;
473 }
474 // Should never fail, due to the reserve() above.
475 CHECK(emplace_back(*it));
476 }
477 return true;
478 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700479
480 // Returns the element at the provided index. index must be less than size().
481 const T &at(size_t index) const {
482 CHECK_LT(index, length_);
483 return unsafe_at(index);
484 }
485
486 // Same as at(), except that bounds checks are only performed in non-optimized
487 // builds.
488 // TODO(james): The GetInlineElement() call itself does some bounds-checking;
489 // consider down-grading that.
490 const T &unsafe_at(size_t index) const {
491 DCHECK_LT(index, length_);
492 if (kInline) {
493 // This reinterpret_cast is extremely wrong if T != InlineType (this is
494 // fine because we only do this if kInline is true).
495 // TODO(james): Get the templating improved so that we can get away with
496 // specializing at() instead of using if statements. Resolving this will
497 // also allow deduplicating the Resize() calls.
498 // This specialization is difficult because you cannot partially
499 // specialize a templated class method (online things seem to suggest e.g.
500 // using a struct as the template parameter rather than having separate
501 // parameters).
502 return reinterpret_cast<const T &>(GetInlineElement(index));
503 } else {
504 return objects_[index].t;
505 }
506 }
507
508 // Returns a mutable pointer to the element at the provided index. index must
509 // be less than size().
510 T &at(size_t index) {
511 CHECK_LT(index, length_);
512 return unsafe_at(index);
513 }
514
515 // Same as at(), except that bounds checks are only performed in non-optimized
516 // builds.
517 // TODO(james): The GetInlineElement() call itself does some bounds-checking;
518 // consider down-grading that.
519 T &unsafe_at(size_t index) {
520 DCHECK_LT(index, length_);
521 if (kInline) {
522 // This reinterpret_cast is extremely wrong if T != InlineType (this is
523 // fine because we only do this if kInline is true).
524 // TODO(james): Get the templating improved so that we can get away with
525 // specializing at() instead of using if statements. Resolving this will
526 // also allow deduplicating the Resize() calls.
527 // This specialization is difficult because you cannot partially
528 // specialize a templated class method (online things seem to suggest e.g.
529 // using a struct as the template parameter rather than having separate
530 // parameters).
531 return reinterpret_cast<T &>(GetInlineElement(index));
532 } else {
533 return objects_[index].t;
534 }
535 }
536
537 const T &operator[](size_t index) const { return at(index); }
538 T &operator[](size_t index) { return at(index); }
539
540 // Resizes the vector to the requested size.
541 // size must be less than or equal to the current capacity() of the vector.
542 // Does not allocate additional memory (call reserve() to allocate additional
543 // memory).
544 // Zero-initializes all inline element; initializes all subtable/string
545 // elements to extant but empty objects.
546 void resize(size_t size);
547
548 // Resizes an inline vector to the requested size.
549 // When changing the size of the vector, the removed/inserted elements will be
550 // set to zero if requested. Otherwise, they will be left uninitialized.
551 void resize_inline(size_t size, SetZero set_zero) {
552 CHECK_LE(size, allocated_length_);
553 static_assert(
554 kInline,
555 "Vector::resize_inline() only works for inline vector types (scalars, "
556 "enums, structs).");
557 if (size == length_) {
558 return;
559 }
560 if (set_zero == SetZero::kYes) {
561 memset(
562 reinterpret_cast<void *>(inline_data() + std::min(size, length_)), 0,
563 std::abs(static_cast<ssize_t>(length_) - static_cast<ssize_t>(size)) *
564 sizeof(InlineType));
565 }
566 length_ = size;
567 SetLength(length_);
568 }
569 // Resizes a vector of offsets to the requested size.
570 // If the size is increased, the new elements will be initialized
571 // to empty but extant objects for non-inlined types (so, zero-length
572 // vectors/strings; objects that exist but have no fields populated).
573 // Note that this is always equivalent to resize().
574 void resize_not_inline(size_t size) {
575 CHECK_LE(size, allocated_length_);
576 static_assert(!kInline,
577 "Vector::resize_not_inline() only works for offset vector "
578 "types (objects, strings).");
579 if (size == length_) {
580 return;
581 } else if (length_ > size) {
582 // TODO: Remove any excess allocated memory.
583 length_ = size;
584 SetLength(length_);
585 return;
586 } else {
587 while (length_ < size) {
Austin Schuh6bdcc372024-06-27 14:49:11 -0700588 CHECK(emplace_back() != nullptr);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700589 }
590 }
591 }
592
593 // Accessors directly to the inline data of a vector.
594 const T *data() const {
595 static_assert(kInline,
596 "If you have a use-case for directly accessing the "
597 "flatbuffer data pointer for vectors of "
598 "objects/strings, please start a discussion.");
599 return inline_data();
600 }
601
602 T *data() {
603 static_assert(kInline,
604 "If you have a use-case for directly accessing the "
605 "flatbuffer data pointer for vectors of "
606 "objects/strings, please start a discussion.");
607 return inline_data();
608 }
609
James Kuszmaul22448052023-12-14 15:55:14 -0800610 // Iterators to allow easy use with standard C++ features.
611 iterator begin() { return iterator(this, 0); }
612 iterator end() { return iterator(this, size()); }
613 const_iterator begin() const { return const_iterator(this, 0); }
614 const_iterator end() const { return const_iterator(this, size()); }
615
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700616 std::string SerializationDebugString() const {
617 std::stringstream str;
618 str << "Raw Size: " << kSize << " alignment: " << kAlign
619 << " allocated length: " << allocated_length_ << " inline alignment "
Austin Schuhf8440852024-05-31 10:46:50 -0700620 << kInlineAlign << " \n";
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700621 str << "Observed length " << GetLength() << " (expected " << length_
622 << ")\n";
623 str << "Inline Size " << kInlineSize << " Inline bytes/value:\n";
624 // TODO(james): Get pretty-printing for structs so we can provide better
625 // debug.
626 internal::DebugBytes(
627 internal::GetSubSpan(vector_buffer(), kLengthSize,
628 sizeof(InlineType) * allocated_length_),
629 str);
Austin Schuhf8440852024-05-31 10:46:50 -0700630 str << "kPadding1 " << kPadding1 << " offset data size "
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700631 << kOffsetOffsetDataSize << "\n";
632 return str.str();
633 }
634
635 protected:
636 friend struct internal::TableMover<
637 Vector<T, kStaticLength, kInline, kForceAlign, kNullTerminate>>;
638 // protected so that the String class can access the move constructor.
639 Vector(Vector &&) = default;
640
641 private:
Austin Schuhf8440852024-05-31 10:46:50 -0700642 // See kAlign.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700643 size_t Alignment() const final { return kAlign; }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700644 // Returns a buffer that starts at the start of the vector itself (past any
645 // padding).
Austin Schuhf8440852024-05-31 10:46:50 -0700646 std::span<uint8_t> vector_buffer() { return buffer(); }
647 std::span<const uint8_t> vector_buffer() const { return buffer(); }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700648
649 bool AddInlineElement(InlineType e) {
650 if (length_ == allocated_length_) {
651 return false;
652 }
653 SetInlineElement(length_, e);
654 ++length_;
655 SetLength(length_);
656 return true;
657 }
658
659 void SetInlineElement(size_t index, InlineType value) {
660 CHECK_LT(index, allocated_length_);
661 inline_data()[index] = value;
662 }
663
664 InlineType &GetInlineElement(size_t index) {
665 CHECK_LT(index, allocated_length_);
666 return inline_data()[index];
667 }
668
669 const InlineType &GetInlineElement(size_t index) const {
670 CHECK_LT(index, allocated_length_);
671 return inline_data()[index];
672 }
673
674 // Returns a pointer to the start of the inline data itself.
675 InlineType *inline_data() {
676 return reinterpret_cast<InlineType *>(vector_buffer().data() + kLengthSize);
677 }
678 const InlineType *inline_data() const {
679 return reinterpret_cast<const InlineType *>(vector_buffer().data() +
680 kLengthSize);
681 }
682
683 // Updates the length of the vector to match the provided length. Does not set
684 // the length_ member.
685 void SetLength(LengthType length) {
686 *reinterpret_cast<LengthType *>(vector_buffer().data()) = length;
687 if (kNullTerminate) {
688 memset(reinterpret_cast<void *>(inline_data() + length), 0,
689 sizeof(InlineType));
690 }
691 }
692 LengthType GetLength() const {
693 return *reinterpret_cast<const LengthType *>(vector_buffer().data());
694 }
695
696 // Overrides to allow ResizeableObject to manage memory adjustments.
697 size_t NumberOfSubObjects() const final {
698 return kInline ? 0 : allocated_length_;
699 }
700 using ResizeableObject::SubObject;
701 SubObject GetSubObject(size_t index) final {
702 return SubObject{
703 reinterpret_cast<uoffset_t *>(&GetInlineElement(index)),
704 // In order to let this compile regardless of whether type T is an
705 // object type or not, we just use a reinterpret_cast.
706 (index < length_)
707 ? reinterpret_cast<ResizeableObject *>(&objects_[index].t)
708 : nullptr,
709 &object_absolute_offsets_[index]};
710 }
711 // Implementation that handles copying from a flatbuffers::Vector of an inline
712 // data type.
James Kuszmaul692780f2023-12-20 14:01:56 -0800713 [[nodiscard]] bool FromInlineFlatbuffer(ConstFlatbuffer &vector) {
714 return FromData(reinterpret_cast<const InlineType *>(vector.Data()),
715 vector.size());
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700716 }
717
718 // Implementation that handles copying from a flatbuffers::Vector of a
719 // not-inline data type.
James Kuszmaul6be41022023-12-20 11:55:28 -0800720 template <typename Iterable>
721 [[nodiscard]] bool FromNotInlineIterable(const Iterable &vector) {
722 if (!reserve(vector.size())) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700723 return false;
724 }
725 // "Clear" the vector.
726 resize_not_inline(0);
727
James Kuszmaul6be41022023-12-20 11:55:28 -0800728 for (const auto &entry : vector) {
Austin Schuh6bdcc372024-06-27 14:49:11 -0700729 T *emplaced_entry = emplace_back();
730 CHECK(emplaced_entry != nullptr);
731 if (!emplaced_entry->FromFlatbuffer(entry)) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700732 return false;
733 }
734 }
735 return true;
736 }
737
James Kuszmaul692780f2023-12-20 14:01:56 -0800738 [[nodiscard]] bool FromNotInlineFlatbuffer(const Flatbuffer &vector) {
739 return FromNotInlineIterable(vector);
James Kuszmaul6be41022023-12-20 11:55:28 -0800740 }
741
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700742 // In order to allow for easy partial template specialization, we use a
743 // non-member class to call FromInline/FromNotInlineFlatbuffer and
744 // resize_inline/resize_not_inline. There are not actually any great ways to
745 // do this with just our own class member functions, so instead we make these
746 // methods members of a friend of the Vector class; we then partially
747 // specialize the entire InlineWrapper class and use it to isolate anything
748 // that needs to have a common user interface while still having separate
749 // actual logic.
750 template <typename T_, bool kInline_, class Enable_>
751 friend struct internal::InlineWrapper;
752
753 // Note: The objects here really want to be owned by this object (as opposed
754 // to e.g. returning a stack-allocated object from the emplace_back() methods
755 // that the user then owns). There are two main challenges with have the user
756 // own the object on question:
757 // 1. We can't have >1 reference floating around, or else one object's state
758 // can become out of date. This forces us to do ref-counting and could
759 // make certain types of code obnoxious to write.
760 // 2. Once the user-created object goes out of scope, we lose all of its
761 // internal state. In _theory_ it should be possible to reconstruct most
762 // of the relevant state by examining the contents of the buffer, but
763 // doing so would be cumbersome.
764 aos::InlinedVector<internal::TableMover<ObjectType>,
765 kInline ? 0 : kStaticLength>
766 objects_;
767 aos::InlinedVector<size_t, kInline ? 0 : kStaticLength>
768 object_absolute_offsets_;
769 // Current actual length of the vector.
770 size_t length_ = 0;
771 // Current length that we have allocated space available for.
772 size_t allocated_length_ = kStaticLength;
773};
774
775template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
776 bool kNullTerminate>
777T *Vector<T, kStaticLength, kInline, kForceAlign,
778 kNullTerminate>::emplace_back() {
779 static_assert(!kInline);
780 if (length_ >= allocated_length_) {
781 return nullptr;
782 }
783 const size_t object_start = object_absolute_offsets_[length_];
784 std::span<uint8_t> object_buffer =
785 internal::GetSubSpan(buffer(), object_start, T::kSize);
786 objects_.emplace_back(object_buffer, this);
787 const uoffset_t offset =
788 object_start - (reinterpret_cast<size_t>(&GetInlineElement(length_)) -
789 reinterpret_cast<size_t>(buffer().data()));
790 CHECK(AddInlineElement(offset));
791 return &objects_[objects_.size() - 1].t;
792}
793
794// The String class is a special version of the Vector that is always
795// null-terminated, always contains 1-byte character elements, and which has a
796// few extra methods for convenient string access.
797template <size_t kStaticLength>
798class String : public Vector<char, kStaticLength, true, 0, true> {
799 public:
800 typedef Vector<char, kStaticLength, true, 0, true> VectorType;
801 typedef flatbuffers::String Flatbuffer;
James Kuszmaul6be41022023-12-20 11:55:28 -0800802 typedef std::string FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700803 String(std::span<uint8_t> buffer, ResizeableObject *parent)
804 : VectorType(buffer, parent) {}
805 virtual ~String() {}
806 void SetString(std::string_view string) {
Sanjay Narayanan71de06a2024-05-06 15:24:48 -0700807 CHECK_LE(string.size(), VectorType::capacity());
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700808 VectorType::resize_inline(string.size(), SetZero::kNo);
809 memcpy(VectorType::data(), string.data(), string.size());
810 }
James Kuszmaul6be41022023-12-20 11:55:28 -0800811 using VectorType::FromFlatbuffer;
812 [[nodiscard]] bool FromFlatbuffer(const std::string &string) {
813 return VectorType::FromData(string.data(), string.size());
814 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700815 std::string_view string_view() const {
816 return std::string_view(VectorType::data(), VectorType::size());
817 }
818 std::string str() const {
819 return std::string(VectorType::data(), VectorType::size());
820 }
821 const char *c_str() const { return VectorType::data(); }
822
823 private:
824 friend struct internal::TableMover<String<kStaticLength>>;
825 String(String &&) = default;
826};
827
828namespace internal {
829// Specialization for all non-inline vector types. All of these types will just
830// use offsets for their inline data and have appropriate member types/constants
831// for the remaining fields.
832template <typename T>
833struct InlineWrapper<T, false, void> {
834 typedef uoffset_t Type;
835 typedef T ObjectType;
836 typedef flatbuffers::Offset<typename T::Flatbuffer> FlatbufferType;
837 typedef flatbuffers::Offset<typename T::Flatbuffer> ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800838 typedef T::FlatbufferObjectType FlatbufferObjectType;
Austin Schuhf8440852024-05-31 10:46:50 -0700839 static constexpr size_t kDataElementAlign = T::kAlign;
840 static constexpr size_t kDataElementAlignOffset = T::kAlignOffset;
841 static constexpr size_t kDataElementSize =
842 ((T::kSize + T::kAlign - 1) / T::kAlign) * T::kAlign;
843 static_assert((kDataElementSize % kDataElementAlign) == 0);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700844 template <typename StaticVector>
845 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800846 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700847 return to->FromNotInlineFlatbuffer(from);
848 }
849 template <typename StaticVector>
850 static void ResizeVector(StaticVector *target, size_t size) {
851 target->resize_not_inline(size);
852 }
853};
854// Specialization for "normal" scalar inline data (ints, floats, doubles,
855// enums).
856template <typename T>
857struct InlineWrapper<T, true,
858 typename std::enable_if_t<!std::is_class<T>::value>> {
859 typedef T Type;
860 typedef T ObjectType;
861 typedef T FlatbufferType;
862 typedef T ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800863 typedef T *FlatbufferObjectType;
Austin Schuhf8440852024-05-31 10:46:50 -0700864 static constexpr size_t kDataElementAlign = alignof(T);
865 static constexpr size_t kDataElementAlignOffset = 0;
866 static constexpr size_t kDataElementSize = sizeof(T);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700867 template <typename StaticVector>
868 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800869 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700870 return to->FromInlineFlatbuffer(from);
871 }
872 template <typename StaticVector>
873 static void ResizeVector(StaticVector *target, size_t size) {
874 target->resize_inline(size, SetZero::kYes);
875 }
876};
877// Specialization for booleans, given that flatbuffers uses uint8_t's for bools.
878template <>
879struct InlineWrapper<bool, true, void> {
880 typedef uint8_t Type;
881 typedef uint8_t ObjectType;
882 typedef uint8_t FlatbufferType;
883 typedef uint8_t ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800884 typedef uint8_t *FlatbufferObjectType;
Austin Schuhf8440852024-05-31 10:46:50 -0700885 static constexpr size_t kDataElementAlign = 1u;
886 static constexpr size_t kDataElementAlignOffset = 0;
887 static constexpr size_t kDataElementSize = 1u;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700888 template <typename StaticVector>
889 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800890 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700891 return to->FromInlineFlatbuffer(from);
892 }
893 template <typename StaticVector>
894 static void ResizeVector(StaticVector *target, size_t size) {
895 target->resize_inline(size, SetZero::kYes);
896 }
897};
898// Specialization for flatbuffer structs.
899// The flatbuffers codegen uses struct pointers rather than references or the
900// such, so it needs to be treated special.
901template <typename T>
902struct InlineWrapper<T, true,
903 typename std::enable_if_t<std::is_class<T>::value>> {
904 typedef T Type;
905 typedef T ObjectType;
906 typedef T *FlatbufferType;
907 typedef const T *ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800908 typedef T *FlatbufferObjectType;
Austin Schuhf8440852024-05-31 10:46:50 -0700909 static constexpr size_t kDataElementAlign = alignof(T);
910 static constexpr size_t kDataElementAlignOffset = 0;
911 static constexpr size_t kDataElementSize = sizeof(T);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700912 template <typename StaticVector>
913 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800914 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700915 return to->FromInlineFlatbuffer(from);
916 }
917 template <typename StaticVector>
918 static void ResizeVector(StaticVector *target, size_t size) {
919 target->resize_inline(size, SetZero::kYes);
920 }
921};
922} // namespace internal
923 //
924template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
925 bool kNullTerminate>
926bool Vector<T, kStaticLength, kInline, kForceAlign,
James Kuszmaul692780f2023-12-20 14:01:56 -0800927 kNullTerminate>::FromFlatbuffer(ConstFlatbuffer &vector) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700928 return internal::InlineWrapper<T, kInline>::FromFlatbuffer(this, vector);
929}
930
931template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
932 bool kNullTerminate>
933void Vector<T, kStaticLength, kInline, kForceAlign, kNullTerminate>::resize(
934 size_t size) {
935 internal::InlineWrapper<T, kInline>::ResizeVector(this, size);
936}
937
938} // namespace aos::fbs
939#endif // AOS_FLATBUFFERS_STATIC_VECTOR_H_