blob: 05d6459acbb5f71fc0cbdeaf38192602caebb37f [file] [log] [blame]
James Kuszmaulf5eb4682023-09-22 17:16:59 -07001#ifndef AOS_FLATBUFFERS_STATIC_VECTOR_H_
2#define AOS_FLATBUFFERS_STATIC_VECTOR_H_
3#include <span>
4
5#include "flatbuffers/base.h"
James Kuszmaule65fb402024-01-13 14:10:51 -08006#include "flatbuffers/vector.h"
James Kuszmaulf5eb4682023-09-22 17:16:59 -07007#include "glog/logging.h"
8
9#include "aos/containers/inlined_vector.h"
10#include "aos/containers/sized_array.h"
11#include "aos/flatbuffers/base.h"
12
13namespace aos::fbs {
14
15namespace internal {
16// Helper class for managing how we specialize the Vector object for different
17// contained types.
18// Users of the Vector class should never need to care about this.
19// Template arguments:
20// T: The type that the vector stores.
21// kInline: Whether the type in question is stored inline or not.
22// Enable: Used for SFINAE around struct values; can be ignored.
23// The struct provides the following types:
24// Type: The type of the data that will be stored inline in the vector.
25// ObjectType: The type of the actual data (only used for non-inline objects).
26// FlatbufferType: The type used by flatbuffers::Vector to store this type.
27// ConstFlatbufferType: The type used by a const flatbuffers::Vector to store
28// this type.
29// kDataAlign: Alignment required by the stored type.
30// kDataSize: Nominal size required by each non-inline data member. This is
31// what will be initially allocated; once created, individual members may
32// grow to accommodate dynamically lengthed vectors.
33template <typename T, bool kInline, class Enable = void>
34struct InlineWrapper;
35} // namespace internal
36
37// This Vector class provides a mutable, resizeable, flatbuffer vector.
38//
39// Upon creation, the Vector will start with enough space allocated for
40// kStaticLength elements, and must be provided with a memory buffer that
41// is large enough to serialize all the kStaticLength members (kStaticLength may
42// be zero).
43//
44// Once created, the Vector may be grown using calls to reserve().
45// This will result in the Vector attempting to allocate memory via its
46// parent object; such calls may fail if there is no space available in the
47// allocator.
48//
49// Note that if you are using the Vector class in a realtime context (and thus
50// must avoid dynamic memory allocations) you must only be using a Vector of
51// inline data (i.e., scalars, enums, or structs). Flatbuffer tables and strings
52// require overhead to manage and so require some form of dynamic memory
53// allocation. If we discover a strong use-case for such things, then we may
54// provide some interface that allows managing said metadata on the stack or
55// in another realtime-safe manner.
56//
57// Template arguments:
58// T: Type contained by the vector; either a scalar/struct/enum type or a
59// static flatbuffer type of some sort (a String or an implementation of
60// aos::fbs::Table).
61// kStaticLength: Number of elements to statically allocate memory for.
62// May be zero.
63// kInline: Whether the type T will be stored inline in the vector.
64// kForceAlign: Alignment to force for the start of the vector (e.g., for
65// byte arrays it may be desirable to have the entire array aligned).
66// kNullTerminate: Whether to reserve an extra byte past the end of
67// the inline data for null termination. Not included in kStaticLength,
68// so if e.g. you want to store the string "abc" then kStaticLength can
69// be 3 and kNullTerminate can be true and the vector data will take
70// up 4 bytes of memory.
71//
72// Vector buffer memory layout:
73// * Requirements:
74// * Minimum alignment of 4 bytes (for element count).
75// * The start of the vector data must be aligned to either
76// alignof(InlineType) or a user-specified number.
77// * The element count for the vector must immediately precede the vector
78// data (and so may itself not be aligned to alignof(InlineType)).
79// * For non-inlined types, the individual types must be aligned to
80// their own alignment.
81// * In order to accommodate this, the vector buffer as a whole must
82// generally be aligned to the greatest of the above alignments. There
83// are two reasonable ways one could do this:
84// * Require that the 4th byte of the buffer provided by aligned to
85// the maximum alignment of its contents.
86// * Require that the buffer itself by aligned, and provide padding
87// ourselves. The Vector would then have to expose its own offset
88// because it would not start at the start of the buffer.
89// The former requires that the wrapping code understand the internals
90// of how vectors work; the latter generates extra padding and adds
91// extra logic around handling non-zero offsets.
92// To maintain general simplicity, we will use the second condition and eat
93// the cost of the potential extra few bytes of padding.
94// * The layout of the buffer will thus be:
95// [padding; element_count; inline_data; padding; offset_data]
96// The first padding will be of size max(0, kAlign - 4).
97// The element_count is of size 4.
98// The inline_data is of size sizeof(InlineType) * kStaticLength.
99// The second padding is of size
100// (kAlign - ((sizeof(InlineType) * kStaticLength) % kAlign)).
101// The remaining data is only present if kInline is false.
102// The offset data is of size T::kSize * kStaticLength. T::kSize % T::kAlign
103// must be zero.
104// Note that no padding is required on the end because T::kAlign will always
105// end up being equal to the alignment (this can only be violated if
106// kForceAlign is used, but we do not allow that).
James Kuszmaul1c9693f2023-12-08 09:45:26 -0800107// The Vector class leaves any padding uninitialized. Until and unless we
108// determine that it is a performance issue, it is the responsibility of the
109// parent of this object to zero-initialize the memory.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700110template <typename T, size_t kStaticLength, bool kInline,
111 size_t kForceAlign = 0, bool kNullTerminate = false>
112class Vector : public ResizeableObject {
James Kuszmaul22448052023-12-14 15:55:14 -0800113 template <typename VectorType, typename ValueType>
114 class generic_iterator {
115 public:
116 using iterator_category = std::random_access_iterator_tag;
117 using value_type = ValueType;
118 using difference_type = std::ptrdiff_t;
119 using pointer = value_type *;
120 using reference = value_type &;
121
122 explicit generic_iterator(VectorType *vector, size_t index)
123 : vector_(vector), index_(index) {}
124 generic_iterator(const generic_iterator &) = default;
125 generic_iterator() : vector_(nullptr), index_(0) {}
126 generic_iterator &operator=(const generic_iterator &) = default;
127
128 generic_iterator &operator++() {
129 ++index_;
130 return *this;
131 }
132 generic_iterator operator++(int) {
133 generic_iterator retval = *this;
134 ++(*this);
135 return retval;
136 }
137 generic_iterator &operator--() {
138 --index_;
139 return *this;
140 }
141 generic_iterator operator--(int) {
142 generic_iterator retval = *this;
143 --(*this);
144 return retval;
145 }
146 bool operator==(const generic_iterator &other) const {
147 CHECK_EQ(other.vector_, vector_);
148 return index_ == other.index_;
149 }
150 std::strong_ordering operator<=>(const generic_iterator &other) const {
151 CHECK_EQ(other.vector_, vector_);
152 return index_ <=> other.index_;
153 }
154 reference operator*() const { return vector_->at(index_); }
155 difference_type operator-(const generic_iterator &other) const {
156 CHECK_EQ(other.vector_, vector_);
157 return index_ - other.index_;
158 }
159 generic_iterator operator-(difference_type decrement) const {
160 return generic_iterator(vector_, index_ - decrement);
161 }
162 friend generic_iterator operator-(difference_type decrement,
163 const generic_iterator &rhs) {
164 return rhs - decrement;
165 }
166 generic_iterator operator+(difference_type increment) const {
167 return generic_iterator(vector_, index_ + increment);
168 }
169 friend generic_iterator operator+(difference_type increment,
170 const generic_iterator &rhs) {
171 return rhs + increment;
172 }
173 generic_iterator &operator+=(difference_type increment) {
174 index_ += increment;
175 return *this;
176 }
177 generic_iterator &operator-=(difference_type increment) {
178 index_ -= increment;
179 return *this;
180 }
181 reference operator[](difference_type index) const {
182 return *(*this + index);
183 }
184
185 private:
186 VectorType *vector_;
187 size_t index_;
188 };
189
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700190 public:
James Kuszmaul22448052023-12-14 15:55:14 -0800191 using iterator = generic_iterator<Vector, T>;
192 using const_iterator = generic_iterator<const Vector, const T>;
193
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700194 static_assert(kInline || !kNullTerminate,
195 "It does not make sense to null-terminate vectors of objects.");
196 // Type stored inline in the serialized vector (offsets for tables/strings; T
197 // otherwise).
198 using InlineType = typename internal::InlineWrapper<T, kInline>::Type;
199 // OUt-of-line type for out-of-line T.
200 using ObjectType = typename internal::InlineWrapper<T, kInline>::ObjectType;
201 // Type used as the template parameter to flatbuffers::Vector<>.
202 using FlatbufferType =
203 typename internal::InlineWrapper<T, kInline>::FlatbufferType;
204 using ConstFlatbufferType =
205 typename internal::InlineWrapper<T, kInline>::ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800206 // FlatbufferObjectType corresponds to the type used by the flatbuffer
207 // "object" API (i.e. the FlatbufferT types).
208 // This type will be something unintelligble for inline types.
209 using FlatbufferObjectType =
210 typename internal::InlineWrapper<T, kInline>::FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700211 // flatbuffers::Vector type that corresponds to this Vector.
212 typedef flatbuffers::Vector<FlatbufferType> Flatbuffer;
213 typedef const flatbuffers::Vector<ConstFlatbufferType> ConstFlatbuffer;
214 // Alignment of the inline data.
215 static constexpr size_t kInlineAlign =
216 std::max(kForceAlign, alignof(InlineType));
217 // Type used for serializing the length of the vector.
218 typedef uint32_t LengthType;
219 // Overall alignment of this type, and required alignment of the buffer that
220 // must be provided to the Vector.
221 static constexpr size_t kAlign =
222 std::max({alignof(LengthType), kInlineAlign,
223 internal::InlineWrapper<T, kInline>::kDataAlign});
224 // Padding inserted prior to the length element of the vector (to manage
225 // alignment of the data properly; see class comment)
226 static constexpr size_t kPadding1 =
227 std::max<size_t>(0, kAlign - sizeof(LengthType));
228 // Size of the vector length field.
229 static constexpr size_t kLengthSize = sizeof(LengthType);
230 // Size of all the inline vector data, including null termination (prior to
231 // any dynamic increases in size).
232 static constexpr size_t kInlineSize =
233 sizeof(InlineType) * (kStaticLength + (kNullTerminate ? 1 : 0));
234 // Per-element size of any out-of-line data.
235 static constexpr size_t kDataElementSize =
236 internal::InlineWrapper<T, kInline>::kDataSize;
237 // Padding between the inline data and any out-of-line data, to manage
238 // mismatches in alignment between the two.
239 static constexpr size_t kPadding2 = kAlign - (kInlineSize % kAlign);
240 // Total statically allocated space for any out-of-line data ("offset data")
241 // (prior to any dynamic increases in size).
242 static constexpr size_t kOffsetOffsetDataSize =
243 kInline ? 0 : (kStaticLength * kDataElementSize);
244 // Total nominal size of the Vector.
245 static constexpr size_t kSize =
246 kPadding1 + kLengthSize + kInlineSize + kPadding2 + kOffsetOffsetDataSize;
247 // Offset from the start of the provided buffer to where the actual start of
248 // the vector is.
249 static constexpr size_t kOffset = kPadding1;
250 // Constructors; the provided buffer must be aligned to kAlign and be kSize in
251 // length. parent must be non-null.
252 Vector(std::span<uint8_t> buffer, ResizeableObject *parent)
253 : ResizeableObject(buffer, parent) {
254 CHECK_EQ(0u, reinterpret_cast<size_t>(buffer.data()) % kAlign);
255 CHECK_EQ(kSize, buffer.size());
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700256 SetLength(0u);
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700257 if (!kInline) {
258 // Initialize the offsets for any sub-tables. These are used to track
259 // where each table will get serialized in memory as memory gets
260 // resized/moved around.
261 for (size_t index = 0; index < kStaticLength; ++index) {
262 object_absolute_offsets_.emplace_back(kPadding1 + kLengthSize +
263 kInlineSize + kPadding2 +
264 index * kDataElementSize);
265 }
266 }
267 }
268 Vector(const Vector &) = delete;
269 Vector &operator=(const Vector &) = delete;
270 virtual ~Vector() {}
271 // Current allocated length of this vector.
272 // Does not include null termination.
273 size_t capacity() const { return allocated_length_; }
274 // Current length of the vector.
275 // Does not include null termination.
276 size_t size() const { return length_; }
277
278 // Appends an element to the Vector. Used when kInline is false. Returns
279 // nullptr if the append failed due to insufficient capacity. If you need to
280 // increase the capacity() of the vector, call reserve().
281 [[nodiscard]] T *emplace_back();
282 // Appends an element to the Vector. Used when kInline is true. Returns false
283 // if there is insufficient capacity for a new element.
284 [[nodiscard]] bool emplace_back(T element) {
285 static_assert(kInline);
286 return AddInlineElement(element);
287 }
288
289 // Adjusts the allocated size of the vector (does not affect the actual
290 // current length as returned by size()). Returns true on success, and false
291 // if the allocation failed for some reason.
292 // Note that reductions in size will not currently result in the allocated
293 // size actually changing.
James Kuszmaul22efcb82024-03-22 16:20:56 -0700294 // For vectors of non-inline types (e.g., vectors of strings or vectors of
295 // tables), reserve() will allocate memory in an internal vector that we use
296 // for storing some metadata.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700297 [[nodiscard]] bool reserve(size_t new_length) {
298 if (new_length > allocated_length_) {
299 const size_t new_elements = new_length - allocated_length_;
300 // First, we must add space for our new inline elements.
301 if (!InsertBytes(
302 inline_data() + allocated_length_ + (kNullTerminate ? 1 : 0),
303 new_elements * sizeof(InlineType), SetZero::kYes)) {
304 return false;
305 }
306 if (!kInline) {
307 // For non-inline objects, create the space required for all the new
308 // object data.
309 const size_t insertion_point = buffer_.size();
310 if (!InsertBytes(buffer_.data() + insertion_point,
311 new_elements * kDataElementSize, SetZero::kYes)) {
312 return false;
313 }
314 for (size_t index = 0; index < new_elements; ++index) {
315 // Note that the already-allocated data may be arbitrarily-sized, so
316 // we cannot use the same static calculation that we do in the
317 // constructor.
318 object_absolute_offsets_.emplace_back(insertion_point +
319 index * kDataElementSize);
320 }
321 objects_.reserve(new_length);
322 }
323 allocated_length_ = new_length;
324 }
325 return true;
326 }
327
328 // Accessors for using the Vector as a flatbuffers::Vector.
329 // Note that these pointers will be unstable if any memory allocations occur
330 // that cause memory to get shifted around.
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700331 ConstFlatbuffer *AsFlatbufferVector() const {
332 return reinterpret_cast<const Flatbuffer *>(vector_buffer().data());
333 }
334
335 // Copies the contents of the provided vector into this; returns false on
336 // failure (e.g., if the provided vector is too long for the amount of space
337 // we can allocate through reserve()).
James Kuszmaul710883b2023-12-14 14:34:48 -0800338 // This is a deep copy, and will call FromFlatbuffer on any constituent
339 // objects.
James Kuszmaul692780f2023-12-20 14:01:56 -0800340 [[nodiscard]] bool FromFlatbuffer(ConstFlatbuffer *vector) {
341 return FromFlatbuffer(*CHECK_NOTNULL(vector));
342 }
343 [[nodiscard]] bool FromFlatbuffer(ConstFlatbuffer &vector);
James Kuszmaul6be41022023-12-20 11:55:28 -0800344 // The remaining FromFlatbuffer() overloads are for when using the flatbuffer
345 // "object" API, which uses std::vector's for representing vectors.
346 [[nodiscard]] bool FromFlatbuffer(const std::vector<InlineType> &vector) {
347 static_assert(kInline);
348 return FromData(vector.data(), vector.size());
349 }
350 // Overload for vectors of bools, since the standard library may not use a
351 // full byte per vector element.
352 [[nodiscard]] bool FromFlatbuffer(const std::vector<bool> &vector) {
353 static_assert(kInline);
354 // We won't be able to do a clean memcpy because std::vector<bool> may be
355 // implemented using bit-packing.
356 return FromIterator(vector.cbegin(), vector.cend());
357 }
358 // Overload for non-inline types. Note that to avoid having this overload get
359 // resolved with inline types, we make FlatbufferObjectType != InlineType.
360 [[nodiscard]] bool FromFlatbuffer(
361 const std::vector<FlatbufferObjectType> &vector) {
362 static_assert(!kInline);
363 return FromNotInlineIterable(vector);
364 }
365
366 // Copies values from the provided data pointer into the vector, resizing the
367 // vector as needed to match. Returns false on failure (e.g., if the
368 // underlying allocator has insufficient space to perform the copy). Only
369 // works for inline data types.
370 [[nodiscard]] bool FromData(const InlineType *input_data, size_t input_size) {
371 static_assert(kInline);
372 if (!reserve(input_size)) {
373 return false;
374 }
375
376 // We will be overwriting the whole vector very shortly; there is no need to
377 // clear the buffer to zero.
378 resize_inline(input_size, SetZero::kNo);
379
Philipp Schraderd1c74a82024-04-30 13:46:31 -0700380 if (input_size > 0) {
381 memcpy(inline_data(), CHECK_NOTNULL(input_data),
382 size() * sizeof(InlineType));
383 }
James Kuszmaul6be41022023-12-20 11:55:28 -0800384 return true;
385 }
386
387 // Copies values from the provided iterators into the vector, resizing the
388 // vector as needed to match. Returns false on failure (e.g., if the
389 // underlying allocator has insufficient space to perform the copy). Only
390 // works for inline data types.
391 // Does not attempt any optimizations if the iterators meet the
392 // std::contiguous_iterator concept; instead, it simply copies each element
393 // out one-by-one.
394 template <typename Iterator>
395 [[nodiscard]] bool FromIterator(Iterator begin, Iterator end) {
396 static_assert(kInline);
397 resize(0);
398 for (Iterator it = begin; it != end; ++it) {
399 if (!reserve(size() + 1)) {
400 return false;
401 }
402 // Should never fail, due to the reserve() above.
403 CHECK(emplace_back(*it));
404 }
405 return true;
406 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700407
408 // Returns the element at the provided index. index must be less than size().
409 const T &at(size_t index) const {
410 CHECK_LT(index, length_);
411 return unsafe_at(index);
412 }
413
414 // Same as at(), except that bounds checks are only performed in non-optimized
415 // builds.
416 // TODO(james): The GetInlineElement() call itself does some bounds-checking;
417 // consider down-grading that.
418 const T &unsafe_at(size_t index) const {
419 DCHECK_LT(index, length_);
420 if (kInline) {
421 // This reinterpret_cast is extremely wrong if T != InlineType (this is
422 // fine because we only do this if kInline is true).
423 // TODO(james): Get the templating improved so that we can get away with
424 // specializing at() instead of using if statements. Resolving this will
425 // also allow deduplicating the Resize() calls.
426 // This specialization is difficult because you cannot partially
427 // specialize a templated class method (online things seem to suggest e.g.
428 // using a struct as the template parameter rather than having separate
429 // parameters).
430 return reinterpret_cast<const T &>(GetInlineElement(index));
431 } else {
432 return objects_[index].t;
433 }
434 }
435
436 // Returns a mutable pointer to the element at the provided index. index must
437 // be less than size().
438 T &at(size_t index) {
439 CHECK_LT(index, length_);
440 return unsafe_at(index);
441 }
442
443 // Same as at(), except that bounds checks are only performed in non-optimized
444 // builds.
445 // TODO(james): The GetInlineElement() call itself does some bounds-checking;
446 // consider down-grading that.
447 T &unsafe_at(size_t index) {
448 DCHECK_LT(index, length_);
449 if (kInline) {
450 // This reinterpret_cast is extremely wrong if T != InlineType (this is
451 // fine because we only do this if kInline is true).
452 // TODO(james): Get the templating improved so that we can get away with
453 // specializing at() instead of using if statements. Resolving this will
454 // also allow deduplicating the Resize() calls.
455 // This specialization is difficult because you cannot partially
456 // specialize a templated class method (online things seem to suggest e.g.
457 // using a struct as the template parameter rather than having separate
458 // parameters).
459 return reinterpret_cast<T &>(GetInlineElement(index));
460 } else {
461 return objects_[index].t;
462 }
463 }
464
465 const T &operator[](size_t index) const { return at(index); }
466 T &operator[](size_t index) { return at(index); }
467
468 // Resizes the vector to the requested size.
469 // size must be less than or equal to the current capacity() of the vector.
470 // Does not allocate additional memory (call reserve() to allocate additional
471 // memory).
472 // Zero-initializes all inline element; initializes all subtable/string
473 // elements to extant but empty objects.
474 void resize(size_t size);
475
476 // Resizes an inline vector to the requested size.
477 // When changing the size of the vector, the removed/inserted elements will be
478 // set to zero if requested. Otherwise, they will be left uninitialized.
479 void resize_inline(size_t size, SetZero set_zero) {
480 CHECK_LE(size, allocated_length_);
481 static_assert(
482 kInline,
483 "Vector::resize_inline() only works for inline vector types (scalars, "
484 "enums, structs).");
485 if (size == length_) {
486 return;
487 }
488 if (set_zero == SetZero::kYes) {
489 memset(
490 reinterpret_cast<void *>(inline_data() + std::min(size, length_)), 0,
491 std::abs(static_cast<ssize_t>(length_) - static_cast<ssize_t>(size)) *
492 sizeof(InlineType));
493 }
494 length_ = size;
495 SetLength(length_);
496 }
497 // Resizes a vector of offsets to the requested size.
498 // If the size is increased, the new elements will be initialized
499 // to empty but extant objects for non-inlined types (so, zero-length
500 // vectors/strings; objects that exist but have no fields populated).
501 // Note that this is always equivalent to resize().
502 void resize_not_inline(size_t size) {
503 CHECK_LE(size, allocated_length_);
504 static_assert(!kInline,
505 "Vector::resize_not_inline() only works for offset vector "
506 "types (objects, strings).");
507 if (size == length_) {
508 return;
509 } else if (length_ > size) {
510 // TODO: Remove any excess allocated memory.
511 length_ = size;
512 SetLength(length_);
513 return;
514 } else {
515 while (length_ < size) {
516 CHECK_NOTNULL(emplace_back());
517 }
518 }
519 }
520
521 // Accessors directly to the inline data of a vector.
522 const T *data() const {
523 static_assert(kInline,
524 "If you have a use-case for directly accessing the "
525 "flatbuffer data pointer for vectors of "
526 "objects/strings, please start a discussion.");
527 return inline_data();
528 }
529
530 T *data() {
531 static_assert(kInline,
532 "If you have a use-case for directly accessing the "
533 "flatbuffer data pointer for vectors of "
534 "objects/strings, please start a discussion.");
535 return inline_data();
536 }
537
James Kuszmaul22448052023-12-14 15:55:14 -0800538 // Iterators to allow easy use with standard C++ features.
539 iterator begin() { return iterator(this, 0); }
540 iterator end() { return iterator(this, size()); }
541 const_iterator begin() const { return const_iterator(this, 0); }
542 const_iterator end() const { return const_iterator(this, size()); }
543
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700544 std::string SerializationDebugString() const {
545 std::stringstream str;
546 str << "Raw Size: " << kSize << " alignment: " << kAlign
547 << " allocated length: " << allocated_length_ << " inline alignment "
548 << kInlineAlign << " kPadding1 " << kPadding1 << "\n";
549 str << "Observed length " << GetLength() << " (expected " << length_
550 << ")\n";
551 str << "Inline Size " << kInlineSize << " Inline bytes/value:\n";
552 // TODO(james): Get pretty-printing for structs so we can provide better
553 // debug.
554 internal::DebugBytes(
555 internal::GetSubSpan(vector_buffer(), kLengthSize,
556 sizeof(InlineType) * allocated_length_),
557 str);
558 str << "kPadding2 " << kPadding2 << " offset data size "
559 << kOffsetOffsetDataSize << "\n";
560 return str.str();
561 }
562
563 protected:
564 friend struct internal::TableMover<
565 Vector<T, kStaticLength, kInline, kForceAlign, kNullTerminate>>;
566 // protected so that the String class can access the move constructor.
567 Vector(Vector &&) = default;
568
569 private:
570 // See kAlign and kOffset.
571 size_t Alignment() const final { return kAlign; }
572 size_t AbsoluteOffsetOffset() const override { return kOffset; }
573 // Returns a buffer that starts at the start of the vector itself (past any
574 // padding).
575 std::span<uint8_t> vector_buffer() {
576 return internal::GetSubSpan(buffer(), kPadding1);
577 }
578 std::span<const uint8_t> vector_buffer() const {
579 return internal::GetSubSpan(buffer(), kPadding1);
580 }
581
582 bool AddInlineElement(InlineType e) {
583 if (length_ == allocated_length_) {
584 return false;
585 }
586 SetInlineElement(length_, e);
587 ++length_;
588 SetLength(length_);
589 return true;
590 }
591
592 void SetInlineElement(size_t index, InlineType value) {
593 CHECK_LT(index, allocated_length_);
594 inline_data()[index] = value;
595 }
596
597 InlineType &GetInlineElement(size_t index) {
598 CHECK_LT(index, allocated_length_);
599 return inline_data()[index];
600 }
601
602 const InlineType &GetInlineElement(size_t index) const {
603 CHECK_LT(index, allocated_length_);
604 return inline_data()[index];
605 }
606
607 // Returns a pointer to the start of the inline data itself.
608 InlineType *inline_data() {
609 return reinterpret_cast<InlineType *>(vector_buffer().data() + kLengthSize);
610 }
611 const InlineType *inline_data() const {
612 return reinterpret_cast<const InlineType *>(vector_buffer().data() +
613 kLengthSize);
614 }
615
616 // Updates the length of the vector to match the provided length. Does not set
617 // the length_ member.
618 void SetLength(LengthType length) {
619 *reinterpret_cast<LengthType *>(vector_buffer().data()) = length;
620 if (kNullTerminate) {
621 memset(reinterpret_cast<void *>(inline_data() + length), 0,
622 sizeof(InlineType));
623 }
624 }
625 LengthType GetLength() const {
626 return *reinterpret_cast<const LengthType *>(vector_buffer().data());
627 }
628
629 // Overrides to allow ResizeableObject to manage memory adjustments.
630 size_t NumberOfSubObjects() const final {
631 return kInline ? 0 : allocated_length_;
632 }
633 using ResizeableObject::SubObject;
634 SubObject GetSubObject(size_t index) final {
635 return SubObject{
636 reinterpret_cast<uoffset_t *>(&GetInlineElement(index)),
637 // In order to let this compile regardless of whether type T is an
638 // object type or not, we just use a reinterpret_cast.
639 (index < length_)
640 ? reinterpret_cast<ResizeableObject *>(&objects_[index].t)
641 : nullptr,
642 &object_absolute_offsets_[index]};
643 }
644 // Implementation that handles copying from a flatbuffers::Vector of an inline
645 // data type.
James Kuszmaul692780f2023-12-20 14:01:56 -0800646 [[nodiscard]] bool FromInlineFlatbuffer(ConstFlatbuffer &vector) {
647 return FromData(reinterpret_cast<const InlineType *>(vector.Data()),
648 vector.size());
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700649 }
650
651 // Implementation that handles copying from a flatbuffers::Vector of a
652 // not-inline data type.
James Kuszmaul6be41022023-12-20 11:55:28 -0800653 template <typename Iterable>
654 [[nodiscard]] bool FromNotInlineIterable(const Iterable &vector) {
655 if (!reserve(vector.size())) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700656 return false;
657 }
658 // "Clear" the vector.
659 resize_not_inline(0);
660
James Kuszmaul6be41022023-12-20 11:55:28 -0800661 for (const auto &entry : vector) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700662 if (!CHECK_NOTNULL(emplace_back())->FromFlatbuffer(entry)) {
663 return false;
664 }
665 }
666 return true;
667 }
668
James Kuszmaul692780f2023-12-20 14:01:56 -0800669 [[nodiscard]] bool FromNotInlineFlatbuffer(const Flatbuffer &vector) {
670 return FromNotInlineIterable(vector);
James Kuszmaul6be41022023-12-20 11:55:28 -0800671 }
672
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700673 // In order to allow for easy partial template specialization, we use a
674 // non-member class to call FromInline/FromNotInlineFlatbuffer and
675 // resize_inline/resize_not_inline. There are not actually any great ways to
676 // do this with just our own class member functions, so instead we make these
677 // methods members of a friend of the Vector class; we then partially
678 // specialize the entire InlineWrapper class and use it to isolate anything
679 // that needs to have a common user interface while still having separate
680 // actual logic.
681 template <typename T_, bool kInline_, class Enable_>
682 friend struct internal::InlineWrapper;
683
684 // Note: The objects here really want to be owned by this object (as opposed
685 // to e.g. returning a stack-allocated object from the emplace_back() methods
686 // that the user then owns). There are two main challenges with have the user
687 // own the object on question:
688 // 1. We can't have >1 reference floating around, or else one object's state
689 // can become out of date. This forces us to do ref-counting and could
690 // make certain types of code obnoxious to write.
691 // 2. Once the user-created object goes out of scope, we lose all of its
692 // internal state. In _theory_ it should be possible to reconstruct most
693 // of the relevant state by examining the contents of the buffer, but
694 // doing so would be cumbersome.
695 aos::InlinedVector<internal::TableMover<ObjectType>,
696 kInline ? 0 : kStaticLength>
697 objects_;
698 aos::InlinedVector<size_t, kInline ? 0 : kStaticLength>
699 object_absolute_offsets_;
700 // Current actual length of the vector.
701 size_t length_ = 0;
702 // Current length that we have allocated space available for.
703 size_t allocated_length_ = kStaticLength;
704};
705
706template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
707 bool kNullTerminate>
708T *Vector<T, kStaticLength, kInline, kForceAlign,
709 kNullTerminate>::emplace_back() {
710 static_assert(!kInline);
711 if (length_ >= allocated_length_) {
712 return nullptr;
713 }
714 const size_t object_start = object_absolute_offsets_[length_];
715 std::span<uint8_t> object_buffer =
716 internal::GetSubSpan(buffer(), object_start, T::kSize);
717 objects_.emplace_back(object_buffer, this);
718 const uoffset_t offset =
719 object_start - (reinterpret_cast<size_t>(&GetInlineElement(length_)) -
720 reinterpret_cast<size_t>(buffer().data()));
721 CHECK(AddInlineElement(offset));
722 return &objects_[objects_.size() - 1].t;
723}
724
725// The String class is a special version of the Vector that is always
726// null-terminated, always contains 1-byte character elements, and which has a
727// few extra methods for convenient string access.
728template <size_t kStaticLength>
729class String : public Vector<char, kStaticLength, true, 0, true> {
730 public:
731 typedef Vector<char, kStaticLength, true, 0, true> VectorType;
732 typedef flatbuffers::String Flatbuffer;
James Kuszmaul6be41022023-12-20 11:55:28 -0800733 typedef std::string FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700734 String(std::span<uint8_t> buffer, ResizeableObject *parent)
735 : VectorType(buffer, parent) {}
736 virtual ~String() {}
737 void SetString(std::string_view string) {
738 CHECK_LT(string.size(), VectorType::capacity());
739 VectorType::resize_inline(string.size(), SetZero::kNo);
740 memcpy(VectorType::data(), string.data(), string.size());
741 }
James Kuszmaul6be41022023-12-20 11:55:28 -0800742 using VectorType::FromFlatbuffer;
743 [[nodiscard]] bool FromFlatbuffer(const std::string &string) {
744 return VectorType::FromData(string.data(), string.size());
745 }
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700746 std::string_view string_view() const {
747 return std::string_view(VectorType::data(), VectorType::size());
748 }
749 std::string str() const {
750 return std::string(VectorType::data(), VectorType::size());
751 }
752 const char *c_str() const { return VectorType::data(); }
753
754 private:
755 friend struct internal::TableMover<String<kStaticLength>>;
756 String(String &&) = default;
757};
758
759namespace internal {
760// Specialization for all non-inline vector types. All of these types will just
761// use offsets for their inline data and have appropriate member types/constants
762// for the remaining fields.
763template <typename T>
764struct InlineWrapper<T, false, void> {
765 typedef uoffset_t Type;
766 typedef T ObjectType;
767 typedef flatbuffers::Offset<typename T::Flatbuffer> FlatbufferType;
768 typedef flatbuffers::Offset<typename T::Flatbuffer> ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800769 typedef T::FlatbufferObjectType FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700770 static_assert((T::kSize % T::kAlign) == 0);
771 static constexpr size_t kDataAlign = T::kAlign;
772 static constexpr size_t kDataSize = T::kSize;
773 template <typename StaticVector>
774 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800775 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700776 return to->FromNotInlineFlatbuffer(from);
777 }
778 template <typename StaticVector>
779 static void ResizeVector(StaticVector *target, size_t size) {
780 target->resize_not_inline(size);
781 }
782};
783// Specialization for "normal" scalar inline data (ints, floats, doubles,
784// enums).
785template <typename T>
786struct InlineWrapper<T, true,
787 typename std::enable_if_t<!std::is_class<T>::value>> {
788 typedef T Type;
789 typedef T ObjectType;
790 typedef T FlatbufferType;
791 typedef T ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800792 typedef T *FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700793 static constexpr size_t kDataAlign = alignof(T);
794 static constexpr size_t kDataSize = sizeof(T);
795 template <typename StaticVector>
796 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800797 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700798 return to->FromInlineFlatbuffer(from);
799 }
800 template <typename StaticVector>
801 static void ResizeVector(StaticVector *target, size_t size) {
802 target->resize_inline(size, SetZero::kYes);
803 }
804};
805// Specialization for booleans, given that flatbuffers uses uint8_t's for bools.
806template <>
807struct InlineWrapper<bool, true, void> {
808 typedef uint8_t Type;
809 typedef uint8_t ObjectType;
810 typedef uint8_t FlatbufferType;
811 typedef uint8_t ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800812 typedef uint8_t *FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700813 static constexpr size_t kDataAlign = 1u;
814 static constexpr size_t kDataSize = 1u;
815 template <typename StaticVector>
816 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800817 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700818 return to->FromInlineFlatbuffer(from);
819 }
820 template <typename StaticVector>
821 static void ResizeVector(StaticVector *target, size_t size) {
822 target->resize_inline(size, SetZero::kYes);
823 }
824};
825// Specialization for flatbuffer structs.
826// The flatbuffers codegen uses struct pointers rather than references or the
827// such, so it needs to be treated special.
828template <typename T>
829struct InlineWrapper<T, true,
830 typename std::enable_if_t<std::is_class<T>::value>> {
831 typedef T Type;
832 typedef T ObjectType;
833 typedef T *FlatbufferType;
834 typedef const T *ConstFlatbufferType;
James Kuszmaul6be41022023-12-20 11:55:28 -0800835 typedef T *FlatbufferObjectType;
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700836 static constexpr size_t kDataAlign = alignof(T);
837 static constexpr size_t kDataSize = sizeof(T);
838 template <typename StaticVector>
839 static bool FromFlatbuffer(
James Kuszmaul692780f2023-12-20 14:01:56 -0800840 StaticVector *to, const typename StaticVector::ConstFlatbuffer &from) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700841 return to->FromInlineFlatbuffer(from);
842 }
843 template <typename StaticVector>
844 static void ResizeVector(StaticVector *target, size_t size) {
845 target->resize_inline(size, SetZero::kYes);
846 }
847};
848} // namespace internal
849 //
850template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
851 bool kNullTerminate>
852bool Vector<T, kStaticLength, kInline, kForceAlign,
James Kuszmaul692780f2023-12-20 14:01:56 -0800853 kNullTerminate>::FromFlatbuffer(ConstFlatbuffer &vector) {
James Kuszmaulf5eb4682023-09-22 17:16:59 -0700854 return internal::InlineWrapper<T, kInline>::FromFlatbuffer(this, vector);
855}
856
857template <typename T, size_t kStaticLength, bool kInline, size_t kForceAlign,
858 bool kNullTerminate>
859void Vector<T, kStaticLength, kInline, kForceAlign, kNullTerminate>::resize(
860 size_t size) {
861 internal::InlineWrapper<T, kInline>::ResizeVector(this, size);
862}
863
864} // namespace aos::fbs
865#endif // AOS_FLATBUFFERS_STATIC_VECTOR_H_