Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 1 | #ifndef AOS_FLATBUFFER_MERGE_H_ |
| 2 | #define AOS_FLATBUFFER_MERGE_H_ |
| 3 | |
| 4 | #include <cstddef> |
| 5 | #include <string> |
| 6 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 7 | #include "aos/flatbuffers.h" |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 8 | #include "flatbuffers/flatbuffers.h" |
| 9 | |
| 10 | namespace aos { |
| 11 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 12 | // Merges 2 flat buffers with the provided type table into the builder. Returns |
| 13 | // the offset to the flatbuffers. |
| 14 | // One or both of t1 and t2 must be non-null. If one is null, this method |
| 15 | // coppies instead of merging. |
| 16 | flatbuffers::Offset<flatbuffers::Table> MergeFlatBuffers( |
| 17 | const flatbuffers::TypeTable *typetable, const flatbuffers::Table *t1, |
| 18 | const flatbuffers::Table *t2, flatbuffers::FlatBufferBuilder *fbb); |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 19 | |
| 20 | template <class T> |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 21 | inline flatbuffers::Offset<T> MergeFlatBuffers( |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 22 | const flatbuffers::Table *t1, const flatbuffers::Table *t2, |
| 23 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 24 | return MergeFlatBuffers(T::MiniReflectTypeTable(), t1, t2, fbb).o; |
| 25 | } |
| 26 | |
| 27 | template <class T> |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 28 | inline aos::FlatbufferDetachedBuffer<T> MergeFlatBuffers(const T *fb1, |
| 29 | const T *fb2) { |
James Kuszmaul | f3a3be2 | 2020-01-04 12:12:00 -0800 | [diff] [blame] | 30 | flatbuffers::FlatBufferBuilder fbb; |
Austin Schuh | d7b15da | 2020-02-17 15:06:11 -0800 | [diff] [blame] | 31 | fbb.ForceDefaults(true); |
James Kuszmaul | f3a3be2 | 2020-01-04 12:12:00 -0800 | [diff] [blame] | 32 | fbb.Finish(MergeFlatBuffers<T>( |
| 33 | reinterpret_cast<const flatbuffers::Table *>(fb1), |
| 34 | reinterpret_cast<const flatbuffers::Table *>(fb2), &fbb)); |
| 35 | return aos::FlatbufferDetachedBuffer<T>(fbb.Release()); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 36 | } |
| 37 | |
| 38 | template <class T> |
Austin Schuh | e695821 | 2020-10-19 11:48:14 -0700 | [diff] [blame] | 39 | inline flatbuffers::Offset<T> MergeFlatBuffers( |
| 40 | const T *fb1, const T *fb2, flatbuffers::FlatBufferBuilder *fbb) { |
| 41 | return MergeFlatBuffers<T>(reinterpret_cast<const flatbuffers::Table *>(fb1), |
| 42 | reinterpret_cast<const flatbuffers::Table *>(fb2), |
| 43 | fbb); |
| 44 | } |
| 45 | |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 46 | template <class T> |
| 47 | inline aos::FlatbufferDetachedBuffer<T> MergeFlatBuffers( |
| 48 | const aos::Flatbuffer<T> &fb1, const aos::Flatbuffer<T> &fb2) { |
| 49 | return aos::FlatbufferDetachedBuffer<T>( |
| 50 | MergeFlatBuffers<T>(&fb1.message(), &fb2.message())); |
| 51 | } |
| 52 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 53 | // Copies a flatbuffer by walking the tree and copying all the pieces. This |
| 54 | // converts DAGs to trees. |
Austin Schuh | e695821 | 2020-10-19 11:48:14 -0700 | [diff] [blame] | 55 | template <class T> |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 56 | inline flatbuffers::Offset<T> RecursiveCopyFlatBuffer( |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 57 | const T *t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 58 | return MergeFlatBuffers<T>(reinterpret_cast<const flatbuffers::Table *>(t1), |
| 59 | nullptr, fbb); |
| 60 | } |
| 61 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 62 | // Copies a flatbuffer by finding the extents of the memory using the typetable |
| 63 | // and copying the containing memory. This doesn't allocate memory, and |
| 64 | // preserves DAGs. |
| 65 | flatbuffers::Offset<flatbuffers::Table> CopyFlatBuffer( |
| 66 | const flatbuffers::Table *t1, const flatbuffers::TypeTable *typetable, |
| 67 | flatbuffers::FlatBufferBuilder *fbb); |
| 68 | |
| 69 | template <class T> |
| 70 | inline flatbuffers::Offset<T> CopyFlatBuffer( |
| 71 | const T *t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 72 | return flatbuffers::Offset<T>( |
| 73 | CopyFlatBuffer(reinterpret_cast<const flatbuffers::Table *>(t1), |
| 74 | T::MiniReflectTypeTable(), fbb) |
| 75 | .o); |
| 76 | } |
| 77 | |
| 78 | template <class T> |
| 79 | inline flatbuffers::Offset<T> CopyFlatBuffer( |
| 80 | const Flatbuffer<T> &t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 81 | return flatbuffers::Offset<T>( |
| 82 | CopyFlatBuffer( |
| 83 | reinterpret_cast<const flatbuffers::Table *>(&t1.message()), |
| 84 | T::MiniReflectTypeTable(), fbb) |
| 85 | .o); |
| 86 | } |
| 87 | |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 88 | namespace flatbuffer_merge_internal { |
| 89 | |
| 90 | inline flatbuffers::uoffset_t DoBlindCopyFlatBuffer( |
| 91 | const void *message, absl::Span<const uint8_t> span, |
| 92 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 93 | // Enforce 8 byte alignment so anything inside the flatbuffer can be read. |
| 94 | fbb->Align(sizeof(flatbuffers::largest_scalar_t)); |
| 95 | |
| 96 | // We don't know how much of the start of the flatbuffer is padding. The |
| 97 | // safest thing to do from an alignment point of view (without looking inside) |
| 98 | // is to copy the initial offset and leave it as dead space. |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 99 | fbb->PushBytes(span.data(), span.size()); |
| 100 | // Then, compute the offset from the back by computing the distance from the |
| 101 | // front to the start of the message. |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 102 | return fbb->GetSize() - |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 103 | static_cast<flatbuffers::uoffset_t>( |
| 104 | reinterpret_cast<const uint8_t *>(message) - span.data()); |
| 105 | } |
| 106 | |
| 107 | } // namespace flatbuffer_merge_internal |
| 108 | |
| 109 | // Copies a flatbuffer by copying all the data without looking inside and |
| 110 | // pointing inside it. |
| 111 | template <class T> |
| 112 | inline flatbuffers::Offset<T> BlindCopyFlatBuffer( |
| 113 | const NonSizePrefixedFlatbuffer<T> &t, |
| 114 | flatbuffers::FlatBufferBuilder *fbb) { |
| 115 | return flatbuffer_merge_internal::DoBlindCopyFlatBuffer(&t.message(), |
| 116 | t.span(), fbb); |
| 117 | } |
| 118 | |
| 119 | // Copies a flatbuffer by copying all the data without looking inside and |
| 120 | // pointing inside it. |
| 121 | template <class T> |
| 122 | inline flatbuffers::Offset<T> BlindCopyFlatBuffer( |
| 123 | const SizePrefixedFlatbuffer<T> &t, flatbuffers::FlatBufferBuilder *fbb) { |
| 124 | return flatbuffer_merge_internal::DoBlindCopyFlatBuffer(&t.message(), |
| 125 | t.span(), fbb); |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 126 | } |
| 127 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 128 | template <class T> |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 129 | inline flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<T>>> |
Austin Schuh | 5c255aa | 2020-11-05 18:32:46 -0800 | [diff] [blame] | 130 | RecursiveCopyVectorTable(const flatbuffers::Vector<flatbuffers::Offset<T>> *t1, |
| 131 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 132 | if (t1 == nullptr) { |
| 133 | return 0; |
| 134 | } |
| 135 | std::vector<flatbuffers::Offset<T>> v; |
| 136 | for (const T *t : *t1) { |
Austin Schuh | 5c255aa | 2020-11-05 18:32:46 -0800 | [diff] [blame] | 137 | v.emplace_back(RecursiveCopyFlatBuffer(t, fbb)); |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 138 | } |
| 139 | return fbb->CreateVector(v); |
| 140 | } |
| 141 | |
| 142 | inline flatbuffers::Offset< |
| 143 | flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> |
| 144 | CopyVectorSharedString( |
| 145 | const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *t1, |
| 146 | flatbuffers::FlatBufferBuilder *fbb) { |
| 147 | if (t1 == nullptr) { |
| 148 | return 0; |
| 149 | } |
| 150 | std::vector<flatbuffers::Offset<flatbuffers::String>> v; |
| 151 | for (const flatbuffers::String *t : *t1) { |
| 152 | v.emplace_back(fbb->CreateSharedString(t)); |
| 153 | } |
| 154 | return fbb->CreateVector(v); |
| 155 | } |
| 156 | |
| 157 | template <class T> |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 158 | inline FlatbufferDetachedBuffer<T> CopyFlatBuffer(const T *t) { |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 159 | flatbuffers::FlatBufferBuilder fbb; |
Austin Schuh | d7b15da | 2020-02-17 15:06:11 -0800 | [diff] [blame] | 160 | fbb.ForceDefaults(true); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 161 | fbb.Finish(CopyFlatBuffer<T>(t, &fbb)); |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 162 | return FlatbufferDetachedBuffer<T>(fbb.Release()); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 163 | } |
| 164 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 165 | template <class T> |
| 166 | inline FlatbufferDetachedBuffer<T> RecursiveCopyFlatBuffer(const T *t) { |
| 167 | flatbuffers::FlatBufferBuilder fbb; |
| 168 | fbb.ForceDefaults(true); |
| 169 | fbb.Finish(RecursiveCopyFlatBuffer<T>(t, &fbb)); |
| 170 | return FlatbufferDetachedBuffer<T>(fbb.Release()); |
| 171 | } |
| 172 | |
Austin Schuh | 30d7db9 | 2020-01-26 16:45:47 -0800 | [diff] [blame] | 173 | // Compares 2 flatbuffers. Returns true if they match, false otherwise. |
| 174 | bool CompareFlatBuffer(const flatbuffers::TypeTable *typetable, |
| 175 | const flatbuffers::Table *t1, |
| 176 | const flatbuffers::Table *t2); |
| 177 | |
| 178 | template <class T> |
| 179 | inline bool CompareFlatBuffer(const T *t1, const T *t2) { |
| 180 | return CompareFlatBuffer(T::MiniReflectTypeTable(), |
| 181 | reinterpret_cast<const flatbuffers::Table *>(t1), |
| 182 | reinterpret_cast<const flatbuffers::Table *>(t2)); |
| 183 | } |
| 184 | |
Austin Schuh | 97789fc | 2020-08-01 14:42:45 -0700 | [diff] [blame] | 185 | template <class T> |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 186 | inline bool CompareFlatBuffer(const aos::NonSizePrefixedFlatbuffer<T> &t1, |
| 187 | const aos::NonSizePrefixedFlatbuffer<T> &t2) { |
| 188 | return t1.span() == t2.span(); |
| 189 | } |
| 190 | |
| 191 | template <class T> |
| 192 | inline bool CompareFlatBuffer(const aos::SizePrefixedFlatbuffer<T> &t1, |
| 193 | const aos::SizePrefixedFlatbuffer<T> &t2) { |
Austin Schuh | 97789fc | 2020-08-01 14:42:45 -0700 | [diff] [blame] | 194 | return t1.span() == t2.span(); |
| 195 | } |
| 196 | |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 197 | } // namespace aos |
| 198 | |
| 199 | #endif // AOS_FLATBUFFER_MERGE_H_ |