Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 1 | #ifndef AOS_FLATBUFFER_MERGE_H_ |
| 2 | #define AOS_FLATBUFFER_MERGE_H_ |
| 3 | |
| 4 | #include <cstddef> |
| 5 | #include <string> |
| 6 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 7 | #include "aos/flatbuffers.h" |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 8 | #include "flatbuffers/flatbuffers.h" |
| 9 | |
| 10 | namespace aos { |
| 11 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 12 | // Merges 2 flat buffers with the provided type table into the builder. Returns |
| 13 | // the offset to the flatbuffers. |
| 14 | // One or both of t1 and t2 must be non-null. If one is null, this method |
| 15 | // coppies instead of merging. |
| 16 | flatbuffers::Offset<flatbuffers::Table> MergeFlatBuffers( |
| 17 | const flatbuffers::TypeTable *typetable, const flatbuffers::Table *t1, |
| 18 | const flatbuffers::Table *t2, flatbuffers::FlatBufferBuilder *fbb); |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 19 | |
| 20 | template <class T> |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 21 | inline flatbuffers::Offset<T> MergeFlatBuffers( |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 22 | const flatbuffers::Table *t1, const flatbuffers::Table *t2, |
| 23 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 24 | return MergeFlatBuffers(T::MiniReflectTypeTable(), t1, t2, fbb).o; |
| 25 | } |
| 26 | |
| 27 | template <class T> |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 28 | inline aos::FlatbufferDetachedBuffer<T> MergeFlatBuffers(const T *fb1, |
| 29 | const T *fb2) { |
James Kuszmaul | f3a3be2 | 2020-01-04 12:12:00 -0800 | [diff] [blame] | 30 | flatbuffers::FlatBufferBuilder fbb; |
Austin Schuh | d7b15da | 2020-02-17 15:06:11 -0800 | [diff] [blame] | 31 | fbb.ForceDefaults(true); |
James Kuszmaul | f3a3be2 | 2020-01-04 12:12:00 -0800 | [diff] [blame] | 32 | fbb.Finish(MergeFlatBuffers<T>( |
| 33 | reinterpret_cast<const flatbuffers::Table *>(fb1), |
| 34 | reinterpret_cast<const flatbuffers::Table *>(fb2), &fbb)); |
| 35 | return aos::FlatbufferDetachedBuffer<T>(fbb.Release()); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 36 | } |
| 37 | |
| 38 | template <class T> |
Austin Schuh | e695821 | 2020-10-19 11:48:14 -0700 | [diff] [blame] | 39 | inline flatbuffers::Offset<T> MergeFlatBuffers( |
| 40 | const T *fb1, const T *fb2, flatbuffers::FlatBufferBuilder *fbb) { |
| 41 | return MergeFlatBuffers<T>(reinterpret_cast<const flatbuffers::Table *>(fb1), |
| 42 | reinterpret_cast<const flatbuffers::Table *>(fb2), |
| 43 | fbb); |
| 44 | } |
| 45 | |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 46 | template <class T> |
| 47 | inline aos::FlatbufferDetachedBuffer<T> MergeFlatBuffers( |
| 48 | const aos::Flatbuffer<T> &fb1, const aos::Flatbuffer<T> &fb2) { |
| 49 | return aos::FlatbufferDetachedBuffer<T>( |
| 50 | MergeFlatBuffers<T>(&fb1.message(), &fb2.message())); |
| 51 | } |
| 52 | |
Austin Schuh | 4b5c22a | 2020-11-30 22:58:43 -0800 | [diff] [blame^] | 53 | template <class T> |
| 54 | inline flatbuffers::Offset<T> MergeFlatBuffers( |
| 55 | const aos::Flatbuffer<T> &fb1, const aos::Flatbuffer<T> &fb2, |
| 56 | flatbuffers::FlatBufferBuilder *fbb) { |
| 57 | return MergeFlatBuffers<T>( |
| 58 | reinterpret_cast<const flatbuffers::Table *>(&fb1.message()), |
| 59 | reinterpret_cast<const flatbuffers::Table *>(&fb2.message()), fbb); |
| 60 | } |
| 61 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 62 | // Copies a flatbuffer by walking the tree and copying all the pieces. This |
| 63 | // converts DAGs to trees. |
Austin Schuh | e695821 | 2020-10-19 11:48:14 -0700 | [diff] [blame] | 64 | template <class T> |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 65 | inline flatbuffers::Offset<T> RecursiveCopyFlatBuffer( |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 66 | const T *t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 67 | return MergeFlatBuffers<T>(reinterpret_cast<const flatbuffers::Table *>(t1), |
| 68 | nullptr, fbb); |
| 69 | } |
| 70 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 71 | // Copies a flatbuffer by finding the extents of the memory using the typetable |
| 72 | // and copying the containing memory. This doesn't allocate memory, and |
| 73 | // preserves DAGs. |
| 74 | flatbuffers::Offset<flatbuffers::Table> CopyFlatBuffer( |
| 75 | const flatbuffers::Table *t1, const flatbuffers::TypeTable *typetable, |
| 76 | flatbuffers::FlatBufferBuilder *fbb); |
| 77 | |
| 78 | template <class T> |
| 79 | inline flatbuffers::Offset<T> CopyFlatBuffer( |
| 80 | const T *t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 81 | return flatbuffers::Offset<T>( |
| 82 | CopyFlatBuffer(reinterpret_cast<const flatbuffers::Table *>(t1), |
| 83 | T::MiniReflectTypeTable(), fbb) |
| 84 | .o); |
| 85 | } |
| 86 | |
| 87 | template <class T> |
| 88 | inline flatbuffers::Offset<T> CopyFlatBuffer( |
| 89 | const Flatbuffer<T> &t1, flatbuffers::FlatBufferBuilder *fbb) { |
| 90 | return flatbuffers::Offset<T>( |
| 91 | CopyFlatBuffer( |
| 92 | reinterpret_cast<const flatbuffers::Table *>(&t1.message()), |
| 93 | T::MiniReflectTypeTable(), fbb) |
| 94 | .o); |
| 95 | } |
| 96 | |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 97 | namespace flatbuffer_merge_internal { |
| 98 | |
| 99 | inline flatbuffers::uoffset_t DoBlindCopyFlatBuffer( |
| 100 | const void *message, absl::Span<const uint8_t> span, |
| 101 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 102 | // Enforce 8 byte alignment so anything inside the flatbuffer can be read. |
| 103 | fbb->Align(sizeof(flatbuffers::largest_scalar_t)); |
| 104 | |
| 105 | // We don't know how much of the start of the flatbuffer is padding. The |
| 106 | // safest thing to do from an alignment point of view (without looking inside) |
| 107 | // is to copy the initial offset and leave it as dead space. |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 108 | fbb->PushBytes(span.data(), span.size()); |
| 109 | // Then, compute the offset from the back by computing the distance from the |
| 110 | // front to the start of the message. |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 111 | return fbb->GetSize() - |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 112 | static_cast<flatbuffers::uoffset_t>( |
| 113 | reinterpret_cast<const uint8_t *>(message) - span.data()); |
| 114 | } |
| 115 | |
| 116 | } // namespace flatbuffer_merge_internal |
| 117 | |
| 118 | // Copies a flatbuffer by copying all the data without looking inside and |
| 119 | // pointing inside it. |
| 120 | template <class T> |
| 121 | inline flatbuffers::Offset<T> BlindCopyFlatBuffer( |
| 122 | const NonSizePrefixedFlatbuffer<T> &t, |
| 123 | flatbuffers::FlatBufferBuilder *fbb) { |
| 124 | return flatbuffer_merge_internal::DoBlindCopyFlatBuffer(&t.message(), |
| 125 | t.span(), fbb); |
| 126 | } |
| 127 | |
| 128 | // Copies a flatbuffer by copying all the data without looking inside and |
| 129 | // pointing inside it. |
| 130 | template <class T> |
| 131 | inline flatbuffers::Offset<T> BlindCopyFlatBuffer( |
| 132 | const SizePrefixedFlatbuffer<T> &t, flatbuffers::FlatBufferBuilder *fbb) { |
| 133 | return flatbuffer_merge_internal::DoBlindCopyFlatBuffer(&t.message(), |
| 134 | t.span(), fbb); |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 135 | } |
| 136 | |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 137 | template <class T> |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 138 | inline flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<T>>> |
Austin Schuh | 5c255aa | 2020-11-05 18:32:46 -0800 | [diff] [blame] | 139 | RecursiveCopyVectorTable(const flatbuffers::Vector<flatbuffers::Offset<T>> *t1, |
| 140 | flatbuffers::FlatBufferBuilder *fbb) { |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 141 | if (t1 == nullptr) { |
| 142 | return 0; |
| 143 | } |
| 144 | std::vector<flatbuffers::Offset<T>> v; |
| 145 | for (const T *t : *t1) { |
Austin Schuh | 5c255aa | 2020-11-05 18:32:46 -0800 | [diff] [blame] | 146 | v.emplace_back(RecursiveCopyFlatBuffer(t, fbb)); |
Austin Schuh | cbe9d5a | 2020-11-01 23:25:23 -0800 | [diff] [blame] | 147 | } |
| 148 | return fbb->CreateVector(v); |
| 149 | } |
| 150 | |
| 151 | inline flatbuffers::Offset< |
| 152 | flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>>> |
| 153 | CopyVectorSharedString( |
| 154 | const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *t1, |
| 155 | flatbuffers::FlatBufferBuilder *fbb) { |
| 156 | if (t1 == nullptr) { |
| 157 | return 0; |
| 158 | } |
| 159 | std::vector<flatbuffers::Offset<flatbuffers::String>> v; |
| 160 | for (const flatbuffers::String *t : *t1) { |
| 161 | v.emplace_back(fbb->CreateSharedString(t)); |
| 162 | } |
| 163 | return fbb->CreateVector(v); |
| 164 | } |
| 165 | |
| 166 | template <class T> |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 167 | inline FlatbufferDetachedBuffer<T> CopyFlatBuffer(const T *t) { |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 168 | flatbuffers::FlatBufferBuilder fbb; |
Austin Schuh | d7b15da | 2020-02-17 15:06:11 -0800 | [diff] [blame] | 169 | fbb.ForceDefaults(true); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 170 | fbb.Finish(CopyFlatBuffer<T>(t, &fbb)); |
Austin Schuh | 40485ed | 2019-10-26 21:51:44 -0700 | [diff] [blame] | 171 | return FlatbufferDetachedBuffer<T>(fbb.Release()); |
Austin Schuh | e93d864 | 2019-10-13 15:27:07 -0700 | [diff] [blame] | 172 | } |
| 173 | |
Austin Schuh | a4fc60f | 2020-11-01 23:06:47 -0800 | [diff] [blame] | 174 | template <class T> |
| 175 | inline FlatbufferDetachedBuffer<T> RecursiveCopyFlatBuffer(const T *t) { |
| 176 | flatbuffers::FlatBufferBuilder fbb; |
| 177 | fbb.ForceDefaults(true); |
| 178 | fbb.Finish(RecursiveCopyFlatBuffer<T>(t, &fbb)); |
| 179 | return FlatbufferDetachedBuffer<T>(fbb.Release()); |
| 180 | } |
| 181 | |
Austin Schuh | 30d7db9 | 2020-01-26 16:45:47 -0800 | [diff] [blame] | 182 | // Compares 2 flatbuffers. Returns true if they match, false otherwise. |
| 183 | bool CompareFlatBuffer(const flatbuffers::TypeTable *typetable, |
| 184 | const flatbuffers::Table *t1, |
| 185 | const flatbuffers::Table *t2); |
| 186 | |
| 187 | template <class T> |
| 188 | inline bool CompareFlatBuffer(const T *t1, const T *t2) { |
| 189 | return CompareFlatBuffer(T::MiniReflectTypeTable(), |
| 190 | reinterpret_cast<const flatbuffers::Table *>(t1), |
| 191 | reinterpret_cast<const flatbuffers::Table *>(t2)); |
| 192 | } |
| 193 | |
Austin Schuh | 97789fc | 2020-08-01 14:42:45 -0700 | [diff] [blame] | 194 | template <class T> |
Austin Schuh | add6eb3 | 2020-11-09 21:24:26 -0800 | [diff] [blame] | 195 | inline bool CompareFlatBuffer(const aos::NonSizePrefixedFlatbuffer<T> &t1, |
| 196 | const aos::NonSizePrefixedFlatbuffer<T> &t2) { |
| 197 | return t1.span() == t2.span(); |
| 198 | } |
| 199 | |
| 200 | template <class T> |
| 201 | inline bool CompareFlatBuffer(const aos::SizePrefixedFlatbuffer<T> &t1, |
| 202 | const aos::SizePrefixedFlatbuffer<T> &t2) { |
Austin Schuh | 97789fc | 2020-08-01 14:42:45 -0700 | [diff] [blame] | 203 | return t1.span() == t2.span(); |
| 204 | } |
| 205 | |
Austin Schuh | 09d7ffa | 2019-10-03 23:43:34 -0700 | [diff] [blame] | 206 | } // namespace aos |
| 207 | |
| 208 | #endif // AOS_FLATBUFFER_MERGE_H_ |