blob: 8e31596101b6ac12785e3295467053ac4192b7a7 [file] [log] [blame]
Austin Schuh09d7ffa2019-10-03 23:43:34 -07001#include "aos/flatbuffer_merge.h"
2
Tyler Chatowbf0609c2021-07-31 16:13:27 -07003#include <cstdio>
Austin Schuh09d7ffa2019-10-03 23:43:34 -07004
Austin Schuh09d7ffa2019-10-03 23:43:34 -07005#include "flatbuffers/flatbuffers.h"
6#include "flatbuffers/minireflect.h"
7
Philipp Schrader790cb542023-07-05 21:06:52 -07008#include "aos/flatbuffer_utils.h"
9
Austin Schuh09d7ffa2019-10-03 23:43:34 -070010namespace aos {
11
12namespace {
13
14// Simple structure to hold both field_offsets and elements.
15struct OffsetAndFieldOffset {
16 OffsetAndFieldOffset(flatbuffers::voffset_t new_field_offset,
17 flatbuffers::Offset<flatbuffers::String> new_element)
18 : field_offset(new_field_offset), element(new_element) {}
Austin Schuhe93d8642019-10-13 15:27:07 -070019 OffsetAndFieldOffset(flatbuffers::voffset_t new_field_offset,
20 flatbuffers::Offset<flatbuffers::Table> new_element)
21 : field_offset(new_field_offset), element(new_element.o) {}
Austin Schuh09d7ffa2019-10-03 23:43:34 -070022
23 flatbuffers::voffset_t field_offset;
24 flatbuffers::Offset<flatbuffers::String> element;
25};
26
Austin Schuh09d7ffa2019-10-03 23:43:34 -070027// Merges a single element to a builder for the provided field.
28// One or both of t1 and t2 must be non-null. If one is null, this method
29// copies instead of merging.
30template <typename T>
31void MergeElement(flatbuffers::voffset_t field_offset,
32 const flatbuffers::Table *t1, const flatbuffers::Table *t2,
33 flatbuffers::FlatBufferBuilder *fbb) {
34 const uint8_t *val1 =
35 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
36 const uint8_t *val2 =
37 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
38 const bool t1_has = val1 != nullptr;
39 const bool t2_has = val2 != nullptr;
40
41 if (t2_has) {
Austin Schuh4a5f5d22021-10-12 15:09:35 -070042 fbb->AddElement<T>(field_offset, flatbuffers::ReadScalar<T>(val2));
Austin Schuh09d7ffa2019-10-03 23:43:34 -070043 } else if (t1_has) {
Austin Schuh4a5f5d22021-10-12 15:09:35 -070044 fbb->AddElement<T>(field_offset, flatbuffers::ReadScalar<T>(val1));
Austin Schuh09d7ffa2019-10-03 23:43:34 -070045 }
46}
47
48// Merges a single string to a builder for the provided field.
49// One or both of t1 and t2 must be non-null. If one is null, this method
50// copies instead of merging.
51void MergeString(flatbuffers::voffset_t field_offset,
52 const flatbuffers::Table *t1, const flatbuffers::Table *t2,
53 flatbuffers::FlatBufferBuilder *fbb,
54 ::std::vector<OffsetAndFieldOffset> *elements) {
55 const uint8_t *val1 =
56 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
57 const uint8_t *val2 =
58 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
59 const bool t1_has = val1 != nullptr;
60 const bool t2_has = val2 != nullptr;
61
62 if (t2_has) {
63 val2 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val2);
64 const flatbuffers::String *string2 =
65 reinterpret_cast<const flatbuffers::String *>(val2);
66 elements->emplace_back(field_offset,
67 fbb->CreateString(string2->data(), string2->size()));
68 } else if (t1_has) {
69 val1 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val1);
70 const flatbuffers::String *string1 =
71 reinterpret_cast<const flatbuffers::String *>(val1);
72 elements->emplace_back(field_offset,
73 fbb->CreateString(string1->data(), string1->size()));
74 }
75}
76
77// Merges an object to a builder for the provided field.
78// One or both of t1 and t2 must be non-null. If one is null, this method
79// copies instead of merging.
80void MergeTables(flatbuffers::voffset_t field_offset,
81 const flatbuffers::Table *t1, const flatbuffers::Table *t2,
82 const flatbuffers::TypeTable *sub_typetable,
83 flatbuffers::FlatBufferBuilder *fbb,
84 ::std::vector<OffsetAndFieldOffset> *elements) {
85 const uint8_t *val1 =
86 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
87 const uint8_t *val2 =
88 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
89 const bool t1_has = val1 != nullptr;
90 const bool t2_has = val2 != nullptr;
91 if (t1_has || t2_has) {
92 if (val1 != nullptr) {
93 val1 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val1);
94 }
95 if (val2 != nullptr) {
96 val2 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val2);
97 }
98
99 const flatbuffers::Table *sub_t1 =
100 reinterpret_cast<const flatbuffers::Table *>(val1);
101 const flatbuffers::Table *sub_t2 =
102 reinterpret_cast<const flatbuffers::Table *>(val2);
103
Austin Schuha4fc60f2020-11-01 23:06:47 -0800104 elements->emplace_back(
105 field_offset, MergeFlatBuffers(sub_typetable, sub_t1, sub_t2, fbb));
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700106 }
107}
108
109// Adds a vector of strings to the elements vector so it can be added later.
110// One or both of t1 and t2 must be non-null. If one is null, this method
111// copies instead of merging.
112void AddVectorOfStrings(flatbuffers::ElementaryType elementary_type,
113 flatbuffers::voffset_t field_offset,
114 const flatbuffers::Table *t1,
115 const flatbuffers::Table *t2,
116 flatbuffers::FlatBufferBuilder *fbb,
117 ::std::vector<OffsetAndFieldOffset> *elements) {
118 const uint8_t *val1 =
119 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
120 const uint8_t *val2 =
121 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
122 const bool t1_has = val1 != nullptr;
123 const bool t2_has = val2 != nullptr;
124
125 // Compute end size of the vector.
126 size_t size = 0;
127 if (t1_has) {
128 val1 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val1);
129 auto vec1 = reinterpret_cast<
130 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(
131 val1);
132 size += vec1->size();
133 }
134 if (t2_has) {
135 val2 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val2);
136 auto vec2 = reinterpret_cast<
137 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::String>> *>(
138 val2);
139 size += vec2->size();
140 }
141
142 // Only add the vector if there is something to add.
143 if (t1_has || t2_has) {
144 const size_t inline_size =
145 flatbuffers::InlineSize(elementary_type, nullptr);
146
147 ::std::vector<flatbuffers::Offset<flatbuffers::String>> string_elements;
148
149 // Pack the contents in in reverse order.
150 if (t2_has) {
151 auto vec2 = reinterpret_cast<const flatbuffers::Vector<
152 flatbuffers::Offset<flatbuffers::String>> *>(val2);
153 for (auto i = vec2->rbegin(); i != vec2->rend(); ++i) {
154 const flatbuffers::String *s = *i;
155 string_elements.emplace_back(fbb->CreateString(s->data(), s->size()));
156 }
157 }
158 if (t1_has) {
159 auto vec1 = reinterpret_cast<const flatbuffers::Vector<
160 flatbuffers::Offset<flatbuffers::String>> *>(val1);
161 for (auto i = vec1->rbegin(); i != vec1->rend(); ++i) {
162 const flatbuffers::String *s = *i;
163 string_elements.emplace_back(fbb->CreateString(s->data(), s->size()));
164 }
165 }
166
167 // Start the vector.
James Kuszmaul65541cb2022-11-08 14:53:47 -0800168 fbb->StartVector(size, inline_size, /*align=*/inline_size);
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700169
170 for (const flatbuffers::Offset<flatbuffers::String> &element :
171 string_elements) {
172 fbb->PushElement(element);
173 }
174
175 // And then finish the vector and put it in the list of offsets to add to
176 // the message when it finishes.
177 elements->emplace_back(
178 field_offset,
179 flatbuffers::Offset<flatbuffers::String>(fbb->EndVector(size)));
180 }
181}
182
183// Adds a vector of values to the elements vector so it can be added later.
184// One or both of t1 and t2 must be non-null. If one is null, this method
185// copies instead of merging.
186template <typename T>
187void AddVector(flatbuffers::ElementaryType elementary_type,
188 flatbuffers::voffset_t field_offset,
189 const flatbuffers::Table *t1, const flatbuffers::Table *t2,
190 flatbuffers::FlatBufferBuilder *fbb,
191 ::std::vector<OffsetAndFieldOffset> *elements) {
192 const uint8_t *val1 =
193 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
194 const uint8_t *val2 =
195 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
196 const bool t1_has = val1 != nullptr;
197 const bool t2_has = val2 != nullptr;
198
199 // Compute end size of the vector.
200 size_t size = 0;
201 if (t1_has) {
202 val1 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val1);
203 auto vec1 = reinterpret_cast<const flatbuffers::Vector<T> *>(val1);
204 size += vec1->size();
205 }
206 if (t2_has) {
207 val2 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val2);
208 auto vec2 = reinterpret_cast<const flatbuffers::Vector<T> *>(val2);
209 size += vec2->size();
210 }
211
212 // Only add the vector if there is something to add.
213 if (t1_has || t2_has) {
214 const size_t inline_size =
215 flatbuffers::InlineSize(elementary_type, nullptr);
216
217 // Start the vector.
James Kuszmaul65541cb2022-11-08 14:53:47 -0800218 fbb->StartVector(size, inline_size, /*align=*/inline_size);
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700219
220 // Pack the contents in in reverse order.
221 if (t2_has) {
222 auto vec2 = reinterpret_cast<const flatbuffers::Vector<T> *>(val2);
223 // Iterate backwards.
224 for (auto i = vec2->rbegin(); i != vec2->rend(); ++i) {
225 fbb->PushElement<T>(*i);
226 }
227 }
228 if (t1_has) {
229 auto vec1 = reinterpret_cast<const flatbuffers::Vector<T> *>(val1);
230 // Iterate backwards.
231 for (auto i = vec1->rbegin(); i != vec1->rend(); ++i) {
232 fbb->PushElement<T>(*i);
233 }
234 }
235 // And then finish the vector and put it in the list of offsets to add to
236 // the message when it finishes.
237 elements->emplace_back(
238 field_offset,
239 flatbuffers::Offset<flatbuffers::String>(fbb->EndVector(size)));
240 }
241}
242
243void AddVectorOfObjects(flatbuffers::FlatBufferBuilder *fbb,
244 ::std::vector<OffsetAndFieldOffset> *elements,
245 flatbuffers::ElementaryType elementary_type,
246 const flatbuffers::TypeTable *sub_typetable,
247 flatbuffers::voffset_t field_offset,
248 const flatbuffers::Table *t1,
249 const flatbuffers::Table *t2) {
250 const uint8_t *val1 =
251 t1 != nullptr ? t1->GetAddressOf(field_offset) : nullptr;
252 const uint8_t *val2 =
253 t2 != nullptr ? t2->GetAddressOf(field_offset) : nullptr;
254 const bool t1_has = val1 != nullptr;
255 const bool t2_has = val2 != nullptr;
256
257 // Compute end size of the vector.
258 size_t size = 0;
259 if (t1_has) {
260 val1 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val1);
261 auto vec1 = reinterpret_cast<
262 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::Table>> *>(
263 val1);
264 size += vec1->size();
265 }
266 if (t2_has) {
267 val2 += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val2);
268 auto vec2 = reinterpret_cast<
269 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::Table>> *>(
270 val2);
271 size += vec2->size();
272 }
273
274 // Only add the vector if there is something to add.
275 if (t1_has || t2_has) {
276 const size_t inline_size =
277 flatbuffers::InlineSize(elementary_type, sub_typetable);
278
279 ::std::vector<flatbuffers::Offset<flatbuffers::Table>> object_elements;
280
281 // Pack the contents in in reverse order.
282 if (t2_has) {
283 auto vec2 = reinterpret_cast<
284 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::Table>> *>(
285 val2);
286 for (auto i = vec2->rbegin(); i != vec2->rend(); ++i) {
287 const flatbuffers::Table *t = *i;
288
Austin Schuhe93d8642019-10-13 15:27:07 -0700289 flatbuffers::Offset<flatbuffers::Table> end =
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700290 MergeFlatBuffers(sub_typetable, t, nullptr, fbb);
291
Austin Schuhe93d8642019-10-13 15:27:07 -0700292 object_elements.emplace_back(end);
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700293 }
294 }
295 if (t1_has) {
296 auto vec1 = reinterpret_cast<
297 const flatbuffers::Vector<flatbuffers::Offset<flatbuffers::Table>> *>(
298 val1);
299 for (auto i = vec1->rbegin(); i != vec1->rend(); ++i) {
300 const flatbuffers::Table *t = *i;
301
Austin Schuhe93d8642019-10-13 15:27:07 -0700302 flatbuffers::Offset<flatbuffers::Table> end =
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700303 MergeFlatBuffers(sub_typetable, t, nullptr, fbb);
304
Austin Schuhe93d8642019-10-13 15:27:07 -0700305 object_elements.emplace_back(end);
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700306 }
307 }
308
309 // Start the vector.
James Kuszmaul65541cb2022-11-08 14:53:47 -0800310 fbb->StartVector(size, inline_size, /*align=*/inline_size);
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700311
312 for (const flatbuffers::Offset<flatbuffers::Table> &element :
313 object_elements) {
314 fbb->PushElement(element);
315 }
316
317 // And then finish the vector and put it in the list of offsets to add to
318 // the message when it finishes.
319 elements->emplace_back(
320 field_offset,
321 flatbuffers::Offset<flatbuffers::String>(fbb->EndVector(size)));
322 }
323}
324
Austin Schuhe93d8642019-10-13 15:27:07 -0700325} // namespace
326
327flatbuffers::Offset<flatbuffers::Table> MergeFlatBuffers(
328 const flatbuffers::TypeTable *typetable, const flatbuffers::Table *t1,
329 const flatbuffers::Table *t2, flatbuffers::FlatBufferBuilder *fbb) {
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700330 ::std::vector<OffsetAndFieldOffset> elements;
331
332 // We need to do this in 2 passes
333 // The first pass builds up all the sub-objects which are encoded in the
334 // message as offsets.
335 // The second pass builds up the actual table by adding all the values to the
336 // messages, and encoding the offsets in the table.
337 for (size_t field_index = 0; field_index < typetable->num_elems;
338 ++field_index) {
339 const flatbuffers::TypeCode type_code = typetable->type_codes[field_index];
340 const flatbuffers::ElementaryType elementary_type =
341 static_cast<flatbuffers::ElementaryType>(type_code.base_type);
342
343 const flatbuffers::voffset_t field_offset = flatbuffers::FieldIndexToOffset(
344 static_cast<flatbuffers::voffset_t>(field_index));
345
346 switch (elementary_type) {
347 case flatbuffers::ElementaryType::ET_UTYPE:
Austin Schuh7c75e582020-11-14 16:41:18 -0800348 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700349 printf("ET_UTYPE, %s\n", typetable->names[field_index]);
350 break;
351 case flatbuffers::ElementaryType::ET_BOOL:
Austin Schuh7c75e582020-11-14 16:41:18 -0800352 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700353 AddVector<uint8_t>(elementary_type, field_offset, t1, t2, fbb,
354 &elements);
355 break;
356 case flatbuffers::ElementaryType::ET_CHAR:
Austin Schuh7c75e582020-11-14 16:41:18 -0800357 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700358 AddVector<int8_t>(elementary_type, field_offset, t1, t2, fbb,
359 &elements);
360 break;
361 case flatbuffers::ElementaryType::ET_UCHAR:
Austin Schuh7c75e582020-11-14 16:41:18 -0800362 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700363 AddVector<uint8_t>(elementary_type, field_offset, t1, t2, fbb,
364 &elements);
365 break;
366 case flatbuffers::ElementaryType::ET_SHORT:
Austin Schuh7c75e582020-11-14 16:41:18 -0800367 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700368 AddVector<int16_t>(elementary_type, field_offset, t1, t2, fbb,
369 &elements);
370 break;
371 case flatbuffers::ElementaryType::ET_USHORT:
Austin Schuh7c75e582020-11-14 16:41:18 -0800372 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700373 AddVector<uint16_t>(elementary_type, field_offset, t1, t2, fbb,
374 &elements);
375 break;
376 case flatbuffers::ElementaryType::ET_INT:
Austin Schuh7c75e582020-11-14 16:41:18 -0800377 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700378 AddVector<int32_t>(elementary_type, field_offset, t1, t2, fbb,
379 &elements);
380 break;
381 case flatbuffers::ElementaryType::ET_UINT:
Austin Schuh7c75e582020-11-14 16:41:18 -0800382 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700383 AddVector<uint32_t>(elementary_type, field_offset, t1, t2, fbb,
384 &elements);
385 break;
386 case flatbuffers::ElementaryType::ET_LONG:
Austin Schuh7c75e582020-11-14 16:41:18 -0800387 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700388 AddVector<int64_t>(elementary_type, field_offset, t1, t2, fbb,
389 &elements);
390 break;
391 case flatbuffers::ElementaryType::ET_ULONG:
Austin Schuh7c75e582020-11-14 16:41:18 -0800392 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700393 AddVector<uint64_t>(elementary_type, field_offset, t1, t2, fbb,
394 &elements);
395 break;
396 case flatbuffers::ElementaryType::ET_FLOAT:
Austin Schuh7c75e582020-11-14 16:41:18 -0800397 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700398 AddVector<float>(elementary_type, field_offset, t1, t2, fbb, &elements);
399 break;
400 case flatbuffers::ElementaryType::ET_DOUBLE:
Austin Schuh7c75e582020-11-14 16:41:18 -0800401 if (!type_code.is_repeating) continue;
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700402 AddVector<double>(elementary_type, field_offset, t1, t2, fbb,
403 &elements);
404 break;
405 case flatbuffers::ElementaryType::ET_STRING:
Austin Schuh7c75e582020-11-14 16:41:18 -0800406 if (!type_code.is_repeating) {
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700407 MergeString(field_offset, t1, t2, fbb, &elements);
408 } else {
409 AddVectorOfStrings(elementary_type, field_offset, t1, t2, fbb,
410 &elements);
411 }
412 break;
413 case flatbuffers::ElementaryType::ET_SEQUENCE: {
414 const flatbuffers::TypeTable *sub_typetable =
415 typetable->type_refs[type_code.sequence_ref]();
Austin Schuh7c75e582020-11-14 16:41:18 -0800416 if (!type_code.is_repeating) {
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700417 MergeTables(field_offset, t1, t2, sub_typetable, fbb, &elements);
418 } else {
419 const flatbuffers::TypeTable *sub_typetable =
420 typetable->type_refs[type_code.sequence_ref]();
421
422 AddVectorOfObjects(fbb, &elements, elementary_type, sub_typetable,
423 field_offset, t1, t2);
424 }
425 } break;
426 }
427 }
428
429 const flatbuffers::uoffset_t start = fbb->StartTable();
430
431 // We want to do this the same way as the json library. Rip through the
432 // fields and generate a list of things to add. Then add them.
433 // Also need recursion for subtypes.
434 for (size_t field_index = 0; field_index < typetable->num_elems;
435 ++field_index) {
436 const flatbuffers::TypeCode type_code = typetable->type_codes[field_index];
Austin Schuh7c75e582020-11-14 16:41:18 -0800437 if (type_code.is_repeating) {
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700438 continue;
439 }
440 const flatbuffers::ElementaryType elementary_type =
441 static_cast<flatbuffers::ElementaryType>(type_code.base_type);
442
443 const flatbuffers::voffset_t field_offset = flatbuffers::FieldIndexToOffset(
444 static_cast<flatbuffers::voffset_t>(field_index));
445
446 switch (elementary_type) {
447 case flatbuffers::ElementaryType::ET_UTYPE:
448 // TODO(austin): Need to see one and try it.
449 printf("ET_UTYPE, %s\n", typetable->names[field_index]);
450 break;
451 case flatbuffers::ElementaryType::ET_BOOL: {
452 MergeElement<uint8_t>(field_offset, t1, t2, fbb);
453 } break;
454 case flatbuffers::ElementaryType::ET_CHAR:
455 MergeElement<int8_t>(field_offset, t1, t2, fbb);
456 break;
457 case flatbuffers::ElementaryType::ET_UCHAR:
458 MergeElement<uint8_t>(field_offset, t1, t2, fbb);
459 break;
460 case flatbuffers::ElementaryType::ET_SHORT:
461 MergeElement<int16_t>(field_offset, t1, t2, fbb);
462 break;
463 case flatbuffers::ElementaryType::ET_USHORT:
464 MergeElement<uint16_t>(field_offset, t1, t2, fbb);
465 break;
466 case flatbuffers::ElementaryType::ET_INT:
467 MergeElement<int32_t>(field_offset, t1, t2, fbb);
468 break;
469 case flatbuffers::ElementaryType::ET_UINT:
470 MergeElement<uint32_t>(field_offset, t1, t2, fbb);
471 break;
472 case flatbuffers::ElementaryType::ET_LONG:
473 MergeElement<int64_t>(field_offset, t1, t2, fbb);
474 break;
475 case flatbuffers::ElementaryType::ET_ULONG:
476 MergeElement<uint64_t>(field_offset, t1, t2, fbb);
477 break;
478 case flatbuffers::ElementaryType::ET_FLOAT:
479 MergeElement<float>(field_offset, t1, t2, fbb);
480 break;
481 case flatbuffers::ElementaryType::ET_DOUBLE:
482 MergeElement<double>(field_offset, t1, t2, fbb);
483 break;
484 case flatbuffers::ElementaryType::ET_STRING:
485 case flatbuffers::ElementaryType::ET_SEQUENCE:
486 // Already handled above since this is an uoffset.
487 break;
488 }
489 }
490
491 // And there is no need to check for duplicates since we are creating this
492 // list very carefully from the type table.
493 for (const OffsetAndFieldOffset &element : elements) {
494 fbb->AddOffset(element.field_offset, element.element);
495 }
496
497 return fbb->EndTable(start);
498}
499
Austin Schuh30d7db92020-01-26 16:45:47 -0800500bool CompareFlatBuffer(const flatbuffers::TypeTable *typetable,
501 const flatbuffers::Table *t1,
502 const flatbuffers::Table *t2) {
503 // Copying flatbuffers is deterministic for the same typetable. So, copy both
504 // to guarantee that they are sorted the same, then check that the memory
505 // matches.
506 //
507 // There has to be a better way to do this, but the efficiency hit of this
508 // implementation is fine for the usages that we have now. We are better off
509 // abstracting this into a library call where we can fix it later easily.
510 flatbuffers::FlatBufferBuilder fbb1;
Austin Schuhd7b15da2020-02-17 15:06:11 -0800511 fbb1.ForceDefaults(true);
Austin Schuh30d7db92020-01-26 16:45:47 -0800512 fbb1.Finish(MergeFlatBuffers(typetable, t1, nullptr, &fbb1));
513 flatbuffers::FlatBufferBuilder fbb2;
Austin Schuhd7b15da2020-02-17 15:06:11 -0800514 fbb2.ForceDefaults(true);
Austin Schuh30d7db92020-01-26 16:45:47 -0800515 fbb2.Finish(MergeFlatBuffers(typetable, t2, nullptr, &fbb2));
516
517 if (fbb1.GetSize() != fbb2.GetSize()) return false;
518
519 return memcmp(fbb1.GetBufferPointer(), fbb2.GetBufferPointer(),
520 fbb1.GetSize()) == 0;
521}
522
Austin Schuha4fc60f2020-11-01 23:06:47 -0800523// Struct to track a range of memory.
524struct Bounds {
525 const uint8_t *min;
526 const uint8_t *max;
527
528 absl::Span<const uint8_t> span() {
529 return {min, static_cast<size_t>(max - min)};
530 }
531};
532
533// Grows the range of memory to contain the pointer.
534void Extend(Bounds *b, const uint8_t *ptr) {
535 b->min = std::min(ptr, b->min);
536 b->max = std::max(ptr, b->max);
537}
538
539// Grows the range of memory to contain the span.
540void Extend(Bounds *b, absl::Span<const uint8_t> data) {
541 b->min = std::min(data.data(), b->min);
542 b->max = std::max(data.data() + data.size(), b->max);
543}
544
545// Finds the extents of the provided string. Returns the containing span and
546// required alignment.
547std::pair<absl::Span<const uint8_t>, size_t> ExtentsString(
548 const flatbuffers::String *s) {
549 const uint8_t *s_uint8 = reinterpret_cast<const uint8_t *>(s);
550 // Strings are null terminated.
551 Bounds b{.min = s_uint8,
552 .max = s_uint8 + sizeof(flatbuffers::uoffset_t) + s->size() + 1};
553 return std::make_pair(b.span(), sizeof(flatbuffers::uoffset_t));
554}
555
556// Finds the extents of the provided table. Returns the containing span and the
557// required alignment.
558std::pair<absl::Span<const uint8_t>, size_t> ExtentsTable(
559 const flatbuffers::TypeTable *type_table, const flatbuffers::Table *t1) {
560 const uint8_t *t1_uint8 = reinterpret_cast<const uint8_t *>(t1);
561 // Count the offset to the vtable.
562 Bounds b{.min = t1_uint8, .max = t1_uint8 + sizeof(flatbuffers::soffset_t)};
563 // Find the limits of the vtable and start of table.
564 const uint8_t *vt = t1->GetVTable();
565 Extend(&b, vt);
566 Extend(&b, vt + flatbuffers::ReadScalar<flatbuffers::voffset_t>(vt));
567 // We need to be at least as aligned as the vtable pointer. Start there.
568 size_t alignment = sizeof(flatbuffers::uoffset_t);
569
570 // Now do all our fields.
571 for (size_t field_index = 0; field_index < type_table->num_elems;
572 ++field_index) {
573 const flatbuffers::TypeCode type_code = type_table->type_codes[field_index];
574 const flatbuffers::ElementaryType elementary_type =
575 static_cast<flatbuffers::ElementaryType>(type_code.base_type);
576 const flatbuffers::TypeTable *field_type_table =
577 type_code.sequence_ref >= 0
578 ? type_table->type_refs[type_code.sequence_ref]()
579 : nullptr;
580
581 // Note: we don't yet support enums, structs, or unions. That is mostly
582 // because we haven't had a use case yet.
583
584 // Compute the pointer to our field.
585 const uint8_t *val = nullptr;
586 if (type_table->st == flatbuffers::ST_TABLE) {
587 val = t1->GetAddressOf(flatbuffers::FieldIndexToOffset(
588 static_cast<flatbuffers::voffset_t>(field_index)));
589 // Bail on non-populated fields.
590 if (val == nullptr) continue;
591 } else {
592 val = t1_uint8 + type_table->values[field_index];
593 }
594
595 // Now make sure the field is aligned properly.
596 const size_t field_size =
597 flatbuffers::InlineSize(elementary_type, field_type_table);
598 alignment = std::max(
599 alignment, std::min(sizeof(flatbuffers::largest_scalar_t), field_size));
600
601 absl::Span<const uint8_t> field_span(val, field_size);
602
603 Extend(&b, field_span);
604
Austin Schuh7c75e582020-11-14 16:41:18 -0800605 if (type_code.is_repeating) {
Austin Schuha4fc60f2020-11-01 23:06:47 -0800606 // Go look inside the vector and track the base size.
607 val += flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val);
608 const flatbuffers::Vector<uint8_t> *vec =
609 reinterpret_cast<const flatbuffers::Vector<uint8_t> *>(val);
610 absl::Span<const uint8_t> vec_span(
611 val, sizeof(flatbuffers::uoffset_t) +
612 vec->size() * flatbuffers::InlineSize(elementary_type,
613 field_type_table));
614 Extend(&b, vec_span);
615 // Non-scalar vectors need their pointers followed.
616 if (elementary_type == flatbuffers::ElementaryType::ET_STRING) {
617 for (size_t i = 0; i < vec->size(); ++i) {
618 const uint8_t *field_ptr =
619 vec->Data() + i * InlineSize(elementary_type, field_type_table);
620 std::pair<absl::Span<const uint8_t>, size_t> str_data =
621 ExtentsString(reinterpret_cast<const flatbuffers::String *>(
622 field_ptr +
623 flatbuffers::ReadScalar<flatbuffers::uoffset_t>(field_ptr)));
624 Extend(&b, str_data.first);
625 alignment = std::max(alignment, str_data.second);
626 }
627 } else if (elementary_type == flatbuffers::ElementaryType::ET_SEQUENCE) {
628 for (size_t i = 0; i < vec->size(); ++i) {
629 const uint8_t *field_ptr =
630 vec->Data() + i * InlineSize(elementary_type, field_type_table);
631 CHECK(type_table->st == flatbuffers::ST_TABLE)
632 << ": Only tables are supported right now. Patches welcome.";
633
634 std::pair<absl::Span<const uint8_t>, size_t> sub_data = ExtentsTable(
635 field_type_table,
636 reinterpret_cast<const flatbuffers::Table *>(
637 field_ptr +
638 flatbuffers::ReadScalar<flatbuffers::uoffset_t>(field_ptr)));
639 alignment = std::max(alignment, sub_data.second);
640 Extend(&b, sub_data.first);
641 }
642 }
643
644 continue;
645 }
646
647 switch (elementary_type) {
648 case flatbuffers::ElementaryType::ET_UTYPE:
649 case flatbuffers::ElementaryType::ET_BOOL:
650 case flatbuffers::ElementaryType::ET_CHAR:
651 case flatbuffers::ElementaryType::ET_UCHAR:
652 case flatbuffers::ElementaryType::ET_SHORT:
653 case flatbuffers::ElementaryType::ET_USHORT:
654 case flatbuffers::ElementaryType::ET_INT:
655 case flatbuffers::ElementaryType::ET_UINT:
656 case flatbuffers::ElementaryType::ET_LONG:
657 case flatbuffers::ElementaryType::ET_ULONG:
658 case flatbuffers::ElementaryType::ET_FLOAT:
659 case flatbuffers::ElementaryType::ET_DOUBLE:
660 // This is covered by the field and size above.
661 break;
662 case flatbuffers::ElementaryType::ET_STRING: {
663 std::pair<absl::Span<const uint8_t>, size_t> str_data =
664 ExtentsString(reinterpret_cast<const flatbuffers::String *>(
665 val + flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val)));
666 alignment = std::max(alignment, str_data.second);
667 Extend(&b, str_data.first);
668 } break;
669 case flatbuffers::ElementaryType::ET_SEQUENCE: {
670 switch (type_table->st) {
671 case flatbuffers::ST_TABLE: {
672 const flatbuffers::Table *sub_table =
673 reinterpret_cast<const flatbuffers::Table *>(
674 val + flatbuffers::ReadScalar<flatbuffers::uoffset_t>(val));
675 std::pair<absl::Span<const uint8_t>, size_t> sub_data =
676 ExtentsTable(field_type_table, sub_table);
677 alignment = std::max(alignment, sub_data.second);
678 Extend(&b, sub_data.first);
679 } break;
680 case flatbuffers::ST_ENUM:
681 LOG(FATAL) << "Copying enums not implemented yet";
682 case flatbuffers::ST_STRUCT:
683 LOG(FATAL) << "Copying structs not implemented yet";
684 case flatbuffers::ST_UNION:
685 LOG(FATAL) << "Copying unions not implemented yet";
686 }
687 }
688 }
689 }
690
691 // To be a parsable flatbuffer, the flatbuffer needs to be aligned up to the
692 // maximum internal alignment. Both in length and starting point. We know
693 // that for this to be actually true, the start and end pointers will need to
694 // be aligned to the required alignment.
695 CHECK((alignment & (alignment - 1)) == 0)
696 << ": Invalid alignment: " << alignment << ", needs to be a power of 2.";
697 while (reinterpret_cast<uintptr_t>(b.min) & (alignment - 1)) {
698 --b.min;
699 }
700 while (reinterpret_cast<uintptr_t>(b.max) & (alignment - 1)) {
701 ++b.max;
702 }
703
704 return std::make_pair(b.span(), alignment);
705}
706
707// Computes the offset, containing span, and alignment of the provided
708// flatbuffer.
709std::tuple<flatbuffers::Offset<flatbuffers::Table>, absl::Span<const uint8_t>,
710 size_t>
711Extents(const flatbuffers::TypeTable *type_table,
712 const flatbuffers::Table *t1) {
713 std::pair<absl::Span<const uint8_t>, size_t> data =
714 ExtentsTable(type_table, t1);
715
716 return std::make_tuple(flatbuffers::Offset<flatbuffers::Table>(
717 static_cast<flatbuffers::uoffset_t>(
718 data.first.data() + data.first.size() -
719 reinterpret_cast<const uint8_t *>(t1))),
720 data.first, data.second);
721}
722
723flatbuffers::Offset<flatbuffers::Table> CopyFlatBuffer(
724 const flatbuffers::Table *t1, const flatbuffers::TypeTable *typetable,
725 flatbuffers::FlatBufferBuilder *fbb) {
726 std::tuple<flatbuffers::Offset<flatbuffers::Table>, absl::Span<const uint8_t>,
727 size_t>
728 r = Extents(typetable, t1);
729
730 // Pad out enough so that the flatbuffer alignment is preserved.
731 fbb->Align(std::get<2>(r));
732
733 // Now push everything we found. And offsets are tracked from the end of the
734 // buffer while building, so recompute the offset returned from the back.
735 fbb->PushBytes(std::get<1>(r).data(), std::get<1>(r).size());
736 return fbb->GetSize() + std::get<0>(r).o - std::get<1>(r).size();
737}
738
Austin Schuh09d7ffa2019-10-03 23:43:34 -0700739} // namespace aos