blob: 5ddff63420aca64e428b983c71d37b04cb5f8d6f [file] [log] [blame]
Austin Schuhe89fa2d2019-08-14 20:24:23 -07001/*
2 * Copyright 2015 Google Inc. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "flatbuffers/reflection.h"
Austin Schuh272c6132020-11-14 16:37:52 -080018
Austin Schuhe89fa2d2019-08-14 20:24:23 -070019#include "flatbuffers/util.h"
20
21// Helper functionality for reflection.
22
23namespace flatbuffers {
24
Austin Schuha1d006e2022-09-14 21:50:42 -070025namespace {
26
27static void CopyInline(FlatBufferBuilder &fbb, const reflection::Field &fielddef,
28 const Table &table, size_t align, size_t size) {
29 fbb.Align(align);
30 fbb.PushBytes(table.GetStruct<const uint8_t *>(fielddef.offset()), size);
31 fbb.TrackField(fielddef.offset(), fbb.GetSize());
32}
33
34static bool VerifyStruct(flatbuffers::Verifier &v,
35 const flatbuffers::Table &parent_table,
36 voffset_t field_offset, const reflection::Object &obj,
37 bool required) {
38 auto offset = parent_table.GetOptionalFieldOffset(field_offset);
39 if (required && !offset) { return false; }
40
41 return !offset ||
42 v.VerifyFieldStruct(reinterpret_cast<const uint8_t *>(&parent_table),
43 offset, obj.bytesize(), obj.minalign());
44}
45
46static bool VerifyVectorOfStructs(flatbuffers::Verifier &v,
47 const flatbuffers::Table &parent_table,
48 voffset_t field_offset,
49 const reflection::Object &obj, bool required) {
50 auto p = parent_table.GetPointer<const uint8_t *>(field_offset);
51 if (required && !p) { return false; }
52
53 return !p || v.VerifyVectorOrString(p, obj.bytesize());
54}
55
56// forward declare to resolve cyclic deps between VerifyObject and VerifyVector
57static bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
58 const reflection::Object &obj,
59 const flatbuffers::Table *table, bool required);
60
61static bool VerifyUnion(flatbuffers::Verifier &v, const reflection::Schema &schema,
62 uint8_t utype, const uint8_t *elem,
63 const reflection::Field &union_field) {
64 if (!utype) return true; // Not present.
65 auto fb_enum = schema.enums()->Get(union_field.type()->index());
66 if (utype >= fb_enum->values()->size()) return false;
67 auto elem_type = fb_enum->values()->Get(utype)->union_type();
68 switch (elem_type->base_type()) {
69 case reflection::BaseType::Obj: {
70 auto elem_obj = schema.objects()->Get(elem_type->index());
71 if (elem_obj->is_struct()) {
72 return v.VerifyFromPointer(elem, elem_obj->bytesize());
73 } else {
74 return VerifyObject(v, schema, *elem_obj,
75 reinterpret_cast<const flatbuffers::Table *>(elem),
76 true);
77 }
78 }
79 case reflection::BaseType::String:
80 return v.VerifyString(
81 reinterpret_cast<const flatbuffers::String *>(elem));
82 default: return false;
83 }
84}
85
86static bool VerifyVector(flatbuffers::Verifier &v, const reflection::Schema &schema,
87 const flatbuffers::Table &table,
88 const reflection::Field &vec_field) {
89 FLATBUFFERS_ASSERT(vec_field.type()->base_type() == reflection::BaseType::Vector);
90 if (!table.VerifyField<uoffset_t>(v, vec_field.offset(), sizeof(uoffset_t)))
91 return false;
92
93 switch (vec_field.type()->element()) {
94 case reflection::BaseType::UType:
95 return v.VerifyVector(flatbuffers::GetFieldV<uint8_t>(table, vec_field));
96 case reflection::BaseType::Bool:
97 case reflection::BaseType::Byte:
98 case reflection::BaseType::UByte:
99 return v.VerifyVector(flatbuffers::GetFieldV<int8_t>(table, vec_field));
100 case reflection::BaseType::Short:
101 case reflection::BaseType::UShort:
102 return v.VerifyVector(flatbuffers::GetFieldV<int16_t>(table, vec_field));
103 case reflection::BaseType::Int:
104 case reflection::BaseType::UInt:
105 return v.VerifyVector(flatbuffers::GetFieldV<int32_t>(table, vec_field));
106 case reflection::BaseType::Long:
107 case reflection::BaseType::ULong:
108 return v.VerifyVector(flatbuffers::GetFieldV<int64_t>(table, vec_field));
109 case reflection::BaseType::Float:
110 return v.VerifyVector(flatbuffers::GetFieldV<float>(table, vec_field));
111 case reflection::BaseType::Double:
112 return v.VerifyVector(flatbuffers::GetFieldV<double>(table, vec_field));
113 case reflection::BaseType::String: {
114 auto vec_string =
115 flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::String>>(
116 table, vec_field);
117 if (v.VerifyVector(vec_string) && v.VerifyVectorOfStrings(vec_string)) {
118 return true;
119 } else {
120 return false;
121 }
122 }
123 case reflection::BaseType::Obj: {
124 auto obj = schema.objects()->Get(vec_field.type()->index());
125 if (obj->is_struct()) {
126 return VerifyVectorOfStructs(v, table, vec_field.offset(), *obj,
127 vec_field.required());
128 } else {
129 auto vec =
130 flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::Table>>(
131 table, vec_field);
132 if (!v.VerifyVector(vec)) return false;
133 if (!vec) return true;
134 for (uoffset_t j = 0; j < vec->size(); j++) {
135 if (!VerifyObject(v, schema, *obj, vec->Get(j), true)) {
136 return false;
137 }
138 }
139 return true;
140 }
141 }
142 case reflection::BaseType::Union: {
143 auto vec = flatbuffers::GetFieldV<flatbuffers::Offset<uint8_t>>(
144 table, vec_field);
145 if (!v.VerifyVector(vec)) return false;
146 if (!vec) return true;
147 auto type_vec = table.GetPointer<Vector<uint8_t> *>(vec_field.offset() -
148 sizeof(voffset_t));
149 if (!v.VerifyVector(type_vec)) return false;
150 for (uoffset_t j = 0; j < vec->size(); j++) {
151 // get union type from the prev field
152 auto utype = type_vec->Get(j);
153 auto elem = vec->Get(j);
154 if (!VerifyUnion(v, schema, utype, elem, vec_field)) return false;
155 }
156 return true;
157 }
158 case reflection::BaseType::Vector:
159 case reflection::BaseType::None:
160 default: FLATBUFFERS_ASSERT(false); return false;
161 }
162}
163
164static bool VerifyObject(flatbuffers::Verifier &v, const reflection::Schema &schema,
165 const reflection::Object &obj,
166 const flatbuffers::Table *table, bool required) {
167 if (!table) return !required;
168 if (!table->VerifyTableStart(v)) return false;
169 for (uoffset_t i = 0; i < obj.fields()->size(); i++) {
170 auto field_def = obj.fields()->Get(i);
171 switch (field_def->type()->base_type()) {
172 case reflection::BaseType::None: FLATBUFFERS_ASSERT(false); break;
173 case reflection::BaseType::UType:
174 if (!table->VerifyField<uint8_t>(v, field_def->offset(),
175 sizeof(uint8_t)))
176 return false;
177 break;
178 case reflection::BaseType::Bool:
179 case reflection::BaseType::Byte:
180 case reflection::BaseType::UByte:
181 if (!table->VerifyField<int8_t>(v, field_def->offset(), sizeof(int8_t)))
182 return false;
183 break;
184 case reflection::BaseType::Short:
185 case reflection::BaseType::UShort:
186 if (!table->VerifyField<int16_t>(v, field_def->offset(),
187 sizeof(int16_t)))
188 return false;
189 break;
190 case reflection::BaseType::Int:
191 case reflection::BaseType::UInt:
192 if (!table->VerifyField<int32_t>(v, field_def->offset(),
193 sizeof(int32_t)))
194 return false;
195 break;
196 case reflection::BaseType::Long:
197 case reflection::BaseType::ULong:
198 if (!table->VerifyField<int64_t>(v, field_def->offset(),
199 sizeof(int64_t)))
200 return false;
201 break;
202 case reflection::BaseType::Float:
203 if (!table->VerifyField<float>(v, field_def->offset(), sizeof(float)))
204 return false;
205 break;
206 case reflection::BaseType::Double:
207 if (!table->VerifyField<double>(v, field_def->offset(), sizeof(double)))
208 return false;
209 break;
210 case reflection::BaseType::String:
211 if (!table->VerifyField<uoffset_t>(v, field_def->offset(),
212 sizeof(uoffset_t)) ||
213 !v.VerifyString(flatbuffers::GetFieldS(*table, *field_def))) {
214 return false;
215 }
216 break;
217 case reflection::BaseType::Vector:
218 if (!VerifyVector(v, schema, *table, *field_def)) return false;
219 break;
220 case reflection::BaseType::Obj: {
221 auto child_obj = schema.objects()->Get(field_def->type()->index());
222 if (child_obj->is_struct()) {
223 if (!VerifyStruct(v, *table, field_def->offset(), *child_obj,
224 field_def->required())) {
225 return false;
226 }
227 } else {
228 if (!VerifyObject(v, schema, *child_obj,
229 flatbuffers::GetFieldT(*table, *field_def),
230 field_def->required())) {
231 return false;
232 }
233 }
234 break;
235 }
236 case reflection::BaseType::Union: {
237 // get union type from the prev field
238 voffset_t utype_offset = field_def->offset() - sizeof(voffset_t);
239 auto utype = table->GetField<uint8_t>(utype_offset, 0);
240 auto uval = reinterpret_cast<const uint8_t *>(
241 flatbuffers::GetFieldT(*table, *field_def));
242 if (!VerifyUnion(v, schema, utype, uval, *field_def)) { return false; }
243 break;
244 }
245 default: FLATBUFFERS_ASSERT(false); break;
246 }
247 }
248
249 if (!v.EndTable()) return false;
250
251 return true;
252}
253
254
255} // namespace
256
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700257int64_t GetAnyValueI(reflection::BaseType type, const uint8_t *data) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700258 // clang-format off
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700259 #define FLATBUFFERS_GET(T) static_cast<int64_t>(ReadScalar<T>(data))
260 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700261 case reflection::BaseType::UType:
262 case reflection::BaseType::Bool:
263 case reflection::BaseType::UByte: return FLATBUFFERS_GET(uint8_t);
264 case reflection::BaseType::Byte: return FLATBUFFERS_GET(int8_t);
265 case reflection::BaseType::Short: return FLATBUFFERS_GET(int16_t);
266 case reflection::BaseType::UShort: return FLATBUFFERS_GET(uint16_t);
267 case reflection::BaseType::Int: return FLATBUFFERS_GET(int32_t);
268 case reflection::BaseType::UInt: return FLATBUFFERS_GET(uint32_t);
269 case reflection::BaseType::Long: return FLATBUFFERS_GET(int64_t);
270 case reflection::BaseType::ULong: return FLATBUFFERS_GET(uint64_t);
271 case reflection::BaseType::Float: return FLATBUFFERS_GET(float);
272 case reflection::BaseType::Double: return FLATBUFFERS_GET(double);
273 case reflection::BaseType::String: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700274 auto s = reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) +
275 data);
276 return s ? StringToInt(s->c_str()) : 0;
277 }
278 default: return 0; // Tables & vectors do not make sense.
279 }
280 #undef FLATBUFFERS_GET
281 // clang-format on
282}
283
284double GetAnyValueF(reflection::BaseType type, const uint8_t *data) {
285 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700286 case reflection::BaseType::Float: return static_cast<double>(ReadScalar<float>(data));
287 case reflection::BaseType::Double: return ReadScalar<double>(data);
288 case reflection::BaseType::String: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700289 auto s =
290 reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
Austin Schuh272c6132020-11-14 16:37:52 -0800291 if (s) {
292 double d;
293 StringToNumber(s->c_str(), &d);
294 return d;
295 } else {
296 return 0.0;
297 }
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700298 }
299 default: return static_cast<double>(GetAnyValueI(type, data));
300 }
301}
302
303std::string GetAnyValueS(reflection::BaseType type, const uint8_t *data,
304 const reflection::Schema *schema, int type_index) {
305 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700306 case reflection::BaseType::Float:
307 case reflection::BaseType::Double: return NumToString(GetAnyValueF(type, data));
308 case reflection::BaseType::String: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700309 auto s =
310 reinterpret_cast<const String *>(ReadScalar<uoffset_t>(data) + data);
311 return s ? s->c_str() : "";
312 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700313 case reflection::BaseType::Obj:
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700314 if (schema) {
315 // Convert the table to a string. This is mostly for debugging purposes,
316 // and does NOT promise to be JSON compliant.
317 // Also prefixes the type.
318 auto &objectdef = *schema->objects()->Get(type_index);
319 auto s = objectdef.name()->str();
320 if (objectdef.is_struct()) {
321 s += "(struct)"; // TODO: implement this as well.
322 } else {
323 auto table_field = reinterpret_cast<const Table *>(
324 ReadScalar<uoffset_t>(data) + data);
325 s += " { ";
326 auto fielddefs = objectdef.fields();
327 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
328 auto &fielddef = **it;
329 if (!table_field->CheckField(fielddef.offset())) continue;
330 auto val = GetAnyFieldS(*table_field, fielddef, schema);
James Kuszmauldac091f2022-03-22 09:35:06 -0700331 if (fielddef.type()->base_type() == reflection::BaseType::String) {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700332 std::string esc;
333 flatbuffers::EscapeString(val.c_str(), val.length(), &esc, true,
334 false);
335 val = esc;
336 }
337 s += fielddef.name()->str();
338 s += ": ";
339 s += val;
340 s += ", ";
341 }
342 s += "}";
343 }
344 return s;
345 } else {
346 return "(table)";
347 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700348 case reflection::BaseType::Vector:
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700349 return "[(elements)]"; // TODO: implement this as well.
James Kuszmauldac091f2022-03-22 09:35:06 -0700350 case reflection::BaseType::Union: return "(union)"; // TODO: implement this as well.
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700351 default: return NumToString(GetAnyValueI(type, data));
352 }
353}
354
Austin Schuha1d006e2022-09-14 21:50:42 -0700355void ForAllFields(const reflection::Object *object, bool reverse,
356 std::function<void(const reflection::Field *)> func) {
357 std::vector<uint32_t> field_to_id_map;
358 field_to_id_map.resize(object->fields()->size());
359
360 // Create the mapping of field ID to the index into the vector.
361 for (uint32_t i = 0; i < object->fields()->size(); ++i) {
362 auto field = object->fields()->Get(i);
363 field_to_id_map[field->id()] = i;
364 }
365
366 for (size_t i = 0; i < field_to_id_map.size(); ++i) {
367 func(object->fields()->Get(
368 field_to_id_map[reverse ? field_to_id_map.size() - i + 1 : i]));
369 }
370}
371
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700372void SetAnyValueI(reflection::BaseType type, uint8_t *data, int64_t val) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700373 // clang-format off
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700374 #define FLATBUFFERS_SET(T) WriteScalar(data, static_cast<T>(val))
375 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700376 case reflection::BaseType::UType:
377 case reflection::BaseType::Bool:
378 case reflection::BaseType::UByte: FLATBUFFERS_SET(uint8_t ); break;
379 case reflection::BaseType::Byte: FLATBUFFERS_SET(int8_t ); break;
380 case reflection::BaseType::Short: FLATBUFFERS_SET(int16_t ); break;
381 case reflection::BaseType::UShort: FLATBUFFERS_SET(uint16_t); break;
382 case reflection::BaseType::Int: FLATBUFFERS_SET(int32_t ); break;
383 case reflection::BaseType::UInt: FLATBUFFERS_SET(uint32_t); break;
384 case reflection::BaseType::Long: FLATBUFFERS_SET(int64_t ); break;
385 case reflection::BaseType::ULong: FLATBUFFERS_SET(uint64_t); break;
386 case reflection::BaseType::Float: FLATBUFFERS_SET(float ); break;
387 case reflection::BaseType::Double: FLATBUFFERS_SET(double ); break;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700388 // TODO: support strings
389 default: break;
390 }
391 #undef FLATBUFFERS_SET
392 // clang-format on
393}
394
395void SetAnyValueF(reflection::BaseType type, uint8_t *data, double val) {
396 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700397 case reflection::BaseType::Float: WriteScalar(data, static_cast<float>(val)); break;
398 case reflection::BaseType::Double: WriteScalar(data, val); break;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700399 // TODO: support strings.
400 default: SetAnyValueI(type, data, static_cast<int64_t>(val)); break;
401 }
402}
403
404void SetAnyValueS(reflection::BaseType type, uint8_t *data, const char *val) {
405 switch (type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700406 case reflection::BaseType::Float:
407 case reflection::BaseType::Double: {
Austin Schuh272c6132020-11-14 16:37:52 -0800408 double d;
409 StringToNumber(val, &d);
410 SetAnyValueF(type, data, d);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700411 break;
Austin Schuh272c6132020-11-14 16:37:52 -0800412 }
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700413 // TODO: support strings.
414 default: SetAnyValueI(type, data, StringToInt(val)); break;
415 }
416}
417
418// Resize a FlatBuffer in-place by iterating through all offsets in the buffer
419// and adjusting them by "delta" if they straddle the start offset.
420// Once that is done, bytes can now be inserted/deleted safely.
421// "delta" may be negative (shrinking).
422// Unless "delta" is a multiple of the largest alignment, you'll create a small
423// amount of garbage space in the buffer (usually 0..7 bytes).
424// If your FlatBuffer's root table is not the schema's root table, you should
425// pass in your root_table type as well.
426class ResizeContext {
427 public:
428 ResizeContext(const reflection::Schema &schema, uoffset_t start, int delta,
429 std::vector<uint8_t> *flatbuf,
430 const reflection::Object *root_table = nullptr)
431 : schema_(schema),
James Kuszmauldac091f2022-03-22 09:35:06 -0700432 startptr_(flatbuf->data() + start),
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700433 delta_(delta),
434 buf_(*flatbuf),
435 dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
436 auto mask = static_cast<int>(sizeof(largest_scalar_t) - 1);
437 delta_ = (delta_ + mask) & ~mask;
438 if (!delta_) return; // We can't shrink by less than largest_scalar_t.
439 // Now change all the offsets by delta_.
James Kuszmauldac091f2022-03-22 09:35:06 -0700440 auto root = GetAnyRoot(buf_.data());
441 Straddle<uoffset_t, 1>(buf_.data(), root, buf_.data());
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700442 ResizeTable(root_table ? *root_table : *schema.root_table(), root);
443 // We can now add or remove bytes at start.
444 if (delta_ > 0)
445 buf_.insert(buf_.begin() + start, delta_, 0);
446 else
Austin Schuh272c6132020-11-14 16:37:52 -0800447 buf_.erase(buf_.begin() + start + delta_, buf_.begin() + start);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700448 }
449
450 // Check if the range between first (lower address) and second straddles
451 // the insertion point. If it does, change the offset at offsetloc (of
452 // type T, with direction D).
453 template<typename T, int D>
454 void Straddle(const void *first, const void *second, void *offsetloc) {
455 if (first <= startptr_ && second >= startptr_) {
456 WriteScalar<T>(offsetloc, ReadScalar<T>(offsetloc) + delta_ * D);
457 DagCheck(offsetloc) = true;
458 }
459 }
460
461 // This returns a boolean that records if the corresponding offset location
462 // has been modified already. If so, we can't even read the corresponding
463 // offset, since it is pointing to a location that is illegal until the
464 // resize actually happens.
465 // This must be checked for every offset, since we can't know which offsets
466 // will straddle and which won't.
467 uint8_t &DagCheck(const void *offsetloc) {
468 auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
James Kuszmauldac091f2022-03-22 09:35:06 -0700469 reinterpret_cast<const uoffset_t *>(buf_.data());
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700470 return dag_check_[dag_idx];
471 }
472
473 void ResizeTable(const reflection::Object &objectdef, Table *table) {
474 if (DagCheck(table)) return; // Table already visited.
475 auto vtable = table->GetVTable();
476 // Early out: since all fields inside the table must point forwards in
477 // memory, if the insertion point is before the table we can stop here.
478 auto tableloc = reinterpret_cast<uint8_t *>(table);
479 if (startptr_ <= tableloc) {
480 // Check if insertion point is between the table and a vtable that
481 // precedes it. This can't happen in current construction code, but check
482 // just in case we ever change the way flatbuffers are built.
483 Straddle<soffset_t, -1>(vtable, table, table);
484 } else {
485 // Check each field.
486 auto fielddefs = objectdef.fields();
487 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
488 auto &fielddef = **it;
489 auto base_type = fielddef.type()->base_type();
490 // Ignore scalars.
James Kuszmauldac091f2022-03-22 09:35:06 -0700491 if (base_type <= reflection::BaseType::Double) continue;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700492 // Ignore fields that are not stored.
493 auto offset = table->GetOptionalFieldOffset(fielddef.offset());
494 if (!offset) continue;
495 // Ignore structs.
496 auto subobjectdef =
James Kuszmauldac091f2022-03-22 09:35:06 -0700497 base_type == reflection::BaseType::Obj
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700498 ? schema_.objects()->Get(fielddef.type()->index())
499 : nullptr;
500 if (subobjectdef && subobjectdef->is_struct()) continue;
501 // Get this fields' offset, and read it if safe.
502 auto offsetloc = tableloc + offset;
503 if (DagCheck(offsetloc)) continue; // This offset already visited.
504 auto ref = offsetloc + ReadScalar<uoffset_t>(offsetloc);
505 Straddle<uoffset_t, 1>(offsetloc, ref, offsetloc);
506 // Recurse.
507 switch (base_type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700508 case reflection::BaseType::Obj: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700509 ResizeTable(*subobjectdef, reinterpret_cast<Table *>(ref));
510 break;
511 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700512 case reflection::BaseType::Vector: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700513 auto elem_type = fielddef.type()->element();
James Kuszmauldac091f2022-03-22 09:35:06 -0700514 if (elem_type != reflection::BaseType::Obj && elem_type != reflection::BaseType::String)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700515 break;
516 auto vec = reinterpret_cast<Vector<uoffset_t> *>(ref);
517 auto elemobjectdef =
James Kuszmauldac091f2022-03-22 09:35:06 -0700518 elem_type == reflection::BaseType::Obj
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700519 ? schema_.objects()->Get(fielddef.type()->index())
520 : nullptr;
521 if (elemobjectdef && elemobjectdef->is_struct()) break;
522 for (uoffset_t i = 0; i < vec->size(); i++) {
523 auto loc = vec->Data() + i * sizeof(uoffset_t);
524 if (DagCheck(loc)) continue; // This offset already visited.
525 auto dest = loc + vec->Get(i);
526 Straddle<uoffset_t, 1>(loc, dest, loc);
527 if (elemobjectdef)
528 ResizeTable(*elemobjectdef, reinterpret_cast<Table *>(dest));
529 }
530 break;
531 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700532 case reflection::BaseType::Union: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700533 ResizeTable(GetUnionType(schema_, objectdef, fielddef, *table),
534 reinterpret_cast<Table *>(ref));
535 break;
536 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700537 case reflection::BaseType::String: break;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700538 default: FLATBUFFERS_ASSERT(false);
539 }
540 }
541 // Check if the vtable offset points beyond the insertion point.
542 // Must do this last, since GetOptionalFieldOffset above still reads
543 // this value.
544 Straddle<soffset_t, -1>(table, vtable, table);
545 }
546 }
547
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700548 private:
549 const reflection::Schema &schema_;
550 uint8_t *startptr_;
551 int delta_;
552 std::vector<uint8_t> &buf_;
553 std::vector<uint8_t> dag_check_;
554};
555
556void SetString(const reflection::Schema &schema, const std::string &val,
557 const String *str, std::vector<uint8_t> *flatbuf,
558 const reflection::Object *root_table) {
559 auto delta = static_cast<int>(val.size()) - static_cast<int>(str->size());
560 auto str_start = static_cast<uoffset_t>(
James Kuszmauldac091f2022-03-22 09:35:06 -0700561 reinterpret_cast<const uint8_t *>(str) - flatbuf->data());
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700562 auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
563 if (delta) {
564 // Clear the old string, since we don't want parts of it remaining.
James Kuszmauldac091f2022-03-22 09:35:06 -0700565 memset(flatbuf->data() + start, 0, str->size());
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700566 // Different size, we must expand (or contract).
567 ResizeContext(schema, start, delta, flatbuf, root_table);
568 // Set the new length.
James Kuszmauldac091f2022-03-22 09:35:06 -0700569 WriteScalar(flatbuf->data() + str_start,
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700570 static_cast<uoffset_t>(val.size()));
571 }
572 // Copy new data. Safe because we created the right amount of space.
James Kuszmauldac091f2022-03-22 09:35:06 -0700573 memcpy(flatbuf->data() + start, val.c_str(), val.size() + 1);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700574}
575
576uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
577 const VectorOfAny *vec, uoffset_t num_elems,
578 uoffset_t elem_size, std::vector<uint8_t> *flatbuf,
579 const reflection::Object *root_table) {
580 auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
581 auto delta_bytes = delta_elem * static_cast<int>(elem_size);
James Kuszmauldac091f2022-03-22 09:35:06 -0700582 auto vec_start = reinterpret_cast<const uint8_t *>(vec) - flatbuf->data();
583 auto start = static_cast<uoffset_t>(vec_start) +
584 static_cast<uoffset_t>(sizeof(uoffset_t)) +
585 elem_size * num_elems;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700586 if (delta_bytes) {
587 if (delta_elem < 0) {
588 // Clear elements we're throwing away, since some might remain in the
589 // buffer.
590 auto size_clear = -delta_elem * elem_size;
James Kuszmauldac091f2022-03-22 09:35:06 -0700591 memset(flatbuf->data() + start - size_clear, 0, size_clear);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700592 }
593 ResizeContext(schema, start, delta_bytes, flatbuf, root_table);
James Kuszmauldac091f2022-03-22 09:35:06 -0700594 WriteScalar(flatbuf->data() + vec_start, newsize); // Length field.
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700595 // Set new elements to 0.. this can be overwritten by the caller.
596 if (delta_elem > 0) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700597 memset(flatbuf->data() + start, 0,
598 static_cast<size_t>(delta_elem) * elem_size);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700599 }
600 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700601 return flatbuf->data() + start;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700602}
603
604const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
605 const uint8_t *newbuf, size_t newlen) {
606 // Align to sizeof(uoffset_t) past sizeof(largest_scalar_t) since we're
607 // going to chop off the root offset.
608 while ((flatbuf.size() & (sizeof(uoffset_t) - 1)) ||
609 !(flatbuf.size() & (sizeof(largest_scalar_t) - 1))) {
610 flatbuf.push_back(0);
611 }
612 auto insertion_point = static_cast<uoffset_t>(flatbuf.size());
613 // Insert the entire FlatBuffer minus the root pointer.
614 flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
615 auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
James Kuszmauldac091f2022-03-22 09:35:06 -0700616 return flatbuf.data() + insertion_point + root_offset;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700617}
618
Austin Schuha1d006e2022-09-14 21:50:42 -0700619
620
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700621
622Offset<const Table *> CopyTable(FlatBufferBuilder &fbb,
623 const reflection::Schema &schema,
624 const reflection::Object &objectdef,
625 const Table &table, bool use_string_pooling) {
626 // Before we can construct the table, we have to first generate any
627 // subobjects, and collect their offsets.
628 std::vector<uoffset_t> offsets;
629 auto fielddefs = objectdef.fields();
630 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
631 auto &fielddef = **it;
632 // Skip if field is not present in the source.
633 if (!table.CheckField(fielddef.offset())) continue;
634 uoffset_t offset = 0;
635 switch (fielddef.type()->base_type()) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700636 case reflection::BaseType::String: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700637 offset = use_string_pooling
638 ? fbb.CreateSharedString(GetFieldS(table, fielddef)).o
639 : fbb.CreateString(GetFieldS(table, fielddef)).o;
640 break;
641 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700642 case reflection::BaseType::Obj: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700643 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
644 if (!subobjectdef.is_struct()) {
Austin Schuh272c6132020-11-14 16:37:52 -0800645 offset = CopyTable(fbb, schema, subobjectdef,
646 *GetFieldT(table, fielddef), use_string_pooling)
647 .o;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700648 }
649 break;
650 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700651 case reflection::BaseType::Union: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700652 auto &subobjectdef = GetUnionType(schema, objectdef, fielddef, table);
Austin Schuh272c6132020-11-14 16:37:52 -0800653 offset = CopyTable(fbb, schema, subobjectdef,
654 *GetFieldT(table, fielddef), use_string_pooling)
655 .o;
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700656 break;
657 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700658 case reflection::BaseType::Vector: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700659 auto vec =
660 table.GetPointer<const Vector<Offset<Table>> *>(fielddef.offset());
661 auto element_base_type = fielddef.type()->element();
662 auto elemobjectdef =
James Kuszmauldac091f2022-03-22 09:35:06 -0700663 element_base_type == reflection::BaseType::Obj
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700664 ? schema.objects()->Get(fielddef.type()->index())
665 : nullptr;
666 switch (element_base_type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700667 case reflection::BaseType::String: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700668 std::vector<Offset<const String *>> elements(vec->size());
669 auto vec_s = reinterpret_cast<const Vector<Offset<String>> *>(vec);
670 for (uoffset_t i = 0; i < vec_s->size(); i++) {
671 elements[i] = use_string_pooling
672 ? fbb.CreateSharedString(vec_s->Get(i)).o
673 : fbb.CreateString(vec_s->Get(i)).o;
674 }
675 offset = fbb.CreateVector(elements).o;
676 break;
677 }
James Kuszmauldac091f2022-03-22 09:35:06 -0700678 case reflection::BaseType::Obj: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700679 if (!elemobjectdef->is_struct()) {
680 std::vector<Offset<const Table *>> elements(vec->size());
681 for (uoffset_t i = 0; i < vec->size(); i++) {
Austin Schuh272c6132020-11-14 16:37:52 -0800682 elements[i] = CopyTable(fbb, schema, *elemobjectdef,
683 *vec->Get(i), use_string_pooling);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700684 }
685 offset = fbb.CreateVector(elements).o;
686 break;
687 }
688 }
Austin Schuh272c6132020-11-14 16:37:52 -0800689 FLATBUFFERS_FALLTHROUGH(); // fall thru
690 default: { // Scalars and structs.
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700691 auto element_size = GetTypeSize(element_base_type);
James Kuszmaul65541cb2022-11-08 14:53:47 -0800692 auto element_alignment = element_size; // For primitive elements
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700693 if (elemobjectdef && elemobjectdef->is_struct())
694 element_size = elemobjectdef->bytesize();
James Kuszmaul65541cb2022-11-08 14:53:47 -0800695 fbb.StartVector(vec->size(), element_size, element_alignment);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700696 fbb.PushBytes(vec->Data(), element_size * vec->size());
697 offset = fbb.EndVector(vec->size());
698 break;
699 }
700 }
701 break;
702 }
703 default: // Scalars.
704 break;
705 }
706 if (offset) { offsets.push_back(offset); }
707 }
708 // Now we can build the actual table from either offsets or scalar data.
709 auto start = objectdef.is_struct() ? fbb.StartStruct(objectdef.minalign())
710 : fbb.StartTable();
711 size_t offset_idx = 0;
712 for (auto it = fielddefs->begin(); it != fielddefs->end(); ++it) {
713 auto &fielddef = **it;
714 if (!table.CheckField(fielddef.offset())) continue;
715 auto base_type = fielddef.type()->base_type();
716 switch (base_type) {
James Kuszmauldac091f2022-03-22 09:35:06 -0700717 case reflection::BaseType::Obj: {
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700718 auto &subobjectdef = *schema.objects()->Get(fielddef.type()->index());
719 if (subobjectdef.is_struct()) {
720 CopyInline(fbb, fielddef, table, subobjectdef.minalign(),
721 subobjectdef.bytesize());
722 break;
723 }
724 }
Austin Schuh272c6132020-11-14 16:37:52 -0800725 FLATBUFFERS_FALLTHROUGH(); // fall thru
James Kuszmauldac091f2022-03-22 09:35:06 -0700726 case reflection::BaseType::Union:
727 case reflection::BaseType::String:
728 case reflection::BaseType::Vector:
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700729 fbb.AddOffset(fielddef.offset(), Offset<void>(offsets[offset_idx++]));
730 break;
731 default: { // Scalars.
732 auto size = GetTypeSize(base_type);
733 CopyInline(fbb, fielddef, table, size, size);
734 break;
735 }
736 }
737 }
738 FLATBUFFERS_ASSERT(offset_idx == offsets.size());
739 if (objectdef.is_struct()) {
740 fbb.ClearOffsets();
741 return fbb.EndStruct();
742 } else {
743 return fbb.EndTable(start);
744 }
745}
746
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700747
748bool Verify(const reflection::Schema &schema, const reflection::Object &root,
Austin Schuha1d006e2022-09-14 21:50:42 -0700749 const uint8_t *const buf, const size_t length,
750 const uoffset_t max_depth, const uoffset_t max_tables) {
Austin Schuh272c6132020-11-14 16:37:52 -0800751 Verifier v(buf, length, max_depth, max_tables);
Austin Schuha1d006e2022-09-14 21:50:42 -0700752 return VerifyObject(v, schema, root, flatbuffers::GetAnyRoot(buf),
753 /*required=*/true);
754}
755
756bool VerifySizePrefixed(const reflection::Schema &schema,
757 const reflection::Object &root,
758 const uint8_t *const buf, const size_t length,
759 const uoffset_t max_depth, const uoffset_t max_tables) {
760 Verifier v(buf, length, max_depth, max_tables);
761 return VerifyObject(v, schema, root, flatbuffers::GetAnySizePrefixedRoot(buf),
762 /*required=*/true);
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700763}
764
765} // namespace flatbuffers