blob: 2fb22204ded1bce46f8b5b887c121fb3d543c25b [file] [log] [blame]
Austin Schuhe89fa2d2019-08-14 20:24:23 -07001local N = require("flatbuffers.numTypes")
2local ba = require("flatbuffers.binaryarray")
3local compat = require("flatbuffers.compat")
4
5local m = {}
6
7local mt = {}
8
9-- get locals for faster access
10local VOffsetT = N.VOffsetT
11local UOffsetT = N.UOffsetT
12local SOffsetT = N.SOffsetT
13local Bool = N.Bool
14local Uint8 = N.Uint8
15local Uint16 = N.Uint16
16local Uint32 = N.Uint32
17local Uint64 = N.Uint64
18local Int8 = N.Int8
19local Int16 = N.Int16
20local Int32 = N.Int32
21local Int64 = N.Int64
22local Float32 = N.Float32
23local Float64 = N.Float64
24
25local MAX_BUFFER_SIZE = 0x80000000 -- 2 GB
26local VtableMetadataFields = 2
27
28local getAlignSize = compat.GetAlignSize
29
30local function vtableEqual(a, objectStart, b)
31 UOffsetT:EnforceNumber(objectStart)
32 if (#a * VOffsetT.bytewidth) ~= #b then
33 return false
34 end
35
36 for i, elem in ipairs(a) do
37 local x = string.unpack(VOffsetT.packFmt, b, 1 + (i - 1) * VOffsetT.bytewidth)
38 if x ~= 0 or elem ~= 0 then
39 local y = objectStart - elem
40 if x ~= y then
41 return false
42 end
43 end
44 end
45 return true
46end
47
48function m.New(initialSize)
49 assert(0 <= initialSize and initialSize < MAX_BUFFER_SIZE)
50 local o =
51 {
52 finished = false,
53 bytes = ba.New(initialSize),
54 nested = false,
55 head = initialSize,
56 minalign = 1,
57 vtables = {}
58 }
59 setmetatable(o, {__index = mt})
60 return o
61end
62
63function mt:Output(full)
64 assert(self.finished, "Builder Not Finished")
65 if full then
66 return self.bytes:Slice()
67 else
68 return self.bytes:Slice(self.head)
69 end
70end
71
72function mt:StartObject(numFields)
73 assert(not self.nested)
74
75 local vtable = {}
76
77 for _=1,numFields do
78 table.insert(vtable, 0)
79 end
80
81 self.currentVTable = vtable
82 self.objectEnd = self:Offset()
83 self.nested = true
84end
85
86function mt:WriteVtable()
87 self:PrependSOffsetTRelative(0)
88 local objectOffset = self:Offset()
89
90 local exisitingVTable
91 local i = #self.vtables
92 while i >= 1 do
93 if self.vtables[i] == 0 then
94 table.remove(self.vtables,i)
95 end
96 i = i - 1
97 end
98
99 i = #self.vtables
100 while i >= 1 do
101
102 local vt2Offset = self.vtables[i]
103 local vt2Start = #self.bytes - vt2Offset
104 local vt2lenstr = self.bytes:Slice(vt2Start, vt2Start+1)
105 local vt2Len = string.unpack(VOffsetT.packFmt, vt2lenstr, 1)
106
107 local metadata = VtableMetadataFields * VOffsetT.bytewidth
108 local vt2End = vt2Start + vt2Len
109 local vt2 = self.bytes:Slice(vt2Start+metadata,vt2End)
110
111 if vtableEqual(self.currentVTable, objectOffset, vt2) then
112 exisitingVTable = vt2Offset
113 break
114 end
115
116 i = i - 1
117 end
118
119 if not exisitingVTable then
120 i = #self.currentVTable
121 while i >= 1 do
122 local off = 0
123 local a = self.currentVTable[i]
124 if a and a ~= 0 then
125 off = objectOffset - a
126 end
127 self:PrependVOffsetT(off)
128
129 i = i - 1
130 end
131
132 local objectSize = objectOffset - self.objectEnd
133 self:PrependVOffsetT(objectSize)
134
135 local vBytes = #self.currentVTable + VtableMetadataFields
136 vBytes = vBytes * VOffsetT.bytewidth
137 self:PrependVOffsetT(vBytes)
138
139 local objectStart = #self.bytes - objectOffset
140 self.bytes:Set(SOffsetT:Pack(self:Offset() - objectOffset),objectStart)
141
142 table.insert(self.vtables, self:Offset())
143 else
144 local objectStart = #self.bytes - objectOffset
145 self.head = objectStart
146 self.bytes:Set(SOffsetT:Pack(exisitingVTable - objectOffset),self.head)
147 end
148
149 self.currentVTable = nil
150 return objectOffset
151end
152
153function mt:EndObject()
154 assert(self.nested)
155 self.nested = false
156 return self:WriteVtable()
157end
158
159local function growByteBuffer(self, desiredSize)
160 local s = #self.bytes
161 assert(s < MAX_BUFFER_SIZE, "Flat Buffers cannot grow buffer beyond 2 gigabytes")
162 local newsize = s
163 repeat
164 newsize = math.min(newsize * 2, MAX_BUFFER_SIZE)
165 if newsize == 0 then newsize = 1 end
166 until newsize > desiredSize
167
168 self.bytes:Grow(newsize)
169end
170
171function mt:Head()
172 return self.head
173end
174
175function mt:Offset()
176 return #self.bytes - self.head
177end
178
179function mt:Pad(n)
180 if n > 0 then
181 -- pads are 8-bit, so skip the bytewidth lookup
182 local h = self.head - n -- UInt8
183 self.head = h
184 self.bytes:Pad(n, h)
185 end
186end
187
188function mt:Prep(size, additionalBytes)
189 if size > self.minalign then
190 self.minalign = size
191 end
192
193 local h = self.head
194
195 local k = #self.bytes - h + additionalBytes
196 local alignsize = ((~k) + 1) & (size - 1) -- getAlignSize(k, size)
197
198 local desiredSize = alignsize + size + additionalBytes
199
200 while self.head < desiredSize do
201 local oldBufSize = #self.bytes
202 growByteBuffer(self, desiredSize)
203 local updatedHead = self.head + #self.bytes - oldBufSize
204 self.head = updatedHead
205 end
206
207 self:Pad(alignsize)
208end
209
210function mt:PrependSOffsetTRelative(off)
211 self:Prep(SOffsetT.bytewidth, 0)
212 assert(off <= self:Offset(), "Offset arithmetic error")
213 local off2 = self:Offset() - off + SOffsetT.bytewidth
214 self:Place(off2, SOffsetT)
215end
216
217function mt:PrependUOffsetTRelative(off)
218 self:Prep(UOffsetT.bytewidth, 0)
219 local soffset = self:Offset()
220 if off <= soffset then
221 local off2 = soffset - off + UOffsetT.bytewidth
222 self:Place(off2, UOffsetT)
223 else
224 error("Offset arithmetic error")
225 end
226end
227
228function mt:StartVector(elemSize, numElements, alignment)
229 assert(not self.nested)
230 self.nested = true
231 self:Prep(Uint32.bytewidth, elemSize * numElements)
232 self:Prep(alignment, elemSize * numElements)
233 return self:Offset()
234end
235
236function mt:EndVector(vectorNumElements)
237 assert(self.nested)
238 self.nested = false
239 self:Place(vectorNumElements, UOffsetT)
240 return self:Offset()
241end
242
243function mt:CreateString(s)
244 assert(not self.nested)
245 self.nested = true
246
247 assert(type(s) == "string")
248
249 self:Prep(UOffsetT.bytewidth, (#s + 1)*Uint8.bytewidth)
250 self:Place(0, Uint8)
251
252 local l = #s
253 self.head = self.head - l
254
255 self.bytes:Set(s, self.head, self.head + l)
256
257 return self:EndVector(#s)
258end
259
260function mt:CreateByteVector(x)
261 assert(not self.nested)
262 self.nested = true
263 self:Prep(UOffsetT.bytewidth, #x*Uint8.bytewidth)
264
265 local l = #x
266 self.head = self.head - l
267
268 self.bytes:Set(x, self.head, self.head + l)
269
270 return self:EndVector(#x)
271end
272
273function mt:Slot(slotnum)
274 assert(self.nested)
275 -- n.b. slot number is 0-based
276 self.currentVTable[slotnum + 1] = self:Offset()
277end
278
279local function finish(self, rootTable, sizePrefix)
280 UOffsetT:EnforceNumber(rootTable)
281 local prepSize = UOffsetT.bytewidth
282 if sizePrefix then
283 prepSize = prepSize + Int32.bytewidth
284 end
285
286 self:Prep(self.minalign, prepSize)
287 self:PrependUOffsetTRelative(rootTable)
288 if sizePrefix then
289 local size = #self.bytes - self.head
290 Int32:EnforceNumber(size)
291 self:PrependInt32(size)
292 end
293 self.finished = true
294 return self.head
295end
296
297function mt:Finish(rootTable)
298 return finish(self, rootTable, false)
299end
300
301function mt:FinishSizePrefixed(rootTable)
302 return finish(self, rootTable, true)
303end
304
305function mt:Prepend(flags, off)
306 self:Prep(flags.bytewidth, 0)
307 self:Place(off, flags)
308end
309
310function mt:PrependSlot(flags, o, x, d)
311 flags:EnforceNumber(x)
312 flags:EnforceNumber(d)
313 if x ~= d then
314 self:Prepend(flags, x)
315 self:Slot(o)
316 end
317end
318
319function mt:PrependBoolSlot(...) self:PrependSlot(Bool, ...) end
320function mt:PrependByteSlot(...) self:PrependSlot(Uint8, ...) end
321function mt:PrependUint8Slot(...) self:PrependSlot(Uint8, ...) end
322function mt:PrependUint16Slot(...) self:PrependSlot(Uint16, ...) end
323function mt:PrependUint32Slot(...) self:PrependSlot(Uint32, ...) end
324function mt:PrependUint64Slot(...) self:PrependSlot(Uint64, ...) end
325function mt:PrependInt8Slot(...) self:PrependSlot(Int8, ...) end
326function mt:PrependInt16Slot(...) self:PrependSlot(Int16, ...) end
327function mt:PrependInt32Slot(...) self:PrependSlot(Int32, ...) end
328function mt:PrependInt64Slot(...) self:PrependSlot(Int64, ...) end
329function mt:PrependFloat32Slot(...) self:PrependSlot(Float32, ...) end
330function mt:PrependFloat64Slot(...) self:PrependSlot(Float64, ...) end
331
332function mt:PrependUOffsetTRelativeSlot(o,x,d)
333 if x~=d then
334 self:PrependUOffsetTRelative(x)
335 self:Slot(o)
336 end
337end
338
339function mt:PrependStructSlot(v,x,d)
340 UOffsetT:EnforceNumber(d)
341 if x~=d then
342 UOffsetT:EnforceNumber(x)
343 assert(x == self:Offset(), "Tried to write a Struct at an Offset that is different from the current Offset of the Builder.")
344 self:Slot(v)
345 end
346end
347
348function mt:PrependBool(x) self:Prepend(Bool, x) end
349function mt:PrependByte(x) self:Prepend(Uint8, x) end
350function mt:PrependUint8(x) self:Prepend(Uint8, x) end
351function mt:PrependUint16(x) self:Prepend(Uint16, x) end
352function mt:PrependUint32(x) self:Prepend(Uint32, x) end
353function mt:PrependUint64(x) self:Prepend(Uint64, x) end
354function mt:PrependInt8(x) self:Prepend(Int8, x) end
355function mt:PrependInt16(x) self:Prepend(Int16, x) end
356function mt:PrependInt32(x) self:Prepend(Int32, x) end
357function mt:PrependInt64(x) self:Prepend(Int64, x) end
358function mt:PrependFloat32(x) self:Prepend(Float32, x) end
359function mt:PrependFloat64(x) self:Prepend(Float64, x) end
360function mt:PrependVOffsetT(x) self:Prepend(VOffsetT, x) end
361
362function mt:Place(x, flags)
363 local d = flags:EnforceNumberAndPack(x)
364 local h = self.head - flags.bytewidth
365 self.head = h
366 self.bytes:Set(d, h)
367end
368
369return m