blob: 4a0c4f6a21ee43b252908585c81b1c2c3bb29c65 [file] [log] [blame]
Austin Schuhe89fa2d2019-08-14 20:24:23 -07001local N = require("flatbuffers.numTypes")
2local ba = require("flatbuffers.binaryarray")
3local compat = require("flatbuffers.compat")
4
5local m = {}
6
7local mt = {}
8
9-- get locals for faster access
10local VOffsetT = N.VOffsetT
11local UOffsetT = N.UOffsetT
12local SOffsetT = N.SOffsetT
13local Bool = N.Bool
14local Uint8 = N.Uint8
15local Uint16 = N.Uint16
16local Uint32 = N.Uint32
17local Uint64 = N.Uint64
18local Int8 = N.Int8
19local Int16 = N.Int16
20local Int32 = N.Int32
21local Int64 = N.Int64
22local Float32 = N.Float32
23local Float64 = N.Float64
24
25local MAX_BUFFER_SIZE = 0x80000000 -- 2 GB
26local VtableMetadataFields = 2
27
28local getAlignSize = compat.GetAlignSize
29
30local function vtableEqual(a, objectStart, b)
31 UOffsetT:EnforceNumber(objectStart)
Austin Schuh272c6132020-11-14 16:37:52 -080032 if (#a * 2) ~= #b then
Austin Schuhe89fa2d2019-08-14 20:24:23 -070033 return false
34 end
35
36 for i, elem in ipairs(a) do
Austin Schuh272c6132020-11-14 16:37:52 -080037 local x = string.unpack(VOffsetT.packFmt, b, 1 + (i - 1) * 2)
Austin Schuhe89fa2d2019-08-14 20:24:23 -070038 if x ~= 0 or elem ~= 0 then
39 local y = objectStart - elem
40 if x ~= y then
41 return false
42 end
43 end
44 end
45 return true
46end
47
48function m.New(initialSize)
49 assert(0 <= initialSize and initialSize < MAX_BUFFER_SIZE)
50 local o =
51 {
52 finished = false,
53 bytes = ba.New(initialSize),
54 nested = false,
55 head = initialSize,
56 minalign = 1,
57 vtables = {}
58 }
59 setmetatable(o, {__index = mt})
60 return o
61end
62
Austin Schuh272c6132020-11-14 16:37:52 -080063-- Clears the builder and resets the state. It does not actually clear the backing binary array, it just reuses it as
64-- needed. This is a performant way to use the builder for multiple constructions without the overhead of multiple
65-- builder allocations.
66function mt:Clear()
67 self.finished = false
68 self.nested = false
69 self.minalign = 1
70 self.currentVTable = nil
71 self.objectEnd = nil
72 self.head = #self.bytes -- place the head at the end of the binary array
73
74 -- clear vtables instead of making a new table
75 local vtable = self.vtables
76 local vtableCount = #vtable
77 for i=1,vtableCount do vtable[i] = nil end
78end
79
Austin Schuhe89fa2d2019-08-14 20:24:23 -070080function mt:Output(full)
81 assert(self.finished, "Builder Not Finished")
82 if full then
83 return self.bytes:Slice()
84 else
85 return self.bytes:Slice(self.head)
86 end
87end
88
89function mt:StartObject(numFields)
90 assert(not self.nested)
91
92 local vtable = {}
93
94 for _=1,numFields do
95 table.insert(vtable, 0)
96 end
97
98 self.currentVTable = vtable
99 self.objectEnd = self:Offset()
100 self.nested = true
101end
102
103function mt:WriteVtable()
104 self:PrependSOffsetTRelative(0)
105 local objectOffset = self:Offset()
106
107 local exisitingVTable
108 local i = #self.vtables
109 while i >= 1 do
110 if self.vtables[i] == 0 then
111 table.remove(self.vtables,i)
112 end
113 i = i - 1
114 end
115
116 i = #self.vtables
117 while i >= 1 do
118
119 local vt2Offset = self.vtables[i]
120 local vt2Start = #self.bytes - vt2Offset
121 local vt2lenstr = self.bytes:Slice(vt2Start, vt2Start+1)
122 local vt2Len = string.unpack(VOffsetT.packFmt, vt2lenstr, 1)
123
Austin Schuh272c6132020-11-14 16:37:52 -0800124 local metadata = VtableMetadataFields * 2
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700125 local vt2End = vt2Start + vt2Len
126 local vt2 = self.bytes:Slice(vt2Start+metadata,vt2End)
127
128 if vtableEqual(self.currentVTable, objectOffset, vt2) then
129 exisitingVTable = vt2Offset
130 break
131 end
132
133 i = i - 1
134 end
135
136 if not exisitingVTable then
137 i = #self.currentVTable
138 while i >= 1 do
139 local off = 0
140 local a = self.currentVTable[i]
141 if a and a ~= 0 then
142 off = objectOffset - a
143 end
144 self:PrependVOffsetT(off)
145
146 i = i - 1
147 end
148
149 local objectSize = objectOffset - self.objectEnd
150 self:PrependVOffsetT(objectSize)
151
152 local vBytes = #self.currentVTable + VtableMetadataFields
Austin Schuh272c6132020-11-14 16:37:52 -0800153 vBytes = vBytes * 2
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700154 self:PrependVOffsetT(vBytes)
155
156 local objectStart = #self.bytes - objectOffset
157 self.bytes:Set(SOffsetT:Pack(self:Offset() - objectOffset),objectStart)
158
159 table.insert(self.vtables, self:Offset())
160 else
161 local objectStart = #self.bytes - objectOffset
162 self.head = objectStart
163 self.bytes:Set(SOffsetT:Pack(exisitingVTable - objectOffset),self.head)
164 end
165
166 self.currentVTable = nil
167 return objectOffset
168end
169
170function mt:EndObject()
171 assert(self.nested)
172 self.nested = false
173 return self:WriteVtable()
174end
175
176local function growByteBuffer(self, desiredSize)
177 local s = #self.bytes
178 assert(s < MAX_BUFFER_SIZE, "Flat Buffers cannot grow buffer beyond 2 gigabytes")
179 local newsize = s
180 repeat
181 newsize = math.min(newsize * 2, MAX_BUFFER_SIZE)
182 if newsize == 0 then newsize = 1 end
183 until newsize > desiredSize
184
185 self.bytes:Grow(newsize)
186end
187
188function mt:Head()
189 return self.head
190end
191
192function mt:Offset()
193 return #self.bytes - self.head
194end
195
196function mt:Pad(n)
197 if n > 0 then
198 -- pads are 8-bit, so skip the bytewidth lookup
199 local h = self.head - n -- UInt8
200 self.head = h
201 self.bytes:Pad(n, h)
202 end
203end
204
205function mt:Prep(size, additionalBytes)
206 if size > self.minalign then
207 self.minalign = size
208 end
209
210 local h = self.head
211
212 local k = #self.bytes - h + additionalBytes
213 local alignsize = ((~k) + 1) & (size - 1) -- getAlignSize(k, size)
214
215 local desiredSize = alignsize + size + additionalBytes
216
217 while self.head < desiredSize do
218 local oldBufSize = #self.bytes
219 growByteBuffer(self, desiredSize)
220 local updatedHead = self.head + #self.bytes - oldBufSize
221 self.head = updatedHead
222 end
223
224 self:Pad(alignsize)
225end
226
227function mt:PrependSOffsetTRelative(off)
Austin Schuh272c6132020-11-14 16:37:52 -0800228 self:Prep(4, 0)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700229 assert(off <= self:Offset(), "Offset arithmetic error")
Austin Schuh272c6132020-11-14 16:37:52 -0800230 local off2 = self:Offset() - off + 4
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700231 self:Place(off2, SOffsetT)
232end
233
234function mt:PrependUOffsetTRelative(off)
Austin Schuh272c6132020-11-14 16:37:52 -0800235 self:Prep(4, 0)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700236 local soffset = self:Offset()
237 if off <= soffset then
Austin Schuh272c6132020-11-14 16:37:52 -0800238 local off2 = soffset - off + 4
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700239 self:Place(off2, UOffsetT)
240 else
241 error("Offset arithmetic error")
242 end
243end
244
245function mt:StartVector(elemSize, numElements, alignment)
246 assert(not self.nested)
247 self.nested = true
Austin Schuh272c6132020-11-14 16:37:52 -0800248 local elementSize = elemSize * numElements
249 self:Prep(4, elementSize) -- Uint32 length
250 self:Prep(alignment, elementSize)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700251 return self:Offset()
252end
253
254function mt:EndVector(vectorNumElements)
255 assert(self.nested)
256 self.nested = false
257 self:Place(vectorNumElements, UOffsetT)
258 return self:Offset()
259end
260
261function mt:CreateString(s)
262 assert(not self.nested)
263 self.nested = true
264
265 assert(type(s) == "string")
266
Austin Schuh272c6132020-11-14 16:37:52 -0800267 self:Prep(4, #s + 1)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700268 self:Place(0, Uint8)
269
270 local l = #s
271 self.head = self.head - l
272
273 self.bytes:Set(s, self.head, self.head + l)
274
Austin Schuh272c6132020-11-14 16:37:52 -0800275 return self:EndVector(l)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700276end
277
278function mt:CreateByteVector(x)
279 assert(not self.nested)
280 self.nested = true
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700281
282 local l = #x
Austin Schuh272c6132020-11-14 16:37:52 -0800283 self:Prep(4, l)
284
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700285 self.head = self.head - l
286
287 self.bytes:Set(x, self.head, self.head + l)
288
Austin Schuh272c6132020-11-14 16:37:52 -0800289 return self:EndVector(l)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700290end
291
292function mt:Slot(slotnum)
293 assert(self.nested)
294 -- n.b. slot number is 0-based
295 self.currentVTable[slotnum + 1] = self:Offset()
296end
297
298local function finish(self, rootTable, sizePrefix)
299 UOffsetT:EnforceNumber(rootTable)
Austin Schuh272c6132020-11-14 16:37:52 -0800300 self:Prep(self.minalign, sizePrefix and 8 or 4)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700301 self:PrependUOffsetTRelative(rootTable)
302 if sizePrefix then
303 local size = #self.bytes - self.head
304 Int32:EnforceNumber(size)
305 self:PrependInt32(size)
306 end
307 self.finished = true
308 return self.head
309end
310
311function mt:Finish(rootTable)
312 return finish(self, rootTable, false)
313end
314
315function mt:FinishSizePrefixed(rootTable)
316 return finish(self, rootTable, true)
317end
318
319function mt:Prepend(flags, off)
320 self:Prep(flags.bytewidth, 0)
321 self:Place(off, flags)
322end
323
324function mt:PrependSlot(flags, o, x, d)
Austin Schuh272c6132020-11-14 16:37:52 -0800325 flags:EnforceNumbers(x,d)
326-- flags:EnforceNumber(x)
327-- flags:EnforceNumber(d)
Austin Schuhe89fa2d2019-08-14 20:24:23 -0700328 if x ~= d then
329 self:Prepend(flags, x)
330 self:Slot(o)
331 end
332end
333
334function mt:PrependBoolSlot(...) self:PrependSlot(Bool, ...) end
335function mt:PrependByteSlot(...) self:PrependSlot(Uint8, ...) end
336function mt:PrependUint8Slot(...) self:PrependSlot(Uint8, ...) end
337function mt:PrependUint16Slot(...) self:PrependSlot(Uint16, ...) end
338function mt:PrependUint32Slot(...) self:PrependSlot(Uint32, ...) end
339function mt:PrependUint64Slot(...) self:PrependSlot(Uint64, ...) end
340function mt:PrependInt8Slot(...) self:PrependSlot(Int8, ...) end
341function mt:PrependInt16Slot(...) self:PrependSlot(Int16, ...) end
342function mt:PrependInt32Slot(...) self:PrependSlot(Int32, ...) end
343function mt:PrependInt64Slot(...) self:PrependSlot(Int64, ...) end
344function mt:PrependFloat32Slot(...) self:PrependSlot(Float32, ...) end
345function mt:PrependFloat64Slot(...) self:PrependSlot(Float64, ...) end
346
347function mt:PrependUOffsetTRelativeSlot(o,x,d)
348 if x~=d then
349 self:PrependUOffsetTRelative(x)
350 self:Slot(o)
351 end
352end
353
354function mt:PrependStructSlot(v,x,d)
355 UOffsetT:EnforceNumber(d)
356 if x~=d then
357 UOffsetT:EnforceNumber(x)
358 assert(x == self:Offset(), "Tried to write a Struct at an Offset that is different from the current Offset of the Builder.")
359 self:Slot(v)
360 end
361end
362
363function mt:PrependBool(x) self:Prepend(Bool, x) end
364function mt:PrependByte(x) self:Prepend(Uint8, x) end
365function mt:PrependUint8(x) self:Prepend(Uint8, x) end
366function mt:PrependUint16(x) self:Prepend(Uint16, x) end
367function mt:PrependUint32(x) self:Prepend(Uint32, x) end
368function mt:PrependUint64(x) self:Prepend(Uint64, x) end
369function mt:PrependInt8(x) self:Prepend(Int8, x) end
370function mt:PrependInt16(x) self:Prepend(Int16, x) end
371function mt:PrependInt32(x) self:Prepend(Int32, x) end
372function mt:PrependInt64(x) self:Prepend(Int64, x) end
373function mt:PrependFloat32(x) self:Prepend(Float32, x) end
374function mt:PrependFloat64(x) self:Prepend(Float64, x) end
375function mt:PrependVOffsetT(x) self:Prepend(VOffsetT, x) end
376
377function mt:Place(x, flags)
378 local d = flags:EnforceNumberAndPack(x)
379 local h = self.head - flags.bytewidth
380 self.head = h
381 self.bytes:Set(d, h)
382end
383
384return m