Austin Schuh | e89fa2d | 2019-08-14 20:24:23 -0700 | [diff] [blame^] | 1 | local N = require("flatbuffers.numTypes") |
| 2 | local ba = require("flatbuffers.binaryarray") |
| 3 | local compat = require("flatbuffers.compat") |
| 4 | |
| 5 | local m = {} |
| 6 | |
| 7 | local mt = {} |
| 8 | |
| 9 | -- get locals for faster access |
| 10 | local VOffsetT = N.VOffsetT |
| 11 | local UOffsetT = N.UOffsetT |
| 12 | local SOffsetT = N.SOffsetT |
| 13 | local Bool = N.Bool |
| 14 | local Uint8 = N.Uint8 |
| 15 | local Uint16 = N.Uint16 |
| 16 | local Uint32 = N.Uint32 |
| 17 | local Uint64 = N.Uint64 |
| 18 | local Int8 = N.Int8 |
| 19 | local Int16 = N.Int16 |
| 20 | local Int32 = N.Int32 |
| 21 | local Int64 = N.Int64 |
| 22 | local Float32 = N.Float32 |
| 23 | local Float64 = N.Float64 |
| 24 | |
| 25 | local MAX_BUFFER_SIZE = 0x80000000 -- 2 GB |
| 26 | local VtableMetadataFields = 2 |
| 27 | |
| 28 | local getAlignSize = compat.GetAlignSize |
| 29 | |
| 30 | local function vtableEqual(a, objectStart, b) |
| 31 | UOffsetT:EnforceNumber(objectStart) |
| 32 | if (#a * VOffsetT.bytewidth) ~= #b then |
| 33 | return false |
| 34 | end |
| 35 | |
| 36 | for i, elem in ipairs(a) do |
| 37 | local x = string.unpack(VOffsetT.packFmt, b, 1 + (i - 1) * VOffsetT.bytewidth) |
| 38 | if x ~= 0 or elem ~= 0 then |
| 39 | local y = objectStart - elem |
| 40 | if x ~= y then |
| 41 | return false |
| 42 | end |
| 43 | end |
| 44 | end |
| 45 | return true |
| 46 | end |
| 47 | |
| 48 | function m.New(initialSize) |
| 49 | assert(0 <= initialSize and initialSize < MAX_BUFFER_SIZE) |
| 50 | local o = |
| 51 | { |
| 52 | finished = false, |
| 53 | bytes = ba.New(initialSize), |
| 54 | nested = false, |
| 55 | head = initialSize, |
| 56 | minalign = 1, |
| 57 | vtables = {} |
| 58 | } |
| 59 | setmetatable(o, {__index = mt}) |
| 60 | return o |
| 61 | end |
| 62 | |
| 63 | function mt:Output(full) |
| 64 | assert(self.finished, "Builder Not Finished") |
| 65 | if full then |
| 66 | return self.bytes:Slice() |
| 67 | else |
| 68 | return self.bytes:Slice(self.head) |
| 69 | end |
| 70 | end |
| 71 | |
| 72 | function mt:StartObject(numFields) |
| 73 | assert(not self.nested) |
| 74 | |
| 75 | local vtable = {} |
| 76 | |
| 77 | for _=1,numFields do |
| 78 | table.insert(vtable, 0) |
| 79 | end |
| 80 | |
| 81 | self.currentVTable = vtable |
| 82 | self.objectEnd = self:Offset() |
| 83 | self.nested = true |
| 84 | end |
| 85 | |
| 86 | function mt:WriteVtable() |
| 87 | self:PrependSOffsetTRelative(0) |
| 88 | local objectOffset = self:Offset() |
| 89 | |
| 90 | local exisitingVTable |
| 91 | local i = #self.vtables |
| 92 | while i >= 1 do |
| 93 | if self.vtables[i] == 0 then |
| 94 | table.remove(self.vtables,i) |
| 95 | end |
| 96 | i = i - 1 |
| 97 | end |
| 98 | |
| 99 | i = #self.vtables |
| 100 | while i >= 1 do |
| 101 | |
| 102 | local vt2Offset = self.vtables[i] |
| 103 | local vt2Start = #self.bytes - vt2Offset |
| 104 | local vt2lenstr = self.bytes:Slice(vt2Start, vt2Start+1) |
| 105 | local vt2Len = string.unpack(VOffsetT.packFmt, vt2lenstr, 1) |
| 106 | |
| 107 | local metadata = VtableMetadataFields * VOffsetT.bytewidth |
| 108 | local vt2End = vt2Start + vt2Len |
| 109 | local vt2 = self.bytes:Slice(vt2Start+metadata,vt2End) |
| 110 | |
| 111 | if vtableEqual(self.currentVTable, objectOffset, vt2) then |
| 112 | exisitingVTable = vt2Offset |
| 113 | break |
| 114 | end |
| 115 | |
| 116 | i = i - 1 |
| 117 | end |
| 118 | |
| 119 | if not exisitingVTable then |
| 120 | i = #self.currentVTable |
| 121 | while i >= 1 do |
| 122 | local off = 0 |
| 123 | local a = self.currentVTable[i] |
| 124 | if a and a ~= 0 then |
| 125 | off = objectOffset - a |
| 126 | end |
| 127 | self:PrependVOffsetT(off) |
| 128 | |
| 129 | i = i - 1 |
| 130 | end |
| 131 | |
| 132 | local objectSize = objectOffset - self.objectEnd |
| 133 | self:PrependVOffsetT(objectSize) |
| 134 | |
| 135 | local vBytes = #self.currentVTable + VtableMetadataFields |
| 136 | vBytes = vBytes * VOffsetT.bytewidth |
| 137 | self:PrependVOffsetT(vBytes) |
| 138 | |
| 139 | local objectStart = #self.bytes - objectOffset |
| 140 | self.bytes:Set(SOffsetT:Pack(self:Offset() - objectOffset),objectStart) |
| 141 | |
| 142 | table.insert(self.vtables, self:Offset()) |
| 143 | else |
| 144 | local objectStart = #self.bytes - objectOffset |
| 145 | self.head = objectStart |
| 146 | self.bytes:Set(SOffsetT:Pack(exisitingVTable - objectOffset),self.head) |
| 147 | end |
| 148 | |
| 149 | self.currentVTable = nil |
| 150 | return objectOffset |
| 151 | end |
| 152 | |
| 153 | function mt:EndObject() |
| 154 | assert(self.nested) |
| 155 | self.nested = false |
| 156 | return self:WriteVtable() |
| 157 | end |
| 158 | |
| 159 | local function growByteBuffer(self, desiredSize) |
| 160 | local s = #self.bytes |
| 161 | assert(s < MAX_BUFFER_SIZE, "Flat Buffers cannot grow buffer beyond 2 gigabytes") |
| 162 | local newsize = s |
| 163 | repeat |
| 164 | newsize = math.min(newsize * 2, MAX_BUFFER_SIZE) |
| 165 | if newsize == 0 then newsize = 1 end |
| 166 | until newsize > desiredSize |
| 167 | |
| 168 | self.bytes:Grow(newsize) |
| 169 | end |
| 170 | |
| 171 | function mt:Head() |
| 172 | return self.head |
| 173 | end |
| 174 | |
| 175 | function mt:Offset() |
| 176 | return #self.bytes - self.head |
| 177 | end |
| 178 | |
| 179 | function mt:Pad(n) |
| 180 | if n > 0 then |
| 181 | -- pads are 8-bit, so skip the bytewidth lookup |
| 182 | local h = self.head - n -- UInt8 |
| 183 | self.head = h |
| 184 | self.bytes:Pad(n, h) |
| 185 | end |
| 186 | end |
| 187 | |
| 188 | function mt:Prep(size, additionalBytes) |
| 189 | if size > self.minalign then |
| 190 | self.minalign = size |
| 191 | end |
| 192 | |
| 193 | local h = self.head |
| 194 | |
| 195 | local k = #self.bytes - h + additionalBytes |
| 196 | local alignsize = ((~k) + 1) & (size - 1) -- getAlignSize(k, size) |
| 197 | |
| 198 | local desiredSize = alignsize + size + additionalBytes |
| 199 | |
| 200 | while self.head < desiredSize do |
| 201 | local oldBufSize = #self.bytes |
| 202 | growByteBuffer(self, desiredSize) |
| 203 | local updatedHead = self.head + #self.bytes - oldBufSize |
| 204 | self.head = updatedHead |
| 205 | end |
| 206 | |
| 207 | self:Pad(alignsize) |
| 208 | end |
| 209 | |
| 210 | function mt:PrependSOffsetTRelative(off) |
| 211 | self:Prep(SOffsetT.bytewidth, 0) |
| 212 | assert(off <= self:Offset(), "Offset arithmetic error") |
| 213 | local off2 = self:Offset() - off + SOffsetT.bytewidth |
| 214 | self:Place(off2, SOffsetT) |
| 215 | end |
| 216 | |
| 217 | function mt:PrependUOffsetTRelative(off) |
| 218 | self:Prep(UOffsetT.bytewidth, 0) |
| 219 | local soffset = self:Offset() |
| 220 | if off <= soffset then |
| 221 | local off2 = soffset - off + UOffsetT.bytewidth |
| 222 | self:Place(off2, UOffsetT) |
| 223 | else |
| 224 | error("Offset arithmetic error") |
| 225 | end |
| 226 | end |
| 227 | |
| 228 | function mt:StartVector(elemSize, numElements, alignment) |
| 229 | assert(not self.nested) |
| 230 | self.nested = true |
| 231 | self:Prep(Uint32.bytewidth, elemSize * numElements) |
| 232 | self:Prep(alignment, elemSize * numElements) |
| 233 | return self:Offset() |
| 234 | end |
| 235 | |
| 236 | function mt:EndVector(vectorNumElements) |
| 237 | assert(self.nested) |
| 238 | self.nested = false |
| 239 | self:Place(vectorNumElements, UOffsetT) |
| 240 | return self:Offset() |
| 241 | end |
| 242 | |
| 243 | function mt:CreateString(s) |
| 244 | assert(not self.nested) |
| 245 | self.nested = true |
| 246 | |
| 247 | assert(type(s) == "string") |
| 248 | |
| 249 | self:Prep(UOffsetT.bytewidth, (#s + 1)*Uint8.bytewidth) |
| 250 | self:Place(0, Uint8) |
| 251 | |
| 252 | local l = #s |
| 253 | self.head = self.head - l |
| 254 | |
| 255 | self.bytes:Set(s, self.head, self.head + l) |
| 256 | |
| 257 | return self:EndVector(#s) |
| 258 | end |
| 259 | |
| 260 | function mt:CreateByteVector(x) |
| 261 | assert(not self.nested) |
| 262 | self.nested = true |
| 263 | self:Prep(UOffsetT.bytewidth, #x*Uint8.bytewidth) |
| 264 | |
| 265 | local l = #x |
| 266 | self.head = self.head - l |
| 267 | |
| 268 | self.bytes:Set(x, self.head, self.head + l) |
| 269 | |
| 270 | return self:EndVector(#x) |
| 271 | end |
| 272 | |
| 273 | function mt:Slot(slotnum) |
| 274 | assert(self.nested) |
| 275 | -- n.b. slot number is 0-based |
| 276 | self.currentVTable[slotnum + 1] = self:Offset() |
| 277 | end |
| 278 | |
| 279 | local function finish(self, rootTable, sizePrefix) |
| 280 | UOffsetT:EnforceNumber(rootTable) |
| 281 | local prepSize = UOffsetT.bytewidth |
| 282 | if sizePrefix then |
| 283 | prepSize = prepSize + Int32.bytewidth |
| 284 | end |
| 285 | |
| 286 | self:Prep(self.minalign, prepSize) |
| 287 | self:PrependUOffsetTRelative(rootTable) |
| 288 | if sizePrefix then |
| 289 | local size = #self.bytes - self.head |
| 290 | Int32:EnforceNumber(size) |
| 291 | self:PrependInt32(size) |
| 292 | end |
| 293 | self.finished = true |
| 294 | return self.head |
| 295 | end |
| 296 | |
| 297 | function mt:Finish(rootTable) |
| 298 | return finish(self, rootTable, false) |
| 299 | end |
| 300 | |
| 301 | function mt:FinishSizePrefixed(rootTable) |
| 302 | return finish(self, rootTable, true) |
| 303 | end |
| 304 | |
| 305 | function mt:Prepend(flags, off) |
| 306 | self:Prep(flags.bytewidth, 0) |
| 307 | self:Place(off, flags) |
| 308 | end |
| 309 | |
| 310 | function mt:PrependSlot(flags, o, x, d) |
| 311 | flags:EnforceNumber(x) |
| 312 | flags:EnforceNumber(d) |
| 313 | if x ~= d then |
| 314 | self:Prepend(flags, x) |
| 315 | self:Slot(o) |
| 316 | end |
| 317 | end |
| 318 | |
| 319 | function mt:PrependBoolSlot(...) self:PrependSlot(Bool, ...) end |
| 320 | function mt:PrependByteSlot(...) self:PrependSlot(Uint8, ...) end |
| 321 | function mt:PrependUint8Slot(...) self:PrependSlot(Uint8, ...) end |
| 322 | function mt:PrependUint16Slot(...) self:PrependSlot(Uint16, ...) end |
| 323 | function mt:PrependUint32Slot(...) self:PrependSlot(Uint32, ...) end |
| 324 | function mt:PrependUint64Slot(...) self:PrependSlot(Uint64, ...) end |
| 325 | function mt:PrependInt8Slot(...) self:PrependSlot(Int8, ...) end |
| 326 | function mt:PrependInt16Slot(...) self:PrependSlot(Int16, ...) end |
| 327 | function mt:PrependInt32Slot(...) self:PrependSlot(Int32, ...) end |
| 328 | function mt:PrependInt64Slot(...) self:PrependSlot(Int64, ...) end |
| 329 | function mt:PrependFloat32Slot(...) self:PrependSlot(Float32, ...) end |
| 330 | function mt:PrependFloat64Slot(...) self:PrependSlot(Float64, ...) end |
| 331 | |
| 332 | function mt:PrependUOffsetTRelativeSlot(o,x,d) |
| 333 | if x~=d then |
| 334 | self:PrependUOffsetTRelative(x) |
| 335 | self:Slot(o) |
| 336 | end |
| 337 | end |
| 338 | |
| 339 | function mt:PrependStructSlot(v,x,d) |
| 340 | UOffsetT:EnforceNumber(d) |
| 341 | if x~=d then |
| 342 | UOffsetT:EnforceNumber(x) |
| 343 | assert(x == self:Offset(), "Tried to write a Struct at an Offset that is different from the current Offset of the Builder.") |
| 344 | self:Slot(v) |
| 345 | end |
| 346 | end |
| 347 | |
| 348 | function mt:PrependBool(x) self:Prepend(Bool, x) end |
| 349 | function mt:PrependByte(x) self:Prepend(Uint8, x) end |
| 350 | function mt:PrependUint8(x) self:Prepend(Uint8, x) end |
| 351 | function mt:PrependUint16(x) self:Prepend(Uint16, x) end |
| 352 | function mt:PrependUint32(x) self:Prepend(Uint32, x) end |
| 353 | function mt:PrependUint64(x) self:Prepend(Uint64, x) end |
| 354 | function mt:PrependInt8(x) self:Prepend(Int8, x) end |
| 355 | function mt:PrependInt16(x) self:Prepend(Int16, x) end |
| 356 | function mt:PrependInt32(x) self:Prepend(Int32, x) end |
| 357 | function mt:PrependInt64(x) self:Prepend(Int64, x) end |
| 358 | function mt:PrependFloat32(x) self:Prepend(Float32, x) end |
| 359 | function mt:PrependFloat64(x) self:Prepend(Float64, x) end |
| 360 | function mt:PrependVOffsetT(x) self:Prepend(VOffsetT, x) end |
| 361 | |
| 362 | function mt:Place(x, flags) |
| 363 | local d = flags:EnforceNumberAndPack(x) |
| 364 | local h = self.head - flags.bytewidth |
| 365 | self.head = h |
| 366 | self.bytes:Set(d, h) |
| 367 | end |
| 368 | |
| 369 | return m |