Squashed 'third_party/flatbuffers/' content from commit acc9990ab
Change-Id: I48550d40d78fea996ebe74e9723a5d1f910de491
git-subtree-dir: third_party/flatbuffers
git-subtree-split: acc9990abd2206491480291b0f85f925110102ea
diff --git a/lua/flatbuffers/binaryarray.lua b/lua/flatbuffers/binaryarray.lua
new file mode 100644
index 0000000..bf728cd
--- /dev/null
+++ b/lua/flatbuffers/binaryarray.lua
@@ -0,0 +1,123 @@
+local m = {} -- the module table
+
+local mt = {} -- the module metatable
+
+-- given a binary array, set a metamethod to return its length
+-- (e.g., #binaryArray, calls this)
+function mt:__len()
+ return self.size
+end
+
+-- Create a new binary array of an initial size
+function m.New(sizeOrString)
+ -- the array storage itself
+ local o = {}
+
+ if type(sizeOrString) == "string" then
+ o.str = sizeOrString
+ o.size = #sizeOrString
+ elseif type(sizeOrString) == "number" then
+ o.data = {}
+ o.size = sizeOrString
+ else
+ error("Expect a integer size value or string to construct a binary array")
+ end
+ -- set the inheritance
+ setmetatable(o, {__index = mt, __len = mt.__len})
+ return o
+end
+
+-- Get a slice of the binary array from start to end position
+function mt:Slice(startPos, endPos)
+ startPos = startPos or 0
+ endPos = endPos or self.size
+ local d = self.data
+ if d then
+ -- if the self.data is defined, we are building the buffer
+ -- in a Lua table
+
+ -- new table to store the slice components
+ local b = {}
+
+ -- starting with the startPos, put all
+ -- values into the new table to be concat later
+ -- updated the startPos based on the size of the
+ -- value
+ while startPos < endPos do
+ local v = d[startPos] or '/0'
+ table.insert(b, v)
+ startPos = startPos + #v
+ end
+
+ -- combine the table of strings into one string
+ -- this is faster than doing a bunch of concats by themselves
+ return table.concat(b)
+ else
+ -- n.b start/endPos are 0-based incoming, so need to convert
+ -- correctly. in python a slice includes start -> end - 1
+ return self.str:sub(startPos+1, endPos)
+ end
+end
+
+-- Grow the binary array to a new size, placing the exisiting data
+-- at then end of the new array
+function mt:Grow(newsize)
+ -- the new table to store the data
+ local newT = {}
+
+ -- the offset to be applied to existing entries
+ local offset = newsize - self.size
+
+ -- loop over all the current entries and
+ -- add them to the new table at the correct
+ -- offset location
+ local d = self.data
+ for i,data in pairs(d) do
+ newT[i + offset] = data
+ end
+
+ -- update this storage with the new table and size
+ self.data = newT
+ self.size = newsize
+end
+
+-- memorization for padding strings
+local pads = {}
+
+-- pad the binary with n \0 bytes at the starting position
+function mt:Pad(n, startPos)
+ -- use memorization to avoid creating a bunch of strings
+ -- all the time
+ local s = pads[n]
+ if not s then
+ s = string.rep('\0', n)
+ pads[n] = s
+ end
+
+ -- store the padding string at the start position in the
+ -- Lua table
+ self.data[startPos] = s
+end
+
+-- Sets the binary array value at the specified position
+function mt:Set(value, position)
+ self.data[position] = value
+end
+
+-- locals for slightly faster access
+local sunpack = string.unpack
+local spack = string.pack
+
+-- Pack the data into a binary representation
+function m.Pack(fmt, ...)
+ return spack(fmt, ...)
+end
+
+-- Unpack the data from a binary representation in
+-- a Lua value
+function m.Unpack(fmt, s, pos)
+ return sunpack(fmt, s.str, pos + 1)
+end
+
+-- Return the binary array module
+return m
\ No newline at end of file
diff --git a/lua/flatbuffers/builder.lua b/lua/flatbuffers/builder.lua
new file mode 100644
index 0000000..2fb2220
--- /dev/null
+++ b/lua/flatbuffers/builder.lua
@@ -0,0 +1,369 @@
+local N = require("flatbuffers.numTypes")
+local ba = require("flatbuffers.binaryarray")
+local compat = require("flatbuffers.compat")
+
+local m = {}
+
+local mt = {}
+
+-- get locals for faster access
+local VOffsetT = N.VOffsetT
+local UOffsetT = N.UOffsetT
+local SOffsetT = N.SOffsetT
+local Bool = N.Bool
+local Uint8 = N.Uint8
+local Uint16 = N.Uint16
+local Uint32 = N.Uint32
+local Uint64 = N.Uint64
+local Int8 = N.Int8
+local Int16 = N.Int16
+local Int32 = N.Int32
+local Int64 = N.Int64
+local Float32 = N.Float32
+local Float64 = N.Float64
+
+local MAX_BUFFER_SIZE = 0x80000000 -- 2 GB
+local VtableMetadataFields = 2
+
+local getAlignSize = compat.GetAlignSize
+
+local function vtableEqual(a, objectStart, b)
+ UOffsetT:EnforceNumber(objectStart)
+ if (#a * VOffsetT.bytewidth) ~= #b then
+ return false
+ end
+
+ for i, elem in ipairs(a) do
+ local x = string.unpack(VOffsetT.packFmt, b, 1 + (i - 1) * VOffsetT.bytewidth)
+ if x ~= 0 or elem ~= 0 then
+ local y = objectStart - elem
+ if x ~= y then
+ return false
+ end
+ end
+ end
+ return true
+end
+
+function m.New(initialSize)
+ assert(0 <= initialSize and initialSize < MAX_BUFFER_SIZE)
+ local o =
+ {
+ finished = false,
+ bytes = ba.New(initialSize),
+ nested = false,
+ head = initialSize,
+ minalign = 1,
+ vtables = {}
+ }
+ setmetatable(o, {__index = mt})
+ return o
+end
+
+function mt:Output(full)
+ assert(self.finished, "Builder Not Finished")
+ if full then
+ return self.bytes:Slice()
+ else
+ return self.bytes:Slice(self.head)
+ end
+end
+
+function mt:StartObject(numFields)
+ assert(not self.nested)
+
+ local vtable = {}
+
+ for _=1,numFields do
+ table.insert(vtable, 0)
+ end
+
+ self.currentVTable = vtable
+ self.objectEnd = self:Offset()
+ self.nested = true
+end
+
+function mt:WriteVtable()
+ self:PrependSOffsetTRelative(0)
+ local objectOffset = self:Offset()
+
+ local exisitingVTable
+ local i = #self.vtables
+ while i >= 1 do
+ if self.vtables[i] == 0 then
+ table.remove(self.vtables,i)
+ end
+ i = i - 1
+ end
+
+ i = #self.vtables
+ while i >= 1 do
+
+ local vt2Offset = self.vtables[i]
+ local vt2Start = #self.bytes - vt2Offset
+ local vt2lenstr = self.bytes:Slice(vt2Start, vt2Start+1)
+ local vt2Len = string.unpack(VOffsetT.packFmt, vt2lenstr, 1)
+
+ local metadata = VtableMetadataFields * VOffsetT.bytewidth
+ local vt2End = vt2Start + vt2Len
+ local vt2 = self.bytes:Slice(vt2Start+metadata,vt2End)
+
+ if vtableEqual(self.currentVTable, objectOffset, vt2) then
+ exisitingVTable = vt2Offset
+ break
+ end
+
+ i = i - 1
+ end
+
+ if not exisitingVTable then
+ i = #self.currentVTable
+ while i >= 1 do
+ local off = 0
+ local a = self.currentVTable[i]
+ if a and a ~= 0 then
+ off = objectOffset - a
+ end
+ self:PrependVOffsetT(off)
+
+ i = i - 1
+ end
+
+ local objectSize = objectOffset - self.objectEnd
+ self:PrependVOffsetT(objectSize)
+
+ local vBytes = #self.currentVTable + VtableMetadataFields
+ vBytes = vBytes * VOffsetT.bytewidth
+ self:PrependVOffsetT(vBytes)
+
+ local objectStart = #self.bytes - objectOffset
+ self.bytes:Set(SOffsetT:Pack(self:Offset() - objectOffset),objectStart)
+
+ table.insert(self.vtables, self:Offset())
+ else
+ local objectStart = #self.bytes - objectOffset
+ self.head = objectStart
+ self.bytes:Set(SOffsetT:Pack(exisitingVTable - objectOffset),self.head)
+ end
+
+ self.currentVTable = nil
+ return objectOffset
+end
+
+function mt:EndObject()
+ assert(self.nested)
+ self.nested = false
+ return self:WriteVtable()
+end
+
+local function growByteBuffer(self, desiredSize)
+ local s = #self.bytes
+ assert(s < MAX_BUFFER_SIZE, "Flat Buffers cannot grow buffer beyond 2 gigabytes")
+ local newsize = s
+ repeat
+ newsize = math.min(newsize * 2, MAX_BUFFER_SIZE)
+ if newsize == 0 then newsize = 1 end
+ until newsize > desiredSize
+
+ self.bytes:Grow(newsize)
+end
+
+function mt:Head()
+ return self.head
+end
+
+function mt:Offset()
+ return #self.bytes - self.head
+end
+
+function mt:Pad(n)
+ if n > 0 then
+ -- pads are 8-bit, so skip the bytewidth lookup
+ local h = self.head - n -- UInt8
+ self.head = h
+ self.bytes:Pad(n, h)
+ end
+end
+
+function mt:Prep(size, additionalBytes)
+ if size > self.minalign then
+ self.minalign = size
+ end
+
+ local h = self.head
+
+ local k = #self.bytes - h + additionalBytes
+ local alignsize = ((~k) + 1) & (size - 1) -- getAlignSize(k, size)
+
+ local desiredSize = alignsize + size + additionalBytes
+
+ while self.head < desiredSize do
+ local oldBufSize = #self.bytes
+ growByteBuffer(self, desiredSize)
+ local updatedHead = self.head + #self.bytes - oldBufSize
+ self.head = updatedHead
+ end
+
+ self:Pad(alignsize)
+end
+
+function mt:PrependSOffsetTRelative(off)
+ self:Prep(SOffsetT.bytewidth, 0)
+ assert(off <= self:Offset(), "Offset arithmetic error")
+ local off2 = self:Offset() - off + SOffsetT.bytewidth
+ self:Place(off2, SOffsetT)
+end
+
+function mt:PrependUOffsetTRelative(off)
+ self:Prep(UOffsetT.bytewidth, 0)
+ local soffset = self:Offset()
+ if off <= soffset then
+ local off2 = soffset - off + UOffsetT.bytewidth
+ self:Place(off2, UOffsetT)
+ else
+ error("Offset arithmetic error")
+ end
+end
+
+function mt:StartVector(elemSize, numElements, alignment)
+ assert(not self.nested)
+ self.nested = true
+ self:Prep(Uint32.bytewidth, elemSize * numElements)
+ self:Prep(alignment, elemSize * numElements)
+ return self:Offset()
+end
+
+function mt:EndVector(vectorNumElements)
+ assert(self.nested)
+ self.nested = false
+ self:Place(vectorNumElements, UOffsetT)
+ return self:Offset()
+end
+
+function mt:CreateString(s)
+ assert(not self.nested)
+ self.nested = true
+
+ assert(type(s) == "string")
+
+ self:Prep(UOffsetT.bytewidth, (#s + 1)*Uint8.bytewidth)
+ self:Place(0, Uint8)
+
+ local l = #s
+ self.head = self.head - l
+
+ self.bytes:Set(s, self.head, self.head + l)
+
+ return self:EndVector(#s)
+end
+
+function mt:CreateByteVector(x)
+ assert(not self.nested)
+ self.nested = true
+ self:Prep(UOffsetT.bytewidth, #x*Uint8.bytewidth)
+
+ local l = #x
+ self.head = self.head - l
+
+ self.bytes:Set(x, self.head, self.head + l)
+
+ return self:EndVector(#x)
+end
+
+function mt:Slot(slotnum)
+ assert(self.nested)
+ -- n.b. slot number is 0-based
+ self.currentVTable[slotnum + 1] = self:Offset()
+end
+
+local function finish(self, rootTable, sizePrefix)
+ UOffsetT:EnforceNumber(rootTable)
+ local prepSize = UOffsetT.bytewidth
+ if sizePrefix then
+ prepSize = prepSize + Int32.bytewidth
+ end
+
+ self:Prep(self.minalign, prepSize)
+ self:PrependUOffsetTRelative(rootTable)
+ if sizePrefix then
+ local size = #self.bytes - self.head
+ Int32:EnforceNumber(size)
+ self:PrependInt32(size)
+ end
+ self.finished = true
+ return self.head
+end
+
+function mt:Finish(rootTable)
+ return finish(self, rootTable, false)
+end
+
+function mt:FinishSizePrefixed(rootTable)
+ return finish(self, rootTable, true)
+end
+
+function mt:Prepend(flags, off)
+ self:Prep(flags.bytewidth, 0)
+ self:Place(off, flags)
+end
+
+function mt:PrependSlot(flags, o, x, d)
+ flags:EnforceNumber(x)
+ flags:EnforceNumber(d)
+ if x ~= d then
+ self:Prepend(flags, x)
+ self:Slot(o)
+ end
+end
+
+function mt:PrependBoolSlot(...) self:PrependSlot(Bool, ...) end
+function mt:PrependByteSlot(...) self:PrependSlot(Uint8, ...) end
+function mt:PrependUint8Slot(...) self:PrependSlot(Uint8, ...) end
+function mt:PrependUint16Slot(...) self:PrependSlot(Uint16, ...) end
+function mt:PrependUint32Slot(...) self:PrependSlot(Uint32, ...) end
+function mt:PrependUint64Slot(...) self:PrependSlot(Uint64, ...) end
+function mt:PrependInt8Slot(...) self:PrependSlot(Int8, ...) end
+function mt:PrependInt16Slot(...) self:PrependSlot(Int16, ...) end
+function mt:PrependInt32Slot(...) self:PrependSlot(Int32, ...) end
+function mt:PrependInt64Slot(...) self:PrependSlot(Int64, ...) end
+function mt:PrependFloat32Slot(...) self:PrependSlot(Float32, ...) end
+function mt:PrependFloat64Slot(...) self:PrependSlot(Float64, ...) end
+
+function mt:PrependUOffsetTRelativeSlot(o,x,d)
+ if x~=d then
+ self:PrependUOffsetTRelative(x)
+ self:Slot(o)
+ end
+end
+
+function mt:PrependStructSlot(v,x,d)
+ UOffsetT:EnforceNumber(d)
+ if x~=d then
+ UOffsetT:EnforceNumber(x)
+ assert(x == self:Offset(), "Tried to write a Struct at an Offset that is different from the current Offset of the Builder.")
+ self:Slot(v)
+ end
+end
+
+function mt:PrependBool(x) self:Prepend(Bool, x) end
+function mt:PrependByte(x) self:Prepend(Uint8, x) end
+function mt:PrependUint8(x) self:Prepend(Uint8, x) end
+function mt:PrependUint16(x) self:Prepend(Uint16, x) end
+function mt:PrependUint32(x) self:Prepend(Uint32, x) end
+function mt:PrependUint64(x) self:Prepend(Uint64, x) end
+function mt:PrependInt8(x) self:Prepend(Int8, x) end
+function mt:PrependInt16(x) self:Prepend(Int16, x) end
+function mt:PrependInt32(x) self:Prepend(Int32, x) end
+function mt:PrependInt64(x) self:Prepend(Int64, x) end
+function mt:PrependFloat32(x) self:Prepend(Float32, x) end
+function mt:PrependFloat64(x) self:Prepend(Float64, x) end
+function mt:PrependVOffsetT(x) self:Prepend(VOffsetT, x) end
+
+function mt:Place(x, flags)
+ local d = flags:EnforceNumberAndPack(x)
+ local h = self.head - flags.bytewidth
+ self.head = h
+ self.bytes:Set(d, h)
+end
+
+return m
diff --git a/lua/flatbuffers/compat.lua b/lua/flatbuffers/compat.lua
new file mode 100644
index 0000000..89c992b
--- /dev/null
+++ b/lua/flatbuffers/compat.lua
@@ -0,0 +1,17 @@
+local m = {}
+
+local getAlignSize
+if _VERSION == "Lua 5.3" then
+ getAlignSize = function(k, size)
+ return ((~k) + 1) & (size - 1)
+ end
+else
+ getAlignSize = function(self, size, additionalBytes)
+ local alignsize = bit32.bnot(#self.bytes-self:Head() + additionalBytes) + 1
+ return bit32.band(alignsize,(size - 1))
+ end
+end
+
+m.GetAlignSize = getAlignSize
+
+return m
\ No newline at end of file
diff --git a/lua/flatbuffers/numTypes.lua b/lua/flatbuffers/numTypes.lua
new file mode 100644
index 0000000..8fec21c
--- /dev/null
+++ b/lua/flatbuffers/numTypes.lua
@@ -0,0 +1,204 @@
+local m = {}
+
+local ba = require("flatbuffers.binaryarray")
+
+local bpack = ba.Pack
+local bunpack = ba.Unpack
+
+local type_mt = {}
+
+function type_mt:Pack(value)
+ return bpack(self.packFmt, value)
+end
+
+function type_mt:Unpack(buf, pos)
+ return bunpack(self.packFmt, buf, pos)
+end
+
+function type_mt:ValidNumber(n)
+ if not self.min_value and not self.max_value then return true end
+ return self.min_value <= n and n <= self.max_value
+end
+
+function type_mt:EnforceNumber(n)
+ -- duplicate code since the overhead of function calls
+ -- for such a popular method is time consuming
+ if not self.min_value and not self.max_value then
+ return
+ end
+
+ if self.min_value <= n and n <= self.max_value then
+ return
+ end
+
+ error("Number is not in the valid range")
+end
+
+function type_mt:EnforceNumberAndPack(n)
+ return bpack(self.packFmt, n)
+end
+
+function type_mt:ConvertType(n, otherType)
+ assert(self.bytewidth == otherType.bytewidth, "Cannot convert between types of different widths")
+ if self == otherType then
+ return n
+ end
+ return otherType:Unpack(self:Pack(n))
+end
+
+local bool_mt =
+{
+ bytewidth = 1,
+ min_value = false,
+ max_value = true,
+ lua_type = type(true),
+ name = "bool",
+ packFmt = "<I1",
+ Pack = function(self, value) return value and "1" or "0" end,
+ Unpack = function(self, buf, pos) return buf[pos] == "1" end,
+ ValidNumber = function(self, n) return true end, -- anything is a valid boolean in Lua
+ EnforceNumber = function(self, n) end, -- anything is a valid boolean in Lua
+ EnforceNumberAndPack = function(self, n) return self:Pack(value) end,
+}
+
+local uint8_mt =
+{
+ bytewidth = 1,
+ min_value = 0,
+ max_value = 2^8-1,
+ lua_type = type(1),
+ name = "uint8",
+ packFmt = "<I1"
+}
+
+local uint16_mt =
+{
+ bytewidth = 2,
+ min_value = 0,
+ max_value = 2^16-1,
+ lua_type = type(1),
+ name = "uint16",
+ packFmt = "<I2"
+}
+
+local uint32_mt =
+{
+ bytewidth = 4,
+ min_value = 0,
+ max_value = 2^32-1,
+ lua_type = type(1),
+ name = "uint32",
+ packFmt = "<I4"
+}
+
+local uint64_mt =
+{
+ bytewidth = 8,
+ min_value = 0,
+ max_value = 2^64-1,
+ lua_type = type(1),
+ name = "uint64",
+ packFmt = "<I8"
+}
+
+local int8_mt =
+{
+ bytewidth = 1,
+ min_value = -2^7,
+ max_value = 2^7-1,
+ lua_type = type(1),
+ name = "int8",
+ packFmt = "<i1"
+}
+
+local int16_mt =
+{
+ bytewidth = 2,
+ min_value = -2^15,
+ max_value = 2^15-1,
+ lua_type = type(1),
+ name = "int16",
+ packFmt = "<i2"
+}
+
+local int32_mt =
+{
+ bytewidth = 4,
+ min_value = -2^31,
+ max_value = 2^31-1,
+ lua_type = type(1),
+ name = "int32",
+ packFmt = "<i4"
+}
+
+local int64_mt =
+{
+ bytewidth = 8,
+ min_value = -2^63,
+ max_value = 2^63-1,
+ lua_type = type(1),
+ name = "int64",
+ packFmt = "<i8"
+}
+
+local float32_mt =
+{
+ bytewidth = 4,
+ min_value = nil,
+ max_value = nil,
+ lua_type = type(1.0),
+ name = "float32",
+ packFmt = "<f"
+}
+
+local float64_mt =
+{
+ bytewidth = 8,
+ min_value = nil,
+ max_value = nil,
+ lua_type = type(1.0),
+ name = "float64",
+ packFmt = "<d"
+}
+
+-- register the base class
+setmetatable(bool_mt, {__index = type_mt})
+setmetatable(uint8_mt, {__index = type_mt})
+setmetatable(uint16_mt, {__index = type_mt})
+setmetatable(uint32_mt, {__index = type_mt})
+setmetatable(uint64_mt, {__index = type_mt})
+setmetatable(int8_mt, {__index = type_mt})
+setmetatable(int16_mt, {__index = type_mt})
+setmetatable(int32_mt, {__index = type_mt})
+setmetatable(int64_mt, {__index = type_mt})
+setmetatable(float32_mt, {__index = type_mt})
+setmetatable(float64_mt, {__index = type_mt})
+
+
+m.Uint8 = uint8_mt
+m.Uint16 = uint16_mt
+m.Uint32 = uint32_mt
+m.Uint64 = uint64_mt
+m.Int8 = int8_mt
+m.Int16 = int16_mt
+m.Int32 = int32_mt
+m.Int64 = int64_mt
+m.Float32 = float32_mt
+m.Float64 = float64_mt
+
+m.UOffsetT = uint32_mt
+m.VOffsetT = uint16_mt
+m.SOffsetT = int32_mt
+
+local GenerateTypes = function(listOfTypes)
+ for _,t in pairs(listOfTypes) do
+ t.Pack = function(self, value) return bpack(self.packFmt, value) end
+ t.Unpack = function(self, buf, pos) return bunpack(self.packFmt, buf, pos) end
+ end
+end
+
+GenerateTypes(m)
+
+-- explicitly execute after GenerateTypes call, as we don't want to define a Pack/Unpack function for it.
+m.Bool = bool_mt
+return m
diff --git a/lua/flatbuffers/view.lua b/lua/flatbuffers/view.lua
new file mode 100644
index 0000000..da0f8bf
--- /dev/null
+++ b/lua/flatbuffers/view.lua
@@ -0,0 +1,97 @@
+local m = {}
+local mt = {}
+
+local mt_name = "flatbuffers.view.mt"
+
+local N = require("flatbuffers.numTypes")
+local binaryarray = require("flatbuffers.binaryarray")
+
+function m.New(buf, pos)
+ N.UOffsetT:EnforceNumber(pos)
+
+ -- need to convert from a string buffer into
+ -- a binary array
+
+ local o = {
+ bytes = type(buf) == "string" and binaryarray.New(buf) or buf,
+ pos = pos
+ }
+ setmetatable(o, {__index = mt, __metatable = mt_name})
+ return o
+end
+
+function mt:Offset(vtableOffset)
+ local vtable = self.pos - self:Get(N.SOffsetT, self.pos)
+ local vtableEnd = self:Get(N.VOffsetT, vtable)
+ if vtableOffset < vtableEnd then
+ return self:Get(N.VOffsetT, vtable + vtableOffset)
+ end
+ return 0
+end
+
+function mt:Indirect(off)
+ N.UOffsetT:EnforceNumber(off)
+ return off + N.UOffsetT:Unpack(self.bytes, off)
+end
+
+function mt:String(off)
+ N.UOffsetT:EnforceNumber(off)
+ off = off + N.UOffsetT:Unpack(self.bytes, off)
+ local start = off + N.UOffsetT.bytewidth
+ local length = N.UOffsetT:Unpack(self.bytes, off)
+ return self.bytes:Slice(start, start+length)
+end
+
+function mt:VectorLen(off)
+ N.UOffsetT:EnforceNumber(off)
+ off = off + self.pos
+ off = off + N.UOffsetT:Unpack(self.bytes, off)
+ return N.UOffsetT:Unpack(self.bytes, off)
+end
+
+function mt:Vector(off)
+ N.UOffsetT:EnforceNumber(off)
+
+ off = off + self.pos
+ local x = off + self:Get(N.UOffsetT, off)
+ x = x + N.UOffsetT.bytewidth
+ return x
+end
+
+function mt:Union(t2, off)
+ assert(getmetatable(t2) == mt_name)
+ N.UOffsetT:EnforceNumber(off)
+
+ off = off + self.pos
+ t2.pos = off + self:Get(N.UOffsetT, off)
+ t2.bytes = self.bytes
+end
+
+function mt:Get(flags, off)
+ N.UOffsetT:EnforceNumber(off)
+ return flags:Unpack(self.bytes, off)
+end
+
+function mt:GetSlot(slot, d, validatorFlags)
+ N.VOffsetT:EnforceNumber(slot)
+ if validatorFlags then
+ validatorFlags:EnforceNumber(d)
+ end
+ local off = self:Offset(slot)
+ if off == 0 then
+ return d
+ end
+ return self:Get(validatorFlags, self.pos + off)
+end
+
+function mt:GetVOffsetTSlot(slot, d)
+ N.VOffsetT:EnforceNumber(slot)
+ N.VOffsetT:EnforceNumber(d)
+ local off = self:Offset(slot)
+ if off == 0 then
+ return d
+ end
+ return off
+end
+
+return m
\ No newline at end of file