Esempio n. 1
0
    def __init__(self, inStream: FileInputStream, knownTypes):
        """
        Parses a binary SKilL file
        :param inStream: FileInputStream
        :param knownTypes: classes from the specification
        """
        self.blockCounter = 0
        self.seenTypes = set()
        self.blockIDBarrier = 0
        self.poolByName = dict()
        self.localFields = []
        self.fieldDataQueue = []
        self.offset = 0
        self.types = []
        self.inStream = inStream
        self.strings = StringPool(self.inStream)
        self.annotation = Annotation(self.types)

        self.knownTypes = knownTypes

        while not inStream.eof():
            self.stringBlock()
            self.typeBlock()
Esempio n. 2
0
 def open(path, mode: [], knownTypes: []):
     """
     Create a new skill file based on argument path and mode.
     """
     actualMode = ActualMode(mode)
     try:
         if actualMode.openMode == Mode.Create:
             strings = StringPool(None)
             types = []
             annotation = Annotation(types)
             return SkillState({}, strings, annotation, types,
                                 FileInputStream.open(path), actualMode.closeMode, knownTypes)
         elif actualMode.openMode == Mode.Read:
             p = Parser(FileInputStream.open(path), knownTypes)
             return p.read(SkillState, actualMode.closeMode, knownTypes)
         else:
             raise Exception("should never happen")
     except SkillException as e:
         raise e
     except Exception as e:
         raise SkillException(e)
Esempio n. 3
0
    def stringBlock(self):
        """
        Parses string block
        :return:
        """
        try:
            count = self.inStream.v64()

            if count != 0:
                offset = []
                last = 0
                for i in range(0, count):
                    offset.append(self.inStream.i32())
                for j in range(0, count):
                    add = StringPool.Position(self.inStream.position() + last,
                                              offset[j] - last)
                    self.strings.stringPositions.append(add)
                    self.strings.idMap.append(None)
                    last = offset[j]
                self.inStream.jump(self.inStream.position() + last)
        except Exception as e:
            raise ParseException(self.inStream, self.blockCounter, e,
                                 "corrupted string block")
Esempio n. 4
0
class FileParser:
    def __init__(self, inStream: FileInputStream, knownTypes):
        """
        Parses a binary SKilL file
        :param inStream: FileInputStream
        :param knownTypes: classes from the specification
        """
        self.blockCounter = 0
        self.seenTypes = set()
        self.blockIDBarrier = 0
        self.poolByName = dict()
        self.localFields = []
        self.fieldDataQueue = []
        self.offset = 0
        self.types = []
        self.inStream = inStream
        self.strings = StringPool(self.inStream)
        self.annotation = Annotation(self.types)

        self.knownTypes = knownTypes

        while not inStream.eof():
            self.stringBlock()
            self.typeBlock()

    @staticmethod
    def newPool(name: str, superPool, types: [], knownTypes):
        raise NotImplementedError()

    def stringBlock(self):
        """
        Parses string block
        :return:
        """
        try:
            count = self.inStream.v64()

            if count != 0:
                offset = []
                last = 0
                for i in range(0, count):
                    offset.append(self.inStream.i32())
                for j in range(0, count):
                    add = StringPool.Position(self.inStream.position() + last,
                                              offset[j] - last)
                    self.strings.stringPositions.append(add)
                    self.strings.idMap.append(None)
                    last = offset[j]
                self.inStream.jump(self.inStream.position() + last)
        except Exception as e:
            raise ParseException(self.inStream, self.blockCounter, e,
                                 "corrupted string block")

    def typeDefinition(self):
        """
        Parses type definitions in type block
        :return:
        """
        try:
            name = self.strings.get(self.inStream.v64())
        except InvalidPoolIndexException as e:
            raise ParseException(self.inStream, self.blockCounter, e,
                                 "corrupted type header")

        if name is None:
            raise ParseException(self.inStream, self.blockCounter, None,
                                 "corrupted file: nullptr in typename")

        if name in self.seenTypes:
            raise ParseException(self.inStream, self.blockCounter, None,
                                 "Duplicate definition of type {}", name)
        self.seenTypes.add(name)

        # try to parse the type definition
        try:
            count = self.inStream.v64()
            # definition: StoragePool = None
            if name in self.poolByName:
                definition: StoragePool = self.poolByName.get(name)
            else:
                self.typeRestriction()
                superID = self.inStream.v64()
                # superDef: StoragePool = None
                if superID == 0:
                    superDef = None
                elif superID > len(self.types):
                    raise ParseException(
                        self.inStream, self.blockCounter, None,
                        "Type {} refers to an ill-formed super fType.\n" +
                        "          found: {}; current number of other types {}",
                        name, superID, len(self.types))
                else:
                    superDef = self.types[superID - 1]

                try:
                    # search in knowntypes for this class
                    seen = False
                    typ = None
                    escapeName = "Z" + str(
                        binascii.hexlify(name.encode("utf-16_be")))[2:-1]
                    for i in self.knownTypes:
                        if name == i.__name__.lower(
                        ) or i.__name__ == escapeName:
                            typ = i
                            seen = True
                    if not seen:
                        superType = (SkillObject, ) if superDef is None else (
                            superDef._cls, )
                        typ = type(name, superType, dict())
                    definition = self.newPool(name, superDef, self.types, typ)
                    if definition.superPool is not superDef:
                        if superDef is None:
                            raise ParseException(
                                self.inStream, self.blockCounter, None,
                                "The file contains a super type {}"
                                "but {} is specified to be a base type.",
                                "<none>", name)
                        else:
                            raise ParseException(
                                self.inStream, self.blockCounter, None,
                                "The file contains a super type {}"
                                "but {} is specified to be a base type.",
                                superDef.__name__, name)
                    self.poolByName[name] = definition
                except Exception as ex:
                    raise ParseException(
                        self.inStream, self.blockCounter, ex,
                        "The super type of {} stored in the file does not match the specification!",
                        name)
            if self.blockIDBarrier < definition.typeID():
                self.blockIDBarrier = definition.typeID()
            else:
                raise ParseException(
                    self.inStream, self.blockCounter, None,
                    "Found unordered type block. Type {} has id {}, barrier was {}.",
                    name, definition.typeID(), self.blockIDBarrier)

            bpo = definition.basePool._cachedSize
            if definition.superPool is not None:
                if count != 0:
                    bpo += self.inStream.v64()
                else:
                    bpo += definition.superPool.lastBlock().bpo

            if definition.superPool is not None:
                b: Block = definition.superPool.lastBlock()
                if bpo < b.bpo or (b.bpo + b.staticCount) < bpo:
                    raise ParseException(self.inStream, self.blockCounter,
                                         None, "Found broken bpo.")

            definition.blocks.append(Block(bpo, count, count))
            definition.staticDataInstances += count

            fieldCount = self.inStream.v64()
            self.localFields.append(LFEntry(definition, fieldCount))
        except Exception as exc:
            raise ParseException(self.inStream, self.blockCounter, exc,
                                 "unexpected end of file")

    def typeBlock(self):
        """
        Parses type block.
        :return:
        """
        self.seenTypes.clear()
        self.blockIDBarrier = 0
        self.localFields.clear()
        self.fieldDataQueue.clear()
        self.offset = 0

        # parse type
        count = self.inStream.v64()
        for _ in range(0, count):
            self.typeDefinition()

        # resize pools by updating cachedSize and staticCount
        for e in self.localFields:
            p: StoragePool = e.p
            b: Block = p.lastBlock()
            p._cachedSize += b.count

            if b.count != 0:
                parent: StoragePool = p.superPool
                if parent is not None:
                    sb: Block = parent.lastBlock()
                    delta = sb.staticCount - (b.bpo - sb.bpo)
                    if delta > 0:
                        sb.staticCount -= delta
                        parent.staticDataInstances -= delta

        # parse fields
        for lfe in self.localFields:
            p: StoragePool = lfe.p
            legalFieldIDBarrier = 1 + len(p._dataFields)
            lastBlock: Block = p.blocks[len(p.blocks) - 1]
            for fieldcounter in range(lfe.count, 0, -1):
                ID = self.inStream.v64()
                if ID > legalFieldIDBarrier or ID <= 0:
                    raise ParseException(self.inStream, self.blockCounter,
                                         None, "Found an illegal field ID: {}",
                                         ID)
                if ID == legalFieldIDBarrier:
                    fieldName = self.strings.get(self.inStream.v64())
                    if fieldName is None:
                        raise ParseException(
                            self.inStream, self.blockCounter, None,
                            "corrupted file: nullptr in field name")
                    t = self.fieldType()
                    self.fieldRestrictions(t)
                    end = self.inStream.v64()
                    try:
                        f: FieldDeclaration = p.addField(t, fieldName)
                        if len(p.blocks) == 1:
                            f.addChunk(
                                SimpleChunk(self.offset, end, lastBlock.bpo,
                                            lastBlock.count))
                        else:
                            f.addChunk(
                                BulkChunk(self.offset, end, p._cachedSize,
                                          len(p.blocks)))
                    except SkillException as e:
                        raise ParseException(self.inStream, self.blockCounter,
                                             None, e.message)
                    legalFieldIDBarrier += 1
                else:
                    end = self.inStream.v64()
                    p._dataFields[ID - 1].addChunk(
                        SimpleChunk(self.offset, end, lastBlock.bpo,
                                    lastBlock.count))

                self.offset = end
                self.fieldDataQueue.append(DataEntry(p, ID))
        self.processFieldData()

    def processFieldData(self):
        """
        Parses field data by setting position of Chunks.
        :return:
        """
        fileOffset = self.inStream.position()
        dataEnd = fileOffset
        for e in self.fieldDataQueue:
            f: FieldDeclaration = e.owner._dataFields[e.fieldID - 1]
            end: int = f._addOffsetToLastChunk(fileOffset)
            dataEnd = max(dataEnd, end)
        self.inStream.jump(dataEnd)

    def fieldType(self):
        """
        Reads typeID and returns corresponding FieldType.
        :return: FieldType
        """
        typeID = self.inStream.v64()
        if typeID == 0:
            return ConstantI8(self.inStream.i8())
        elif typeID == 1:
            return ConstantI16(self.inStream.i16())
        elif typeID == 2:
            return ConstantI32(self.inStream.i32())
        elif typeID == 3:
            return ConstantI64(self.inStream.i64())
        elif typeID == 4:
            return ConstantV64(self.inStream.v64())
        elif typeID == 5:
            return self.annotation
        elif typeID == 6:
            return BoolType()
        elif typeID == 7:
            return I8()
        elif typeID == 8:
            return I16()
        elif typeID == 9:
            return I32()
        elif typeID == 10:
            return I64()
        elif typeID == 11:
            return V64()
        elif typeID == 12:
            return F32()
        elif typeID == 13:
            return F64()
        elif typeID == 14:
            return self.strings
        elif typeID == 15:
            return ConstantLengthArray(self.inStream.v64(), self.fieldType())
        elif typeID == 17:
            return VariableLengthArray(self.fieldType())
        elif typeID == 18:
            return ListType(self.fieldType())
        elif typeID == 19:
            return SetType(self.fieldType())
        elif typeID == 20:
            return MapType(self.fieldType(), self.fieldType())
        elif typeID >= 32:
            return self.types[typeID - 32]
        else:
            raise ParseException(self.inStream, self.blockCounter, None,
                                 "Invalid type ID: []", typeID)

    def read(self, cls, writeMode, knownTypes):
        """
        Function to create a SkillState defined in the binding.
        :param cls: class of generated SkillState
        :param writeMode: current writeMode
        :param knownTypes: classes generated in binding
        :return:
        """
        try:
            r = cls(self.poolByName, self.strings, self.annotation, self.types,
                    self.inStream, writeMode, knownTypes)
            return r
        except Exception as e:
            raise ParseException(self.inStream, self.blockCounter, e,
                                 "State instantiation failed!")

    def typeRestriction(self):
        """
        Reads type restrictions. Does not generate restriction objects. Binary files with restrictions can be read.
        :return:
        """
        i = self.inStream.v64()
        for _ in range(i, 0, -1):
            self.inStream.v64()

    def fieldRestrictions(self, fType):
        """
        Reads field restrictions. Does not generate restriction objects. Binary files with restrictions can be read.
        :param fType: Field type of the field.
        :return:
        """
        count = self.inStream.v64()
        i = fType.typeID()
        for _ in range(count, 0, -1):
            index = self.inStream.v64()
            if index == 1:
                if i in [5, 14] or i >= 32:
                    self.inStream.v64()
                else:
                    fType.readSingleField(self.inStream)
            elif index == 3:
                fType.readSingleField(self.inStream)
                fType.readSingleField(self.inStream)
            elif index == 5:
                self.inStream.v64()
            elif index == 9:
                c = self.inStream.v64()
                for k in range(c, 0, -1):
                    self.inStream.v64()