Example #1
0
 def __init__(self, name):
     construct.Struct.__init__(
         self, name, construct.Bytes('header', 8),
         construct.Value(
             'total_bytes',
             lambda ctx: self.__total_utf8_bytes__(ctx['header'])),
         construct.MetaRepeater(
             lambda ctx: self.__total_utf8_bytes__(ctx['header']) - 1,
             construct.Bytes('sub_byte', 8)),
         construct.Value('value',
                         lambda ctx: self.__calculate_utf8_value__(ctx)))
Example #2
0
 def _struct(cls):
     return construct.Struct(
         "type" / construct.Enum(construct.Byte, ParamType),
         "value" / construct.Switch(
             construct.this.type,
             {
                 "Int":
                 construct.Int32sl,
                 "Float":
                 construct.ExprAdapter(
                     construct.Bytes(10),
                     lambda obj, ctx: numpy.frombuffer(
                         obj.rjust(16, b"\x00"), dtype=numpy.longdouble),
                     lambda obj, ctx: numpy.longdouble(obj).tobytes()[-10:],
                 ),
                 "Flag":
                 construct.Byte,
                 "Str":
                 construct.PascalString(construct.Int32ul, "cp932"),
             },
             # else 'Var' variable name type
             construct.Select(
                 construct.PascalString(construct.Int32ul, "cp932"), ),
         ),
     )
Example #3
0
    def create_packet_block(data):
        data_len = len(data)
        data_len = data_len if data_len % 4 is 0 else data_len + 4 - data_len % 4
        data = b'\x00' * (data_len - len(data)) + data
        block_len = data_len + 32
        packet_block = construct.Struct(
            "block_type" / construct.Int32ub,
            "block_total_len_1" / construct.Int32ub,
            "interface_id" / construct.Int32ub,
            # "high_timestamp" / construct.Int32ub,
            # "low_timestamp" / construct.Int32ub,
            "timestamp" / construct.Int64ub,
            "capture_packet_len" / construct.Int32ub,
            "original_packet_len" / construct.Int32ub,
            "packet_data" / construct.Bytes(data_len),
            "block_total_len_2" / construct.Int32ub,
        )
        printing.debug(block_len)
        r = packet_block.build(
            dict(
                block_type=0x00000006,
                block_total_len_1=block_len,
                interface_id=0x0,
                # high_timestamp=0x0,
                # low_timestamp=0x0,
                timestamp=int(time.time() * 1000000),
                capture_packet_len=data_len,
                original_packet_len=data_len,
                packet_data=data,
                block_total_len_2=block_len))

        return r
Example #4
0
 class ConstDefaultTest(DataclassMixin):
     const_bytes: bytes = csfield(cs.Const(b"BMP"))
     const_int: int = csfield(cs.Const(5, cs.Int8ub))
     default_int: int = csfield(cs.Default(cs.Int8ub, 28))
     default_lambda: bytes = csfield(
         cs.Default(cs.Bytes(cs.this.const_int),
                    lambda ctx: bytes(ctx.const_int)))
Example #5
0
def _get_data_subblocks(name):
    """Return Adapter to parse GIF data sub-blocks."""
    return construct.ExprAdapter(
        construct.Struct(
            name,
            construct.RepeatUntil(
                lambda obj, ctx: obj.block_size == 0x00,
                construct.Struct(
                    'blocks',
                    construct.ULInt8('block_size'),
                    construct.Bytes('data_values', lambda ctx: ctx.block_size),
                ),
            ),
        ),
        # from comment string, build Containers
        encoder=lambda obj, ctx: construct.Container(blocks=[
            construct.Container(
                block_size=len(chunk),
                data_values=chunk,
            ) for chunk in [obj[i:i + 255] for i in xrange(0, len(obj), 255)]
        ] + [construct.Container(block_size=0, data_values='')], ),
        # from Containers, build comment string
        decoder=lambda obj, ctx: ''.join(dsb.data_values
                                         for dsb in obj.blocks),
    )
Example #6
0
 class Image(DataclassMixin):
     width: int = csfield(cs.Int8ub)
     height: int = csfield(cs.Int8ub)
     pixels: t.Optional[bytes] = csfield(
         cs.Default(
             cs.Bytes(cs.this.width * cs.this.height),
             lambda ctx: bytes(ctx.width * ctx.height),
         ))
Example #7
0
 class Image(cst.TContainerBase):
     width: int = cst.TStructField(cs.Int8ub)
     height: int = cst.TStructField(cs.Int8ub)
     pixels: cst.Opt[bytes] = cst.TStructField(
         cs.Default(
             cs.Bytes(cs.this.width * cs.this.height),
             lambda ctx: bytes(ctx.width * ctx.height),
         ))
Example #8
0
 def __init__(self, const):
     self.const = const
     super().__init__(
         c.IfThenElse(
             c.this._building,
             c.Select(c.Bytes(len(self.const)), c.Pass),
             Optional(c.Const(const)),
         )
     )
Example #9
0
def UBInt24(name):  # noqa
    """
    A 24-bit integer.

    :param name: The attribute name under which this value will be
        accessible.
    :type name: :py:class:`str`
    """
    return _UBInt24(construct.Bytes(name, 3))
Example #10
0
def PrefixedOffset(sizetype, type, offs=0):
    return C.FocusedSeq(
        "content", "_data" / C.Rebuild(
            C.Struct("size" / C.Rebuild(sizetype,
                                        C.len_(this.data) - offs),
                     "data" / C.Bytes(this.size + offs)), lambda obj:
            {"data": type.build(obj.content, **{
                **obj._params,
                **obj
            })}), "content" / C.RestreamData(this._data.data, type))
Example #11
0
def decode_itempos(itempos):
    """
    Decodes a single itempos and returns extracted information
    """
    itempos_io = StringIO.StringIO(itempos)
    itempos_struct = construct.Struct("itempos",
                                      construct.ULInt16("itempos_size"),
                                      construct.Padding(2),
                                      construct.ULInt32("filesize"),
                                      construct.Bytes("dos_date", 2),
                                      construct.Bytes("dos_time", 2),
                                      construct.ULInt16("file_attr"),
                                      construct.CString("filename")
                                      )
    parse_res = itempos_struct.parse_stream(itempos_io)
    if itempos_io.pos % 2 == 1:
        itempos_io.read(1)
    ext_struct = construct.Struct("ext",
                                  construct.ULInt16("ext_size"),
                                  construct.ULInt16("ext_version")
                                  )
    parse_ext = ext_struct.parse_stream(itempos_io)
    if parse_ext["ext_version"] >= 0x3:
        itempos2_struct = construct.Struct("itempos2",
                                           construct.Padding(2),  # 0004
                                           construct.Padding(2),  # BEEF
                                           construct.Bytes("creation_dos_date", 2),
                                           construct.Bytes("creation_dos_time", 2),
                                           construct.Bytes("access_dos_date", 2),
                                           construct.Bytes("access_dos_time", 2),
                                           construct.Padding(4)
                                           )
        parse_res2 = itempos2_struct.parse_stream(itempos_io)
    unicode_filename = ""
    if parse_ext["ext_version"] >= 0x7:
        itempos3_struct = construct.Struct("itempos3",
                                           construct.ULInt64("file_ref"),
                                           construct.Padding(8),
                                           construct.Padding(2),
                                           construct.Padding(4)
                                           )
        parse_res3 = itempos3_struct.parse_stream(itempos_io)
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char
    elif parse_ext["ext_version"] >= 0x3:
        unicode_filename = itempos_io.read().decode("utf16")
        if not unicode_filename.endswith("\0"):
            unicode_filename = unicode_filename[:-2]  # ditch last unused 2 bytes and \0 char

    timestamp_modified = dosdate(parse_res["dos_date"], parse_res["dos_time"]).strftime("%d/%m/%Y %H:%M:%S")
    timestamp_created = dosdate(parse_res2["creation_dos_date"], parse_res2["creation_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")
    timestamp_access = dosdate(parse_res2["access_dos_date"], parse_res2["access_dos_time"]).strftime(
        "%d/%m/%Y %H:%M:%S")

    return [unicode(parse_res["itempos_size"]), unicode(parse_res["filesize"]), timestamp_modified,
            parse_res["filename"], timestamp_created, timestamp_access, unicode_filename]
    def __init__(self, encoding=None):
        """
        Construct-Adapter for UUID field.

        Parses either `utf8` encoded or raw byte strings into :class:`UUID`
        instances.

        Args:
            encoding (str): The encoding to use.
        """
        if encoding:
            super(self.__class__, self).__init__(SGString(encoding))
        else:
            super(self.__class__, self).__init__(construct.Bytes(0x10))
Example #13
0
 def _struct(cls):
     return construct.Struct(
         'version' / _LsbVersionValidator(construct.Int32ul),
         'flags' / construct.Byte,
         'command_count' / construct.Int32ul,
         'param_stream_size' / construct.Int32ul,
         'command_params' / construct.Array(
             construct.this.command_count,
             _ParamStreamAdapter(
                 construct.Bytes(construct.this.param_stream_size)),
         ),
         'commands' / construct.PrefixedArray(
             construct.Int32ul, construct.Select(*_command_structs)),
     )
Example #14
0
 def _struct(cls):
     return construct.Struct(
         "version" / LsbVersionValidator(construct.Int32ul),
         "flags" / construct.Byte,
         "command_count" / construct.Int32ul,
         "param_stream_size" / construct.Int32ul,
         "command_params" / construct.Array(
             construct.this.command_count,
             _ParamStreamAdapter(
                 construct.Bytes(construct.this.param_stream_size)),
         ),
         "commands" / construct.PrefixedArray(
             construct.Int32ul, construct.Select(*_command_structs)),
     )
def LifeScanPacket(include_link_control: bool, ) -> construct.Struct:  # pylint: disable=invalid-name
    if include_link_control:
        link_control_construct = _LINK_CONTROL
    else:
        link_control_construct = construct.Const(b"\x00")

    return construct.Struct(
        "data" / construct.RawCopy(
            construct.Struct(
                construct.Const(b"\x02"),  # stx
                "length" / construct.Rebuild(
                    construct.Byte, lambda this: len(this.message) + 6),
                "link_control" / link_control_construct,
                "message" / construct.Bytes(lambda this: this.length - 6),
                construct.Const(b"\x03"),  # etx
            ), ),
        "checksum" / construct.Checksum(construct.Int16ul, lifescan.crc_ccitt,
                                        construct.this.data.data),
    )
Example #16
0
def LifeScanPacket(include_link_control):  # pylint: disable=invalid-name
    # type: (bool) -> construct.Struct
    if include_link_control:
        link_control_construct = _LINK_CONTROL
    else:
        link_control_construct = construct.Const(b'\x00')

    return construct.Struct(
        'data' / construct.RawCopy(
            construct.Struct(
                construct.Const(b'\x02'),  # stx
                'length' / construct.Rebuild(
                    construct.Byte, lambda this: len(this.message) + 6),
                'link_control' / link_control_construct,
                'message' / construct.Bytes(lambda this: this.length - 6),
                construct.Const(b'\x03'),  # etx
            ), ),
        'checksum' / construct.Checksum(construct.Int16ul, lifescan.crc_ccitt,
                                        construct.this.data.data),
    )
Example #17
0
def LifeScanPacket(command_prefix, include_link_control):
    if include_link_control:
        link_control_construct = _LINK_CONTROL
    else:
        link_control_construct = construct.Const(b'\x00')

    command_prefix_construct = construct.Const(command_prefix, construct.Byte)

    return construct.Struct(
        'data' / construct.RawCopy(
            construct.Struct(
                construct.Const(b'\x02'),  # stx
                'length' / construct.Rebuild(
                    construct.Byte, lambda this: len(this.message) + 7),
                'link_control' / link_control_construct,
                'command_prefix' / command_prefix_construct,
                'message' / construct.Bytes(lambda this: this.length - 7),
                construct.Const(b'\x03'),  # etx
            ), ),
        'checksum' / construct.Checksum(construct.Int16ul, lifescan.crc_ccitt,
                                        construct.this.data.data),
    )
Example #18
0
 def _struct(cls):
     return construct.Struct(
         'type' / construct.Enum(construct.Byte, ParamType),
         'value' / construct.Switch(
             construct.this.type,
             {
                 'Int': construct.Int32ul,
                 'Float': construct.ExprAdapter(
                     construct.Bytes(10),
                     lambda obj, ctx: numpy.frombuffer(obj.rjust(16, b'\x00'),
                                                       dtype=numpy.longdouble),
                     lambda obj, ctx: numpy.longdouble(obj).tobytes()[-10:]
                 ),
                 'Flag': construct.Byte,
                 'Str': construct.PascalString(construct.Int32ul, 'cp932'),
             },
             # else 'Var' variable name type
             construct.Select(
                 construct.PascalString(construct.Int32ul, 'cp932'),
             ),
         ),
     )
Example #19
0
class MsgLinuxCpuState(SBP):
    """SBP class for message MSG_LINUX_CPU_STATE (0x7F00).

  You can have MSG_LINUX_CPU_STATE inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message indicates the process state of the top 10 heaviest
consumers of CPU on the system.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  index : int
    sequence of this status message, values from 0-9
  pid : int
    the PID of the process
  pcpu : int
    percent of cpu used, expressed as a fraction of 256
  tname : string
    fixed length string representing the thread name
  cmdline : string
    the command line (as much as it fits in the remaining packet)
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'index' / construct.Int8ul,
        'pid' / construct.Int16ul,
        'pcpu' / construct.Int8ul,
        'tname' / construct.Bytes(15),
        'cmdline' / construct.GreedyBytes,
    )
    __slots__ = [
        'index',
        'pid',
        'pcpu',
        'tname',
        'cmdline',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgLinuxCpuState,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgLinuxCpuState, self).__init__()
            self.msg_type = SBP_MSG_LINUX_CPU_STATE
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.index = kwargs.pop('index')
            self.pid = kwargs.pop('pid')
            self.pcpu = kwargs.pop('pcpu')
            self.tname = kwargs.pop('tname')
            self.cmdline = kwargs.pop('cmdline')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgLinuxCpuState.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgLinuxCpuState(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgLinuxCpuState._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgLinuxCpuState._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgLinuxCpuState, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Example #20
0
def UBInt24(name):  # noqa
    return _UBInt24(construct.Bytes(name, 3))
Example #21
0
class GetClassSpecificDescriptorRequest(GetDescriptorRequest):

    # Specialized descriptor information -- either these two fields should be overridden,
    # or matches_class_specifics should be,
    CLASS_NUMBER       = -1
    DESCRIPTOR_SUBTYPE = -1

    # Generic descriptor information.
    DESCRIPTOR_NAME = "class-specific"
    BINARY_FORMAT = DescriptorFormat(
            "bLength"             / DescriptorField("Length"),
            "bDescriptorType"     / DescriptorNumber(0x24),
            "bDescriptorSubtype"  / DescriptorField("Descriptor Subtype"),
            "Data"                / construct.Bytes(this.bLength)
    )


    @classmethod
    def matches_class_specifics(cls, usb_class, subclass, protocol, subtype, is_interface):
        """
        Determines whether the given class handles the given class/subclass/protocol and
        descriptor subtype. Should be overridden in subordinate classes if CLASS_NUMBER
        and DESCRIPTOR_SUBTYPE aren't.
        """

        # Default implementation.
        return (usb_class == cls.CLASS_NUMBER) and (subtype == cls.DESCRIPTOR_SUBTYPE)


    @classmethod
    def find_specialized_descriptor(cls, data, interface_descriptor, subtype):
        """
        Finds any specialized ClassSpecificDescriptor request objects that correspond
        to the current interface -or- to the device's class, and the descriptor subtype.
        """

        # FIXME: read the device class, and set the usb_class/subclass/protocol here;
        # only defer to the interface descriptor if we have a composite device.
        if not interface_descriptor:
            return None
        else:
            usb_class = interface_descriptor['bInterfaceClass']
            subclass  = interface_descriptor['bInterfaceSubclass']
            protocol  = interface_descriptor['bInterfaceProtocol']
            is_device = False

        # Search all of our subclasses.
        for subclass in cls.__subclasses__():
            matches = subclass.matches_class_specifics(usb_class, subclass, protocol, subtype, is_device)
            if matches:
                return subclass

        return None


    @classmethod
    def _add_subtype_names(cls, decoded, bytes_parsed, specialized_class):

        decoded_descriptor_fields = list(decoded.keys())

        # Update the second entry (the class type) to be class-specific.
        if len(decoded_descriptor_fields) >= 2:
            descriptor_type_row = decoded_descriptor_fields[1]
            decoded[descriptor_type_row] = 'class-specific'

        # Update the third entry (the subclass type) to feature the subclass name.
        if len(decoded_descriptor_fields) >= 3:
            descriptor_subtype_row  = decoded_descriptor_fields[2]
            decoded[descriptor_subtype_row] = specialized_class.get_descriptor_name()

        return decoded, bytes_parsed




    @classmethod
    def decode_as_specialized_descriptor(cls, data, use_pretty_names, parent, subordinate_number):

        # If we don't have at least three bytes, we can't read the subtype. Abort.
        if len(data) < 3:
            return None

        # Otherwise, the subtype is always stored in the third byte.
        subtype_number = data[2]

        # If we don't have a parent descriptor to work with, we can't figure out which class we belong to.
        if not parent:
            return None

        # Find the interface associated with this descriptor.
        interface_descriptor = parent.find_last_interface_descriptor(subordinate_number)

        # If we have an interface descriptor, try to figure out a more appropriate class to parse this structure.
        specialized_class = cls.find_specialized_descriptor(data, interface_descriptor, subtype_number)

        # If we found a more specialized class, use it!
        if specialized_class:
            decoded = specialized_class.decode_data_as_descriptor(data, use_pretty_names, parent, subordinate_number)

            # If we're using pretty names, add the more-specific subtype names.
            if use_pretty_names:
                decoded = cls._add_subtype_names(*decoded, specialized_class)

            return decoded

        return None



    @classmethod
    def decode_data_as_descriptor(cls, data, use_pretty_names=True, parent=None, subordinate_number=None):

        import sys

        # If we're being called from the GetClassSpecificDescriptor generic 'placeholder' class,
        # try to specialize.
        if cls == GetClassSpecificDescriptorRequest:
            specialized = cls.decode_as_specialized_descriptor(data, use_pretty_names, parent, subordinate_number)

            if specialized:
                return specialized

        # Otherwise, pass this down the chain.
        return super().decode_data_as_descriptor(data, use_pretty_names, parent)
Example #22
0
)

PSBTKeyless = c.Struct(
    "magic" / c.Const(b'psbt'),
    "sep" / c.Const(b'\xff'),
    "general" / ValueSequence,
    "transaction" / c.RestreamData(c.this.general[0].value, Transaction),
    "inputs" / c.Array(c.len_(c.this.transaction.inputs), ValueSequence),
    "outputs" / c.Array(c.len_(c.this.transaction.outputs), ValueSequence),
    c.Terminated,
)

# micro-parser for BIP32

BIP32Derivation = c.Struct(
    "fingerprint" / c.Bytes(4),
    "path" / c.GreedyRange(c.Int32ul),
)


def to_protobuf(psbt):
    """Make a protobuf message from a parsed Key-Value psbt"""
    # construct a protobuf repr now
    msg = bip174.PSBT(unsigned_transaction=psbt.general[0].value)

    for inp in psbt.inputs:
        in_msg = bip174.InputType()
        for entry in inp:
            if entry.key.type == 0:
                in_msg.non_witness_utxo = entry.value
            elif entry.key.type == 1:
Example #23
0
        return dt.isoformat()


def FILETIME(name):
    return FileTimeAdapter(construct.ULInt64(name))


# Common structs.

UNICODE_STRING = construct.Struct(
    'UNICODE_STRING', construct.ULInt32('length'),
    construct.String('data', lambda ctx: ctx.length, encoding='utf16'))

SIZED_DATA = construct.Struct('SIZED_DATA', construct.ULInt32('size'),
                              construct.Bytes('data', lambda ctx: ctx.size))

# DPAPI structs.

DPAPI_BLOB = construct.Struct(
    'BLOB', construct.ULInt32('version'), GUID('provider'),
    construct.ULInt32('mk_version'),
    GUID('mk_guid'), construct.ULInt32('flags'),
    construct.Rename('description', UNICODE_STRING),
    construct.ULInt32('crypt_alg_id'), construct.ULInt32('crypt_alg_len'),
    construct.ULInt32('salt_len'),
    construct.Bytes('salt', lambda ctx: ctx.salt_len),
    construct.ULInt32('unknown1'), construct.ULInt32('hash_alg_id'),
    construct.ULInt32('hash_alg_len'), construct.ULInt32('hmac_len'),
    construct.Bytes('hmac', lambda ctx: ctx.hmac_len),
    construct.ULInt32('encrypted_len'),
Example #24
0
class MsgLinuxProcessSocketQueues(SBP):
    """SBP class for message MSG_LINUX_PROCESS_SOCKET_QUEUES (0x7F04).

  You can have MSG_LINUX_PROCESS_SOCKET_QUEUES inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  Top 10 list of sockets with deep queues.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  index : int
    sequence of this status message, values from 0-9
  pid : int
    the PID of the process in question
  recv_queued : int
    the total amount of receive data queued for this process
  send_queued : int
    the total amount of send data queued for this process
  socket_types : int
    A bitfield indicating the socket types used:
  0x1 (tcp), 0x2 (udp), 0x4 (unix stream), 0x8 (unix dgram), 0x10 (netlink),
  and 0x8000 (unknown)

  socket_states : int
    A bitfield indicating the socket states:
  0x1 (established), 0x2 (syn-sent), 0x4 (syn-recv), 0x8 (fin-wait-1),
  0x10 (fin-wait-2), 0x20 (time-wait), 0x40 (closed), 0x80 (close-wait),
  0x100 (last-ack), 0x200 (listen), 0x400 (closing), 0x800 (unconnected),
  and 0x8000 (unknown)

  address_of_largest : string
    Address of the largest queue, remote or local depending on the directionality
of the connection.

  cmdline : string
    the command line of the process in question
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'index' / construct.Int8ul,
        'pid' / construct.Int16ul,
        'recv_queued' / construct.Int16ul,
        'send_queued' / construct.Int16ul,
        'socket_types' / construct.Int16ul,
        'socket_states' / construct.Int16ul,
        'address_of_largest' / construct.Bytes(64),
        'cmdline' / construct.GreedyBytes,
    )
    __slots__ = [
        'index',
        'pid',
        'recv_queued',
        'send_queued',
        'socket_types',
        'socket_states',
        'address_of_largest',
        'cmdline',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgLinuxProcessSocketQueues,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgLinuxProcessSocketQueues, self).__init__()
            self.msg_type = SBP_MSG_LINUX_PROCESS_SOCKET_QUEUES
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.index = kwargs.pop('index')
            self.pid = kwargs.pop('pid')
            self.recv_queued = kwargs.pop('recv_queued')
            self.send_queued = kwargs.pop('send_queued')
            self.socket_types = kwargs.pop('socket_types')
            self.socket_states = kwargs.pop('socket_states')
            self.address_of_largest = kwargs.pop('address_of_largest')
            self.cmdline = kwargs.pop('cmdline')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgLinuxProcessSocketQueues.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgLinuxProcessSocketQueues(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgLinuxProcessSocketQueues._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgLinuxProcessSocketQueues._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgLinuxProcessSocketQueues, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Example #25
0
class KeychainParser(interface.FileObjectParser):
  """Parser for Keychain files."""

  NAME = 'mac_keychain'
  DESCRIPTION = 'Parser for MacOS Keychain files.'

  KEYCHAIN_SIGNATURE = b'kych'
  KEYCHAIN_MAJOR_VERSION = 1
  KEYCHAIN_MINOR_VERSION = 0

  RECORD_TYPE_APPLICATION = 0x80000000
  RECORD_TYPE_INTERNET = 0x80000001

  # DB HEADER.
  KEYCHAIN_DB_HEADER = construct.Struct(
      'db_header',
      construct.Bytes('signature', 4),
      construct.UBInt16('major_version'),
      construct.UBInt16('minor_version'),
      construct.UBInt32('header_size'),
      construct.UBInt32('schema_offset'),
      construct.Padding(4))

  # DB SCHEMA.
  KEYCHAIN_DB_SCHEMA = construct.Struct(
      'db_schema',
      construct.UBInt32('size'),
      construct.UBInt32('number_of_tables'))

  # For each number_of_tables, the schema has a TABLE_OFFSET with the
  # offset starting in the DB_SCHEMA.
  TABLE_OFFSET = construct.UBInt32('table_offset')

  TABLE_HEADER = construct.Struct(
      'table_header',
      construct.UBInt32('table_size'),
      construct.UBInt32('record_type'),
      construct.UBInt32('number_of_records'),
      construct.UBInt32('first_record'),
      construct.UBInt32('index_offset'),
      construct.Padding(4),
      construct.UBInt32('recordnumbercount'))

  RECORD_HEADER = construct.Struct(
      'record_header',
      construct.UBInt32('entry_length'),
      construct.Padding(12),
      construct.UBInt32('ssgp_length'),
      construct.Padding(4),
      construct.UBInt32('creation_time'),
      construct.UBInt32('last_modification_time'),
      construct.UBInt32('text_description'),
      construct.Padding(4),
      construct.UBInt32('comments'),
      construct.Padding(8),
      construct.UBInt32('entry_name'),
      construct.Padding(20),
      construct.UBInt32('account_name'),
      construct.Padding(4))

  RECORD_HEADER_APP = construct.Struct(
      'record_entry_app',
      RECORD_HEADER,
      construct.Padding(4))

  RECORD_HEADER_INET = construct.Struct(
      'record_entry_inet',
      RECORD_HEADER,
      construct.UBInt32('where'),
      construct.UBInt32('protocol'),
      construct.UBInt32('type'),
      construct.Padding(4),
      construct.UBInt32('url'))

  TEXT = construct.PascalString(
      'text', length_field=construct.UBInt32('length'))

  TIME = construct.Struct(
      'timestamp',
      construct.String('year', 4),
      construct.String('month', 2),
      construct.String('day', 2),
      construct.String('hour', 2),
      construct.String('minute', 2),
      construct.String('second', 2),
      construct.Padding(2))

  TYPE_TEXT = construct.String('type', 4)

  # TODO: add more protocols.
  _PROTOCOL_TRANSLATION_DICT = {
      'htps': 'https',
      'smtp': 'smtp',
      'imap': 'imap',
      'http': 'http'}

  def _ReadEntryApplication(self, parser_mediator, file_object):
    """Extracts the information from an application password entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
    """
    record_offset = file_object.tell()
    try:
      record_struct = self.RECORD_HEADER_APP.parse_stream(file_object)
    except (IOError, construct.FieldError):
      parser_mediator.ProduceExtractionError(
          'unable to parse record structure at offset: 0x{0:08x}'.format(
              record_offset))
      return

    (ssgp_hash, creation_time, last_modification_time, text_description,
     comments, entry_name, account_name) = self._ReadEntryHeader(
         parser_mediator, file_object, record_struct.record_header,
         record_offset)

    # Move to the end of the record.
    next_record_offset = (
        record_offset + record_struct.record_header.entry_length)
    file_object.seek(next_record_offset, os.SEEK_SET)

    event_data = KeychainApplicationRecordEventData()
    event_data.account_name = account_name
    event_data.comments = comments
    event_data.entry_name = entry_name
    event_data.ssgp_hash = ssgp_hash
    event_data.text_description = text_description

    if creation_time:
      event = time_events.DateTimeValuesEvent(
          creation_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if last_modification_time:
      event = time_events.DateTimeValuesEvent(
          last_modification_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

  def _ReadEntryHeader(
      self, parser_mediator, file_object, record, record_offset):
    """Read the common record attributes.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_entry (dfvfs.FileEntry): a file entry object.
      file_object (dfvfs.FileIO): a file-like object.
      record (construct.Struct): record header structure.
      record_offset (int): offset of the start of the record.

    Returns:
      A tuple containing:
        ssgp_hash: Hash of the encrypted data (passwd, cert, note).
        creation_time (dfdatetime.TimeElements): entry creation time or None.
        last_modification_time ((dfdatetime.TimeElements): entry last
            modification time or None.
        text_description: A brief description of the entry.
        entry_name: Name of the entry
        account_name: Name of the account.
    """
    # TODO: reduce number of seeks and/or offset calculations needed
    # for parsing.

    # Info: The hash header always start with the string ssgp follow by
    #       the hash. Furthermore The fields are always a multiple of four.
    #       Then if it is not multiple the value is padded by 0x00.
    ssgp_hash = binascii.hexlify(file_object.read(record.ssgp_length)[4:])

    creation_time = None

    structure_offset = record_offset + record.creation_time - 1
    file_object.seek(structure_offset, os.SEEK_SET)

    try:
      time_structure = self.TIME.parse_stream(file_object)
    except construct.FieldError as exception:
      time_structure = None
      parser_mediator.ProduceExtractionError(
          'unable to parse creation time with error: {0!s}'.format(exception))

    if time_structure:
      time_elements_tuple = (
          time_structure.year, time_structure.month, time_structure.day,
          time_structure.hour, time_structure.minute, time_structure.second)

      creation_time = dfdatetime_time_elements.TimeElements()
      try:
        creation_time.CopyFromStringTuple(
            time_elements_tuple=time_elements_tuple)
      except ValueError:
        creation_time = None
        parser_mediator.ProduceExtractionError(
            'invalid creation time value: {0!s}'.format(time_elements_tuple))

    last_modification_time = None

    structure_offset = record_offset + record.last_modification_time - 1
    file_object.seek(structure_offset, os.SEEK_SET)

    try:
      time_structure = self.TIME.parse_stream(file_object)
    except construct.FieldError as exception:
      time_structure = None
      parser_mediator.ProduceExtractionError(
          'unable to parse last modification time with error: {0!s}'.format(
              exception))

    if time_structure:
      time_elements_tuple = (
          time_structure.year, time_structure.month, time_structure.day,
          time_structure.hour, time_structure.minute, time_structure.second)

      last_modification_time = dfdatetime_time_elements.TimeElements()
      try:
        last_modification_time.CopyFromStringTuple(
            time_elements_tuple=time_elements_tuple)
      except ValueError:
        last_modification_time = None
        parser_mediator.ProduceExtractionError(
            'invalid last modification time value: {0!s}'.format(
                time_elements_tuple))

    text_description = 'N/A'
    if record.text_description:
      structure_offset = record_offset + record.text_description - 1
      file_object.seek(structure_offset, os.SEEK_SET)

      try:
        text_description = self.TEXT.parse_stream(file_object)
      except construct.FieldError as exception:
        parser_mediator.ProduceExtractionError(
            'unable to parse text description with error: {0!s}'.format(
                exception))

    comments = 'N/A'
    if record.comments:
      structure_offset = record_offset + record.comments - 1
      file_object.seek(structure_offset, os.SEEK_SET)

      try:
        comments = self.TEXT.parse_stream(file_object)
      except construct.FieldError as exception:
        parser_mediator.ProduceExtractionError(
            'unable to parse comments with error: {0!s}'.format(exception))

    structure_offset = record_offset + record.entry_name - 1
    file_object.seek(structure_offset, os.SEEK_SET)

    try:
      entry_name = self.TEXT.parse_stream(file_object)
    except construct.FieldError as exception:
      entry_name = 'N/A'
      parser_mediator.ProduceExtractionError(
          'unable to parse entry name with error: {0!s}'.format(exception))

    structure_offset = record_offset + record.account_name - 1
    file_object.seek(structure_offset, os.SEEK_SET)

    try:
      account_name = self.TEXT.parse_stream(file_object)
    except construct.FieldError as exception:
      account_name = 'N/A'
      parser_mediator.ProduceExtractionError(
          'unable to parse account name with error: {0!s}'.format(exception))

    return (
        ssgp_hash, creation_time, last_modification_time,
        text_description, comments, entry_name, account_name)

  def _ReadEntryInternet(self, parser_mediator, file_object):
    """Extracts the information from an Internet password entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
    """
    record_offset = file_object.tell()
    try:
      record_header_struct = self.RECORD_HEADER_INET.parse_stream(file_object)
    except (IOError, construct.FieldError):
      parser_mediator.ProduceExtractionError((
          'unable to parse record header structure at offset: '
          '0x{0:08x}').format(record_offset))
      return

    (ssgp_hash, creation_time, last_modification_time, text_description,
     comments, entry_name, account_name) = self._ReadEntryHeader(
         parser_mediator, file_object, record_header_struct.record_header,
         record_offset)

    if not record_header_struct.where:
      where = 'N/A'
      protocol = 'N/A'
      type_protocol = 'N/A'

    else:
      offset = record_offset + record_header_struct.where - 1
      file_object.seek(offset, os.SEEK_SET)
      where = self.TEXT.parse_stream(file_object)

      offset = record_offset + record_header_struct.protocol - 1
      file_object.seek(offset, os.SEEK_SET)
      protocol = self.TYPE_TEXT.parse_stream(file_object)

      offset = record_offset + record_header_struct.type - 1
      file_object.seek(offset, os.SEEK_SET)
      type_protocol = self.TEXT.parse_stream(file_object)
      type_protocol = self._PROTOCOL_TRANSLATION_DICT.get(
          type_protocol, type_protocol)

      if record_header_struct.url:
        offset = record_offset + record_header_struct.url - 1
        file_object.seek(offset, os.SEEK_SET)
        url = self.TEXT.parse_stream(file_object)
        where = '{0:s}{1:s}'.format(where, url)

    # Move to the end of the record.
    next_record_offset = (
        record_offset + record_header_struct.record_header.entry_length)
    file_object.seek(next_record_offset, os.SEEK_SET)

    event_data = KeychainInternetRecordEventData()
    event_data.account_name = account_name
    event_data.comments = comments
    event_data.entry_name = entry_name
    event_data.protocol = protocol
    event_data.ssgp_hash = ssgp_hash
    event_data.text_description = text_description
    event_data.type_protocol = type_protocol
    event_data.where = where

    if creation_time:
      event = time_events.DateTimeValuesEvent(
          creation_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if last_modification_time:
      event = time_events.DateTimeValuesEvent(
          last_modification_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

  def _ReadTableOffsets(self, parser_mediator, file_object):
    """Reads the table offsets.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      list[int]: table offsets.
    """
    # INFO: The HEADER KEYCHAIN:
    # [DBHEADER] + [DBSCHEMA] + [OFFSET TABLE A] + ... + [OFFSET TABLE Z]
    # Where the table offset is relative to the first byte of the DB Schema,
    # then we must add to this offset the size of the [DBHEADER].
    # Read the database schema and extract the offset for all the tables.
    # They are ordered by file position from the top to the bottom of the file.
    table_offsets = []

    try:
      db_schema_struct = self.KEYCHAIN_DB_SCHEMA.parse_stream(file_object)
    except (IOError, construct.FieldError):
      parser_mediator.ProduceExtractionError(
          'unable to parse database schema structure')
      return []

    for index in range(db_schema_struct.number_of_tables):
      try:
        table_offset = self.TABLE_OFFSET.parse_stream(file_object)
      except (IOError, construct.FieldError):
        parser_mediator.ProduceExtractionError(
            'unable to parse table offsets: {0:d}'.format(index))
        return

      table_offsets.append(table_offset + self.KEYCHAIN_DB_HEADER.sizeof())

    return table_offsets

  @classmethod
  def GetFormatSpecification(cls):
    """Retrieves the format specification.

    Returns:
      FormatSpecification: format specification.
    """
    format_specification = specification.FormatSpecification(cls.NAME)
    format_specification.AddNewSignature(
        cls.KEYCHAIN_SIGNATURE, offset=0)
    return format_specification

  def ParseFileObject(self, parser_mediator, file_object, **kwargs):
    """Parses a MacOS keychain file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
    try:
      db_header = self.KEYCHAIN_DB_HEADER.parse_stream(file_object)
    except (IOError, construct.FieldError):
      raise errors.UnableToParseFile('Unable to parse file header.')

    if db_header.signature != self.KEYCHAIN_SIGNATURE:
      raise errors.UnableToParseFile('Not a MacOS keychain file.')

    if (db_header.major_version != self.KEYCHAIN_MAJOR_VERSION or
        db_header.minor_version != self.KEYCHAIN_MINOR_VERSION):
      parser_mediator.ProduceExtractionError(
          'unsupported format version: {0:s}.{1:s}'.format(
              db_header.major_version, db_header.minor_version))
      return

    # TODO: document format and determine if -1 offset correction is needed.
    table_offsets = self._ReadTableOffsets(parser_mediator, file_object)
    for table_offset in table_offsets:
      # Skipping X bytes, unknown data at this point.
      file_object.seek(table_offset, os.SEEK_SET)

      try:
        table = self.TABLE_HEADER.parse_stream(file_object)
      except (IOError, construct.FieldError):
        parser_mediator.ProduceExtractionError(
            'unable to parse table structure at offset: 0x{0:08x}'.format(
                table_offset))
        continue

      # Table_offset: absolute byte in the file where the table starts.
      # table.first_record: first record in the table, relative to the
      #                     first byte of the table.
      file_object.seek(table_offset + table.first_record, os.SEEK_SET)

      if table.record_type == self.RECORD_TYPE_INTERNET:
        for _ in range(table.number_of_records):
          self._ReadEntryInternet(parser_mediator, file_object)

      elif table.record_type == self.RECORD_TYPE_APPLICATION:
        for _ in range(table.number_of_records):
          self._ReadEntryApplication(parser_mediator, file_object)
Example #26
0
}

OUTPUT_SCRIPTS = {
    "address": messages.OutputScriptType.PAYTOADDRESS,
    "segwit": messages.OutputScriptType.PAYTOWITNESS,
    "p2shsegwit": messages.OutputScriptType.PAYTOP2SHWITNESS,
}

DEFAULT_COIN = "Bitcoin"

XpubStruct = c.Struct(
    "version" / c.Int32ub,
    "depth" / c.Int8ub,
    "fingerprint" / c.Int32ub,
    "child_num" / c.Int32ub,
    "chain_code" / c.Bytes(32),
    "key" / c.Bytes(33),
    c.Terminated,
)


def xpub_deserialize(xpubstr):
    xpub_bytes = tools.b58check_decode(xpubstr)
    data = XpubStruct.parse(xpub_bytes)
    node = messages.HDNodeType(
        depth=data.depth,
        fingerprint=data.fingerprint,
        child_num=data.child_num,
        chain_code=data.chain_code,
    )
    if data.key[0] == 0:
Example #27
0
def GUID(name):
    return GuidAdapter(construct.Bytes(name, 16))
Example #28
0
class BinaryCookieParser(interface.FileObjectParser):
    """Parser for Safari Binary Cookie files."""

    NAME = u'binary_cookies'
    DESCRIPTION = u'Parser for Safari Binary Cookie files.'

    COOKIE_HEADER = construct.Struct(
        u'binary_cookie_header', construct.UBInt32(u'pages'),
        construct.Array(lambda ctx: ctx.pages,
                        construct.UBInt32(u'page_sizes')))

    COOKIE_DATA = construct.Struct(u'binary_cookie_data',
                                   construct.ULInt32(u'size'),
                                   construct.Bytes(u'unknown_1', 4),
                                   construct.ULInt32(u'flags'),
                                   construct.Bytes(u'unknown_2', 4),
                                   construct.ULInt32(u'url_offset'),
                                   construct.ULInt32(u'name_offset'),
                                   construct.ULInt32(u'path_offset'),
                                   construct.ULInt32(u'value_offset'),
                                   construct.Bytes(u'end_of_cookie', 8),
                                   construct.LFloat64(u'expiration_date'),
                                   construct.LFloat64(u'creation_date'))

    PAGE_DATA = construct.Struct(
        u'page_data', construct.Bytes(u'header', 4),
        construct.ULInt32(u'number_of_cookies'),
        construct.Array(lambda ctx: ctx.number_of_cookies,
                        construct.ULInt32(u'offsets')))

    # Cookie flags.
    COOKIE_FLAG_NONE = 0
    COOKIE_FLAG_SECURE = 1
    COOKIE_FLAG_UNKNOWN = 2
    COOKIE_FLAG_HTTP_ONLY = 4

    def __init__(self):
        """Initializes a parser object."""
        super(BinaryCookieParser, self).__init__()
        self._cookie_plugins = (
            cookie_plugins_manager.CookiePluginsManager.GetPlugins())

    def _ParsePage(self, page_data, parser_mediator):
        """Extract events from a page and produce events.

    Args:
      page_data: Raw bytes of the page.
      file_entry: The file entry (instance of dfvfs.FileEntry).
      parser_mediator: A parser mediator object (instance of ParserMediator).
    """
        try:
            page = self.PAGE_DATA.parse(page_data)
        except construct.FieldError:
            parser_mediator.ProduceParseError(u'Unable to parse page')
            return

        for page_offset in page.offsets:
            try:
                cookie = self.COOKIE_DATA.parse(page_data[page_offset:])
            except construct.FieldError:
                message = u'Unable to parse cookie data from offset: {0:d}'.format(
                    page_offset)
                parser_mediator.ProduceParseError(message)
                continue

            # The offset is determine by the range between the start of the current
            # offset until the start of the next offset. Thus we need to determine
            # the proper ordering of the offsets, since they are not always in the
            # same ordering.
            offset_dict = {
                cookie.url_offset: u'url',
                cookie.name_offset: u'name',
                cookie.value_offset: u'value',
                cookie.path_offset: u'path'
            }

            offsets = sorted(offset_dict.keys())
            offsets.append(cookie.size + page_offset)

            # TODO: Find a better approach to parsing the data than this.
            data_dict = {}
            for current_offset in range(0, len(offsets) - 1):
                # Get the current offset and the offset for the next entry.
                start, end = offsets[current_offset:current_offset + 2]
                value = offset_dict.get(offsets[current_offset])
                # Read the data.
                data_all = page_data[start + page_offset:end + page_offset]
                data, _, _ = data_all.partition(b'\x00')
                data_dict[value] = data

            url = data_dict.get(u'url')
            cookie_name = data_dict.get(u'name')
            cookie_value = data_dict.get(u'value')
            path = data_dict.get(u'path')

            flags = []
            flag_int = cookie.flags
            if flag_int & self.COOKIE_FLAG_HTTP_ONLY:
                flags.append(u'HttpOnly')
            if flag_int & self.COOKIE_FLAG_UNKNOWN:
                flags.append(u'Unknown')
            if flag_int & self.COOKIE_FLAG_SECURE:
                flags.append(u'Secure')

            cookie_flags = u'|'.join(flags)

            if cookie.creation_date:
                event_object = BinaryCookieEvent(
                    cookie.creation_date,
                    eventdata.EventTimestamp.CREATION_TIME, cookie_flags, url,
                    cookie_value, cookie_name, path)
                parser_mediator.ProduceEvent(event_object)

            if cookie.expiration_date:
                event_object = BinaryCookieEvent(
                    cookie.expiration_date,
                    eventdata.EventTimestamp.EXPIRATION_TIME, cookie_flags,
                    url, cookie_value, cookie_name, path)
                parser_mediator.ProduceEvent(event_object)

            for cookie_plugin in self._cookie_plugins:
                try:
                    cookie_plugin.UpdateChainAndProcess(
                        parser_mediator,
                        cookie_name=data_dict.get(u'name'),
                        cookie_data=data_dict.get(u'value'),
                        url=data_dict.get(u'url'))
                except errors.WrongPlugin:
                    pass

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Safari binary cookie file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        # Start by verifying magic value.
        # We do this here instead of in the header parsing routine due to the
        # fact that we read an integer there and create an array, which is part
        # of the header. For false hits this could end up with reading large chunks
        # of data, which we don't want for false hits.
        magic = file_object.read(4)
        if magic != b'cook':
            raise errors.UnableToParseFile(
                u'The file is not a Binary Cookie file. Unsupported file signature.'
            )

        try:
            header = self.COOKIE_HEADER.parse_stream(file_object)
        except (IOError, construct.ArrayError, construct.FieldError):
            raise errors.UnableToParseFile(
                u'The file is not a Binary Cookie file (bad header).')

        for page_size in header.page_sizes:
            page = file_object.read(page_size)
            if len(page) != page_size:
                parser_mediator.ProduceParseError(
                    u'Unable to continue parsing Binary Cookie file')
                break

            self._ParsePage(page, parser_mediator)
Example #29
0
    ), _transform_vendor_trust, 2, _transform_vendor_trust, 2)

VendorHeader = c.Struct(
    "_start_offset" / c.Tell,
    "magic" / c.Const(b"TRZV"),
    "_header_len" / c.Padding(4),
    "expiry" / c.Int32ul,
    "version" / c.Struct(
        "major" / c.Int8ul,
        "minor" / c.Int8ul,
    ),
    "vendor_sigs_required" / c.Int8ul,
    "vendor_sigs_n" / c.Rebuild(c.Int8ul, c.len_(c.this.pubkeys)),
    "vendor_trust" / VendorTrust,
    "reserved" / c.Padding(14),
    "pubkeys" / c.Bytes(32)[c.this.vendor_sigs_n],
    "vendor_string" / c.Aligned(4, c.PascalString(c.Int8ul, "utf-8")),
    "vendor_image" / Toif,
    "_data_end_offset" / c.Tell,
    c.Padding(-(c.this._data_end_offset + 65) % 512),
    "sigmask" / c.Byte,
    "signature" / c.Bytes(64),
    "_end_offset" / c.Tell,
    "header_len" /
    c.Pointer(c.this._start_offset + 4,
              c.Rebuild(c.Int32ul, c.this._end_offset - c.this._start_offset)),
)

VersionLong = c.Struct(
    "major" / c.Int8ul,
    "minor" / c.Int8ul,
        #RPC_TYPE.KILL_SESSIONS_RESULT : RpcMessage_KILL_SESSIONS_RESULT,
        #RPC_TYPE.DEL_ENTRIES : RpcMessage_DEL_ENTRIES,
        #RPC_TYPE.DEL_ENTRIES_RESULT : RpcMessage_DEL_ENTRIES_RESULT,
        #RPC_TYPE.SHOW_ENTRIES : RpcMessage_SHOW_ENTRIES,
        #RPC_TYPE.SHOW_ENTRIES_RESULT : RpcMessage_SHOW_ENTRIES_RESULT,
        #RPC_TYPE.DUMP_MD : RpcMessage_DUMP_MD,
        #RPC_TYPE.DUMP_MD_RESULT : RpcMessage_DUMP_MD_RESULT,
        #RPC_TYPE.CLEAN_DB : RpcMessage_CLEAN_DB,
        #RPC_TYPE.DEBUGCTL : RpcMessage_DEBUGCTL,
    },
    default=None)

# RPC packet common header
rpc_packet_t = con.Struct("length" / Rebuild(Hex(Int32ub), len_(this.data)),
                          "code" / RPC_TYPE,
                          "data" / con.HexDump(con.Bytes(this.length)))


def rpc_message_build(code, **kwargs):
    """
    Build and serialize an RPC packet
    """
    data = RpcMessage.build(kwargs, code=code)

    return rpc_packet_t.build(Container(code=code, data=data))


def rpc_message_parse(source):
    """
    Read and deserilize RPC message from a file-like object or socket)
    """