Ejemplo n.º 1
0
class MsgSettingsReadByIndexResp(SBP):
    """SBP class for message MSG_SETTINGS_READ_BY_INDEX_RESP (0x00A7).

  You can have MSG_SETTINGS_READ_BY_INDEX_RESP inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  The settings message for iterating through the settings
values. It will read the setting at an index, returning a
NULL-terminated and delimited string with contents
[SECTION_SETTING, SETTING, VALUE].


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  index : int
    An index into the device settings, with values ranging from
0 to length(settings)

  setting : string
    A NULL-terminated and delimited string with contents
[SECTION_SETTING, SETTING, VALUE].

  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'index' / construct.Int16ul,
        'setting' / construct.GreedyString(encoding='utf8'),
    )
    __slots__ = [
        'index',
        'setting',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgSettingsReadByIndexResp,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgSettingsReadByIndexResp, self).__init__()
            self.msg_type = SBP_MSG_SETTINGS_READ_BY_INDEX_RESP
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.index = kwargs.pop('index')
            self.setting = kwargs.pop('setting')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgSettingsReadByIndexResp.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgSettingsReadByIndexResp(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgSettingsReadByIndexResp._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgSettingsReadByIndexResp._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgSettingsReadByIndexResp, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 2
0
import construct
import struct

_struct = construct.Struct('a' / construct.Int64sl, 'b' / construct.Int64sl,
                           'c' / construct.Int64sl)
format = construct.Array(10000, _struct)
format.build([{'a': 100, 'b': 100, 'c': 100}] * 10000)
"""

raw_get
raw_set
len


vector(intn, intn)
array(2, array(3, intn))
collection(intn, intn, intn)
struct(field('terry', intn), field('bobby', intn))
choice(
"""


class array:
    def __init__(self, item_count, item_type):
        self.item_count = item_count
        self.item_type = item_count

    def ___compiler(self):
        return [REPEAT] + self.item_type.___compile() + [
            TIMES(self.item_count), BUILD(list)
        ]
Ejemplo n.º 3
0
class MsgTrackingStateDetailed(SBP):
    """SBP class for message MSG_TRACKING_STATE_DETAILED (0x0011).

  You can have MSG_TRACKING_STATE_DETAILED inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  The tracking message returns a set tracking channel parameters for a
single tracking channel useful for debugging issues.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  recv_time : int
    Receiver clock time.
  tot : GPSTime
    Time of transmission of signal from satellite. TOW only valid when
TOW status is decoded or propagated. WN only valid when week
number valid flag is set.

  P : int
    Pseudorange observation. Valid only when pseudorange valid flag is
set.

  P_std : int
    Pseudorange observation standard deviation. Valid only when
pseudorange valid flag is set.

  L : CarrierPhase
    Carrier phase observation with typical sign convention. Valid only
when PLL pessimistic lock is achieved.

  cn0 : int
    Carrier-to-Noise density
  lock : int
    Lock time. It is encoded according to DF402 from the RTCM 10403.2
Amendment 2 specification. Valid values range from 0 to 15.

  sid : GnssSignal
    GNSS signal identifier.
  doppler : int
    Carrier Doppler frequency.
  doppler_std : int
    Carrier Doppler frequency standard deviation.
  uptime : int
    Number of seconds of continuous tracking. Specifies how much time
signal is in continuous track.

  clock_offset : int
    TCXO clock offset. Valid only when valid clock valid flag is set.

  clock_drift : int
    TCXO clock drift. Valid only when valid clock valid flag is set.

  corr_spacing : int
    Early-Prompt (EP) and Prompt-Late (PL) correlators spacing.
  acceleration : int
    Acceleration. Valid only when acceleration valid flag is set.
  sync_flags : int
    Synchronization status flags.
  tow_flags : int
    TOW status flags.
  track_flags : int
    Tracking loop status flags.
  nav_flags : int
    Navigation data status flags.
  pset_flags : int
    Parameters sets flags.
  misc_flags : int
    Miscellaneous flags.
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'recv_time' / construct.Int64ul,
        'tot' / construct.Struct(GPSTime._parser),
        'P' / construct.Int32ul,
        'P_std' / construct.Int16ul,
        'L' / construct.Struct(CarrierPhase._parser),
        'cn0' / construct.Int8ul,
        'lock' / construct.Int16ul,
        'sid' / construct.Struct(GnssSignal._parser),
        'doppler' / construct.Int32sl,
        'doppler_std' / construct.Int16ul,
        'uptime' / construct.Int32ul,
        'clock_offset' / construct.Int16sl,
        'clock_drift' / construct.Int16sl,
        'corr_spacing' / construct.Int16ul,
        'acceleration' / construct.Int8sl,
        'sync_flags' / construct.Int8ul,
        'tow_flags' / construct.Int8ul,
        'track_flags' / construct.Int8ul,
        'nav_flags' / construct.Int8ul,
        'pset_flags' / construct.Int8ul,
        'misc_flags' / construct.Int8ul,
    )
    __slots__ = [
        'recv_time',
        'tot',
        'P',
        'P_std',
        'L',
        'cn0',
        'lock',
        'sid',
        'doppler',
        'doppler_std',
        'uptime',
        'clock_offset',
        'clock_drift',
        'corr_spacing',
        'acceleration',
        'sync_flags',
        'tow_flags',
        'track_flags',
        'nav_flags',
        'pset_flags',
        'misc_flags',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgTrackingStateDetailed,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgTrackingStateDetailed, self).__init__()
            self.msg_type = SBP_MSG_TRACKING_STATE_DETAILED
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.recv_time = kwargs.pop('recv_time')
            self.tot = kwargs.pop('tot')
            self.P = kwargs.pop('P')
            self.P_std = kwargs.pop('P_std')
            self.L = kwargs.pop('L')
            self.cn0 = kwargs.pop('cn0')
            self.lock = kwargs.pop('lock')
            self.sid = kwargs.pop('sid')
            self.doppler = kwargs.pop('doppler')
            self.doppler_std = kwargs.pop('doppler_std')
            self.uptime = kwargs.pop('uptime')
            self.clock_offset = kwargs.pop('clock_offset')
            self.clock_drift = kwargs.pop('clock_drift')
            self.corr_spacing = kwargs.pop('corr_spacing')
            self.acceleration = kwargs.pop('acceleration')
            self.sync_flags = kwargs.pop('sync_flags')
            self.tow_flags = kwargs.pop('tow_flags')
            self.track_flags = kwargs.pop('track_flags')
            self.nav_flags = kwargs.pop('nav_flags')
            self.pset_flags = kwargs.pop('pset_flags')
            self.misc_flags = kwargs.pop('misc_flags')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgTrackingStateDetailed.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgTrackingStateDetailed(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgTrackingStateDetailed._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgTrackingStateDetailed._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgTrackingStateDetailed, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 4
0
class MsgOrientEuler(SBP):
  """SBP class for message MSG_ORIENT_EULER (0x0221).

  You can have MSG_ORIENT_EULER inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message reports the yaw, pitch, and roll angles of the vehicle body frame.
The rotations should applied intrinsically in the order yaw, pitch, and roll 
in order to rotate the from a frame aligned with the local-level NED frame 
to the vehicle body frame.  This message will only be available in future 
INS versions of Swift Products and is not produced by Piksi Multi or Duro.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  tow : int
    GPS Time of Week
  roll : int
    rotation about the forward axis of the vehicle
  pitch : int
    rotation about the rightward axis of the vehicle
  yaw : int
    rotation about the downward axis of the vehicle
  roll_accuracy : float
    Estimated standard deviation of roll
  pitch_accuracy : float
    Estimated standard deviation of pitch
  yaw_accuracy : float
    Estimated standard deviation of yaw
  flags : int
    Status flags
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
  _parser = construct.Struct(
                   'tow' / construct.Int32ul,
                   'roll' / construct.Int32sl,
                   'pitch' / construct.Int32sl,
                   'yaw' / construct.Int32sl,
                   'roll_accuracy' / construct.Float32l,
                   'pitch_accuracy' / construct.Float32l,
                   'yaw_accuracy' / construct.Float32l,
                   'flags' / construct.Int8ul,)
  __slots__ = [
               'tow',
               'roll',
               'pitch',
               'yaw',
               'roll_accuracy',
               'pitch_accuracy',
               'yaw_accuracy',
               'flags',
              ]

  def __init__(self, sbp=None, **kwargs):
    if sbp:
      super( MsgOrientEuler,
             self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                            sbp.payload, sbp.crc)
      self.from_binary(sbp.payload)
    else:
      super( MsgOrientEuler, self).__init__()
      self.msg_type = SBP_MSG_ORIENT_EULER
      self.sender = kwargs.pop('sender', SENDER_ID)
      self.tow = kwargs.pop('tow')
      self.roll = kwargs.pop('roll')
      self.pitch = kwargs.pop('pitch')
      self.yaw = kwargs.pop('yaw')
      self.roll_accuracy = kwargs.pop('roll_accuracy')
      self.pitch_accuracy = kwargs.pop('pitch_accuracy')
      self.yaw_accuracy = kwargs.pop('yaw_accuracy')
      self.flags = kwargs.pop('flags')

  def __repr__(self):
    return fmt_repr(self)

  @staticmethod
  def from_json(s):
    """Given a JSON-encoded string s, build a message object.

    """
    d = json.loads(s)
    return MsgOrientEuler.from_json_dict(d)

  @staticmethod
  def from_json_dict(d):
    sbp = SBP.from_json_dict(d)
    return MsgOrientEuler(sbp, **d)

 
  def from_binary(self, d):
    """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
    p = MsgOrientEuler._parser.parse(d)
    for n in self.__class__.__slots__:
      setattr(self, n, getattr(p, n))

  def to_binary(self):
    """Produce a framed/packed SBP message.

    """
    c = containerize(exclude_fields(self))
    self.payload = MsgOrientEuler._parser.build(c)
    return self.pack()

  def into_buffer(self, buf, offset):
    """Produce a framed/packed SBP message into the provided buffer and offset.

    """
    self.payload = containerize(exclude_fields(self))
    self.parser = MsgOrientEuler._parser
    self.stream_payload.reset(buf, offset)
    return self.pack_into(buf, offset, self._build_payload)

  def to_json_dict(self):
    self.to_binary()
    d = super( MsgOrientEuler, self).to_json_dict()
    j = walk_json_dict(exclude_fields(self))
    d.update(j)
    return d
Ejemplo n.º 5
0
class MsgAngularRate(SBP):
  """SBP class for message MSG_ANGULAR_RATE (0x0222).

  You can have MSG_ANGULAR_RATE inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message reports the orientation rates in the vehicle body frame. 
The values represent the measurements a strapped down gyroscope would 
make and are not equivalent to the time derivative of the Euler angles.
The orientation and origin of the user frame is specified via device settings.
By convention, the vehicle x-axis is expected to be aligned with the forward
direction, while the vehicle y-axis is expected to be aligned with the right
direction, and the vehicle z-axis should be aligned with the down direction.
This message will only be available in future INS versions of Swift Products 
and is not produced by Piksi Multi or Duro.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  tow : int
    GPS Time of Week
  x : int
    angular rate about x axis
  y : int
    angular rate about y axis
  z : int
    angular rate about z axis
  flags : int
    Status flags
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
  _parser = construct.Struct(
                   'tow' / construct.Int32ul,
                   'x' / construct.Int32sl,
                   'y' / construct.Int32sl,
                   'z' / construct.Int32sl,
                   'flags' / construct.Int8ul,)
  __slots__ = [
               'tow',
               'x',
               'y',
               'z',
               'flags',
              ]

  def __init__(self, sbp=None, **kwargs):
    if sbp:
      super( MsgAngularRate,
             self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                            sbp.payload, sbp.crc)
      self.from_binary(sbp.payload)
    else:
      super( MsgAngularRate, self).__init__()
      self.msg_type = SBP_MSG_ANGULAR_RATE
      self.sender = kwargs.pop('sender', SENDER_ID)
      self.tow = kwargs.pop('tow')
      self.x = kwargs.pop('x')
      self.y = kwargs.pop('y')
      self.z = kwargs.pop('z')
      self.flags = kwargs.pop('flags')

  def __repr__(self):
    return fmt_repr(self)

  @staticmethod
  def from_json(s):
    """Given a JSON-encoded string s, build a message object.

    """
    d = json.loads(s)
    return MsgAngularRate.from_json_dict(d)

  @staticmethod
  def from_json_dict(d):
    sbp = SBP.from_json_dict(d)
    return MsgAngularRate(sbp, **d)

 
  def from_binary(self, d):
    """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
    p = MsgAngularRate._parser.parse(d)
    for n in self.__class__.__slots__:
      setattr(self, n, getattr(p, n))

  def to_binary(self):
    """Produce a framed/packed SBP message.

    """
    c = containerize(exclude_fields(self))
    self.payload = MsgAngularRate._parser.build(c)
    return self.pack()

  def into_buffer(self, buf, offset):
    """Produce a framed/packed SBP message into the provided buffer and offset.

    """
    self.payload = containerize(exclude_fields(self))
    self.parser = MsgAngularRate._parser
    self.stream_payload.reset(buf, offset)
    return self.pack_into(buf, offset, self._build_payload)

  def to_json_dict(self):
    self.to_binary()
    d = super( MsgAngularRate, self).to_json_dict()
    j = walk_json_dict(exclude_fields(self))
    d.update(j)
    return d
        return self.offset

FileVaultV2Header = construct.Struct("FileVaultV2Header",
    construct.Magic("encrcdsa"),
    construct.UBInt32("version"),
    construct.UBInt32("encIVSize"),
    construct.UBInt32("_unk1"),
    construct.UBInt32("_unk2"),
    construct.UBInt32("keyBits"),
    construct.UBInt32("_unk4"),
    construct.UBInt32("_unk5"),
    construct.Array(4, construct.UBInt32("UDIFID")),
    construct.UBInt32("blockSize"),
    construct.UBInt64("dataSize"),
    construct.UBInt64("dataOffset"),
    construct.Padding(0x260),
    construct.UBInt32("kdfAlgorithm"),
    construct.UBInt32("kdfPRNGAlgorithm"),
    construct.UBInt32("kdfIterationCount"),
    construct.UBInt32("kdfSaltLen"),
    construct.String("kdfSalt", 0x20),
    construct.UBInt32("blobEncIVSize"),
    construct.String("blobEncIV", 0x20),
    construct.UBInt32("blobEncKeyBits"),
    construct.UBInt32("blobEncAlgorithm"),
    construct.UBInt32("blobEncPadding"),
    construct.UBInt32("blobEncMode"),
    construct.UBInt32("encryptedKeyblobSize"),
    construct.String("encryptedKeyblob", 0x30))

class FileVaultFile(BaseFile):
Ejemplo n.º 7
0
class UsersPlugin(interface.KeyPlugin):
    """SAM Windows Registry plugin for Users Account information."""

    NAME = 'winreg_sam_users'
    DESCRIPTION = u'Parser for SAM Users and Names Registry keys.'

    REG_KEYS = [u'\\SAM\\Domains\\Account\\Users']
    REG_TYPE = 'SAM'
    F_VALUE_STRUCT = construct.Struct('f_struct', construct.Padding(8),
                                      construct.ULInt64('last_login'),
                                      construct.Padding(8),
                                      construct.ULInt64('password_reset'),
                                      construct.Padding(16),
                                      construct.ULInt16('rid'),
                                      construct.Padding(16),
                                      construct.ULInt8('login_count'))
    V_VALUE_HEADER = construct.Struct(
        'v_header', construct.Array(11, construct.ULInt32('values')))
    V_VALUE_HEADER_SIZE = 0xCC

    def GetEntries(self,
                   parser_mediator,
                   key=None,
                   registry_type=None,
                   codepage='cp1252',
                   **unused_kwargs):
        """Collect data from Users and Names and produce event objects.

    Args:
      parser_mediator: A parser context object (instance of ParserContext).
      key: Optional Registry key (instance of winreg.WinRegKey).
           The default is None.
      registry_type: Optional Registry type string. The default is None.
    """

        name_dict = {}

        name_key = key.GetSubkey('Names')
        if not name_key:
            parser_mediator.ProduceParseError(u'Unable to locate Names key.')
            return
        values = [(v.name, v.last_written_timestamp)
                  for v in name_key.GetSubkeys()]
        name_dict = dict(values)

        for subkey in key.GetSubkeys():
            text_dict = {}
            if subkey.name == 'Names':
                continue
            text_dict['user_guid'] = subkey.name
            parsed_v_value = self._ParseVValue(subkey)
            if not parsed_v_value:
                parser_mediator.ProduceParseError(
                    u'Unable to parse SAM key: {0:s} V value.'.format(subkey))
                return
            username = parsed_v_value[0]
            full_name = parsed_v_value[1]
            comments = parsed_v_value[2]
            if username:
                text_dict['username'] = username
            if full_name:
                text_dict['full_name'] = full_name
            if comments:
                text_dict['comments'] = comments
            if name_dict:
                account_create_time = name_dict.get(text_dict.get('username'),
                                                    0)
            else:
                account_create_time = 0

            f_data = self._ParseFValue(subkey)
            last_login_time = timelib.Timestamp.FromFiletime(f_data.last_login)
            password_reset_time = timelib.Timestamp.FromFiletime(
                f_data.password_reset)
            text_dict['account_rid'] = f_data.rid
            text_dict['login_count'] = f_data.login_count

            if account_create_time > 0:
                event_object = windows_events.WindowsRegistryEvent(
                    account_create_time,
                    key.path,
                    text_dict,
                    usage=eventdata.EventTimestamp.ACCOUNT_CREATED,
                    offset=key.offset,
                    registry_type=registry_type,
                    source_append=u'User Account Information')
                parser_mediator.ProduceEvent(event_object)

            if last_login_time > 0:
                event_object = windows_events.WindowsRegistryEvent(
                    last_login_time,
                    key.path,
                    text_dict,
                    usage=eventdata.EventTimestamp.LAST_LOGIN_TIME,
                    offset=key.offset,
                    registry_type=registry_type,
                    source_append=u'User Account Information')
                parser_mediator.ProduceEvent(event_object)

            if password_reset_time > 0:
                event_object = windows_events.WindowsRegistryEvent(
                    password_reset_time,
                    key.path,
                    text_dict,
                    usage=eventdata.EventTimestamp.LAST_PASSWORD_RESET,
                    offset=key.offset,
                    registry_type=registry_type,
                    source_append=u'User Account Information')
                parser_mediator.ProduceEvent(event_object)

    def _ParseVValue(self, key):
        """Parses V value and returns name, fullname, and comments data.

    Args:
      key: Registry key (instance of winreg.WinRegKey).

    Returns:
      name: Name data parsed with name start and length values.
      fullname: Fullname data parsed with fullname start and length values.
      comments: Comments data parsed with comments start and length values.
    """

        v_value = key.GetValue('V')
        if not v_value:
            logging.error(u'Unable to locate V Value in key.')
            return
        try:
            structure = self.V_VALUE_HEADER.parse(v_value.data)
        except construct.FieldError as exception:
            logging.error(
                u'Unable to extract V value header data: {:s}'.format(
                    exception))
            return
        name_offset = structure.values()[0][3] + self.V_VALUE_HEADER_SIZE
        full_name_offset = structure.values()[0][6] + self.V_VALUE_HEADER_SIZE
        comments_offset = structure.values()[0][9] + self.V_VALUE_HEADER_SIZE
        name_raw = v_value.data[name_offset:name_offset +
                                structure.values()[0][4]]
        full_name_raw = v_value.data[full_name_offset:full_name_offset +
                                     structure.values()[0][7]]
        comments_raw = v_value.data[comments_offset:comments_offset +
                                    structure.values()[0][10]]
        name = binary.ReadUtf16(name_raw)
        full_name = binary.ReadUtf16(full_name_raw)
        comments = binary.ReadUtf16(comments_raw)
        return name, full_name, comments

    def _ParseFValue(self, key):
        """Parses F value and returns parsed F data construct object.

    Args:
      key: Registry key (instance of winreg.WinRegKey).

    Returns:
      f_data: Construct parsed F value containing rid, login count,
              and timestamp information.
    """
        f_value = key.GetValue('F')
        if not f_value:
            logging.error(u'Unable to locate F Value in key.')
            return
        try:
            f_data = self.F_VALUE_STRUCT.parse(f_value.data)
        except construct.FieldError as exception:
            logging.error(
                u'Unable to extract F value data: {:s}'.format(exception))
            return
        return f_data
Ejemplo n.º 8
0
class AutomaticDestinationsOLECFPlugin(interface.OLECFPlugin):
  """Plugin that parses an .automaticDestinations-ms OLECF file."""

  NAME = 'olecf_automatic_destinations'
  DESCRIPTION = 'Parser for *.automaticDestinations-ms OLECF files.'

  REQUIRED_ITEMS = frozenset(['DestList'])

  _RE_LNK_ITEM_NAME = re.compile(r'^[1-9a-f][0-9a-f]*$')

  # We cannot use the parser registry here since winlnk could be disabled.
  # TODO: see if there is a more elegant solution for this.
  _WINLNK_PARSER = winlnk.WinLnkParser()

  _DEST_LIST_STREAM_HEADER = construct.Struct(
      'dest_list_stream_header',
      construct.ULInt32('format_version'),
      construct.ULInt32('number_of_entries'),
      construct.ULInt32('number_of_pinned_entries'),
      construct.Padding(4),
      construct.ULInt32('last_entry_number'),
      construct.Padding(4),
      construct.ULInt32('last_revision_number'),
      construct.Padding(4))

  _DEST_LIST_STREAM_HEADER_SIZE = _DEST_LIST_STREAM_HEADER.sizeof()

  # Using Construct's utf-16 encoding here will create strings with their
  # end-of-string characters exposed. Instead the strings are read as
  # binary strings and converted using ReadUTF16().
  _DEST_LIST_STREAM_ENTRY_V1 = construct.Struct(
      'dest_list_stream_entry_v1',
      construct.Padding(8),
      construct.Bytes('droid_volume_identifier', 16),
      construct.Bytes('droid_file_identifier', 16),
      construct.Bytes('birth_droid_volume_identifier', 16),
      construct.Bytes('birth_droid_file_identifier', 16),
      construct.String('hostname', 16),
      construct.ULInt32('entry_number'),
      construct.Padding(8),
      construct.ULInt64('last_modification_time'),
      construct.ULInt32('pin_status'),
      construct.ULInt16('path_size'),
      construct.String('path', lambda ctx: ctx.path_size * 2))

  _DEST_LIST_STREAM_ENTRY_V3 = construct.Struct(
      'dest_list_stream_entry_v3',
      construct.Padding(8),
      construct.Bytes('droid_volume_identifier', 16),
      construct.Bytes('droid_file_identifier', 16),
      construct.Bytes('birth_droid_volume_identifier', 16),
      construct.Bytes('birth_droid_file_identifier', 16),
      construct.String('hostname', 16),
      construct.ULInt32('entry_number'),
      construct.Padding(8),
      construct.ULInt64('last_modification_time'),
      construct.ULInt32('pin_status'),
      construct.Padding(16),
      construct.ULInt16('path_size'),
      construct.String('path', lambda ctx: ctx.path_size * 2),
      construct.Padding(4))

  def _ParseDistributedTrackingIdentifier(
      self, parser_mediator, uuid_data, origin):
    """Extracts data from a Distributed Tracking identifier.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      uuid_data (bytes): UUID data of the Distributed Tracking identifier.
      origin (str): origin of the event (event source).

    Returns:
      str: UUID string of the Distributed Tracking identifier.
    """
    uuid_object = uuid.UUID(bytes_le=uuid_data)

    if uuid_object.version == 1:
      event_data = windows_events.WindowsDistributedLinkTrackingEventData(
          uuid_object, origin)
      date_time = dfdatetime_uuid_time.UUIDTime(timestamp=uuid_object.time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    return '{{{0!s}}}'.format(uuid_object)

  def ParseDestList(self, parser_mediator, olecf_item):
    """Parses the DestList OLECF item.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      olecf_item (pyolecf.item): OLECF item.
    """
    try:
      header = self._DEST_LIST_STREAM_HEADER.parse_stream(olecf_item)
    except (IOError, construct.FieldError) as exception:
      raise errors.UnableToParseFile(
          'Unable to parse DestList header with error: {0!s}'.format(
              exception))

    if header.format_version not in (1, 3, 4):
      parser_mediator.ProduceExtractionError(
          'unsupported format version: {0:d}.'.format(header.format_version))

    if header.format_version == 1:
      dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V1
    elif header.format_version in (3, 4):
      dest_list_stream_entry = self._DEST_LIST_STREAM_ENTRY_V3

    entry_offset = olecf_item.get_offset()
    while entry_offset < olecf_item.size:
      try:
        entry = dest_list_stream_entry.parse_stream(olecf_item)
      except (IOError, construct.FieldError) as exception:
        raise errors.UnableToParseFile(
            'Unable to parse DestList entry with error: {0!s}'.format(
                exception))

      if not entry:
        break

      display_name = 'DestList entry at offset: 0x{0:08x}'.format(entry_offset)

      try:
        droid_volume_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_volume_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_volume_identifier = ''
        parser_mediator.ProduceExtractionError(
            'unable to read droid volume identifier with error: {0!s}'.format(
                exception))

      try:
        droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        droid_file_identifier = ''
        parser_mediator.ProduceExtractionError(
            'unable to read droid file identifier with error: {0!s}'.format(
                exception))

      try:
        birth_droid_volume_identifier = (
            self._ParseDistributedTrackingIdentifier(
                parser_mediator, entry.birth_droid_volume_identifier,
                display_name))

      except (TypeError, ValueError) as exception:
        birth_droid_volume_identifier = ''
        parser_mediator.ProduceExtractionError((
            'unable to read birth droid volume identifier with error: '
            '{0:s}').format(
                exception))

      try:
        birth_droid_file_identifier = self._ParseDistributedTrackingIdentifier(
            parser_mediator, entry.birth_droid_file_identifier, display_name)

      except (TypeError, ValueError) as exception:
        birth_droid_file_identifier = ''
        parser_mediator.ProduceExtractionError((
            'unable to read birth droid file identifier with error: '
            '{0:s}').format(
                exception))

      if entry.last_modification_time == 0:
        date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      else:
        date_time = dfdatetime_filetime.Filetime(
            timestamp=entry.last_modification_time)

      event_data = AutomaticDestinationsDestListEntryEventData()
      event_data.birth_droid_file_identifier = birth_droid_file_identifier
      event_data.birth_droid_volume_identifier = birth_droid_volume_identifier
      event_data.droid_file_identifier = droid_file_identifier
      event_data.droid_volume_identifier = droid_volume_identifier
      event_data.entry_number = entry.entry_number
      event_data.hostname = binary.ByteStreamCopyToString(
          entry.hostname, codepage='ascii')
      event_data.offset = entry_offset
      event_data.path = binary.UTF16StreamCopyToString(entry.path)
      event_data.pin_status = entry.pin_status

      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

      entry_offset = olecf_item.get_offset()

  def Process(self, parser_mediator, root_item=None, **kwargs):
    """Parses an OLECF file.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      root_item (Optional[pyolecf.item]): root item of the OLECF file.

    Raises:
      ValueError: If the root_item is not set.
    """
    # This will raise if unhandled keyword arguments are passed.
    super(AutomaticDestinationsOLECFPlugin, self).Process(
        parser_mediator, **kwargs)

    if not root_item:
      raise ValueError('Root item not set.')

    for item in root_item.sub_items:
      if item.name == 'DestList':
        self.ParseDestList(parser_mediator, item)

      elif self._RE_LNK_ITEM_NAME.match(item.name):
        display_name = parser_mediator.GetDisplayName()
        if display_name:
          display_name = '{0:s} # {1:s}'.format(display_name, item.name)
        else:
          display_name = '# {0:s}'.format(item.name)

        self._WINLNK_PARSER.Parse(
            parser_mediator, item, display_name=display_name)
Ejemplo n.º 9
0
class UnicodeOrHexAdapter(construct.Adapter):
    '''Helper to pretty print string/hex and remove trailing zeroes.'''
    def _decode(self, obj, context):
        try:
            decoded = obj.decode('utf16')
            decoded = decoded.rstrip('\00').encode('utf8')
        except UnicodeDecodeError:
            decoded = obj.encode('hex')
        return decoded


# Common structs.

UNICODE_STRING = construct.Struct(
    'UNICODE_STRING', construct.ULInt32('length'),
    construct.String('data', lambda ctx: ctx.length, encoding='utf16'))

UNICODE_STRING_HEX = construct.Struct(
    'UNICODE_STRING_HEX', construct.ULInt32('length'),
    UnicodeOrHexAdapter(construct.Bytes('data', lambda ctx: ctx.length)))

SIZED_DATA = construct.Struct(
    'SIZED_DATA', construct.ULInt32('size'),
    BytesHexAdapter(construct.Bytes('data', lambda ctx: ctx.size)))

# DPAPI structs.

DPAPI_BLOB = construct.Struct(
    'BLOB', construct.ULInt32('version'), GUID('provider'),
    construct.ULInt32('mk_version'),
Ejemplo n.º 10
0
class MsgLog(SBP):
    """SBP class for message MSG_LOG (0x0401).

  You can have MSG_LOG inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message contains a human-readable payload string from the
device containing errors, warnings and informational messages at
ERROR, WARNING, DEBUG, INFO logging levels.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  level : int
    Logging level
  text : string
    Human-readable string
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'level' / construct.Int8ul,
        'text' / construct.GreedyBytes,
    )
    __slots__ = [
        'level',
        'text',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgLog, self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                         sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgLog, self).__init__()
            self.msg_type = SBP_MSG_LOG
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.level = kwargs.pop('level')
            self.text = kwargs.pop('text')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgLog.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgLog(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgLog._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgLog._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgLog, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 11
0
class MsgPrintDep(SBP):
    """SBP class for message MSG_PRINT_DEP (0x0010).

  You can have MSG_PRINT_DEP inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  Deprecated.

  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  text : string
    Human-readable string
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct('text' / construct.GreedyBytes, )
    __slots__ = [
        'text',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgPrintDep, self).__init__(sbp.msg_type, sbp.sender,
                                              sbp.length, sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgPrintDep, self).__init__()
            self.msg_type = SBP_MSG_PRINT_DEP
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.text = kwargs.pop('text')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgPrintDep.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgPrintDep(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgPrintDep._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgPrintDep._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgPrintDep, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 12
0
class MsgFwd(SBP):
    """SBP class for message MSG_FWD (0x0402).

  You can have MSG_FWD inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message provides the ability to forward messages over SBP.  This may take the form
of wrapping up SBP messages received by Piksi for logging purposes or wrapping 
another protocol with SBP.

The source identifier indicates from what interface a forwarded stream derived.
The protocol identifier identifies what the expected protocol the forwarded msg contains.
Protocol 0 represents SBP and the remaining values are implementation defined.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  source : int
    source identifier
  protocol : int
    protocol identifier
  fwd_payload : string
    variable length wrapped binary message
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'source' / construct.Int8ul,
        'protocol' / construct.Int8ul,
        'fwd_payload' / construct.GreedyBytes,
    )
    __slots__ = [
        'source',
        'protocol',
        'fwd_payload',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgFwd, self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                         sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgFwd, self).__init__()
            self.msg_type = SBP_MSG_FWD
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.source = kwargs.pop('source')
            self.protocol = kwargs.pop('protocol')
            self.fwd_payload = kwargs.pop('fwd_payload')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgFwd.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgFwd(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgFwd._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgFwd._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgFwd, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 13
0
class UserAssistPlugin(interface.WindowsRegistryPlugin):
  """Plugin that parses an UserAssist key."""

  NAME = u'userassist'
  DESCRIPTION = u'Parser for User Assist Registry data.'

  FILTERS = frozenset([
      UserAssistWindowsRegistryKeyPathFilter(
          u'FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'F4E57C4B-2036-45F0-A9AB-443BCFE33D9F'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'CEBFF5CD-ACE2-4F4F-9178-9926F41749EA'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'CAA59E3C-4792-41A5-9909-6A6A8D32490E'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'B267E3AD-A825-4A09-82B9-EEC22AA3B847'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'A3D53349-6E61-4557-8FC7-0028EDCEEBF6'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'9E04CAB2-CC14-11DF-BB8C-A2F1DED72085'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'75048700-EF1F-11D0-9888-006097DEACF9'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'5E6AB780-7743-11CF-A12B-00AA004AE837'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'0D6D4F41-2994-4BA0-8FEF-620E43CD2812'),
      UserAssistWindowsRegistryKeyPathFilter(
          u'BCB48336-4DDD-48FF-BB0B-D3190DACB3E2')])

  URLS = [
      u'http://blog.didierstevens.com/programs/userassist/',
      u'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys',
      u'http://intotheboxes.files.wordpress.com/2010/04'
      u'/intotheboxes_2010_q1.pdf']

  # UserAssist format version used in Windows 2000, XP, 2003, Vista.
  _USERASSIST_V3_STRUCT = construct.Struct(
      u'userassist_entry',
      construct.Padding(4),
      construct.ULInt32(u'number_of_executions'),
      construct.ULInt64(u'timestamp'))

  # UserAssist format version used in Windows 2008, 7, 8.
  _USERASSIST_V5_STRUCT = construct.Struct(
      u'userassist_entry',
      construct.Padding(4),
      construct.ULInt32(u'number_of_executions'),
      construct.ULInt32(u'application_focus_count'),
      construct.ULInt32(u'application_focus_duration'),
      construct.Padding(44),
      construct.ULInt64(u'timestamp'),
      construct.Padding(4))

  def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
    """Extracts events from a Windows Registry key.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    version_value = registry_key.GetValueByName(u'Version')
    count_subkey = registry_key.GetSubkeyByName(u'Count')

    if not version_value:
      parser_mediator.ProduceExtractionError(u'missing version value')
      return

    if not version_value.DataIsInteger():
      parser_mediator.ProduceExtractionError(
          u'unsupported version value data type')
      return

    format_version = version_value.GetDataAsObject()
    if format_version not in (3, 5):
      parser_mediator.ProduceExtractionError(
          u'unsupported format version: {0:d}'.format(format_version))
      return

    if not count_subkey:
      parser_mediator.ProduceExtractionError(u'missing count subkey')
      return

    userassist_entry_index = 0

    for registry_value in count_subkey.GetValues():
      try:
        value_name = registry_value.name.decode(u'rot-13')
      except UnicodeEncodeError as exception:
        logging.debug((
            u'Unable to decode UserAssist string: {0:s} with error: {1:s}.\n'
            u'Attempting piecewise decoding.').format(
                registry_value.name, exception))

        characters = []
        for char in registry_value.name:
          if ord(char) < 128:
            try:
              characters.append(char.decode(u'rot-13'))
            except UnicodeEncodeError:
              characters.append(char)
          else:
            characters.append(char)

        value_name = u''.join(characters)

      if format_version == 5:
        path_segments = value_name.split(u'\\')

        for segment_index in range(0, len(path_segments)):
          # Remove the { } from the path segment to get the GUID.
          guid = path_segments[segment_index][1:-1]
          path_segments[segment_index] = known_folder_ids.PATHS.get(
              guid, path_segments[segment_index])

        value_name = u'\\'.join(path_segments)
        # Check if we might need to substitute values.
        if u'%' in value_name:
          # TODO: fix missing self._knowledge_base
          environment_variables = self._knowledge_base.GetEnvironmentVariables()
          value_name = path_helper.PathHelper.ExpandWindowsPath(
              value_name, environment_variables)

      value_data_size = len(registry_value.data)
      if not registry_value.DataIsBinaryData():
        parser_mediator.ProduceExtractionError(
            u'unsupported value data type: {0:s}'.format(
                registry_value.data_type_string))

      elif value_name == u'UEME_CTLSESSION':
        pass

      elif format_version == 3:
        if value_data_size != self._USERASSIST_V3_STRUCT.sizeof():
          parser_mediator.ProduceExtractionError(
              u'unsupported value data size: {0:d}'.format(value_data_size))

        else:
          parsed_data = self._USERASSIST_V3_STRUCT.parse(registry_value.data)
          timestamp = parsed_data.get(u'timestamp', None)

          number_of_executions = parsed_data.get(u'number_of_executions', None)
          if number_of_executions is not None and number_of_executions > 5:
            number_of_executions -= 5

          event_data = UserAssistWindowsRegistryEventData()
          event_data.key_path = count_subkey.path
          event_data.number_of_executions = number_of_executions
          event_data.offset = registry_value.offset
          event_data.value_name = value_name

          if not timestamp:
            date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
          else:
            date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

          # TODO: check if last written is correct.
          event = time_events.DateTimeValuesEvent(
              date_time, eventdata.EventTimestamp.WRITTEN_TIME)
          parser_mediator.ProduceEventWithEventData(event, event_data)

      elif format_version == 5:
        if value_data_size != self._USERASSIST_V5_STRUCT.sizeof():
          parser_mediator.ProduceExtractionError(
              u'unsupported value data size: {0:d}'.format(value_data_size))

        parsed_data = self._USERASSIST_V5_STRUCT.parse(registry_value.data)

        userassist_entry_index += 1
        timestamp = parsed_data.get(u'timestamp', None)

        event_data = UserAssistWindowsRegistryEventData()
        event_data.application_focus_count = parsed_data.get(
            u'application_focus_count', None)
        event_data.application_focus_duration = parsed_data.get(
            u'application_focus_duration', None)
        event_data.entry_index = userassist_entry_index
        event_data.key_path = count_subkey.path
        event_data.number_of_executions = parsed_data.get(
            u'number_of_executions', None)
        event_data.offset = count_subkey.offset
        event_data.value_name = value_name

        if not timestamp:
          date_time = dfdatetime_semantic_time.SemanticTime(u'Not set')
        else:
          date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

        # TODO: check if last written is correct.
        event = time_events.DateTimeValuesEvent(
            date_time, eventdata.EventTimestamp.WRITTEN_TIME)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 14
0
class MsgSettingsWrite(SBP):
    """SBP class for message MSG_SETTINGS_WRITE (0x00A0).

  You can have MSG_SETTINGS_WRITE inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  The setting message writes the device configuration.

  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  setting : string
    A NULL-terminated and delimited string with contents
[SECTION_SETTING, SETTING, VALUE]. A device will only
process to this message when it is received from sender ID
0x42.

  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'setting' / construct.GreedyString(encoding='utf8'), )
    __slots__ = [
        'setting',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgSettingsWrite,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgSettingsWrite, self).__init__()
            self.msg_type = SBP_MSG_SETTINGS_WRITE
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.setting = kwargs.pop('setting')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgSettingsWrite.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgSettingsWrite(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgSettingsWrite._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgSettingsWrite._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgSettingsWrite, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 15
0
#!/usr/bin/python

import binascii
import construct
import datetime
import sys

from binplist import binplist

HFS_to_Epoch = 2082844800
s_alias = construct.Struct('plist_alias', construct.Padding(4),
                           construct.UBInt16('length'), construct.Padding(6),
                           construct.UBInt32('timestamp1'),
                           construct.Padding(18),
                           construct.UBInt32('timestamp2'),
                           construct.Padding(20))

s_type = construct.UBInt16('type')

s_volume = construct.Struct(
    'volume', construct.UBInt16('volume1_length'),
    construct.UBInt16('characters1'),
    construct.String('volume1', lambda ctx: ctx.characters1 * 2),
    construct.Padding(2), construct.UBInt16('volume2_length'),
    construct.UBInt16('characters2'),
    construct.String('volume2', lambda ctx: ctx.characters2 * 2))

s_mount_point = construct.PascalString(
    'mount_point', length_field=construct.UBInt16('length'))

Ejemplo n.º 16
0
class MsgExtEvent(SBP):
  """SBP class for message MSG_EXT_EVENT (0x0101).

  You can have MSG_EXT_EVENT inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  Reports detection of an external event, the GPS time it occurred,
which pin it was and whether it was rising or falling.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  wn : int
    GPS week number
  tow : int
    GPS time of week rounded to the nearest millisecond
  ns_residual : int
    Nanosecond residual of millisecond-rounded TOW (ranges
from -500000 to 500000)

  flags : int
    Flags
  pin : int
    Pin number.  0..9 = DEBUG0..9.
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
  _parser = construct.Struct(
                   'wn' / construct.Int16ul,
                   'tow' / construct.Int32ul,
                   'ns_residual' / construct.Int32sl,
                   'flags' / construct.Int8ul,
                   'pin' / construct.Int8ul,)
  __slots__ = [
               'wn',
               'tow',
               'ns_residual',
               'flags',
               'pin',
              ]

  def __init__(self, sbp=None, **kwargs):
    if sbp:
      super( MsgExtEvent,
             self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                            sbp.payload, sbp.crc)
      self.from_binary(sbp.payload)
    else:
      super( MsgExtEvent, self).__init__()
      self.msg_type = SBP_MSG_EXT_EVENT
      self.sender = kwargs.pop('sender', SENDER_ID)
      self.wn = kwargs.pop('wn')
      self.tow = kwargs.pop('tow')
      self.ns_residual = kwargs.pop('ns_residual')
      self.flags = kwargs.pop('flags')
      self.pin = kwargs.pop('pin')

  def __repr__(self):
    return fmt_repr(self)

  @staticmethod
  def from_json(s):
    """Given a JSON-encoded string s, build a message object.

    """
    d = json.loads(s)
    return MsgExtEvent.from_json_dict(d)

  @staticmethod
  def from_json_dict(d):
    sbp = SBP.from_json_dict(d)
    return MsgExtEvent(sbp, **d)

 
  def from_binary(self, d):
    """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
    p = MsgExtEvent._parser.parse(d)
    for n in self.__class__.__slots__:
      setattr(self, n, getattr(p, n))

  def to_binary(self):
    """Produce a framed/packed SBP message.

    """
    c = containerize(exclude_fields(self))
    self.payload = MsgExtEvent._parser.build(c)
    return self.pack()

  def into_buffer(self, buf, offset):
    """Produce a framed/packed SBP message into the provided buffer and offset.

    """
    self.payload = containerize(exclude_fields(self))
    self.parser = MsgExtEvent._parser
    self.stream_payload.reset(buf, offset)
    return self.pack_into(buf, offset, self._build_payload)

  def to_json_dict(self):
    self.to_binary()
    d = super( MsgExtEvent, self).to_json_dict()
    j = walk_json_dict(exclude_fields(self))
    d.update(j)
    return d
Ejemplo n.º 17
0
import construct

accelerometer_data = construct.Struct("accel_x" / construct.Int16sl,
                                      "accel_y" / construct.Int16sl,
                                      "accel_z" / construct.Int16sl)

gyroscope_data = construct.BitStruct("gyro_roll" / construct.BitsInteger(24),
                                     "gyro_yaw" / construct.BitsInteger(24),
                                     "gyro_pitch" / construct.BitsInteger(24))

magnet_data = construct.Struct(construct.Padding(6))

touchscreen_coords_data = construct.BitStruct(
    "touch_pad" / construct.Bit, "touch_extra" / construct.BitsInteger(3),
    "touch_value" / construct.BitsInteger(12))
touchscreen_points_data = construct.Struct(
    "coords" / construct.Array(2, touchscreen_coords_data))
touchscreen_data = construct.Struct(
    "points" / construct.Array(10, touchscreen_points_data))

input_data = construct.Struct(
    "sequence_id" / construct.Int16ub, "buttons" / construct.Int16ub,
    "power_status" / construct.Int8ub, "battery_charge" / construct.Int8ub,
    "left_stick_x" / construct.Int16ub, "left_stick_y" / construct.Int16ub,
    "right_stick_x" / construct.Int16ub, "right_stick_y" / construct.Int16ub,
    "audio_volume" / construct.Int8ub, construct.Embedded(accelerometer_data),
    construct.Embedded(gyroscope_data), construct.Embedded(magnet_data),
    construct.Embedded(touchscreen_data),
    "unkown_0" / construct.BytesInteger(4), "extra_buttons" / construct.Int8ub,
    "unknown_1" / construct.BytesInteger(46),
    "fw_version_neg" / construct.Int8ub)
Ejemplo n.º 18
0
class KeychainParser(interface.FileObjectParser):
    """Parser for Keychain files."""

    NAME = 'mac_keychain'
    DESCRIPTION = 'Parser for MacOS Keychain files.'

    KEYCHAIN_SIGNATURE = b'kych'
    KEYCHAIN_MAJOR_VERSION = 1
    KEYCHAIN_MINOR_VERSION = 0

    RECORD_TYPE_APPLICATION = 0x80000000
    RECORD_TYPE_INTERNET = 0x80000001

    # DB HEADER.
    KEYCHAIN_DB_HEADER = construct.Struct('db_header',
                                          construct.Bytes('signature', 4),
                                          construct.UBInt16('major_version'),
                                          construct.UBInt16('minor_version'),
                                          construct.UBInt32('header_size'),
                                          construct.UBInt32('schema_offset'),
                                          construct.Padding(4))

    # DB SCHEMA.
    KEYCHAIN_DB_SCHEMA = construct.Struct(
        'db_schema', construct.UBInt32('size'),
        construct.UBInt32('number_of_tables'))

    # For each number_of_tables, the schema has a TABLE_OFFSET with the
    # offset starting in the DB_SCHEMA.
    TABLE_OFFSET = construct.UBInt32('table_offset')

    TABLE_HEADER = construct.Struct('table_header',
                                    construct.UBInt32('table_size'),
                                    construct.UBInt32('record_type'),
                                    construct.UBInt32('number_of_records'),
                                    construct.UBInt32('first_record'),
                                    construct.UBInt32('index_offset'),
                                    construct.Padding(4),
                                    construct.UBInt32('recordnumbercount'))

    RECORD_HEADER = construct.Struct(
        'record_header', construct.UBInt32('entry_length'),
        construct.Padding(12), construct.UBInt32('ssgp_length'),
        construct.Padding(4), construct.UBInt32('creation_time'),
        construct.UBInt32('last_modification_time'),
        construct.UBInt32('text_description'), construct.Padding(4),
        construct.UBInt32('comments'), construct.Padding(8),
        construct.UBInt32('entry_name'), construct.Padding(20),
        construct.UBInt32('account_name'), construct.Padding(4))

    RECORD_HEADER_APP = construct.Struct('record_entry_app', RECORD_HEADER,
                                         construct.Padding(4))

    RECORD_HEADER_INET = construct.Struct('record_entry_inet', RECORD_HEADER,
                                          construct.UBInt32('where'),
                                          construct.UBInt32('protocol'),
                                          construct.UBInt32('type'),
                                          construct.Padding(4),
                                          construct.UBInt32('url'))

    TEXT = construct.PascalString('text',
                                  length_field=construct.UBInt32('length'))

    TIME = construct.Struct('timestamp', construct.String('year', 4),
                            construct.String('month', 2),
                            construct.String('day', 2),
                            construct.String('hour', 2),
                            construct.String('minute', 2),
                            construct.String('second', 2),
                            construct.Padding(2))

    TYPE_TEXT = construct.String('type', 4)

    # TODO: add more protocols.
    _PROTOCOL_TRANSLATION_DICT = {
        'htps': 'https',
        'smtp': 'smtp',
        'imap': 'imap',
        'http': 'http'
    }

    def _ReadEntryApplication(self, parser_mediator, file_object):
        """Extracts the information from an application password entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
    """
        record_offset = file_object.tell()
        try:
            record_struct = self.RECORD_HEADER_APP.parse_stream(file_object)
        except (IOError, construct.FieldError):
            parser_mediator.ProduceExtractionError(
                'unable to parse record structure at offset: 0x{0:08x}'.format(
                    record_offset))
            return

        (ssgp_hash, creation_time, last_modification_time, text_description,
         comments, entry_name,
         account_name) = self._ReadEntryHeader(parser_mediator, file_object,
                                               record_struct.record_header,
                                               record_offset)

        # Move to the end of the record.
        next_record_offset = (record_offset +
                              record_struct.record_header.entry_length)
        file_object.seek(next_record_offset, os.SEEK_SET)

        event_data = KeychainApplicationRecordEventData()
        event_data.account_name = account_name
        event_data.comments = comments
        event_data.entry_name = entry_name
        event_data.ssgp_hash = ssgp_hash
        event_data.text_description = text_description

        if creation_time:
            event = time_events.DateTimeValuesEvent(
                creation_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if last_modification_time:
            event = time_events.DateTimeValuesEvent(
                last_modification_time,
                definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

    def _ReadEntryHeader(self, parser_mediator, file_object, record,
                         record_offset):
        """Read the common record attributes.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
      record (construct.Struct): record header structure.
      record_offset (int): offset of the start of the record.

    Returns:
      A tuple containing:
        ssgp_hash: Hash of the encrypted data (passwd, cert, note).
        creation_time (dfdatetime.TimeElements): entry creation time or None.
        last_modification_time ((dfdatetime.TimeElements): entry last
            modification time or None.
        text_description: A brief description of the entry.
        entry_name: Name of the entry
        account_name: Name of the account.
    """
        # TODO: reduce number of seeks and/or offset calculations needed
        # for parsing.

        # Info: The hash header always start with the string ssgp follow by
        #       the hash. Furthermore The fields are always a multiple of four.
        #       Then if it is not multiple the value is padded by 0x00.
        ssgp_hash = binascii.hexlify(file_object.read(record.ssgp_length)[4:])
        ssgp_hash = codecs.decode(ssgp_hash, 'ascii')

        creation_time = None

        structure_offset = record_offset + record.creation_time - 1
        file_object.seek(structure_offset, os.SEEK_SET)

        try:
            time_structure = self.TIME.parse_stream(file_object)
        except construct.FieldError as exception:
            time_structure = None
            parser_mediator.ProduceExtractionError(
                'unable to parse creation time with error: {0!s}'.format(
                    exception))

        if time_structure:
            time_elements_tuple = (time_structure.year, time_structure.month,
                                   time_structure.day, time_structure.hour,
                                   time_structure.minute,
                                   time_structure.second)

            creation_time = dfdatetime_time_elements.TimeElements()
            try:
                creation_time.CopyFromStringTuple(
                    time_elements_tuple=time_elements_tuple)
            except ValueError:
                creation_time = None
                parser_mediator.ProduceExtractionError(
                    'invalid creation time value: {0!s}'.format(
                        time_elements_tuple))

        last_modification_time = None

        structure_offset = record_offset + record.last_modification_time - 1
        file_object.seek(structure_offset, os.SEEK_SET)

        try:
            time_structure = self.TIME.parse_stream(file_object)
        except construct.FieldError as exception:
            time_structure = None
            parser_mediator.ProduceExtractionError(
                'unable to parse last modification time with error: {0!s}'.
                format(exception))

        if time_structure:
            time_elements_tuple = (time_structure.year, time_structure.month,
                                   time_structure.day, time_structure.hour,
                                   time_structure.minute,
                                   time_structure.second)

            last_modification_time = dfdatetime_time_elements.TimeElements()
            try:
                last_modification_time.CopyFromStringTuple(
                    time_elements_tuple=time_elements_tuple)
            except ValueError:
                last_modification_time = None
                parser_mediator.ProduceExtractionError(
                    'invalid last modification time value: {0!s}'.format(
                        time_elements_tuple))

        text_description = 'N/A'
        if record.text_description:
            structure_offset = record_offset + record.text_description - 1
            file_object.seek(structure_offset, os.SEEK_SET)

            try:
                text_description = self.TEXT.parse_stream(file_object)
                text_description = codecs.decode(text_description, 'utf-8')
            except construct.FieldError as exception:
                parser_mediator.ProduceExtractionError(
                    'unable to parse text description with error: {0!s}'.
                    format(exception))

        comments = 'N/A'
        if record.comments:
            structure_offset = record_offset + record.comments - 1
            file_object.seek(structure_offset, os.SEEK_SET)

            try:
                comments = self.TEXT.parse_stream(file_object)
            except construct.FieldError as exception:
                parser_mediator.ProduceExtractionError(
                    'unable to parse comments with error: {0!s}'.format(
                        exception))

        structure_offset = record_offset + record.entry_name - 1
        file_object.seek(structure_offset, os.SEEK_SET)

        try:
            entry_name = self.TEXT.parse_stream(file_object)
            entry_name = codecs.decode(entry_name, 'utf-8')
        except construct.FieldError as exception:
            entry_name = 'N/A'
            parser_mediator.ProduceExtractionError(
                'unable to parse entry name with error: {0!s}'.format(
                    exception))

        structure_offset = record_offset + record.account_name - 1
        file_object.seek(structure_offset, os.SEEK_SET)

        try:
            account_name = self.TEXT.parse_stream(file_object)
            account_name = codecs.decode(account_name, 'utf-8')
        except construct.FieldError as exception:
            account_name = 'N/A'
            parser_mediator.ProduceExtractionError(
                'unable to parse account name with error: {0!s}'.format(
                    exception))

        return (ssgp_hash, creation_time, last_modification_time,
                text_description, comments, entry_name, account_name)

    def _ReadEntryInternet(self, parser_mediator, file_object):
        """Extracts the information from an Internet password entry.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.
    """
        record_offset = file_object.tell()
        try:
            record_header_struct = self.RECORD_HEADER_INET.parse_stream(
                file_object)
        except (IOError, construct.FieldError):
            parser_mediator.ProduceExtractionError(
                ('unable to parse record header structure at offset: '
                 '0x{0:08x}').format(record_offset))
            return

        (ssgp_hash, creation_time, last_modification_time, text_description,
         comments, entry_name, account_name) = self._ReadEntryHeader(
             parser_mediator, file_object, record_header_struct.record_header,
             record_offset)

        if not record_header_struct.where:
            where = 'N/A'
            protocol = 'N/A'
            type_protocol = 'N/A'

        else:
            offset = record_offset + record_header_struct.where - 1
            file_object.seek(offset, os.SEEK_SET)
            where = self.TEXT.parse_stream(file_object)
            where = codecs.decode(where, 'utf-8')

            offset = record_offset + record_header_struct.protocol - 1
            file_object.seek(offset, os.SEEK_SET)
            protocol = self.TYPE_TEXT.parse_stream(file_object)
            protocol = codecs.decode(protocol, 'utf-8')

            offset = record_offset + record_header_struct.type - 1
            file_object.seek(offset, os.SEEK_SET)
            type_protocol = self.TEXT.parse_stream(file_object)
            type_protocol = codecs.decode(type_protocol, 'utf-8')
            type_protocol = self._PROTOCOL_TRANSLATION_DICT.get(
                type_protocol, type_protocol)

            if record_header_struct.url:
                offset = record_offset + record_header_struct.url - 1
                file_object.seek(offset, os.SEEK_SET)
                url = self.TEXT.parse_stream(file_object)
                url = codecs.decode(url, 'utf-8')
                where = '{0:s}{1:s}'.format(where, url)

        # Move to the end of the record.
        next_record_offset = (record_offset +
                              record_header_struct.record_header.entry_length)
        file_object.seek(next_record_offset, os.SEEK_SET)

        event_data = KeychainInternetRecordEventData()
        event_data.account_name = account_name
        event_data.comments = comments
        event_data.entry_name = entry_name
        event_data.protocol = protocol
        event_data.ssgp_hash = ssgp_hash
        event_data.text_description = text_description
        event_data.type_protocol = type_protocol
        event_data.where = where

        if creation_time:
            event = time_events.DateTimeValuesEvent(
                creation_time, definitions.TIME_DESCRIPTION_CREATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        if last_modification_time:
            event = time_events.DateTimeValuesEvent(
                last_modification_time,
                definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)

    def _ReadTableOffsets(self, parser_mediator, file_object):
        """Reads the table offsets.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Returns:
      list[int]: table offsets.
    """
        # INFO: The HEADER KEYCHAIN:
        # [DBHEADER] + [DBSCHEMA] + [OFFSET TABLE A] + ... + [OFFSET TABLE Z]
        # Where the table offset is relative to the first byte of the DB Schema,
        # then we must add to this offset the size of the [DBHEADER].
        # Read the database schema and extract the offset for all the tables.
        # They are ordered by file position from the top to the bottom of the file.
        table_offsets = []

        try:
            db_schema_struct = self.KEYCHAIN_DB_SCHEMA.parse_stream(
                file_object)
        except (IOError, construct.FieldError):
            parser_mediator.ProduceExtractionError(
                'unable to parse database schema structure')
            return []

        for index in range(db_schema_struct.number_of_tables):
            try:
                table_offset = self.TABLE_OFFSET.parse_stream(file_object)
            except (IOError, construct.FieldError):
                parser_mediator.ProduceExtractionError(
                    'unable to parse table offsets: {0:d}'.format(index))
                return []

            table_offsets.append(table_offset +
                                 self.KEYCHAIN_DB_HEADER.sizeof())

        return table_offsets

    @classmethod
    def GetFormatSpecification(cls):
        """Retrieves the format specification.

    Returns:
      FormatSpecification: format specification.
    """
        format_specification = specification.FormatSpecification(cls.NAME)
        format_specification.AddNewSignature(cls.KEYCHAIN_SIGNATURE, offset=0)
        return format_specification

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a MacOS keychain file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            db_header = self.KEYCHAIN_DB_HEADER.parse_stream(file_object)
        except (IOError, construct.FieldError):
            raise errors.UnableToParseFile('Unable to parse file header.')

        if db_header.signature != self.KEYCHAIN_SIGNATURE:
            raise errors.UnableToParseFile('Not a MacOS keychain file.')

        if (db_header.major_version != self.KEYCHAIN_MAJOR_VERSION
                or db_header.minor_version != self.KEYCHAIN_MINOR_VERSION):
            parser_mediator.ProduceExtractionError(
                'unsupported format version: {0:s}.{1:s}'.format(
                    db_header.major_version, db_header.minor_version))
            return

        # TODO: document format and determine if -1 offset correction is needed.
        table_offsets = self._ReadTableOffsets(parser_mediator, file_object)
        for table_offset in table_offsets:
            # Skipping X bytes, unknown data at this point.
            file_object.seek(table_offset, os.SEEK_SET)

            try:
                table = self.TABLE_HEADER.parse_stream(file_object)
            except (IOError, construct.FieldError):
                parser_mediator.ProduceExtractionError(
                    'unable to parse table structure at offset: 0x{0:08x}'.
                    format(table_offset))
                continue

            # Table_offset: absolute byte in the file where the table starts.
            # table.first_record: first record in the table, relative to the
            #                     first byte of the table.
            file_object.seek(table_offset + table.first_record, os.SEEK_SET)

            if table.record_type == self.RECORD_TYPE_INTERNET:
                for _ in range(table.number_of_records):
                    self._ReadEntryInternet(parser_mediator, file_object)

            elif table.record_type == self.RECORD_TYPE_APPLICATION:
                for _ in range(table.number_of_records):
                    self._ReadEntryApplication(parser_mediator, file_object)
Ejemplo n.º 19
0
class AppCompatCacheDataParser(object):
    """Class that parses Application Compatibility Cache data."""

    FORMAT_TYPE_2000 = 1
    FORMAT_TYPE_XP = 2
    FORMAT_TYPE_2003 = 3
    FORMAT_TYPE_VISTA = 4
    FORMAT_TYPE_7 = 5
    FORMAT_TYPE_8 = 6
    FORMAT_TYPE_10 = 7

    # AppCompatCache format signature used in Windows XP.
    _HEADER_SIGNATURE_XP = 0xdeadbeef

    # AppCompatCache format used in Windows XP.
    _HEADER_XP_32BIT_STRUCT = construct.Struct(
        u'appcompatcache_header_xp', construct.ULInt32(u'signature'),
        construct.ULInt32(u'number_of_cached_entries'),
        construct.ULInt32(u'number_of_lru_entries'),
        construct.ULInt32(u'unknown1'),
        construct.Array(96, construct.ULInt32("lru_entry")))

    _CACHED_ENTRY_XP_32BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_xp_32bit',
        construct.Array(528, construct.Byte(u'path')),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt64(u'file_size'),
        construct.ULInt64(u'last_update_time'))

    # AppCompatCache format signature used in Windows 2003, Vista and 2008.
    _HEADER_SIGNATURE_2003 = 0xbadc0ffe

    # AppCompatCache format used in Windows 2003.
    _HEADER_2003_STRUCT = construct.Struct(
        u'appcompatcache_header_2003', construct.ULInt32(u'signature'),
        construct.ULInt32(u'number_of_cached_entries'))

    _CACHED_ENTRY_2003_32BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_2003_32bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt64(u'file_size'))

    _CACHED_ENTRY_2003_64BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_2003_64bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'unknown1'), construct.ULInt64(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt64(u'file_size'))

    # AppCompatCache format used in Windows Vista and 2008.
    _CACHED_ENTRY_VISTA_32BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_vista_32bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt32(u'insertion_flags'),
        construct.ULInt32(u'shim_flags'))

    _CACHED_ENTRY_VISTA_64BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_vista_64bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'unknown1'), construct.ULInt64(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt32(u'insertion_flags'),
        construct.ULInt32(u'shim_flags'))

    # AppCompatCache format signature used in Windows 7 and 2008 R2.
    _HEADER_SIGNATURE_7 = 0xbadc0fee

    # AppCompatCache format used in Windows 7 and 2008 R2.
    _HEADER_7_STRUCT = construct.Struct(
        u'appcompatcache_header_7', construct.ULInt32(u'signature'),
        construct.ULInt32(u'number_of_cached_entries'), construct.Padding(120))

    _CACHED_ENTRY_7_32BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_7_32bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt32(u'insertion_flags'),
        construct.ULInt32(u'shim_flags'), construct.ULInt32(u'data_size'),
        construct.ULInt32(u'data_offset'))

    _CACHED_ENTRY_7_64BIT_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_7_64bit',
        construct.ULInt16(u'path_size'),
        construct.ULInt16(u'maximum_path_size'),
        construct.ULInt32(u'unknown1'), construct.ULInt64(u'path_offset'),
        construct.ULInt64(u'last_modification_time'),
        construct.ULInt32(u'insertion_flags'),
        construct.ULInt32(u'shim_flags'), construct.ULInt64(u'data_size'),
        construct.ULInt64(u'data_offset'))

    # AppCompatCache format used in Windows 8.0 and 8.1.
    _HEADER_SIGNATURE_8 = 0x00000080

    _HEADER_8_STRUCT = construct.Struct(u'appcompatcache_header_8',
                                        construct.ULInt32(u'signature'),
                                        construct.ULInt32(u'unknown1'),
                                        construct.Padding(120))

    _CACHED_ENTRY_HEADER_8_STRUCT = construct.Struct(
        u'appcompatcache_cached_entry_header_8',
        construct.ULInt32(u'signature'), construct.ULInt32(u'unknown1'),
        construct.ULInt32(u'cached_entry_data_size'),
        construct.ULInt16(u'path_size'))

    # AppCompatCache format used in Windows 8.0.
    _CACHED_ENTRY_SIGNATURE_8_0 = u'00ts'

    # AppCompatCache format used in Windows 8.1.
    _CACHED_ENTRY_SIGNATURE_8_1 = u'10ts'

    # AppCompatCache format used in Windows 10
    _HEADER_SIGNATURE_10 = 0x00000030

    _HEADER_10_STRUCT = construct.Struct(
        u'appcompatcache_header_8', construct.ULInt32(u'signature'),
        construct.ULInt32(u'unknown1'), construct.Padding(28),
        construct.ULInt32(u'number_of_cached_entries'), construct.Padding(8))

    def __init__(self, debug=False):
        """Initializes a parser object.

    Args:
      debug (Optional[bool]): True if debug information should be printed.
    """
        super(AppCompatCacheDataParser, self).__init__()
        self._debug = debug

    def CheckSignature(self, value_data):
        """Parses the signature.

    Args:
      value_data (bytes): value data.

    Returns:
      int: format type or None.
    """
        signature = construct.ULInt32(u'signature').parse(value_data)
        if signature == self._HEADER_SIGNATURE_XP:
            return self.FORMAT_TYPE_XP

        elif signature == self._HEADER_SIGNATURE_2003:
            # TODO: determine which format version is used (2003 or Vista).
            return self.FORMAT_TYPE_2003

        elif signature == self._HEADER_SIGNATURE_7:
            return self.FORMAT_TYPE_7

        elif signature == self._HEADER_SIGNATURE_8:
            if value_data[signature:signature +
                          4] in (self._CACHED_ENTRY_SIGNATURE_8_0,
                                 self._CACHED_ENTRY_SIGNATURE_8_1):
                return self.FORMAT_TYPE_8

        elif signature == self._HEADER_SIGNATURE_10:
            # Windows 10 uses the same cache entry signature as Windows 8.1
            if value_data[signature:signature +
                          4] == (self._CACHED_ENTRY_SIGNATURE_8_1):
                return self.FORMAT_TYPE_10

        return

    def ParseHeader(self, format_type, value_data):
        """Parses the header.

    Args:
      format_type (int): format type.
      value_data (bytess): value data.

    Returns:
      AppCompatCacheHeader: header.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                               self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7,
                               self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        header_object = AppCompatCacheHeader()

        if format_type == self.FORMAT_TYPE_XP:
            header_object.header_size = self._HEADER_XP_32BIT_STRUCT.sizeof()
            header_struct = self._HEADER_XP_32BIT_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_2003:
            header_object.header_size = self._HEADER_2003_STRUCT.sizeof()
            header_struct = self._HEADER_2003_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            header_object.header_size = self._HEADER_VISTA_STRUCT.sizeof()
            header_struct = self._HEADER_VISTA_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_7:
            header_object.header_size = self._HEADER_7_STRUCT.sizeof()
            header_struct = self._HEADER_7_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_8:
            header_object.header_size = self._HEADER_8_STRUCT.sizeof()
            header_struct = self._HEADER_8_STRUCT.parse(value_data)

        elif format_type == self.FORMAT_TYPE_10:
            header_object.header_size = self._HEADER_10_STRUCT.sizeof()
            header_struct = self._HEADER_10_STRUCT.parse(value_data)

        if self._debug:
            print(u'Header data:')
            print(hexdump.Hexdump(value_data[0:header_object.header_size]))

        if self._debug:
            print(u'Signature\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                header_struct.get(u'signature')))

        if format_type in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                           self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7,
                           self.FORMAT_TYPE_10):
            header_object.number_of_cached_entries = header_struct.get(
                u'number_of_cached_entries')

            if self._debug:
                print(u'Number of cached entries\t\t\t\t\t\t: {0:d}'.format(
                    header_object.number_of_cached_entries))

        if format_type == self.FORMAT_TYPE_XP:
            number_of_lru_entries = header_struct.get(u'number_of_lru_entries')
            if self._debug:
                print(u'Number of LRU entries\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    number_of_lru_entries))
                print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    header_struct.get(u'unknown1')))

                print(u'LRU entries:')
            data_offset = 16
            if number_of_lru_entries > 0 and number_of_lru_entries <= 96:
                for lru_entry_index in range(number_of_lru_entries):
                    lru_entry = construct.ULInt32(u'cache_entry_index').parse(
                        value_data[data_offset:data_offset + 4])
                    data_offset += 4

                    if self._debug:
                        print((u'LRU entry: {0:d}\t\t\t\t\t\t\t\t: {1:d} '
                               u'(offset: 0x{2:08x})').format(
                                   lru_entry_index, lru_entry,
                                   400 + (lru_entry * 552)))

                if self._debug:
                    print(u'')

            if self._debug:
                print(u'Unknown data:')
                print(hexdump.Hexdump(value_data[data_offset:400]))

        elif format_type == self.FORMAT_TYPE_8:
            if self._debug:
                print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    header_struct.get(u'unknown1')))

        if self._debug:
            print(u'')

        return header_object

    def DetermineCacheEntrySize(self, format_type, value_data,
                                cached_entry_offset):
        """Parses a cached entry.

    Args:
      format_type (int): format type.
      value_data (bytess): value data.
      cached_entry_offset (int): offset of the first cached entry data
          relative to the start of the value data.

    Returns:
      int: cached entry size or None.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                               self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7,
                               self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[cached_entry_offset:]
        cached_entry_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof()

        elif format_type in (self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                             self.FORMAT_TYPE_7):
            path_size = construct.ULInt16(u'path_size').parse(
                cached_entry_data[0:2])
            maximum_path_size = construct.ULInt16(u'maximum_path_size').parse(
                cached_entry_data[2:4])
            path_offset_32bit = construct.ULInt32(u'path_offset').parse(
                cached_entry_data[4:8])
            path_offset_64bit = construct.ULInt32(u'path_offset').parse(
                cached_entry_data[8:16])

            if maximum_path_size < path_size:
                logging.error(u'Path size value out of bounds.')
                return

            path_end_of_string_size = maximum_path_size - path_size
            if path_size == 0 or path_end_of_string_size != 2:
                logging.error(u'Unsupported path size values.')
                return

            # Assume the entry is 64-bit if the 32-bit path offset is 0 and
            # the 64-bit path offset is set.
            if path_offset_32bit == 0 and path_offset_64bit != 0:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
                    )

            else:
                if format_type == self.FORMAT_TYPE_2003:
                    cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_VISTA:
                    cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
                    )
                elif format_type == self.FORMAT_TYPE_7:
                    cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(
                    )

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof()

        return cached_entry_size

    def ParseCachedEntry(self, format_type, value_data, cached_entry_index,
                         cached_entry_offset, cached_entry_size):
        """Parses a cached entry.

    Args:
      format_type (int): format type.
      value_data (bytess): value data.
      cached_entry_index (int): cached entry index.
      cached_entry_offset (int): offset of the first cached entry data
          relative to the start of the value data.
      cached_entry_size (int): cached entry data size.

    Returns:
      AppCompatCacheCachedEntry: cached entry.

    Raises:
      RuntimeError: if the format type is not supported.
    """
        if format_type not in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                               self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7,
                               self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            raise RuntimeError(
                u'Unsupported format type: {0:d}'.format(format_type))

        cached_entry_data = value_data[
            cached_entry_offset:cached_entry_offset + cached_entry_size]

        if format_type in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                           self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7):
            if self._debug:
                print(u'Cached entry: {0:d} data:'.format(cached_entry_index))
                print(hexdump.Hexdump(cached_entry_data))

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            if self._debug:
                print(u'Cached entry: {0:d} header data:'.format(
                    cached_entry_index))
                print(hexdump.Hexdump(cached_entry_data[:-2]))

        cached_entry_struct = None

        if format_type == self.FORMAT_TYPE_XP:
            if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_2003:
            if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_VISTA:
            if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type == self.FORMAT_TYPE_7:
            if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof():
                cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse(
                    cached_entry_data)

            elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse(
                    cached_entry_data)

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            if cached_entry_data[0:4] not in (
                    self._CACHED_ENTRY_SIGNATURE_8_0,
                    self._CACHED_ENTRY_SIGNATURE_8_1):
                raise RuntimeError(u'Unsupported cache entry signature')

            if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof(
            ):
                cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse(
                    cached_entry_data)

                cached_entry_data_size = cached_entry_struct.get(
                    u'cached_entry_data_size')
                cached_entry_size = 12 + cached_entry_data_size

                cached_entry_data = value_data[
                    cached_entry_offset:cached_entry_offset +
                    cached_entry_size]

        if not cached_entry_struct:
            raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format(
                cached_entry_size))

        if format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            if self._debug:
                print(u'Cached entry: {0:d} data:'.format(cached_entry_index))
                print(hexdump.Hexdump(cached_entry_data))

        cached_entry_object = AppCompatCacheCachedEntry()
        cached_entry_object.cached_entry_size = cached_entry_size

        path_offset = 0
        data_size = 0

        if format_type == self.FORMAT_TYPE_XP:
            string_size = 0
            for string_index in xrange(0, 528, 2):
                if (ord(cached_entry_data[string_index]) == 0
                        and ord(cached_entry_data[string_index + 1]) == 0):
                    break
                string_size += 2

            cached_entry_object.path = cached_entry_data[0:string_size].decode(
                u'utf-16-le')

            if self._debug:
                print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format(
                    cached_entry_object.path))

        elif format_type in (self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA,
                             self.FORMAT_TYPE_7):
            path_size = cached_entry_struct.get(u'path_size')
            maximum_path_size = cached_entry_struct.get(u'maximum_path_size')
            path_offset = cached_entry_struct.get(u'path_offset')

            if self._debug:
                print(u'Path size\t\t\t\t\t\t\t\t: {0:d}'.format(path_size))
                print(u'Maximum path size\t\t\t\t\t\t\t: {0:d}'.format(
                    maximum_path_size))
                print(u'Path offset\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    path_offset))

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            path_size = cached_entry_struct.get(u'path_size')

            if self._debug:
                print(u'Signature\t\t\t\t\t\t\t\t: {0:s}'.format(
                    cached_entry_data[0:4]))
                print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    cached_entry_struct.get(u'unknown1')))
                print(u'Cached entry data size\t\t\t\t\t\t\t: {0:d}'.format(
                    cached_entry_data_size))
                print(u'Path size\t\t\t\t\t\t\t\t: {0:d}'.format(path_size))

            cached_entry_data_offset = 14 + path_size
            cached_entry_object.path = cached_entry_data[
                14:cached_entry_data_offset].decode(u'utf-16-le')

            if self._debug:
                print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format(
                    cached_entry_object.path))

            if format_type == self.FORMAT_TYPE_8:
                remaining_data = cached_entry_data[cached_entry_data_offset:]

                cached_entry_object.insertion_flags = construct.ULInt32(
                    u'insertion_flags').parse(remaining_data[0:4])
                cached_entry_object.shim_flags = construct.ULInt32(
                    u'shim_flags').parse(remaining_data[4:8])

                if self._debug:
                    print(u'Insertion flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        cached_entry_object.insertion_flags))
                    print(u'Shim flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        cached_entry_object.shim_flags))

                if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0:
                    cached_entry_data_offset += 8

                elif cached_entry_data[
                        0:4] == self._CACHED_ENTRY_SIGNATURE_8_1:
                    if self._debug:
                        print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:04x}'.format(
                            construct.ULInt16(u'unknown1').parse(
                                remaining_data[8:10])))

                    cached_entry_data_offset += 10

            remaining_data = cached_entry_data[cached_entry_data_offset:]

        if format_type in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003,
                           self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7):
            cached_entry_object.last_modification_time = cached_entry_struct.get(
                u'last_modification_time')

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            cached_entry_object.last_modification_time = construct.ULInt64(
                u'last_modification_time').parse(remaining_data[0:8])

        if not cached_entry_object.last_modification_time:
            if self._debug:
                print(
                    u'Last modification time\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        cached_entry_object.last_modification_time))

        else:
            timestamp = cached_entry_object.last_modification_time // 10
            date_string = (datetime.datetime(1601, 1, 1) +
                           datetime.timedelta(microseconds=timestamp))

            if self._debug:
                print(
                    u'Last modification time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.
                    format(date_string,
                           cached_entry_object.last_modification_time))

        if format_type in (self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003):
            cached_entry_object.file_size = cached_entry_struct.get(
                u'file_size')

            if self._debug:
                print(u'File size\t\t\t\t\t\t\t\t: {0:d}'.format(
                    cached_entry_object.file_size))

        elif format_type in (self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7):
            cached_entry_object.insertion_flags = cached_entry_struct.get(
                u'insertion_flags')
            cached_entry_object.shim_flags = cached_entry_struct.get(
                u'shim_flags')

            if self._debug:
                print(u'Insertion flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    cached_entry_object.insertion_flags))
                print(u'Shim flags\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    cached_entry_object.shim_flags))

        if format_type == self.FORMAT_TYPE_XP:
            cached_entry_object.last_update_time = cached_entry_struct.get(
                u'last_update_time')

            if not cached_entry_object.last_update_time:
                if self._debug:
                    print(u'Last update time\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                        cached_entry_object.last_update_time))

            else:
                timestamp = cached_entry_object.last_update_time // 10
                date_string = (datetime.datetime(1601, 1, 1) +
                               datetime.timedelta(microseconds=timestamp))

                if self._debug:
                    print(u'Last update time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'.
                          format(date_string,
                                 cached_entry_object.last_update_time))

        if format_type == self.FORMAT_TYPE_7:
            data_offset = cached_entry_struct.get(u'data_offset')
            data_size = cached_entry_struct.get(u'data_size')

            if self._debug:
                print(u'Data offset\t\t\t\t\t\t\t\t: 0x{0:08x}'.format(
                    data_offset))
                print(u'Data size\t\t\t\t\t\t\t\t: {0:d}'.format(data_size))

        elif format_type in (self.FORMAT_TYPE_8, self.FORMAT_TYPE_10):
            data_offset = cached_entry_offset + cached_entry_data_offset + 12
            data_size = construct.ULInt32(u'data_size').parse(
                remaining_data[8:12])

            if self._debug:
                print(u'Data size\t\t\t\t\t\t\t\t: {0:d}'.format(data_size))

        if self._debug:
            print(u'')

        if path_offset > 0 and path_size > 0:
            path_size += path_offset
            maximum_path_size += path_offset

            if self._debug:
                print(u'Path data:')
                print(
                    hexdump.Hexdump(value_data[path_offset:maximum_path_size]))

            cached_entry_object.path = value_data[
                path_offset:path_size].decode(u'utf-16-le')

            if self._debug:
                print(u'Path\t\t\t\t\t\t\t\t\t: {0:s}'.format(
                    cached_entry_object.path))
                print(u'')

        if data_size > 0:
            data_size += data_offset

            cached_entry_object.data = value_data[data_offset:data_size]

            if self._debug:
                print(u'Data:')
                print(hexdump.Hexdump(cached_entry_object.data))

        return cached_entry_object
Ejemplo n.º 20
0
    """Convert bytes into single integer.

    :param data: bytes to convert
    :type data: bytes
    :param little_endian: indicate byte ordering in data, defaults to True
    :type little_endian: bool, optional
    :return: integer
    :rtype: int
    """
    byte_order = 'little' if little_endian else 'big'
    return int.from_bytes(data, byteorder=byte_order)

#: Version of protocol used in serial communication
PROTOCOL_VERSION = construct.Struct(
    'bugfix' / construct.Int8ul,
    'minor' / construct.Int8ul,
    'major' / construct.Int8ul,
    'name' / construct.Int8ul
)

#: Type of frame used for pig response
PING_RESPONSE = construct.Struct(
    'version' / PROTOCOL_VERSION,
    'options' / construct.Int16ul,
    'crc' / construct.Int16ul
)


########################################################################################################################
# UART Interface Class
########################################################################################################################
class FPType(Enum):
Ejemplo n.º 21
0
class MsgOrientQuat(SBP):
  """SBP class for message MSG_ORIENT_QUAT (0x0220).

  You can have MSG_ORIENT_QUAT inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message reports the quaternion vector describing the vehicle body frame's orientation
with respect to a local-level NED frame. The components of the vector should sum to a unit
vector assuming that the LSB of each component as a value of 2^-31. This message will only
be available in future INS versions of Swift Products and is not produced by Piksi Multi 
or Duro.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  tow : int
    GPS Time of Week
  w : int
    Real component
  x : int
    1st imaginary component
  y : int
    2nd imaginary component
  z : int
    3rd imaginary component
  w_accuracy : float
    Estimated standard deviation of w
  x_accuracy : float
    Estimated standard deviation of x
  y_accuracy : float
    Estimated standard deviation of y
  z_accuracy : float
    Estimated standard deviation of z
  flags : int
    Status flags
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
  _parser = construct.Struct(
                   'tow' / construct.Int32ul,
                   'w' / construct.Int32sl,
                   'x' / construct.Int32sl,
                   'y' / construct.Int32sl,
                   'z' / construct.Int32sl,
                   'w_accuracy' / construct.Float32l,
                   'x_accuracy' / construct.Float32l,
                   'y_accuracy' / construct.Float32l,
                   'z_accuracy' / construct.Float32l,
                   'flags' / construct.Int8ul,)
  __slots__ = [
               'tow',
               'w',
               'x',
               'y',
               'z',
               'w_accuracy',
               'x_accuracy',
               'y_accuracy',
               'z_accuracy',
               'flags',
              ]

  def __init__(self, sbp=None, **kwargs):
    if sbp:
      super( MsgOrientQuat,
             self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                            sbp.payload, sbp.crc)
      self.from_binary(sbp.payload)
    else:
      super( MsgOrientQuat, self).__init__()
      self.msg_type = SBP_MSG_ORIENT_QUAT
      self.sender = kwargs.pop('sender', SENDER_ID)
      self.tow = kwargs.pop('tow')
      self.w = kwargs.pop('w')
      self.x = kwargs.pop('x')
      self.y = kwargs.pop('y')
      self.z = kwargs.pop('z')
      self.w_accuracy = kwargs.pop('w_accuracy')
      self.x_accuracy = kwargs.pop('x_accuracy')
      self.y_accuracy = kwargs.pop('y_accuracy')
      self.z_accuracy = kwargs.pop('z_accuracy')
      self.flags = kwargs.pop('flags')

  def __repr__(self):
    return fmt_repr(self)

  @staticmethod
  def from_json(s):
    """Given a JSON-encoded string s, build a message object.

    """
    d = json.loads(s)
    return MsgOrientQuat.from_json_dict(d)

  @staticmethod
  def from_json_dict(d):
    sbp = SBP.from_json_dict(d)
    return MsgOrientQuat(sbp, **d)

 
  def from_binary(self, d):
    """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
    p = MsgOrientQuat._parser.parse(d)
    for n in self.__class__.__slots__:
      setattr(self, n, getattr(p, n))

  def to_binary(self):
    """Produce a framed/packed SBP message.

    """
    c = containerize(exclude_fields(self))
    self.payload = MsgOrientQuat._parser.build(c)
    return self.pack()

  def into_buffer(self, buf, offset):
    """Produce a framed/packed SBP message into the provided buffer and offset.

    """
    self.payload = containerize(exclude_fields(self))
    self.parser = MsgOrientQuat._parser
    self.stream_payload.reset(buf, offset)
    return self.pack_into(buf, offset, self._build_payload)

  def to_json_dict(self):
    self.to_binary()
    d = super( MsgOrientQuat, self).to_json_dict()
    j = walk_json_dict(exclude_fields(self))
    d.update(j)
    return d
Ejemplo n.º 22
0
class CupsIppParser(interface.FileObjectParser):
    """Parser for CUPS IPP files. """

    NAME = u'cups_ipp'
    DESCRIPTION = u'Parser for CUPS IPP files.'

    # INFO:
    # For each file, we have only one document with three different timestamps:
    # Created, process and finished.
    # Format:
    # [HEADER: MAGIC + KNOWN_TYPE][GROUP A]...[GROUP Z][GROUP_END: 0x03]
    # GROUP: [GROUP ID][PAIR A]...[PAIR Z] where [PAIR: NAME + VALUE]
    #   GROUP ID: [1byte ID]
    #   PAIR: [TagID][\x00][Name][Value])
    #     TagID: 1 byte integer with the type of "Value".
    #     Name: [Length][Text][\00]
    #       Name can be empty when the name has more than one value.
    #       Example: family name "lopez mata" with more than one surname.
    #       Type_Text + [0x06, family, 0x00] + [0x05, lopez, 0x00] +
    #       Type_Text + [0x00, 0x00] + [0x04, mata, 0x00]
    #     Value: can be integer, boolean, or text provided by TagID.
    #       If boolean, Value: [\x01][0x00(False)] or [\x01(True)]
    #       If integer, Value: [\x04][Integer]
    #       If text,    Value: [Length text][Text][\00]

    # Magic number that identify the CUPS IPP supported version.
    IPP_MAJOR_VERSION = 2
    IPP_MINOR_VERSION = 0
    # Supported Operation ID.
    IPP_OP_ID = 5

    # CUPS IPP File header.
    CUPS_IPP_HEADER = construct.Struct(u'cups_ipp_header_struct',
                                       construct.UBInt8(u'major_version'),
                                       construct.UBInt8(u'minor_version'),
                                       construct.UBInt16(u'operation_id'),
                                       construct.UBInt32(u'request_id'))

    # Group ID that indicates the end of the IPP Control file.
    GROUP_END = 3
    # Identification Groups.
    GROUP_LIST = [1, 2, 4, 5, 6, 7]

    # Type ID, per cups source file ipp-support.c.
    TYPE_GENERAL_INTEGER = 0x20
    TYPE_INTEGER = 0x21
    TYPE_BOOL = 0x22
    TYPE_ENUMERATION = 0x23
    TYPE_DATETIME = 0x31

    # Type of values that can be extracted.
    INTEGER_8 = construct.UBInt8(u'integer')
    INTEGER_32 = construct.UBInt32(u'integer')
    TEXT = construct.PascalString(u'text',
                                  encoding='utf-8',
                                  length_field=construct.UBInt8(u'length'))
    BOOLEAN = construct.Struct(u'boolean_value', construct.Padding(1),
                               INTEGER_8)
    INTEGER = construct.Struct(u'integer_value', construct.Padding(1),
                               INTEGER_32)

    # This is an RFC 2579 datetime.
    DATETIME = construct.Struct(
        u'datetime',
        construct.Padding(1),
        construct.UBInt16(u'year'),
        construct.UBInt8(u'month'),
        construct.UBInt8(u'day'),
        construct.UBInt8(u'hour'),
        construct.UBInt8(u'minutes'),
        construct.UBInt8(u'seconds'),
        construct.UBInt8(u'deciseconds'),
        construct.String(u'direction_from_utc', length=1, encoding='ascii'),
        construct.UBInt8(u'hours_from_utc'),
        construct.UBInt8(u'minutes_from_utc'),
    )

    # Name of the pair.
    PAIR_NAME = construct.Struct(u'pair_name', TEXT, construct.Padding(1))

    # Specific CUPS IPP to generic name.
    NAME_PAIR_TRANSLATION = {
        u'printer-uri': u'uri',
        u'job-uuid': u'job_id',
        u'DestinationPrinterID': u'printer_id',
        u'job-originating-user-name': u'user',
        u'job-name': u'job_name',
        u'document-format': u'doc_type',
        u'job-originating-host-name': u'computer_name',
        u'com.apple.print.JobInfo.PMApplicationName': u'application',
        u'com.apple.print.JobInfo.PMJobOwner': u'owner'
    }

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a CUPS IPP file-like object.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: A file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            header = self.CUPS_IPP_HEADER.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse CUPS IPP Header with error: {0:s}'.format(
                    exception))

        if (header.major_version != self.IPP_MAJOR_VERSION
                or header.minor_version != self.IPP_MINOR_VERSION):
            raise errors.UnableToParseFile(
                u'[{0:s}] Unsupported version number.'.format(self.NAME))

        if header.operation_id != self.IPP_OP_ID:
            # Warn if the operation ID differs from the standard one. We should be
            # able to parse the file nonetheless.
            logging.debug(
                u'[{0:s}] Unsupported operation identifier in file: {1:s}.'.
                format(self.NAME, parser_mediator.GetDisplayName()))

        # Read the pairs extracting the name and the value.
        data_dict = {}
        name, value = self.ReadPair(parser_mediator, file_object)
        while name or value:
            # Translate the known "name" CUPS IPP to a generic name value.
            pretty_name = self.NAME_PAIR_TRANSLATION.get(name, name)
            data_dict.setdefault(pretty_name, []).append(value)
            name, value = self.ReadPair(parser_mediator, file_object)

        # TODO: Refactor to use a lookup table to do event production.
        time_dict = {}
        for key, value in data_dict.items():
            if key.startswith(u'date-time-') or key.startswith(u'time-'):
                time_dict[key] = value
                del data_dict[key]

        if u'date-time-at-creation' in time_dict:
            event_object = CupsIppEvent(time_dict[u'date-time-at-creation'][0],
                                        eventdata.EventTimestamp.CREATION_TIME,
                                        data_dict)
            parser_mediator.ProduceEvent(event_object)

        if u'date-time-at-processing' in time_dict:
            event_object = CupsIppEvent(
                time_dict[u'date-time-at-processing'][0],
                eventdata.EventTimestamp.START_TIME, data_dict)
            parser_mediator.ProduceEvent(event_object)

        if u'date-time-at-completed' in time_dict:
            event_object = CupsIppEvent(
                time_dict[u'date-time-at-completed'][0],
                eventdata.EventTimestamp.END_TIME, data_dict)
            parser_mediator.ProduceEvent(event_object)

        if u'time-at-creation' in time_dict:
            time_value = time_dict[u'time-at-creation'][0]
            timestamp = timelib.Timestamp.FromPosixTime(time_value)
            event_object = CupsIppEvent(timestamp,
                                        eventdata.EventTimestamp.CREATION_TIME,
                                        data_dict)
            parser_mediator.ProduceEvent(event_object)

        if u'time-at-processing' in time_dict:
            time_value = time_dict[u'time-at-processing'][0]
            timestamp = timelib.Timestamp.FromPosixTime(time_value)
            event_object = CupsIppEvent(timestamp,
                                        eventdata.EventTimestamp.START_TIME,
                                        data_dict)
            parser_mediator.ProduceEvent(event_object)

        if u'time-at-completed' in time_dict:
            time_value = time_dict[u'time-at-completed'][0]
            timestamp = timelib.Timestamp.FromPosixTime(time_value)
            event_object = CupsIppEvent(timestamp,
                                        eventdata.EventTimestamp.END_TIME,
                                        data_dict)
            parser_mediator.ProduceEvent(event_object)

    def ReadPair(self, parser_mediator, file_object):
        """Reads an attribute name and value pair from a CUPS IPP event.

    Args:
      parser_mediator: A parser mediator object (instance of ParserMediator).
      file_object: a file-like object that points to a file.

    Returns:
      A list of name and value. If name and value cannot be read both are
      set to None.
    """
        # Pair = Type ID + Name + Value.
        try:
            # Can be:
            #   Group ID + IDtag = Group ID (1byte) + Tag ID (1byte) + '0x00'.
            #   IDtag = Tag ID (1byte) + '0x00'.
            type_id = self.INTEGER_8.parse_stream(file_object)
            if type_id == self.GROUP_END:
                return None, None

            elif type_id in self.GROUP_LIST:
                # If it is a group ID we must read the next byte that contains
                # the first TagID.
                type_id = self.INTEGER_8.parse_stream(file_object)

            # 0x00 separator character.
            _ = self.INTEGER_8.parse_stream(file_object)

        except (IOError, construct.FieldError):
            logging.warning(
                u'[{0:s}] Unsupported identifier in file: {1:s}.'.format(
                    self.NAME, parser_mediator.GetDisplayName()))
            return None, None

        # Name = Length name + name + 0x00
        try:
            name = self.PAIR_NAME.parse_stream(file_object).text
        except (IOError, construct.FieldError):
            logging.warning(u'[{0:s}] Unsupported name in file: {1:s}.'.format(
                self.NAME, parser_mediator.GetDisplayName()))
            return None, None

        # Value: can be integer, boolean or text select by Type ID.
        try:
            if type_id in [
                    self.TYPE_GENERAL_INTEGER, self.TYPE_INTEGER,
                    self.TYPE_ENUMERATION
            ]:
                value = self.INTEGER.parse_stream(file_object).integer

            elif type_id == self.TYPE_BOOL:
                value = bool(self.BOOLEAN.parse_stream(file_object).integer)

            elif type_id == self.TYPE_DATETIME:
                datetime = self.DATETIME.parse_stream(file_object)
                value = timelib.Timestamp.FromRFC2579Datetime(
                    datetime.year, datetime.month, datetime.day, datetime.hour,
                    datetime.minutes, datetime.seconds, datetime.deciseconds,
                    datetime.direction_from_utc, datetime.hours_from_utc,
                    datetime.minutes_from_utc)

            else:
                value = self.TEXT.parse_stream(file_object)

        except (IOError, UnicodeDecodeError, construct.FieldError):
            logging.warning(
                u'[{0:s}] Unsupported value in file: {1:s}.'.format(
                    self.NAME, parser_mediator.GetDisplayName()))
            return None, None

        return name, value
Ejemplo n.º 23
0
class MsgBaselineHeading(SBP):
  """SBP class for message MSG_BASELINE_HEADING (0x020F).

  You can have MSG_BASELINE_HEADING inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message reports the baseline heading pointing from the base station
to the rover relative to True North. The full GPS time is given by the
preceding MSG_GPS_TIME with the matching time-of-week (tow). It is intended
that time-matched RTK mode is used when the base station is moving.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  tow : int
    GPS Time of Week
  heading : int
    Heading
  n_sats : int
    Number of satellites used in solution
  flags : int
    Status flags
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
  _parser = construct.Struct(
                   'tow' / construct.Int32ul,
                   'heading' / construct.Int32ul,
                   'n_sats' / construct.Int8ul,
                   'flags' / construct.Int8ul,)
  __slots__ = [
               'tow',
               'heading',
               'n_sats',
               'flags',
              ]

  def __init__(self, sbp=None, **kwargs):
    if sbp:
      super( MsgBaselineHeading,
             self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                            sbp.payload, sbp.crc)
      self.from_binary(sbp.payload)
    else:
      super( MsgBaselineHeading, self).__init__()
      self.msg_type = SBP_MSG_BASELINE_HEADING
      self.sender = kwargs.pop('sender', SENDER_ID)
      self.tow = kwargs.pop('tow')
      self.heading = kwargs.pop('heading')
      self.n_sats = kwargs.pop('n_sats')
      self.flags = kwargs.pop('flags')

  def __repr__(self):
    return fmt_repr(self)

  @staticmethod
  def from_json(s):
    """Given a JSON-encoded string s, build a message object.

    """
    d = json.loads(s)
    return MsgBaselineHeading.from_json_dict(d)

  @staticmethod
  def from_json_dict(d):
    sbp = SBP.from_json_dict(d)
    return MsgBaselineHeading(sbp, **d)

 
  def from_binary(self, d):
    """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
    p = MsgBaselineHeading._parser.parse(d)
    for n in self.__class__.__slots__:
      setattr(self, n, getattr(p, n))

  def to_binary(self):
    """Produce a framed/packed SBP message.

    """
    c = containerize(exclude_fields(self))
    self.payload = MsgBaselineHeading._parser.build(c)
    return self.pack()

  def into_buffer(self, buf, offset):
    """Produce a framed/packed SBP message into the provided buffer and offset.

    """
    self.payload = containerize(exclude_fields(self))
    self.parser = MsgBaselineHeading._parser
    self.stream_payload.reset(buf, offset)
    return self.pack_into(buf, offset, self._build_payload)

  def to_json_dict(self):
    self.to_binary()
    d = super( MsgBaselineHeading, self).to_json_dict()
    j = walk_json_dict(exclude_fields(self))
    d.update(j)
    return d
Ejemplo n.º 24
0
class MsgUserData(SBP):
    """SBP class for message MSG_USER_DATA (0x0800).

  You can have MSG_USER_DATA inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  This message can contain any application specific user data up to a
maximum length of 255 bytes per message.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  contents : array
    User data payload
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        construct.GreedyRange('contents' / construct.Int8ul), )
    __slots__ = [
        'contents',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgUserData, self).__init__(sbp.msg_type, sbp.sender,
                                              sbp.length, sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgUserData, self).__init__()
            self.msg_type = SBP_MSG_USER_DATA
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.contents = kwargs.pop('contents')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgUserData.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgUserData(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgUserData._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgUserData._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgUserData, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 25
0
class WinJobParser(interface.FileObjectParser):
    """Parse Windows Scheduled Task files for job events."""

    NAME = u'winjob'
    DESCRIPTION = u'Parser for Windows Scheduled Task job (or At-job) files.'

    _EMPTY_SYSTEM_TIME_TUPLE = (0, 0, 0, 0, 0, 0, 0, 0)

    _PRODUCT_VERSIONS = {
        0x0400: u'Windows NT 4.0',
        0x0500: u'Windows 2000',
        0x0501: u'Windows XP',
        0x0600: u'Windows Vista',
        0x0601: u'Windows 7',
        0x0602: u'Windows 8',
        0x0603: u'Windows 8.1',
        0x0a00: u'Windows 10',
    }

    _JOB_FIXED_LENGTH_SECTION_STRUCT = construct.Struct(
        u'job_fixed_length_section', construct.ULInt16(u'product_version'),
        construct.ULInt16(u'format_version'), construct.Bytes(u'job_uuid', 16),
        construct.ULInt16(u'application_length_offset'),
        construct.ULInt16(u'trigger_offset'),
        construct.ULInt16(u'error_retry_count'),
        construct.ULInt16(u'error_retry_interval'),
        construct.ULInt16(u'idle_deadline'), construct.ULInt16(u'idle_wait'),
        construct.ULInt32(u'priority'), construct.ULInt32(u'max_run_time'),
        construct.ULInt32(u'exit_code'), construct.ULInt32(u'status'),
        construct.ULInt32(u'flags'),
        construct.Struct(u'last_run_time', construct.ULInt16(u'year'),
                         construct.ULInt16(u'month'),
                         construct.ULInt16(u'weekday'),
                         construct.ULInt16(u'day'),
                         construct.ULInt16(u'hours'),
                         construct.ULInt16(u'minutes'),
                         construct.ULInt16(u'seconds'),
                         construct.ULInt16(u'milliseconds')))

    # Using Construct's utf-16 encoding here will create strings with their
    # null terminators exposed. Instead, we'll read these variables raw and
    # convert them using Plaso's ReadUTF16() for proper formatting.
    _JOB_VARIABLE_STRUCT = construct.Struct(
        u'job_variable_length_section',
        construct.ULInt16(u'running_instance_count'),
        construct.ULInt16(u'application_length'),
        construct.String(u'application',
                         lambda ctx: ctx.application_length * 2),
        construct.ULInt16(u'parameter_length'),
        construct.String(u'parameter', lambda ctx: ctx.parameter_length * 2),
        construct.ULInt16(u'working_directory_length'),
        construct.String(u'working_directory',
                         lambda ctx: ctx.working_directory_length * 2),
        construct.ULInt16(u'username_length'),
        construct.String(u'username', lambda ctx: ctx.username_length * 2),
        construct.ULInt16(u'comment_length'),
        construct.String(u'comment', lambda ctx: ctx.comment_length * 2),
        construct.ULInt16(u'userdata_length'),
        construct.String(u'userdata', lambda ctx: ctx.userdata_length),
        construct.ULInt16(u'reserved_length'),
        construct.String(u'reserved', lambda ctx: ctx.reserved_length),
        construct.ULInt16(u'number_of_triggers'))

    _TRIGGER_STRUCT = construct.Struct(u'trigger', construct.ULInt16(u'size'),
                                       construct.ULInt16(u'reserved1'),
                                       construct.ULInt16(u'start_year'),
                                       construct.ULInt16(u'start_month'),
                                       construct.ULInt16(u'start_day'),
                                       construct.ULInt16(u'end_year'),
                                       construct.ULInt16(u'end_month'),
                                       construct.ULInt16(u'end_day'),
                                       construct.ULInt16(u'start_hour'),
                                       construct.ULInt16(u'start_minute'),
                                       construct.ULInt32(u'duration'),
                                       construct.ULInt32(u'interval'),
                                       construct.ULInt32(u'trigger_flags'),
                                       construct.ULInt32(u'trigger_type'),
                                       construct.ULInt16(u'trigger_arg0'),
                                       construct.ULInt16(u'trigger_arg1'),
                                       construct.ULInt16(u'trigger_arg2'),
                                       construct.ULInt16(u'trigger_padding'),
                                       construct.ULInt16(u'trigger_reserved2'),
                                       construct.ULInt16(u'trigger_reserved3'))

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows job file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
        try:
            header_struct = self._JOB_FIXED_LENGTH_SECTION_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse fixed-length section with error: {0:s}'.
                format(exception))

        if not header_struct.product_version in self._PRODUCT_VERSIONS:
            raise errors.UnableToParseFile(
                u'Unsupported product version in: 0x{0:04x}'.format(
                    header_struct.product_version))

        if not header_struct.format_version == 1:
            raise errors.UnableToParseFile(
                u'Unsupported format version in: {0:d}'.format(
                    header_struct.format_version))

        try:
            job_variable_struct = self._JOB_VARIABLE_STRUCT.parse_stream(
                file_object)
        except (IOError, construct.FieldError) as exception:
            raise errors.UnableToParseFile(
                u'Unable to parse variable-length section with error: {0:s}'.
                format(exception))

        event_data = WinJobEventData()
        event_data.application = binary.ReadUTF16(
            job_variable_struct.application)
        event_data.comment = binary.ReadUTF16(job_variable_struct.comment)
        event_data.parameters = binary.ReadUTF16(job_variable_struct.parameter)
        event_data.username = binary.ReadUTF16(job_variable_struct.username)
        event_data.working_directory = binary.ReadUTF16(
            job_variable_struct.working_directory)

        systemtime_struct = header_struct.last_run_time
        system_time_tuple = (systemtime_struct.year, systemtime_struct.month,
                             systemtime_struct.weekday, systemtime_struct.day,
                             systemtime_struct.hours,
                             systemtime_struct.minutes,
                             systemtime_struct.seconds,
                             systemtime_struct.milliseconds)

        date_time = None
        if system_time_tuple != self._EMPTY_SYSTEM_TIME_TUPLE:
            try:
                date_time = dfdatetime_systemtime.Systemtime(
                    system_time_tuple=system_time_tuple)
            except ValueError:
                parser_mediator.ProduceExtractionError(
                    u'invalid last run time: {0!s}'.format(system_time_tuple))

        if date_time:
            event = time_events.DateTimeValuesEvent(
                date_time, eventdata.EventTimestamp.LAST_RUNTIME)
            parser_mediator.ProduceEventWithEventData(event, event_data)

        for index in range(job_variable_struct.number_of_triggers):
            try:
                trigger_struct = self._TRIGGER_STRUCT.parse_stream(file_object)
            except (IOError, construct.FieldError) as exception:
                parser_mediator.ProduceExtractionError(
                    u'unable to parse trigger: {0:d} with error: {1:s}'.format(
                        index, exception))
                return

            event_data.trigger_type = trigger_struct.trigger_type

            time_elements_tuple = (trigger_struct.start_year,
                                   trigger_struct.start_month,
                                   trigger_struct.start_day,
                                   trigger_struct.start_hour,
                                   trigger_struct.start_minute, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_MINUTE
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger start time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to start',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)

            time_elements_tuple = (trigger_struct.end_year,
                                   trigger_struct.end_month,
                                   trigger_struct.end_day, 0, 0, 0)

            if time_elements_tuple != (0, 0, 0, 0, 0, 0):
                try:
                    date_time = dfdatetime_time_elements.TimeElements(
                        time_elements_tuple=time_elements_tuple)
                    date_time.is_local_time = True
                    date_time.precision = dfdatetime_definitions.PRECISION_1_DAY
                except ValueError:
                    date_time = None
                    parser_mediator.ProduceExtractionError(
                        u'invalid trigger end time: {0!s}'.format(
                            time_elements_tuple))

                if date_time:
                    event = time_events.DateTimeValuesEvent(
                        date_time,
                        u'Scheduled to end',
                        time_zone=parser_mediator.timezone)
                    parser_mediator.ProduceEventWithEventData(
                        event, event_data)
Ejemplo n.º 26
0
class WinRecyclerInfo2Parser(interface.FileObjectParser):
    """Parses the Windows Recycler INFO2 file."""

    NAME = 'recycle_bin_info2'
    DESCRIPTION = 'Parser for Windows Recycler INFO2 files.'

    _FILE_HEADER_STRUCT = construct.Struct('file_header',
                                           construct.ULInt32('unknown1'),
                                           construct.ULInt32('unknown2'),
                                           construct.ULInt32('unknown3'),
                                           construct.ULInt32('record_size'),
                                           construct.ULInt32('unknown4'))

    _RECYCLER_RECORD_STRUCT = construct.Struct(
        'recycler_record', construct.ULInt32('index'),
        construct.ULInt32('drive_number'), construct.ULInt64('deletion_time'),
        construct.ULInt32('file_size'))

    _ASCII_STRING = construct.CString('string')

    _RECORD_INDEX_OFFSET = 0x104
    _UNICODE_FILENAME_OFFSET = 0x118

    def _ParseRecord(self, parser_mediator, file_object, record_offset,
                     record_size):
        """Parses an INFO-2 record.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.
    """
        record_data = file_object.read(record_size)

        try:
            ascii_filename = self._ASCII_STRING.parse(record_data)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError((
                'unable to parse recycler ASCII filename at offset: 0x{0:08x} '
                'with error: {1!s}').format(record_offset, exception))

        try:
            recycler_record_struct = self._RECYCLER_RECORD_STRUCT.parse(
                record_data[self._RECORD_INDEX_OFFSET:])
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                ('unable to parse recycler index record at offset: 0x{0:08x} '
                 'with error: {1!s}').format(
                     record_offset + self._RECORD_INDEX_OFFSET, exception))

        unicode_filename = None
        if record_size == 800:
            unicode_filename = binary.ReadUTF16(
                record_data[self._UNICODE_FILENAME_OFFSET:])

        ascii_filename = None
        if ascii_filename and parser_mediator.codepage:
            try:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage)
            except UnicodeDecodeError:
                ascii_filename = ascii_filename.decode(
                    parser_mediator.codepage, errors='replace')

        elif ascii_filename:
            ascii_filename = repr(ascii_filename)

        if recycler_record_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=recycler_record_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.drive_number = recycler_record_struct.drive_number
        event_data.original_filename = unicode_filename or ascii_filename
        event_data.file_size = recycler_record_struct.file_size
        event_data.offset = record_offset
        event_data.record_index = recycler_record_struct.index
        event_data.short_filename = ascii_filename

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_DELETED)
        parser_mediator.ProduceEventWithEventData(event, event_data)

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows Recycler INFO2 file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        # Since this header value is really generic it is hard not to use filename
        # as an indicator too.

        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith('INFO2'):
            return

        try:
            file_header_struct = self._FILE_HEADER_STRUCT.parse_stream(
                file_object)
        except (construct.FieldError, IOError) as exception:
            parser_mediator.ProduceExtractionError(
                'unable to parse file header with error: {0!s}'.format(
                    exception))
            return

        if file_header_struct.unknown1 != 5:
            parser_mediator.ProduceExtractionError(
                'unsupport format signature.')
            return

        record_size = file_header_struct.record_size
        if record_size not in (280, 800):
            parser_mediator.ProduceExtractionError(
                'unsupported record size: {0:d}'.format(record_size))
            return

        record_offset = self._FILE_HEADER_STRUCT.sizeof()
        file_size = file_object.get_size()

        while record_offset < file_size:
            self._ParseRecord(parser_mediator, file_object, record_offset,
                              record_size)

            record_offset += record_size
Ejemplo n.º 27
0
import construct as cs
import construct_typed as cst
import dataclasses
import typing as t
from . import GalleryItem

constr = cs.Struct(
    "brand" / cs.Enum(cs.Int8ul, Porsche=0, Audi=4, VW=7),
    "wheels" / cs.Int8ul,
    "color" / cs.Enum(cs.Int8ul, Red=1, Green=10, Blue=11, Black=12),
)

gallery_item = GalleryItem(
    construct=constr,
    example_binarys={
        "Zeros": bytes(constr.sizeof()),
        "1": bytes([4, 4, 12]),
        "2": bytes([4, 4, 13]),
        "3": bytes([7, 2, 1]),
    },
)
Ejemplo n.º 28
0
class WinRecycleBinParser(interface.FileObjectParser):
    """Parses the Windows $Recycle.Bin $I files."""

    NAME = 'recycle_bin'
    DESCRIPTION = 'Parser for Windows $Recycle.Bin $I files.'

    _FILE_HEADER_STRUCT = construct.Struct('file_header',
                                           construct.ULInt64('format_version'),
                                           construct.ULInt64('file_size'),
                                           construct.ULInt64('deletion_time'))

    _FILENAME_V2_STRUCT = construct.Struct(
        'filename_v2', construct.ULInt32('number_of_characters'),
        construct.String('string', lambda ctx: ctx.number_of_characters * 2))

    def _ReadFilename(self, parser_mediator, file_object, format_version):
        """Reads the filename.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (FileIO): file-like object.
      format_version (int): format version.

    Returns:
      str: filename
    """
        if format_version == 1:
            return binary.ReadUTF16Stream(file_object)

        try:
            filename_struct = self._FILENAME_V2_STRUCT.parse_stream(
                file_object)

        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                'unable to parse filename with error: {0!s}'.format(exception))
            return

        return binary.ReadUTF16(filename_struct.string)

    def ParseFileObject(self, parser_mediator, file_object, **kwargs):
        """Parses a Windows RecycleBin $Ixx file-like object.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): file-like object.
    """
        # We may have to rely on filenames since this header is very generic.

        # TODO: Rethink this and potentially make a better test.
        filename = parser_mediator.GetFilename()
        if not filename.startswith('$I'):
            return

        try:
            header_struct = self._FILE_HEADER_STRUCT.parse_stream(file_object)
        except (IOError, construct.FieldError) as exception:
            parser_mediator.ProduceExtractionError(
                'unable to parse file header with error: {0!s}'.format(
                    exception))
            return

        if header_struct.format_version not in (1, 2):
            parser_mediator.ProduceExtractionError(
                'unsupported format version: {0:d}.'.format(
                    header_struct.format_version))
            return

        if header_struct.deletion_time == 0:
            date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
            date_time = dfdatetime_filetime.Filetime(
                timestamp=header_struct.deletion_time)

        event_data = WinRecycleBinEventData()
        event_data.original_filename = self._ReadFilename(
            parser_mediator, file_object, header_struct.format_version)
        event_data.file_size = header_struct.file_size

        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_DELETED)
        parser_mediator.ProduceEventWithEventData(event, event_data)
Ejemplo n.º 29
0
class MsgTrackingState(SBP):
    """SBP class for message MSG_TRACKING_STATE (0x0041).

  You can have MSG_TRACKING_STATE inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  The tracking message returns a variable-length array of tracking
channel states. It reports status and carrier-to-noise density
measurements for all tracked satellites.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  states : array
    Signal tracking channel state
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        construct.GreedyRange(
            'states' / construct.Struct(TrackingChannelState._parser)), )
    __slots__ = [
        'states',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgTrackingState,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgTrackingState, self).__init__()
            self.msg_type = SBP_MSG_TRACKING_STATE
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.states = kwargs.pop('states')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgTrackingState.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgTrackingState(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgTrackingState._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgTrackingState._parser.build(c)
        return self.pack()

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgTrackingState, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d
Ejemplo n.º 30
0
class MsgFileioConfigResp(SBP):
    """SBP class for message MSG_FILEIO_CONFIG_RESP (0x1002).

  You can have MSG_FILEIO_CONFIG_RESP inherit its fields directly
  from an inherited SBP object, or construct it inline using a dict
  of its fields.

  
  The advice on the optimal configuration for a FileIO
transfer.  Newer version of FileIO can support greater
throughput by supporting a large window of FileIO data
that can be in-flight during read or write operations.


  Parameters
  ----------
  sbp : SBP
    SBP parent object to inherit from.
  sequence : int
    Advice sequence number
  window_size : int
    The number of SBP packets in the data in-flight window
  batch_size : int
    The number of SBP packets sent in one PDU
  fileio_version : int
    The version of FileIO that is supported
  sender : int
    Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).

  """
    _parser = construct.Struct(
        'sequence' / construct.Int32ul,
        'window_size' / construct.Int32ul,
        'batch_size' / construct.Int32ul,
        'fileio_version' / construct.Int32ul,
    )
    __slots__ = [
        'sequence',
        'window_size',
        'batch_size',
        'fileio_version',
    ]

    def __init__(self, sbp=None, **kwargs):
        if sbp:
            super(MsgFileioConfigResp,
                  self).__init__(sbp.msg_type, sbp.sender, sbp.length,
                                 sbp.payload, sbp.crc)
            self.from_binary(sbp.payload)
        else:
            super(MsgFileioConfigResp, self).__init__()
            self.msg_type = SBP_MSG_FILEIO_CONFIG_RESP
            self.sender = kwargs.pop('sender', SENDER_ID)
            self.sequence = kwargs.pop('sequence')
            self.window_size = kwargs.pop('window_size')
            self.batch_size = kwargs.pop('batch_size')
            self.fileio_version = kwargs.pop('fileio_version')

    def __repr__(self):
        return fmt_repr(self)

    @staticmethod
    def from_json(s):
        """Given a JSON-encoded string s, build a message object.

    """
        d = json.loads(s)
        return MsgFileioConfigResp.from_json_dict(d)

    @staticmethod
    def from_json_dict(d):
        sbp = SBP.from_json_dict(d)
        return MsgFileioConfigResp(sbp, **d)

    def from_binary(self, d):
        """Given a binary payload d, update the appropriate payload fields of
    the message.

    """
        p = MsgFileioConfigResp._parser.parse(d)
        for n in self.__class__.__slots__:
            setattr(self, n, getattr(p, n))

    def to_binary(self):
        """Produce a framed/packed SBP message.

    """
        c = containerize(exclude_fields(self))
        self.payload = MsgFileioConfigResp._parser.build(c)
        return self.pack()

    def into_buffer(self, buf, offset):
        """Produce a framed/packed SBP message into the provided buffer and offset.

    """
        self.payload = containerize(exclude_fields(self))
        self.parser = MsgFileioConfigResp._parser
        self.stream_payload.reset(buf, offset)
        return self.pack_into(buf, offset, self._build_payload)

    def to_json_dict(self):
        self.to_binary()
        d = super(MsgFileioConfigResp, self).to_json_dict()
        j = walk_json_dict(exclude_fields(self))
        d.update(j)
        return d