class JBSQ(ct.TContainerMixin): magic: Optional[bytes] = ct.sfield( c.Select(c.Const(b"IJBQ"), c.Const(b"IJSQ"), c.Const(b"JBSQ"))) num_events: int = ct.sfield(c.Int32ul) combo: int = ct.sfield(c.Int32ul) end_time: int = ct.sfield(c.Int32ul) _1: None = ct.sfield(c.Padding(2)) starting_buttons: int = ct.sfield(c.Int16ul) start_time: int = ct.sfield(c.Int32ul) _2: None = ct.sfield(c.Padding(12)) density_graph: List[int] = ct.sfield(c.Byte[60]) events: List[Event] = ct.sfield( c.Array(c.this.num_events, ct.TStruct(Event)))
def __init__(self): super(AudioHandler, self).__init__() self.header_base = construct.BitStruct( 'ASTRMBaseHeader', construct.BitField('fmt', 3), construct.Bit('channel'), construct.Flag('vibrate'), construct.Bit('packet_type'), construct.BitField('seq_id', 10), construct.BitField('payload_size', 16)) self.header_aud = construct.Struct('ASTRMAudioHeader', construct.ULInt32('timestamp')) self.header_msg = construct.Struct( 'ASTRMMsgHeader', # This is kind of a hack, (there are two timestamp fields, which one is used # depends on packet_type construct.ULInt32('timestamp_audio'), construct.ULInt32('timestamp'), construct.Array(2, construct.ULInt32('freq_0')), # -> mc_video construct.Array(2, construct.ULInt32('freq_1')), # -> mc_sync construct.ULInt8('vid_format'), construct.Padding(3)) self.header = construct.Struct( 'ASTRMHeader', construct.Embed(self.header_base), construct.Switch('format_hdr', lambda ctx: ctx.packet_type, { 0: construct.Embed(self.header_aud), 1: construct.Embed(self.header_msg), }, default=construct.Pass))
class TimeMachinePlugin(interface.PlistPlugin): """Basic plugin to extract time machine hardisk and the backups. Further details about the extracted fields: DestinationID: remote UUID hard disk where the backup is done. BackupAlias: structure that contains the extra information from the destinationID. SnapshotDates: list of the backup dates. """ NAME = 'time_machine' DESCRIPTION = 'Parser for TimeMachine plist files.' PLIST_PATH = 'com.apple.TimeMachine.plist' PLIST_KEYS = frozenset(['Destinations', 'RootVolumeUUID']) TM_BACKUP_ALIAS = construct.Struct( 'tm_backup_alias', construct.Padding(10), construct.PascalString('value', length_field=construct.UBInt8('length'))) # pylint: disable=arguments-differ def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ destinations = match.get('Destinations', []) for destination in destinations: destination_identifier = (destination.get('DestinationID', None) or 'Unknown device') alias = destination.get('BackupAlias', '<ALIAS>') try: alias = self.TM_BACKUP_ALIAS.parse(alias).value except construct.FieldError: alias = 'Unknown alias' event_data = plist_event.PlistTimeEventData() event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format( alias, destination_identifier) event_data.key = 'item/SnapshotDates' event_data.root = '/Destinations' snapshot_dates = destination.get('SnapshotDates', []) for datetime_value in snapshot_dates: timestamp = timelib.Timestamp.FromPythonDatetime( datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def MakeConstantRow(): return construct.Struct('ConstantRow', construct.ULInt8('Type'), construct.Padding(1, strict = True), MDTag.HasConstant.parse('Parent'), MDTag.BlobHeapRef.parse('Value') )
class TimeMachinePlugin(interface.PlistPlugin): """Basic plugin to extract time machine hardisk and the backups. Further details about the extracted fields: DestinationID: remote UUID hard disk where the backup is done. BackupAlias: structure that contains the extra information from the destinationID. SnapshotDates: list of the backup dates. """ NAME = u'time_machine' DESCRIPTION = u'Parser for TimeMachine plist files.' PLIST_PATH = u'com.apple.TimeMachine.plist' PLIST_KEYS = frozenset([u'Destinations', u'RootVolumeUUID']) TM_BACKUP_ALIAS = construct.Struct( u'tm_backup_alias', construct.Padding(10), construct.PascalString(u'value', length_field=construct.UBInt8(u'length'))) def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_mediator: A parser mediator object (instance of ParserMediator). match: Optional dictionary containing keys extracted from PLIST_KEYS. The default is None. """ if u'Destinations' not in match: return root = u'/Destinations' key = u'item/SnapshotDates' # For each TimeMachine devices. for destination in match[u'Destinations']: hd_uuid = destination.get(u'DestinationID', None) if not hd_uuid: hd_uuid = u'Unknown device' alias = destination.get(u'BackupAlias', u'<ALIAS>') try: alias = self.TM_BACKUP_ALIAS.parse(alias).value except construct.FieldError: alias = u'Unknown alias' # For each Backup. for timestamp in destination.get(u'SnapshotDates', []): description = u'TimeMachine Backup in {0:s} ({1:s})'.format( alias, hd_uuid) event_object = plist_event.PlistEvent(root, key, timestamp, description) parser_mediator.ProduceEvent(event_object)
def subspec_from_grammar_file(filename, verbose=False, cls=SubSpec): with open(filename, "r") as infp: root = cls.grammar.parse(infp.read()) fields = list(parse_grammar(root)) struct_fields, total_bits = convert_fields_to_struct_fields(fields, verbose=verbose) req_padding = 8 - total_bits % 8 if req_padding: fields.append("padding" / construct.Padding(req_padding)) return type("SubSpec", (cls,), dict(struct_cls=construct.Struct(*struct_fields)))
def profile_spec_t(): return cstruct.Struct( "inst" / block_loc_t(), "method" / profile_type_t(), "output" / port_type_t(), cstruct.Padding(1), "in_vals" / cstruct.Array(2, cstruct.Float32l), "state" / state_t(), )
def __init__(s): s.header_cmd0 = construct.Struct('CMD0Header', construct.UBInt8('magic'), construct.UBInt8('unk_0'), construct.UBInt8('unk_1'), construct.UBInt8('unk_2'), construct.UBInt8('unk_3'), construct.UBInt8('flags'), construct.UBInt8('id_primary'), construct.UBInt8('id_secondary'), construct.UBInt16('error_code'), construct.UBInt16('payload_size_cmd0') ) s.header_cmd1 = construct.Struct('CMD1Header', construct.Padding(48) ) s.header_cmd2 = construct.Struct('CMD2Header', construct.ULInt16('JDN_base'), construct.Padding(2), construct.ULInt32('seconds') ) s.header = construct.Struct('CMDHeader', construct.ULInt16('packet_type'), construct.ULInt16('cmd_id'), construct.ULInt16('payload_size'), construct.ULInt16('seq_id'), construct.Switch('cmd_hdr', lambda ctx: ctx.cmd_id, { 0 : construct.If(lambda ctx: ctx.payload_size >= s.header_cmd0.sizeof(), construct.Embed(s.header_cmd0)), 1 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd1.sizeof(), construct.Embed(s.header_cmd1)), 2 : construct.If(lambda ctx: ctx.payload_size == s.header_cmd2.sizeof(), construct.Embed(s.header_cmd2)) }, default = construct.Pass ) ) s.cmd_handlers = { 0 : s.cmd0, 1 : s.cmd1, 2 : s.cmd2 } s.cmd0_handlers = { 5 : { 6 : s.cmd0_5_6 }, }
def decode_itempos(itempos): """ Decodes a single itempos and returns extracted information """ itempos_io = StringIO.StringIO(itempos) itempos_struct = construct.Struct("itempos", construct.ULInt16("itempos_size"), construct.Padding(2), construct.ULInt32("filesize"), construct.Bytes("dos_date", 2), construct.Bytes("dos_time", 2), construct.ULInt16("file_attr"), construct.CString("filename") ) parse_res = itempos_struct.parse_stream(itempos_io) if itempos_io.pos % 2 == 1: itempos_io.read(1) ext_struct = construct.Struct("ext", construct.ULInt16("ext_size"), construct.ULInt16("ext_version") ) parse_ext = ext_struct.parse_stream(itempos_io) if parse_ext["ext_version"] >= 0x3: itempos2_struct = construct.Struct("itempos2", construct.Padding(2), # 0004 construct.Padding(2), # BEEF construct.Bytes("creation_dos_date", 2), construct.Bytes("creation_dos_time", 2), construct.Bytes("access_dos_date", 2), construct.Bytes("access_dos_time", 2), construct.Padding(4) ) parse_res2 = itempos2_struct.parse_stream(itempos_io) unicode_filename = "" if parse_ext["ext_version"] >= 0x7: itempos3_struct = construct.Struct("itempos3", construct.ULInt64("file_ref"), construct.Padding(8), construct.Padding(2), construct.Padding(4) ) parse_res3 = itempos3_struct.parse_stream(itempos_io) unicode_filename = itempos_io.read().decode("utf16") if not unicode_filename.endswith("\0"): unicode_filename = unicode_filename[:-2] # ditch last unused 2 bytes and \0 char elif parse_ext["ext_version"] >= 0x3: unicode_filename = itempos_io.read().decode("utf16") if not unicode_filename.endswith("\0"): unicode_filename = unicode_filename[:-2] # ditch last unused 2 bytes and \0 char timestamp_modified = dosdate(parse_res["dos_date"], parse_res["dos_time"]).strftime("%d/%m/%Y %H:%M:%S") timestamp_created = dosdate(parse_res2["creation_dos_date"], parse_res2["creation_dos_time"]).strftime( "%d/%m/%Y %H:%M:%S") timestamp_access = dosdate(parse_res2["access_dos_date"], parse_res2["access_dos_time"]).strftime( "%d/%m/%Y %H:%M:%S") return [unicode(parse_res["itempos_size"]), unicode(parse_res["filesize"]), timestamp_modified, parse_res["filename"], timestamp_created, timestamp_access, unicode_filename]
def __init__(self, subcon, length=1, pattern=b'\x00'): """ A custom :class:`Subconstruct` that adds a termination character at the end of the child struct. Args: subcon (Construct): The subcon to add the terminated character to. length (int): The amount of termination characters to add. pattern (bytes): The termination pattern to use. """ super(self.__class__, self).__init__(subcon) self.padding = construct.Padding(length, pattern)
class TimeMachinePlugin(interface.PlistPlugin): """Basic plugin to extract time machine hardisk and the backups.""" NAME = 'plist_timemachine' DESCRIPTION = u'Parser for TimeMachine plist files.' PLIST_PATH = 'com.apple.TimeMachine.plist' PLIST_KEYS = frozenset(['Destinations', 'RootVolumeUUID']) # Generated events: # DestinationID: remote UUID hard disk where the backup is done. # BackupAlias: structure that contains the extra information from the # destinationID. # SnapshotDates: list of the backup dates. TM_BACKUP_ALIAS = construct.Struct( 'tm_backup_alias', construct.Padding(10), construct.PascalString('value', length_field=construct.UBInt8('length'))) def GetEntries(self, parser_context, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_context: A parser context object (instance of ParserContext). match: Optional dictionary containing keys extracted from PLIST_KEYS. The default is None. """ root = '/Destinations' key = 'item/SnapshotDates' # For each TimeMachine devices. for destination in match['Destinations']: hd_uuid = destination['DestinationID'] if not hd_uuid: hd_uuid = u'Unknown device' alias = destination['BackupAlias'] try: alias = self.TM_BACKUP_ALIAS.parse(alias).value except construct.FieldError: alias = u'Unknown alias' # For each Backup. for timestamp in destination['SnapshotDates']: description = u'TimeMachine Backup in {0:s} ({1:s})'.format( alias, hd_uuid) event_object = plist_event.PlistEvent(root, key, timestamp, description) parser_context.ProduceEvent(event_object, plugin_name=self.NAME)
class TimeMachinePlugin(interface.PlistPlugin): """Basic plugin to extract time machine hardisk and the backups.""" NAME = 'plist_timemachine' PLIST_PATH = 'com.apple.TimeMachine.plist' PLIST_KEYS = frozenset(['Destinations', 'RootVolumeUUID']) # Yield Events # # DestinationID: remote UUID hard disk where the backup is done. # BackupAlias: structure that contains the extra information from the # destinationID. # SnapshotDates: list of the backup dates. TM_BACKUP_ALIAS = construct.Struct( 'tm_backup_alias', construct.Padding(10), construct.PascalString('value', length_field=construct.UBInt8('length'))) def GetEntries(self, match, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: match: A dictionary containing keys extracted from PLIST_KEYS. Yields: EventObject objects extracted from the plist. """ root = '/Destinations' key = 'item/SnapshotDates' # For each TimeMachine devices. for destination in match['Destinations']: hd_uuid = destination['DestinationID'] if not hd_uuid: hd_uuid = u'Unknown device' alias = destination['BackupAlias'] try: alias = self.TM_BACKUP_ALIAS.parse(alias).value except construct.FieldError: alias = u'Unknown alias' # For each Backup. for timestamp in destination['SnapshotDates']: description = u'TimeMachine Backup in {} ({})'.format( alias, hd_uuid) yield plist_event.PlistEvent(root, key, timestamp, description)
def gate_factory(class_dict): normal_color_struct = cs.Struct(cs.Padding(2), 'category' / cs.Byte, 'color' / cs.Byte) offset_color_struct = cs.Struct('source' / cs.Int, cs.Const(class_dict['color_triplet']), 'hsv' / cs.Single[3]) color_entry_struct = cs.Struct( 'type' / cs.RawCopy(cs.Short), 'colors' / cs.Switch( cs.this.type.data, { class_dict['normal_color']: normal_color_struct, class_dict['offset_color']: offset_color_struct })) color_struct = cs.Struct( cs.Const(class_dict['colorization']), 'num_entries' / cs.Int, 'colorizations' / color_entry_struct[cs.this.num_entries]) level_struct = cs.Struct( cs.Const(class_dict['gate_level']), cs.Const(b'\x01'), 'level_name' / cs.PascalString(cs.Short, 'ascii'), cs.Const(b'\x01'), 'level_icon' / cs.PascalString(cs.Short, 'ascii'), 'colorization' / color_struct, cs.Const(b'\x01'), 'description' / cs.PascalString(cs.Short, 'ascii'), cs.Const(class_dict['level_type']), 'level_type' / cs.Byte, 'restricted' / cs.Byte) velocity_struct = cs.Struct('velocity' / cs.Single, cs.Const(b'\x01'), 'num_entries' / cs.Int, 'lengths' / cs.Single[cs.this.num_entries], 'start' / cs.Long) wheel_struct = cs.Struct( 'class_id' / cs.RawCopy(cs.Short), 'unknown' / cs.Byte[0x05], 'num_levels' / cs.Short, 'levels' / level_struct[cs.this.num_levels], 'velocity' / cs.If(cs.this.class_id.data != class_dict['random_depth'], velocity_struct)) gate_struct = cs.Struct( cs.Const(b'\x01'), 'gate_id' / cs.Int, cs.Const(b'\x01'), 'gate_name' / cs.PascalString(cs.Short, 'ascii'), cs.Const(b'\x01'), 'gate_icon' / cs.PascalString(cs.Short, 'ascii'), 'colorization' / color_struct, cs.Const(b'\x01'), 'description' / cs.PascalString(cs.Short, 'ascii'), 'unknown' / cs.Byte[0x16], 'num_wheels' / cs.Int, 'wheels' / wheel_struct[cs.this.num_wheels], 'class_id' / cs.Int16sb, cs.If(cs.this.class_id < 0, cs.PascalString(cs.Short, 'ascii')), 'themes' / cs.Struct('unknown' / cs.Byte[0x07], 'themes' / cs.Byte[0x06])) return gate_struct
def __init__(s): super(ServiceASTRM, s).__init__() s.header_base = construct.BitStruct('ASTRMBaseHeader', construct.BitField('fmt', 3), construct.Bit('channel'), construct.Flag('vibrate'), construct.Bit('packet_type'), construct.BitField('seq_id', 10), construct.BitField('payload_size', 16) ) s.header_aud = construct.Struct('ASTRMAudioHeader', construct.ULInt32('timestamp'), # construct.Array(lambda ctx: ctx.payload_size, construct.UBInt8("data")) ) s.header_msg = construct.Struct('ASTRMMsgHeader', # This is kind of a hack, (there are two timestamp fields, which one is used depends on packet_type construct.ULInt32('timestamp_audio'), construct.ULInt32('timestamp'), construct.Array(2, construct.ULInt32('freq_0')), # -> mc_video construct.Array(2, construct.ULInt32('freq_1')), # -> mc_sync construct.ULInt8('vid_format'), construct.Padding(3) ) s.header = construct.Struct('ASTRMHeader', construct.Embed(s.header_base), construct.Switch('format_hdr', lambda ctx: ctx.packet_type, { 0 : construct.Embed(s.header_aud), 1 : construct.Embed(s.header_msg), }, default = construct.Pass ) ) s.is_streaming = False s.p = pyaudio.PyAudio() s.stream = None s.pa_num_bufs = 15 s.pa_ring = [array.array('H', '\0' * 416 * 2)] * s.pa_num_bufs s.pa_wpos = s.pa_rpos = 0
class CupsIppParser(interface.FileObjectParser): """Parser for CUPS IPP files. """ NAME = u'cups_ipp' DESCRIPTION = u'Parser for CUPS IPP files.' # INFO: # For each file, we have only one document with three different timestamps: # Created, process and finished. # Format: # [HEADER: MAGIC + KNOWN_TYPE][GROUP A]...[GROUP Z][GROUP_END: 0x03] # GROUP: [GROUP ID][PAIR A]...[PAIR Z] where [PAIR: NAME + VALUE] # GROUP ID: [1byte ID] # PAIR: [TagID][\x00][Name][Value]) # TagID: 1 byte integer with the type of "Value". # Name: [Length][Text][\00] # Name can be empty when the name has more than one value. # Example: family name "lopez mata" with more than one surname. # Type_Text + [0x06, family, 0x00] + [0x05, lopez, 0x00] + # Type_Text + [0x00, 0x00] + [0x04, mata, 0x00] # Value: can be integer, boolean, or text provided by TagID. # If boolean, Value: [\x01][0x00(False)] or [\x01(True)] # If integer, Value: [\x04][Integer] # If text, Value: [Length text][Text][\00] # Magic number that identify the CUPS IPP supported version. IPP_MAJOR_VERSION = 2 IPP_MINOR_VERSION = 0 # Supported Operation ID. IPP_OP_ID = 5 # CUPS IPP File header. CUPS_IPP_HEADER = construct.Struct(u'cups_ipp_header_struct', construct.UBInt8(u'major_version'), construct.UBInt8(u'minor_version'), construct.UBInt16(u'operation_id'), construct.UBInt32(u'request_id')) # Group ID that indicates the end of the IPP Control file. GROUP_END = 3 # Identification Groups. GROUP_LIST = [1, 2, 4, 5, 6, 7] # Type ID, per cups source file ipp-support.c. TYPE_GENERAL_INTEGER = 0x20 TYPE_INTEGER = 0x21 TYPE_BOOL = 0x22 TYPE_ENUMERATION = 0x23 TYPE_DATETIME = 0x31 # Type of values that can be extracted. INTEGER_8 = construct.UBInt8(u'integer') INTEGER_32 = construct.UBInt32(u'integer') TEXT = construct.PascalString(u'text', encoding='utf-8', length_field=construct.UBInt8(u'length')) BOOLEAN = construct.Struct(u'boolean_value', construct.Padding(1), INTEGER_8) INTEGER = construct.Struct(u'integer_value', construct.Padding(1), INTEGER_32) # This is an RFC 2579 datetime. DATETIME = construct.Struct( u'datetime', construct.Padding(1), construct.UBInt16(u'year'), construct.UBInt8(u'month'), construct.UBInt8(u'day'), construct.UBInt8(u'hour'), construct.UBInt8(u'minutes'), construct.UBInt8(u'seconds'), construct.UBInt8(u'deciseconds'), construct.String(u'direction_from_utc', length=1, encoding='ascii'), construct.UBInt8(u'hours_from_utc'), construct.UBInt8(u'minutes_from_utc'), ) # Name of the pair. PAIR_NAME = construct.Struct(u'pair_name', TEXT, construct.Padding(1)) # Specific CUPS IPP to generic name. NAME_PAIR_TRANSLATION = { u'printer-uri': u'uri', u'job-uuid': u'job_id', u'DestinationPrinterID': u'printer_id', u'job-originating-user-name': u'user', u'job-name': u'job_name', u'document-format': u'doc_type', u'job-originating-host-name': u'computer_name', u'com.apple.print.JobInfo.PMApplicationName': u'application', u'com.apple.print.JobInfo.PMJobOwner': u'owner' } def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a CUPS IPP file-like object. Args: parser_mediator: A parser mediator object (instance of ParserMediator). file_object: A file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ try: header = self.CUPS_IPP_HEADER.parse_stream(file_object) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( u'Unable to parse CUPS IPP Header with error: {0:s}'.format( exception)) if (header.major_version != self.IPP_MAJOR_VERSION or header.minor_version != self.IPP_MINOR_VERSION): raise errors.UnableToParseFile( u'[{0:s}] Unsupported version number.'.format(self.NAME)) if header.operation_id != self.IPP_OP_ID: # Warn if the operation ID differs from the standard one. We should be # able to parse the file nonetheless. logging.debug( u'[{0:s}] Unsupported operation identifier in file: {1:s}.'. format(self.NAME, parser_mediator.GetDisplayName())) # Read the pairs extracting the name and the value. data_dict = {} name, value = self.ReadPair(parser_mediator, file_object) while name or value: # Translate the known "name" CUPS IPP to a generic name value. pretty_name = self.NAME_PAIR_TRANSLATION.get(name, name) data_dict.setdefault(pretty_name, []).append(value) name, value = self.ReadPair(parser_mediator, file_object) # TODO: Refactor to use a lookup table to do event production. time_dict = {} for key, value in data_dict.items(): if key.startswith(u'date-time-') or key.startswith(u'time-'): time_dict[key] = value del data_dict[key] if u'date-time-at-creation' in time_dict: event_object = CupsIppEvent(time_dict[u'date-time-at-creation'][0], eventdata.EventTimestamp.CREATION_TIME, data_dict) parser_mediator.ProduceEvent(event_object) if u'date-time-at-processing' in time_dict: event_object = CupsIppEvent( time_dict[u'date-time-at-processing'][0], eventdata.EventTimestamp.START_TIME, data_dict) parser_mediator.ProduceEvent(event_object) if u'date-time-at-completed' in time_dict: event_object = CupsIppEvent( time_dict[u'date-time-at-completed'][0], eventdata.EventTimestamp.END_TIME, data_dict) parser_mediator.ProduceEvent(event_object) if u'time-at-creation' in time_dict: time_value = time_dict[u'time-at-creation'][0] timestamp = timelib.Timestamp.FromPosixTime(time_value) event_object = CupsIppEvent(timestamp, eventdata.EventTimestamp.CREATION_TIME, data_dict) parser_mediator.ProduceEvent(event_object) if u'time-at-processing' in time_dict: time_value = time_dict[u'time-at-processing'][0] timestamp = timelib.Timestamp.FromPosixTime(time_value) event_object = CupsIppEvent(timestamp, eventdata.EventTimestamp.START_TIME, data_dict) parser_mediator.ProduceEvent(event_object) if u'time-at-completed' in time_dict: time_value = time_dict[u'time-at-completed'][0] timestamp = timelib.Timestamp.FromPosixTime(time_value) event_object = CupsIppEvent(timestamp, eventdata.EventTimestamp.END_TIME, data_dict) parser_mediator.ProduceEvent(event_object) def ReadPair(self, parser_mediator, file_object): """Reads an attribute name and value pair from a CUPS IPP event. Args: parser_mediator: A parser mediator object (instance of ParserMediator). file_object: a file-like object that points to a file. Returns: A list of name and value. If name and value cannot be read both are set to None. """ # Pair = Type ID + Name + Value. try: # Can be: # Group ID + IDtag = Group ID (1byte) + Tag ID (1byte) + '0x00'. # IDtag = Tag ID (1byte) + '0x00'. type_id = self.INTEGER_8.parse_stream(file_object) if type_id == self.GROUP_END: return None, None elif type_id in self.GROUP_LIST: # If it is a group ID we must read the next byte that contains # the first TagID. type_id = self.INTEGER_8.parse_stream(file_object) # 0x00 separator character. _ = self.INTEGER_8.parse_stream(file_object) except (IOError, construct.FieldError): logging.warning( u'[{0:s}] Unsupported identifier in file: {1:s}.'.format( self.NAME, parser_mediator.GetDisplayName())) return None, None # Name = Length name + name + 0x00 try: name = self.PAIR_NAME.parse_stream(file_object).text except (IOError, construct.FieldError): logging.warning(u'[{0:s}] Unsupported name in file: {1:s}.'.format( self.NAME, parser_mediator.GetDisplayName())) return None, None # Value: can be integer, boolean or text select by Type ID. try: if type_id in [ self.TYPE_GENERAL_INTEGER, self.TYPE_INTEGER, self.TYPE_ENUMERATION ]: value = self.INTEGER.parse_stream(file_object).integer elif type_id == self.TYPE_BOOL: value = bool(self.BOOLEAN.parse_stream(file_object).integer) elif type_id == self.TYPE_DATETIME: datetime = self.DATETIME.parse_stream(file_object) value = timelib.Timestamp.FromRFC2579Datetime( datetime.year, datetime.month, datetime.day, datetime.hour, datetime.minutes, datetime.seconds, datetime.deciseconds, datetime.direction_from_utc, datetime.hours_from_utc, datetime.minutes_from_utc) else: value = self.TEXT.parse_stream(file_object) except (IOError, UnicodeDecodeError, construct.FieldError): logging.warning( u'[{0:s}] Unsupported value in file: {1:s}.'.format( self.NAME, parser_mediator.GetDisplayName())) return None, None return name, value
_READ_RTC_RESPONSE = construct.Struct( _COMMAND_SUCCESS, 'timestamp' / lifescan_binary_protocol.VERIO_TIMESTAMP, ) _WRITE_RTC_REQUEST = construct.Struct( construct.Const(b'\x03\x20\x01'), 'timestamp' / lifescan_binary_protocol.VERIO_TIMESTAMP, ) _GLUCOSE_UNIT_REQUEST = construct.Const(b'\x03\x09\x02\x02') _GLUCOSE_UNIT_RESPONSE = construct.Struct( _COMMAND_SUCCESS, 'unit' / lifescan_binary_protocol.GLUCOSE_UNIT, construct.Padding(3), ) _MEMORY_ERASE_REQUEST = construct.Const(b'\x03\x1a') _READ_RECORD_COUNT_REQUEST = construct.Const(b'\x03\x27\x00') _READ_RECORD_COUNT_RESPONSE = construct.Struct( _COMMAND_SUCCESS, 'count' / construct.Int16ul, ) _READ_RECORD_REQUEST = construct.Struct( construct.Const(b'\x03\x21'), 'record_id' / construct.Int16ul, )
class KeychainParser(interface.FileObjectParser): """Parser for Keychain files.""" NAME = 'mac_keychain' DESCRIPTION = 'Parser for MacOS Keychain files.' KEYCHAIN_SIGNATURE = b'kych' KEYCHAIN_MAJOR_VERSION = 1 KEYCHAIN_MINOR_VERSION = 0 RECORD_TYPE_APPLICATION = 0x80000000 RECORD_TYPE_INTERNET = 0x80000001 # DB HEADER. KEYCHAIN_DB_HEADER = construct.Struct( 'db_header', construct.Bytes('signature', 4), construct.UBInt16('major_version'), construct.UBInt16('minor_version'), construct.UBInt32('header_size'), construct.UBInt32('schema_offset'), construct.Padding(4)) # DB SCHEMA. KEYCHAIN_DB_SCHEMA = construct.Struct( 'db_schema', construct.UBInt32('size'), construct.UBInt32('number_of_tables')) # For each number_of_tables, the schema has a TABLE_OFFSET with the # offset starting in the DB_SCHEMA. TABLE_OFFSET = construct.UBInt32('table_offset') TABLE_HEADER = construct.Struct( 'table_header', construct.UBInt32('table_size'), construct.UBInt32('record_type'), construct.UBInt32('number_of_records'), construct.UBInt32('first_record'), construct.UBInt32('index_offset'), construct.Padding(4), construct.UBInt32('recordnumbercount')) RECORD_HEADER = construct.Struct( 'record_header', construct.UBInt32('entry_length'), construct.Padding(12), construct.UBInt32('ssgp_length'), construct.Padding(4), construct.UBInt32('creation_time'), construct.UBInt32('last_modification_time'), construct.UBInt32('text_description'), construct.Padding(4), construct.UBInt32('comments'), construct.Padding(8), construct.UBInt32('entry_name'), construct.Padding(20), construct.UBInt32('account_name'), construct.Padding(4)) RECORD_HEADER_APP = construct.Struct( 'record_entry_app', RECORD_HEADER, construct.Padding(4)) RECORD_HEADER_INET = construct.Struct( 'record_entry_inet', RECORD_HEADER, construct.UBInt32('where'), construct.UBInt32('protocol'), construct.UBInt32('type'), construct.Padding(4), construct.UBInt32('url')) TEXT = construct.PascalString( 'text', length_field=construct.UBInt32('length')) TIME = construct.Struct( 'timestamp', construct.String('year', 4), construct.String('month', 2), construct.String('day', 2), construct.String('hour', 2), construct.String('minute', 2), construct.String('second', 2), construct.Padding(2)) TYPE_TEXT = construct.String('type', 4) # TODO: add more protocols. _PROTOCOL_TRANSLATION_DICT = { 'htps': 'https', 'smtp': 'smtp', 'imap': 'imap', 'http': 'http'} def _ReadEntryApplication(self, parser_mediator, file_object): """Extracts the information from an application password entry. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. """ record_offset = file_object.tell() try: record_struct = self.RECORD_HEADER_APP.parse_stream(file_object) except (IOError, construct.FieldError): parser_mediator.ProduceExtractionError( 'unable to parse record structure at offset: 0x{0:08x}'.format( record_offset)) return (ssgp_hash, creation_time, last_modification_time, text_description, comments, entry_name, account_name) = self._ReadEntryHeader( parser_mediator, file_object, record_struct.record_header, record_offset) # Move to the end of the record. next_record_offset = ( record_offset + record_struct.record_header.entry_length) file_object.seek(next_record_offset, os.SEEK_SET) event_data = KeychainApplicationRecordEventData() event_data.account_name = account_name event_data.comments = comments event_data.entry_name = entry_name event_data.ssgp_hash = ssgp_hash event_data.text_description = text_description if creation_time: event = time_events.DateTimeValuesEvent( creation_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) if last_modification_time: event = time_events.DateTimeValuesEvent( last_modification_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) def _ReadEntryHeader( self, parser_mediator, file_object, record, record_offset): """Read the common record attributes. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_entry (dfvfs.FileEntry): a file entry object. file_object (dfvfs.FileIO): a file-like object. record (construct.Struct): record header structure. record_offset (int): offset of the start of the record. Returns: A tuple containing: ssgp_hash: Hash of the encrypted data (passwd, cert, note). creation_time (dfdatetime.TimeElements): entry creation time or None. last_modification_time ((dfdatetime.TimeElements): entry last modification time or None. text_description: A brief description of the entry. entry_name: Name of the entry account_name: Name of the account. """ # TODO: reduce number of seeks and/or offset calculations needed # for parsing. # Info: The hash header always start with the string ssgp follow by # the hash. Furthermore The fields are always a multiple of four. # Then if it is not multiple the value is padded by 0x00. ssgp_hash = binascii.hexlify(file_object.read(record.ssgp_length)[4:]) creation_time = None structure_offset = record_offset + record.creation_time - 1 file_object.seek(structure_offset, os.SEEK_SET) try: time_structure = self.TIME.parse_stream(file_object) except construct.FieldError as exception: time_structure = None parser_mediator.ProduceExtractionError( 'unable to parse creation time with error: {0!s}'.format(exception)) if time_structure: time_elements_tuple = ( time_structure.year, time_structure.month, time_structure.day, time_structure.hour, time_structure.minute, time_structure.second) creation_time = dfdatetime_time_elements.TimeElements() try: creation_time.CopyFromStringTuple( time_elements_tuple=time_elements_tuple) except ValueError: creation_time = None parser_mediator.ProduceExtractionError( 'invalid creation time value: {0!s}'.format(time_elements_tuple)) last_modification_time = None structure_offset = record_offset + record.last_modification_time - 1 file_object.seek(structure_offset, os.SEEK_SET) try: time_structure = self.TIME.parse_stream(file_object) except construct.FieldError as exception: time_structure = None parser_mediator.ProduceExtractionError( 'unable to parse last modification time with error: {0!s}'.format( exception)) if time_structure: time_elements_tuple = ( time_structure.year, time_structure.month, time_structure.day, time_structure.hour, time_structure.minute, time_structure.second) last_modification_time = dfdatetime_time_elements.TimeElements() try: last_modification_time.CopyFromStringTuple( time_elements_tuple=time_elements_tuple) except ValueError: last_modification_time = None parser_mediator.ProduceExtractionError( 'invalid last modification time value: {0!s}'.format( time_elements_tuple)) text_description = 'N/A' if record.text_description: structure_offset = record_offset + record.text_description - 1 file_object.seek(structure_offset, os.SEEK_SET) try: text_description = self.TEXT.parse_stream(file_object) except construct.FieldError as exception: parser_mediator.ProduceExtractionError( 'unable to parse text description with error: {0!s}'.format( exception)) comments = 'N/A' if record.comments: structure_offset = record_offset + record.comments - 1 file_object.seek(structure_offset, os.SEEK_SET) try: comments = self.TEXT.parse_stream(file_object) except construct.FieldError as exception: parser_mediator.ProduceExtractionError( 'unable to parse comments with error: {0!s}'.format(exception)) structure_offset = record_offset + record.entry_name - 1 file_object.seek(structure_offset, os.SEEK_SET) try: entry_name = self.TEXT.parse_stream(file_object) except construct.FieldError as exception: entry_name = 'N/A' parser_mediator.ProduceExtractionError( 'unable to parse entry name with error: {0!s}'.format(exception)) structure_offset = record_offset + record.account_name - 1 file_object.seek(structure_offset, os.SEEK_SET) try: account_name = self.TEXT.parse_stream(file_object) except construct.FieldError as exception: account_name = 'N/A' parser_mediator.ProduceExtractionError( 'unable to parse account name with error: {0!s}'.format(exception)) return ( ssgp_hash, creation_time, last_modification_time, text_description, comments, entry_name, account_name) def _ReadEntryInternet(self, parser_mediator, file_object): """Extracts the information from an Internet password entry. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. """ record_offset = file_object.tell() try: record_header_struct = self.RECORD_HEADER_INET.parse_stream(file_object) except (IOError, construct.FieldError): parser_mediator.ProduceExtractionError(( 'unable to parse record header structure at offset: ' '0x{0:08x}').format(record_offset)) return (ssgp_hash, creation_time, last_modification_time, text_description, comments, entry_name, account_name) = self._ReadEntryHeader( parser_mediator, file_object, record_header_struct.record_header, record_offset) if not record_header_struct.where: where = 'N/A' protocol = 'N/A' type_protocol = 'N/A' else: offset = record_offset + record_header_struct.where - 1 file_object.seek(offset, os.SEEK_SET) where = self.TEXT.parse_stream(file_object) offset = record_offset + record_header_struct.protocol - 1 file_object.seek(offset, os.SEEK_SET) protocol = self.TYPE_TEXT.parse_stream(file_object) offset = record_offset + record_header_struct.type - 1 file_object.seek(offset, os.SEEK_SET) type_protocol = self.TEXT.parse_stream(file_object) type_protocol = self._PROTOCOL_TRANSLATION_DICT.get( type_protocol, type_protocol) if record_header_struct.url: offset = record_offset + record_header_struct.url - 1 file_object.seek(offset, os.SEEK_SET) url = self.TEXT.parse_stream(file_object) where = '{0:s}{1:s}'.format(where, url) # Move to the end of the record. next_record_offset = ( record_offset + record_header_struct.record_header.entry_length) file_object.seek(next_record_offset, os.SEEK_SET) event_data = KeychainInternetRecordEventData() event_data.account_name = account_name event_data.comments = comments event_data.entry_name = entry_name event_data.protocol = protocol event_data.ssgp_hash = ssgp_hash event_data.text_description = text_description event_data.type_protocol = type_protocol event_data.where = where if creation_time: event = time_events.DateTimeValuesEvent( creation_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) if last_modification_time: event = time_events.DateTimeValuesEvent( last_modification_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) def _ReadTableOffsets(self, parser_mediator, file_object): """Reads the table offsets. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Returns: list[int]: table offsets. """ # INFO: The HEADER KEYCHAIN: # [DBHEADER] + [DBSCHEMA] + [OFFSET TABLE A] + ... + [OFFSET TABLE Z] # Where the table offset is relative to the first byte of the DB Schema, # then we must add to this offset the size of the [DBHEADER]. # Read the database schema and extract the offset for all the tables. # They are ordered by file position from the top to the bottom of the file. table_offsets = [] try: db_schema_struct = self.KEYCHAIN_DB_SCHEMA.parse_stream(file_object) except (IOError, construct.FieldError): parser_mediator.ProduceExtractionError( 'unable to parse database schema structure') return [] for index in range(db_schema_struct.number_of_tables): try: table_offset = self.TABLE_OFFSET.parse_stream(file_object) except (IOError, construct.FieldError): parser_mediator.ProduceExtractionError( 'unable to parse table offsets: {0:d}'.format(index)) return table_offsets.append(table_offset + self.KEYCHAIN_DB_HEADER.sizeof()) return table_offsets @classmethod def GetFormatSpecification(cls): """Retrieves the format specification. Returns: FormatSpecification: format specification. """ format_specification = specification.FormatSpecification(cls.NAME) format_specification.AddNewSignature( cls.KEYCHAIN_SIGNATURE, offset=0) return format_specification def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a MacOS keychain file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ try: db_header = self.KEYCHAIN_DB_HEADER.parse_stream(file_object) except (IOError, construct.FieldError): raise errors.UnableToParseFile('Unable to parse file header.') if db_header.signature != self.KEYCHAIN_SIGNATURE: raise errors.UnableToParseFile('Not a MacOS keychain file.') if (db_header.major_version != self.KEYCHAIN_MAJOR_VERSION or db_header.minor_version != self.KEYCHAIN_MINOR_VERSION): parser_mediator.ProduceExtractionError( 'unsupported format version: {0:s}.{1:s}'.format( db_header.major_version, db_header.minor_version)) return # TODO: document format and determine if -1 offset correction is needed. table_offsets = self._ReadTableOffsets(parser_mediator, file_object) for table_offset in table_offsets: # Skipping X bytes, unknown data at this point. file_object.seek(table_offset, os.SEEK_SET) try: table = self.TABLE_HEADER.parse_stream(file_object) except (IOError, construct.FieldError): parser_mediator.ProduceExtractionError( 'unable to parse table structure at offset: 0x{0:08x}'.format( table_offset)) continue # Table_offset: absolute byte in the file where the table starts. # table.first_record: first record in the table, relative to the # first byte of the table. file_object.seek(table_offset + table.first_record, os.SEEK_SET) if table.record_type == self.RECORD_TYPE_INTERNET: for _ in range(table.number_of_records): self._ReadEntryInternet(parser_mediator, file_object) elif table.record_type == self.RECORD_TYPE_APPLICATION: for _ in range(table.number_of_records): self._ReadEntryApplication(parser_mediator, file_object)
'u_dq' / con.Array(2, F32), 'i_dq' / con.Array(2, F32), 'mode' / ControlModeFormat, 'spinup_in_progress' / con.Flag, 'rotation_reversed' / con.Flag, 'controller_saturated' / con.Flag, ), ), 'hardware_test': con.Struct('progress' / F32, ), 'motor_identification': con.Struct('progress' / F32, ), 'low_level_manipulation': con.Struct('mode' / LowLevelManipulationModeFormat, ), }, default=con.Padding(1)) # noinspection PyUnresolvedReferences GeneralStatusMessageFormatV1 = con.Struct( 'timestamp' / TimeAdapter(U64), 'status_flags' / StatusFlagsFormat, 'current_task_id' / TaskIDFormat, con.Padding(3), 'temperature' / con.Struct( 'cpu' / F32, 'vsi' / F32, 'motor' / OptionalFloatAdapter(F32), ), 'dc' / con.Struct( 'voltage' / F32, 'current' / F32,
class KeychainParser(interface.BaseParser): """Parser for Keychain files.""" NAME = 'mac_keychain' DESCRIPTION = u'Parser for Mac OS X Keychain files.' KEYCHAIN_MAGIC_HEADER = 'kych' KEYCHAIN_MAJOR_VERSION = 1 KEYCHAIN_MINOR_VERSION = 0 RECORD_TYPE_APPLICATION = 0x80000000 RECORD_TYPE_INTERNET = 0x80000001 # DB HEADER. KEYCHAIN_DB_HEADER = construct.Struct( 'db_header', construct.String('magic', 4), construct.UBInt16('major_version'), construct.UBInt16('minor_version'), construct.UBInt32('header_size'), construct.UBInt32('schema_offset'), construct.Padding(4)) # DB SCHEMA. KEYCHAIN_DB_SCHEMA = construct.Struct( 'db_schema', construct.UBInt32('size'), construct.UBInt32('number_of_tables')) # For each number_of_tables, the schema has a TABLE_OFFSET with the # offset starting in the DB_SCHEMA. TABLE_OFFSET = construct.UBInt32('table_offset') TABLE_HEADER = construct.Struct( 'table_header', construct.UBInt32('table_size'), construct.UBInt32('record_type'), construct.UBInt32('number_of_records'), construct.UBInt32('first_record'), construct.UBInt32('index_offset'), construct.Padding(4), construct.UBInt32('recordnumbercount')) RECORD_HEADER = construct.Struct( 'record_header', construct.UBInt32('entry_length'), construct.Padding(12), construct.UBInt32('ssgp_length'), construct.Padding(4), construct.UBInt32('creation_time'), construct.UBInt32('last_mod_time'), construct.UBInt32('text_description'), construct.Padding(4), construct.UBInt32('comments'), construct.Padding(8), construct.UBInt32('entry_name'), construct.Padding(20), construct.UBInt32('account_name'), construct.Padding(4)) RECORD_HEADER_APP = construct.Struct( 'record_entry_app', RECORD_HEADER, construct.Padding(4)) RECORD_HEADER_INET = construct.Struct( 'record_entry_inet', RECORD_HEADER, construct.UBInt32('where'), construct.UBInt32('protocol'), construct.UBInt32('type'), construct.Padding(4), construct.UBInt32('url')) TEXT = construct.PascalString( 'text', length_field = construct.UBInt32('length')) TIME = construct.Struct( 'timestamp', construct.String('year', 4), construct.String('month', 2), construct.String('day', 2), construct.String('hour', 2), construct.String('minute', 2), construct.String('second', 2), construct.Padding(2)) TYPE_TEXT = construct.String('type', 4) # TODO: add more protocols. _PROTOCOL_TRANSLATION_DICT = { u'htps': u'https', u'smtp': u'smtp', u'imap': u'imap', u'http': u'http'} def _GetTimestampFromEntry(self, parser_context, file_entry, structure): """Parse a time entry structure into a microseconds since Epoch in UTC. Args: parser_context: A parser context object (instance of ParserContext). file_entry: A file entry object (instance of dfvfs.FileEntry). structure: TIME entry structure: year: String with the number of the year. month: String with the number of the month. day: String with the number of the day. hour: String with the number of the month. minute: String with the number of the minute. second: String with the number of the second. Returns: Microseconds since Epoch in UTC. """ try: return timelib.Timestamp.FromTimeParts( int(structure.year, 10), int(structure.month, 10), int(structure.day, 10), int(structure.hour, 10), int(structure.minute, 10), int(structure.second, 10)) except ValueError: logging.warning( u'[{0:s}] Invalid keychain time {1!s} in file: {2:s}'.format( self.NAME, parser_context.GetDisplayName(file_entry), structure)) return 0 def _ReadEntryApplication(self, parser_context, file_object, file_entry=None): """Extracts the information from an application password entry. Args: parser_context: A parser context object (instance of ParserContext). file_object: A file-like object that points to an Keychain file. file_entry: Optional file entry object (instance of dfvfs.FileEntry). The default is None. """ offset = file_object.tell() try: record = self.RECORD_HEADER_APP.parse_stream(file_object) except (IOError, construct.FieldError): logging.warning(( u'[{0:s}] Unsupported record header at 0x{1:08x} in file: ' u'{2:s}').format( self.NAME, offset, parser_context.GetDisplayName(file_entry))) return (ssgp_hash, creation_time, last_mod_time, text_description, comments, entry_name, account_name) = self._ReadEntryHeader( parser_context, file_entry, file_object, record.record_header, offset) # Move to the end of the record, and then, prepared for the next record. file_object.seek( record.record_header.entry_length + offset - file_object.tell(), os.SEEK_CUR) event_object = KeychainApplicationRecordEvent( creation_time, eventdata.EventTimestamp.CREATION_TIME, entry_name, account_name, text_description, comments, ssgp_hash) parser_context.ProduceEvent( event_object, parser_name=self.NAME, file_entry=file_entry) if creation_time != last_mod_time: event_object = KeychainApplicationRecordEvent( last_mod_time, eventdata.EventTimestamp.MODIFICATION_TIME, entry_name, account_name, text_description, comments, ssgp_hash) parser_context.ProduceEvent( event_object, parser_name=self.NAME, file_entry=file_entry) def _ReadEntryHeader( self, parser_context, file_entry, file_object, record, offset): """Read the common record attributes. Args: parser_context: A parser context object (instance of ParserContext). file_entry: A file entry object (instance of dfvfs.FileEntry). file_object: A file-like object that points to an Keychain file. record: Structure with the header of the record. offset: First byte of the record. Returns: A list of: ssgp_hash: Hash of the encrypted data (passwd, cert, note). creation_time: When the entry was created. last_mod_time: Last time the entry was updated. text_description: A brief description of the entry. entry_name: Name of the entry account_name: Name of the account. """ # Info: The hash header always start with the string ssgp follow by # the hash. Furthermore The fields are always a multiple of four. # Then if it is not multiple the value is padded by 0x00. ssgp_hash = binascii.hexlify(file_object.read(record.ssgp_length)[4:]) file_object.seek( record.creation_time - file_object.tell() + offset - 1, os.SEEK_CUR) creation_time = self._GetTimestampFromEntry( parser_context, file_entry, self.TIME.parse_stream(file_object)) file_object.seek( record.last_mod_time - file_object.tell() + offset - 1, os.SEEK_CUR) last_mod_time = self._GetTimestampFromEntry( parser_context, file_entry, self.TIME.parse_stream(file_object)) # The comment field does not always contain data. if record.text_description: file_object.seek( record.text_description - file_object.tell() + offset -1, os.SEEK_CUR) text_description = self.TEXT.parse_stream(file_object) else: text_description = u'N/A' # The comment field does not always contain data. if record.comments: file_object.seek( record.text_description - file_object.tell() + offset -1, os.SEEK_CUR) comments = self.TEXT.parse_stream(file_object) else: comments = u'N/A' file_object.seek( record.entry_name - file_object.tell() + offset - 1, os.SEEK_CUR) entry_name = self.TEXT.parse_stream(file_object) file_object.seek( record.account_name - file_object.tell() + offset - 1, os.SEEK_CUR) account_name = self.TEXT.parse_stream(file_object) return ( ssgp_hash, creation_time, last_mod_time, text_description, comments, entry_name, account_name) def _ReadEntryInternet(self, parser_context, file_object, file_entry=None): """Extracts the information from an Internet password entry. Args: parser_context: A parser context object (instance of ParserContext). file_object: A file-like object that points to an Keychain file. file_entry: Optional file entry object (instance of dfvfs.FileEntry). The default is None. """ offset = file_object.tell() try: record = self.RECORD_HEADER_INET.parse_stream(file_object) except (IOError, construct.FieldError): logging.warning(( u'[{0:s}] Unsupported record header at 0x{1:08x} in file: ' u'{2:s}').format( self.NAME, offset, parser_context.GetDisplayName(file_entry))) return (ssgp_hash, creation_time, last_mod_time, text_description, comments, entry_name, account_name) = self._ReadEntryHeader( parser_context, file_entry, file_object, record.record_header, offset) if not record.where: where = u'N/A' protocol = u'N/A' type_protocol = u'N/A' else: file_object.seek( record.where - file_object.tell() + offset - 1, os.SEEK_CUR) where = self.TEXT.parse_stream(file_object) file_object.seek( record.protocol - file_object.tell() + offset - 1, os.SEEK_CUR) protocol = self.TYPE_TEXT.parse_stream(file_object) file_object.seek( record.type - file_object.tell() + offset - 1, os.SEEK_CUR) type_protocol = self.TEXT.parse_stream(file_object) type_protocol = self._PROTOCOL_TRANSLATION_DICT.get( type_protocol, type_protocol) if record.url: file_object.seek( record.url - file_object.tell() + offset - 1, os.SEEK_CUR) url = self.TEXT.parse_stream(file_object) where = u'{0:s}{1:s}'.format(where, url) # Move to the end of the record, and then, prepared for the next record. file_object.seek( record.record_header.entry_length + offset - file_object.tell(), os.SEEK_CUR) event_object = KeychainInternetRecordEvent( creation_time, eventdata.EventTimestamp.CREATION_TIME, entry_name, account_name, text_description, comments, where, protocol, type_protocol, ssgp_hash) parser_context.ProduceEvent( event_object, parser_name=self.NAME, file_entry=file_entry) if creation_time != last_mod_time: event_object = KeychainInternetRecordEvent( last_mod_time, eventdata.EventTimestamp.MODIFICATION_TIME, entry_name, account_name, text_description, comments, where, protocol, type_protocol) parser_context.ProduceEvent( event_object, parser_name=self.NAME, file_entry=file_entry) def _VerifyStructure(self, file_object): """Verify that we are dealing with an Keychain entry. Args: file_object: A file-like object that points to an Keychain file. Returns: A list of table positions if it is a keychain, None otherwise. """ # INFO: The HEADER KEYCHAIN: # [DBHEADER] + [DBSCHEMA] + [OFFSET TABLE A] + ... + [OFFSET TABLE Z] # Where the table offset is relative to the first byte of the DB Schema, # then we must add to this offset the size of the [DBHEADER]. try: db_header = self.KEYCHAIN_DB_HEADER.parse_stream(file_object) except (IOError, construct.FieldError): return if (db_header.minor_version != self.KEYCHAIN_MINOR_VERSION or db_header.major_version != self.KEYCHAIN_MAJOR_VERSION or db_header.magic != self.KEYCHAIN_MAGIC_HEADER): return # Read the database schema and extract the offset for all the tables. # They are ordered by file position from the top to the bottom of the file. try: db_schema = self.KEYCHAIN_DB_SCHEMA.parse_stream(file_object) except (IOError, construct.FieldError): return table_offsets = [] for _ in range(db_schema.number_of_tables): try: table_offset = self.TABLE_OFFSET.parse_stream(file_object) except (IOError, construct.FieldError): return table_offsets.append(table_offset + self.KEYCHAIN_DB_HEADER.sizeof()) return table_offsets def Parse(self, parser_context, file_entry): """Extract data from a Keychain file. Args: parser_context: A parser context object (instance of ParserContext). file_entry: A file entry object (instance of dfvfs.FileEntry). """ file_object = file_entry.GetFileObject() table_offsets = self._VerifyStructure(file_object) if not table_offsets: file_object.close() raise errors.UnableToParseFile(u'The file is not a Keychain file.') for table_offset in table_offsets: # Skipping X bytes, unknown data at this point. file_object.seek(table_offset - file_object.tell(), os.SEEK_CUR) try: table = self.TABLE_HEADER.parse_stream(file_object) except construct.FieldError as exception: logging.warning(( u'[{0:s}] Unable to parse table header in file: {1:s} ' u'with error: {2:s}.').format( self.NAME, parser_context.GetDisplayName(file_entry), exception)) continue # Table_offset: absolute byte in the file where the table starts. # table.first_record: first record in the table, relative to the # first byte of the table. file_object.seek( table_offset + table.first_record - file_object.tell(), os.SEEK_CUR) if table.record_type == self.RECORD_TYPE_INTERNET: for _ in range(table.number_of_records): self._ReadEntryInternet( parser_context, file_object, file_entry=file_entry) elif table.record_type == self.RECORD_TYPE_APPLICATION: for _ in range(table.number_of_records): self._ReadEntryApplication( parser_context, file_object, file_entry=file_entry) file_object.close()
This module implements an interface to send and receive these messages. """ __author__ = 'Diego Elio Pettenò' __email__ = '*****@*****.**' __copyright__ = 'Copyright © 2014-2018, Diego Elio Pettenò' __license__ = 'MIT' import construct from glucometerutils import common from glucometerutils.support import construct_extras from glucometerutils.support import lifescan _LINK_CONTROL = construct.BitStruct( construct.Padding(3), 'more' / construct.Default(construct.Flag, False), 'disconnect' / construct.Default(construct.Flag, False), 'acknowledge' / construct.Default(construct.Flag, False), 'expect_receive' / construct.Default(construct.Flag, False), 'sequence_number' / construct.Default(construct.Flag, False), ) def LifeScanPacket(command_prefix, include_link_control): if include_link_control: link_control_construct = _LINK_CONTROL else: link_control_construct = construct.Const(b'\x00') command_prefix_construct = construct.Const(command_prefix, construct.Byte)
"cam_power_freq" / construct.Int8ub, "cam_auto_expo" / construct.Int8ub, "cam_expo_abs" / construct.Int32ub, "cam_brightness" / construct.Int16ub, "cam_contrast" / construct.Int16ub, "cam_gain" / construct.Int16ub, "cam_hue" / construct.Int16ub, "cam_saturation" / construct.Int16ub, "cam_sharpness" / construct.Int16ub, "cam_gamma" / construct.Int16ub, "cam_key_frame" / construct.Int8ub, "cam_white_balance_auto" / construct.Int8ub, "cam_white_balance" / construct.Int32ub, "cam_multiplier" / construct.Int16ub, "cam_multiplier_limit" / construct.Int16ub, construct.Padding(2) ) header_cmd2 = construct.Struct( 'JDN_base' / construct.Int16ul, construct.Padding(2), 'seconds' / construct.Int32ul ) header = construct.Struct( 'packet_type' / construct.Int16ul, 'cmd_id' / construct.Int16ul, 'payload_size' / construct.Int16ul, 'seq_id' / construct.Int16ul, construct.Embedded( construct.Switch(lambda ctx: ctx.cmd_id, { 0: construct.If(
class UserAssistPlugin(interface.WindowsRegistryPlugin): """Plugin that parses an UserAssist key.""" NAME = u'userassist' DESCRIPTION = u'Parser for User Assist Registry data.' FILTERS = frozenset([ UserAssistWindowsRegistryKeyPathFilter( u'FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD'), UserAssistWindowsRegistryKeyPathFilter( u'F4E57C4B-2036-45F0-A9AB-443BCFE33D9F'), UserAssistWindowsRegistryKeyPathFilter( u'F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442'), UserAssistWindowsRegistryKeyPathFilter( u'CEBFF5CD-ACE2-4F4F-9178-9926F41749EA'), UserAssistWindowsRegistryKeyPathFilter( u'CAA59E3C-4792-41A5-9909-6A6A8D32490E'), UserAssistWindowsRegistryKeyPathFilter( u'B267E3AD-A825-4A09-82B9-EEC22AA3B847'), UserAssistWindowsRegistryKeyPathFilter( u'A3D53349-6E61-4557-8FC7-0028EDCEEBF6'), UserAssistWindowsRegistryKeyPathFilter( u'9E04CAB2-CC14-11DF-BB8C-A2F1DED72085'), UserAssistWindowsRegistryKeyPathFilter( u'75048700-EF1F-11D0-9888-006097DEACF9'), UserAssistWindowsRegistryKeyPathFilter( u'5E6AB780-7743-11CF-A12B-00AA004AE837'), UserAssistWindowsRegistryKeyPathFilter( u'0D6D4F41-2994-4BA0-8FEF-620E43CD2812'), UserAssistWindowsRegistryKeyPathFilter( u'BCB48336-4DDD-48FF-BB0B-D3190DACB3E2')]) URLS = [ u'http://blog.didierstevens.com/programs/userassist/', u'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys', u'http://intotheboxes.files.wordpress.com/2010/04' u'/intotheboxes_2010_q1.pdf'] # UserAssist format version used in Windows 2000, XP, 2003, Vista. _USERASSIST_V3_STRUCT = construct.Struct( u'userassist_entry', construct.Padding(4), construct.ULInt32(u'count'), construct.ULInt64(u'timestamp')) # UserAssist format version used in Windows 2008, 7, 8. _USERASSIST_V5_STRUCT = construct.Struct( u'userassist_entry', construct.Padding(4), construct.ULInt32(u'count'), construct.ULInt32(u'app_focus_count'), construct.ULInt32(u'focus_duration'), construct.Padding(44), construct.ULInt64(u'timestamp'), construct.Padding(4)) def GetEntries(self, parser_mediator, registry_key, **kwargs): """Parses a UserAssist Registry key. Args: parser_mediator: A parser mediator object (instance of ParserMediator). registry_key: A Windows Registry key (instance of dfwinreg.WinRegistryKey). """ version_value = registry_key.GetValueByName(u'Version') count_subkey = registry_key.GetSubkeyByName(u'Count') if not version_value: parser_mediator.ProduceExtractionError(u'Missing version value') return if not version_value.DataIsInteger(): parser_mediator.ProduceExtractionError( u'Unsupported version value data type') return format_version = version_value.GetDataAsObject() if format_version not in (3, 5): parser_mediator.ProduceExtractionError( u'Unsupported format version: {0:d}'.format(format_version)) return if not count_subkey: parser_mediator.ProduceExtractionError(u'Missing count subkey') return userassist_entry_index = 0 for registry_value in count_subkey.GetValues(): try: value_name = registry_value.name.decode(u'rot-13') except UnicodeEncodeError as exception: logging.debug(( u'Unable to decode UserAssist string: {0:s} with error: {1:s}.\n' u'Attempting piecewise decoding.').format( registry_value.name, exception)) characters = [] for char in registry_value.name: if ord(char) < 128: try: characters.append(char.decode(u'rot-13')) except UnicodeEncodeError: characters.append(char) else: characters.append(char) value_name = u''.join(characters) if format_version == 5: path_segments = value_name.split(u'\\') for segment_index in range(0, len(path_segments)): # Remove the { } from the path segment to get the GUID. guid = path_segments[segment_index][1:-1] path_segments[segment_index] = known_folder_ids.PATHS.get( guid, path_segments[segment_index]) value_name = u'\\'.join(path_segments) # Check if we might need to substitute values. if u'%' in value_name: path_attributes = parser_mediator.knowledge_base.GetPathAttributes() value_name = environ_expand.ExpandWindowsEnvironmentVariables( value_name, path_attributes) value_data_size = len(registry_value.data) if not registry_value.DataIsBinaryData(): parser_mediator.ProduceExtractionError( u'Unsupported value data type: {0:s}'.format( registry_value.data_type_string)) elif value_name == u'UEME_CTLSESSION': pass elif format_version == 3: if value_data_size != self._USERASSIST_V3_STRUCT.sizeof(): parser_mediator.ProduceExtractionError( u'Unsupported value data size: {0:d}'.format(value_data_size)) else: parsed_data = self._USERASSIST_V3_STRUCT.parse(registry_value.data) filetime = parsed_data.get(u'timestamp', 0) count = parsed_data.get(u'count', 0) if count > 5: count -= 5 values_dict = {} values_dict[value_name] = u'[Count: {0:d}]'.format(count) event_object = UserAssistWindowsRegistryEvent( filetime, count_subkey.path, registry_value.offset, values_dict) parser_mediator.ProduceEvent(event_object) elif format_version == 5: if value_data_size != self._USERASSIST_V5_STRUCT.sizeof(): parser_mediator.ProduceExtractionError( u'Unsupported value data size: {0:d}'.format(value_data_size)) parsed_data = self._USERASSIST_V5_STRUCT.parse(registry_value.data) userassist_entry_index += 1 count = parsed_data.get(u'count', None) app_focus_count = parsed_data.get(u'app_focus_count', None) focus_duration = parsed_data.get(u'focus_duration', None) filetime = parsed_data.get(u'timestamp', 0) values_dict = {} values_dict[value_name] = ( u'[UserAssist entry: {0:d}, Count: {1:d}, ' u'Application focus count: {2:d}, Focus duration: {3:d}]').format( userassist_entry_index, count, app_focus_count, focus_duration) event_object = UserAssistWindowsRegistryEvent( filetime, count_subkey.path, count_subkey.offset, values_dict) parser_mediator.ProduceEvent(event_object)
class UtmpxParser(interface.FileObjectParser): """Parser for UTMPX files.""" NAME = 'utmpx' DESCRIPTION = 'Parser for UTMPX files.' # INFO: Type is suppose to be a short (2 bytes), # however if we analyze the file it is always # byte follow by 3 bytes with \x00 value. _UTMPX_ENTRY = construct.Struct('utmpx_mac', construct.String('user', 256), construct.ULInt32('id'), construct.String('tty_name', 32), construct.ULInt32('pid'), construct.ULInt16('status_type'), construct.ULInt16('unknown'), construct.ULInt32('timestamp'), construct.ULInt32('microseconds'), construct.String('hostname', 256), construct.Padding(64)) _UTMPX_ENTRY_SIZE = _UTMPX_ENTRY.sizeof() _STATUS_TYPE_SIGNATURE = 10 def _ReadEntry(self, parser_mediator, file_object): """Reads an UTMPX entry. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Returns: bool: True if the UTMPX entry was successfully read. """ data = file_object.read(self._UTMPX_ENTRY_SIZE) if len(data) != self._UTMPX_ENTRY_SIZE: return False try: entry_struct = self._UTMPX_ENTRY.parse(data) except (IOError, construct.FieldError) as exception: logging.warning( 'Unable to parse MacOS UTMPX entry with error: {0!s}'.format( exception)) return False user, _, _ = entry_struct.user.partition(b'\x00') if not user: user = '******' terminal, _, _ = entry_struct.tty_name.partition(b'\x00') if not terminal: terminal = 'N/A' computer_name, _, _ = entry_struct.hostname.partition(b'\x00') if not computer_name: computer_name = 'localhost' event_data = UtmpxMacOSEventData() event_data.computer_name = computer_name event_data.offset = file_object.tell() event_data.status_type = entry_struct.status_type event_data.terminal = terminal event_data.user = user timestamp = (entry_struct.timestamp * 1000000) + entry_struct.microseconds date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData(event, event_data) return True def _VerifyStructure(self, file_object): """Verify that we are dealing with an UTMPX entry. Args: file_object (dfvfs.FileIO): a file-like object. Returns: bool: True if it is a UTMPX entry or False otherwise. """ # First entry is a SIGNAL entry of the file ("header"). try: header_struct = self._UTMPX_ENTRY.parse_stream(file_object) except (IOError, construct.FieldError): return False user, _, _ = header_struct.user.partition(b'\x00') # The UTMPX_ENTRY structure will often successfully compile on various # structures, such as binary plist files, and thus we need to do some # additional validation. The first one is to check if the user name # can be converted into a Unicode string, otherwise we can assume # we are dealing with non UTMPX data. try: user.decode('utf-8') except UnicodeDecodeError: return False if user != b'utmpx-1.00': return False if header_struct.status_type != self._STATUS_TYPE_SIGNATURE: return False if (header_struct.timestamp != 0 or header_struct.microseconds != 0 or header_struct.pid != 0): return False tty_name, _, _ = header_struct.tty_name.partition(b'\x00') hostname, _, _ = header_struct.hostname.partition(b'\x00') if tty_name or hostname: return False return True def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses an UTMPX file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ if not self._VerifyStructure(file_object): raise errors.UnableToParseFile('The file is not an UTMPX file.') while self._ReadEntry(parser_mediator, file_object): pass
class SAMUsersWindowsRegistryPlugin(interface.WindowsRegistryPlugin): """Windows Registry plugin for SAM Users Account information.""" NAME = u'windows_sam_users' DESCRIPTION = u'Parser for SAM Users and Names Registry keys.' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( u'HKEY_LOCAL_MACHINE\\SAM\\Domains\\Account\\Users')]) F_VALUE_STRUCT = construct.Struct( u'f_struct', construct.Padding(8), construct.ULInt64(u'last_login'), construct.Padding(8), construct.ULInt64(u'password_reset'), construct.Padding(16), construct.ULInt16(u'rid'), construct.Padding(16), construct.ULInt8(u'login_count')) V_VALUE_HEADER = construct.Struct( u'v_header', construct.Array(11, construct.ULInt32(u'values'))) V_VALUE_HEADER_SIZE = 0xCC _SOURCE_APPEND = u'User Account Information' def _ParseFValue(self, key): """Parses F value and returns parsed F data construct object. Args: key: Registry key (instance of dfwinreg.WinRegistryKey). Returns: f_data: Construct parsed F value containing rid, login count, and timestamp information. """ f_value = key.GetValueByName(u'F') if not f_value: logging.error(u'Unable to locate F Value in key.') return try: f_data = self.F_VALUE_STRUCT.parse(f_value.data) except construct.FieldError as exception: logging.error( u'Unable to extract F value data: {:s}'.format(exception)) return return f_data def _ParseVValue(self, key): """Parses V value and returns name, fullname, and comments data. Args: key: Registry key (instance of dfwinreg.WinRegistryKey). Returns: name: Name data parsed with name start and length values. fullname: Fullname data parsed with fullname start and length values. comments: Comments data parsed with comments start and length values. """ v_value = key.GetValueByName(u'V') if not v_value: logging.error(u'Unable to locate V Value in key.') return try: structure = self.V_VALUE_HEADER.parse(v_value.data) except construct.FieldError as exception: logging.error( u'Unable to extract V value header data with error: {0:s}'.format( exception)) return name_offset = structure.values()[0][3] + self.V_VALUE_HEADER_SIZE full_name_offset = structure.values()[0][6] + self.V_VALUE_HEADER_SIZE comments_offset = structure.values()[0][9] + self.V_VALUE_HEADER_SIZE name_raw = v_value.data[ name_offset:name_offset + structure.values()[0][4]] full_name_raw = v_value.data[ full_name_offset:full_name_offset + structure.values()[0][7]] comments_raw = v_value.data[ comments_offset:comments_offset + structure.values()[0][10]] name = binary.ReadUTF16(name_raw) full_name = binary.ReadUTF16(full_name_raw) comments = binary.ReadUTF16(comments_raw) return name, full_name, comments def GetEntries(self, parser_mediator, registry_key, **kwargs): """Collect data from Users and Names and produce event objects. Args: parser_mediator: A parser mediator object (instance of ParserMediator). registry_key: A Windows Registry key (instance of dfwinreg.WinRegistryKey). """ name_key = registry_key.GetSubkeyByName(u'Names') if not name_key: parser_mediator.ProduceParseError(u'Unable to locate Names key.') return values = [(v.name, v.last_written_time) for v in name_key.GetSubkeys()] name_dict = dict(values) for subkey in registry_key.GetSubkeys(): if subkey.name == u'Names': continue parsed_v_value = self._ParseVValue(subkey) if not parsed_v_value: parser_mediator.ProduceParseError( u'Unable to parse SAM key: {0:s} V value.'.format(subkey)) return username = parsed_v_value[0] full_name = parsed_v_value[1] comments = parsed_v_value[2] values_dict = {u'user_guid': subkey.name} if username: values_dict[u'username'] = username if full_name: values_dict[u'full_name'] = full_name if comments: values_dict[u'comments'] = comments if name_dict: account_create_time = name_dict.get(username, 0) else: account_create_time = 0 f_data = self._ParseFValue(subkey) values_dict[u'account_rid'] = f_data.rid values_dict[u'login_count'] = f_data.login_count if account_create_time > 0: event_object = windows_events.WindowsRegistryEvent( account_create_time, registry_key.path, values_dict, usage=eventdata.EventTimestamp.ACCOUNT_CREATED, offset=registry_key.offset, source_append=self._SOURCE_APPEND) parser_mediator.ProduceEvent(event_object) if f_data.last_login > 0: event_object = windows_events.WindowsRegistryEvent( f_data.last_login, registry_key.path, values_dict, usage=eventdata.EventTimestamp.LAST_LOGIN_TIME, offset=registry_key.offset, source_append=self._SOURCE_APPEND) parser_mediator.ProduceEvent(event_object) if f_data.password_reset > 0: event_object = windows_events.WindowsRegistryEvent( f_data.password_reset, registry_key.path, values_dict, usage=eventdata.EventTimestamp.LAST_PASSWORD_RESET, offset=registry_key.offset, source_append=self._SOURCE_APPEND) parser_mediator.ProduceEvent(event_object)
"data" / c.Prefixed(c.Int32ul, c.GreedyBytes), ) VendorTrust = c.Transformed( c.BitStruct( "reserved" / c.Default(c.BitsInteger(9), 0), "show_vendor_string" / c.Flag, "require_user_click" / c.Flag, "red_background" / c.Flag, "delay" / c.BitsInteger(4), ), _transform_vendor_trust, 2, _transform_vendor_trust, 2) VendorHeader = c.Struct( "_start_offset" / c.Tell, "magic" / c.Const(b"TRZV"), "_header_len" / c.Padding(4), "expiry" / c.Int32ul, "version" / c.Struct( "major" / c.Int8ul, "minor" / c.Int8ul, ), "vendor_sigs_required" / c.Int8ul, "vendor_sigs_n" / c.Rebuild(c.Int8ul, c.len_(c.this.pubkeys)), "vendor_trust" / VendorTrust, "reserved" / c.Padding(14), "pubkeys" / c.Bytes(32)[c.this.vendor_sigs_n], "vendor_string" / c.Aligned(4, c.PascalString(c.Int8ul, "utf-8")), "vendor_image" / Toif, "_data_end_offset" / c.Tell, c.Padding(-(c.this._data_end_offset + 65) % 512), "sigmask" / c.Byte,
class UserAssistPlugin(interface.WindowsRegistryPlugin): """Plugin that parses an UserAssist key.""" NAME = 'userassist' DESCRIPTION = 'Parser for User Assist Registry data.' FILTERS = frozenset([ UserAssistWindowsRegistryKeyPathFilter( 'FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD'), UserAssistWindowsRegistryKeyPathFilter( 'F4E57C4B-2036-45F0-A9AB-443BCFE33D9F'), UserAssistWindowsRegistryKeyPathFilter( 'F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442'), UserAssistWindowsRegistryKeyPathFilter( 'CEBFF5CD-ACE2-4F4F-9178-9926F41749EA'), UserAssistWindowsRegistryKeyPathFilter( 'CAA59E3C-4792-41A5-9909-6A6A8D32490E'), UserAssistWindowsRegistryKeyPathFilter( 'B267E3AD-A825-4A09-82B9-EEC22AA3B847'), UserAssistWindowsRegistryKeyPathFilter( 'A3D53349-6E61-4557-8FC7-0028EDCEEBF6'), UserAssistWindowsRegistryKeyPathFilter( '9E04CAB2-CC14-11DF-BB8C-A2F1DED72085'), UserAssistWindowsRegistryKeyPathFilter( '75048700-EF1F-11D0-9888-006097DEACF9'), UserAssistWindowsRegistryKeyPathFilter( '5E6AB780-7743-11CF-A12B-00AA004AE837'), UserAssistWindowsRegistryKeyPathFilter( '0D6D4F41-2994-4BA0-8FEF-620E43CD2812'), UserAssistWindowsRegistryKeyPathFilter( 'BCB48336-4DDD-48FF-BB0B-D3190DACB3E2') ]) URLS = [ 'http://blog.didierstevens.com/programs/userassist/', 'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys', 'http://intotheboxes.files.wordpress.com/2010/04' '/intotheboxes_2010_q1.pdf' ] # UserAssist format version used in Windows 2000, XP, 2003, Vista. _USERASSIST_V3_STRUCT = construct.Struct( 'userassist_entry', construct.Padding(4), construct.ULInt32('number_of_executions'), construct.ULInt64('timestamp')) # UserAssist format version used in Windows 2008, 7, 8. _USERASSIST_V5_STRUCT = construct.Struct( 'userassist_entry', construct.Padding(4), construct.ULInt32('number_of_executions'), construct.ULInt32('application_focus_count'), construct.ULInt32('application_focus_duration'), construct.Padding(44), construct.ULInt64('timestamp'), construct.Padding(4)) def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ version_value = registry_key.GetValueByName('Version') count_subkey = registry_key.GetSubkeyByName('Count') if not version_value: parser_mediator.ProduceExtractionError('missing version value') return if not version_value.DataIsInteger(): parser_mediator.ProduceExtractionError( 'unsupported version value data type') return format_version = version_value.GetDataAsObject() if format_version not in (3, 5): parser_mediator.ProduceExtractionError( 'unsupported format version: {0:d}'.format(format_version)) return if not count_subkey: parser_mediator.ProduceExtractionError('missing count subkey') return userassist_entry_index = 0 for registry_value in count_subkey.GetValues(): try: value_name = registry_value.name.decode('rot-13') except UnicodeEncodeError as exception: logging.debug(( 'Unable to decode UserAssist string: {0:s} with error: {1!s}.\n' 'Attempting piecewise decoding.').format( registry_value.name, exception)) characters = [] for char in registry_value.name: if ord(char) < 128: try: characters.append(char.decode('rot-13')) except UnicodeEncodeError: characters.append(char) else: characters.append(char) value_name = ''.join(characters) if format_version == 5: path_segments = value_name.split('\\') for segment_index in range(0, len(path_segments)): # Remove the { } from the path segment to get the GUID. guid = path_segments[segment_index][1:-1] path_segments[segment_index] = known_folder_ids.PATHS.get( guid, path_segments[segment_index]) value_name = '\\'.join(path_segments) # Check if we might need to substitute values. if '%' in value_name: # TODO: fix missing self._knowledge_base # pylint: disable=no-member environment_variables = self._knowledge_base.GetEnvironmentVariables( ) value_name = path_helper.PathHelper.ExpandWindowsPath( value_name, environment_variables) value_data_size = len(registry_value.data) if not registry_value.DataIsBinaryData(): parser_mediator.ProduceExtractionError( 'unsupported value data type: {0:s}'.format( registry_value.data_type_string)) elif value_name == 'UEME_CTLSESSION': pass elif format_version == 3: if value_data_size != self._USERASSIST_V3_STRUCT.sizeof(): parser_mediator.ProduceExtractionError( 'unsupported value data size: {0:d}'.format( value_data_size)) else: parsed_data = self._USERASSIST_V3_STRUCT.parse( registry_value.data) timestamp = parsed_data.get('timestamp', None) number_of_executions = parsed_data.get( 'number_of_executions', None) if number_of_executions is not None and number_of_executions > 5: number_of_executions -= 5 event_data = UserAssistWindowsRegistryEventData() event_data.key_path = count_subkey.path event_data.number_of_executions = number_of_executions event_data.offset = registry_value.offset event_data.value_name = value_name if not timestamp: date_time = dfdatetime_semantic_time.SemanticTime( 'Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) # TODO: check if last written is correct. event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData( event, event_data) elif format_version == 5: if value_data_size != self._USERASSIST_V5_STRUCT.sizeof(): parser_mediator.ProduceExtractionError( 'unsupported value data size: {0:d}'.format( value_data_size)) parsed_data = self._USERASSIST_V5_STRUCT.parse( registry_value.data) userassist_entry_index += 1 timestamp = parsed_data.get('timestamp', None) event_data = UserAssistWindowsRegistryEventData() event_data.application_focus_count = parsed_data.get( 'application_focus_count', None) event_data.application_focus_duration = parsed_data.get( 'application_focus_duration', None) event_data.entry_index = userassist_entry_index event_data.key_path = count_subkey.path event_data.number_of_executions = parsed_data.get( 'number_of_executions', None) event_data.offset = count_subkey.offset event_data.value_name = value_name if not timestamp: date_time = dfdatetime_semantic_time.SemanticTime( 'Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=timestamp) # TODO: check if last written is correct. event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
class AppCompatCacheKeyParser(object): """Class that parses the Application Compatibility Cache data.""" FORMAT_TYPE_2000 = 1 FORMAT_TYPE_XP = 2 FORMAT_TYPE_2003 = 3 FORMAT_TYPE_VISTA = 4 FORMAT_TYPE_7 = 5 FORMAT_TYPE_8 = 6 # AppCompatCache format signature used in Windows XP. _HEADER_SIGNATURE_XP = 0xdeadbeef # AppCompatCache format used in Windows XP. _HEADER_XP_32BIT_STRUCT = construct.Struct( 'appcompatcache_header_xp', construct.ULInt32('signature'), construct.ULInt32('number_of_cached_entries'), construct.ULInt32('unknown1'), construct.ULInt32('unknown2'), construct.Padding(384)) _CACHED_ENTRY_XP_32BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_xp_32bit', construct.Array(528, construct.Byte('path')), construct.ULInt64('last_modification_time'), construct.ULInt64('file_size'), construct.ULInt64('last_update_time')) # AppCompatCache format signature used in Windows 2003, Vista and 2008. _HEADER_SIGNATURE_2003 = 0xbadc0ffe # AppCompatCache format used in Windows 2003. _HEADER_2003_STRUCT = construct.Struct( 'appcompatcache_header_2003', construct.ULInt32('signature'), construct.ULInt32('number_of_cached_entries')) _CACHED_ENTRY_2003_32BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_2003_32bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt64('file_size')) _CACHED_ENTRY_2003_64BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_2003_64bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('unknown1'), construct.ULInt64('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt64('file_size')) # AppCompatCache format used in Windows Vista and 2008. _CACHED_ENTRY_VISTA_32BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_vista_32bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags')) _CACHED_ENTRY_VISTA_64BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_vista_64bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('unknown1'), construct.ULInt64('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags')) # AppCompatCache format signature used in Windows 7 and 2008 R2. _HEADER_SIGNATURE_7 = 0xbadc0fee # AppCompatCache format used in Windows 7 and 2008 R2. _HEADER_7_STRUCT = construct.Struct( 'appcompatcache_header_7', construct.ULInt32('signature'), construct.ULInt32('number_of_cached_entries'), construct.Padding(120)) _CACHED_ENTRY_7_32BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_7_32bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'), construct.ULInt32('data_size'), construct.ULInt32('data_offset')) _CACHED_ENTRY_7_64BIT_STRUCT = construct.Struct( 'appcompatcache_cached_entry_7_64bit', construct.ULInt16('path_size'), construct.ULInt16('maximum_path_size'), construct.ULInt32('unknown1'), construct.ULInt64('path_offset'), construct.ULInt64('last_modification_time'), construct.ULInt32('insertion_flags'), construct.ULInt32('shim_flags'), construct.ULInt64('data_size'), construct.ULInt64('data_offset')) # AppCompatCache format used in Windows 8.0 and 8.1. _HEADER_SIGNATURE_8 = 0x00000080 _HEADER_8_STRUCT = construct.Struct('appcompatcache_header_8', construct.ULInt32('signature'), construct.Padding(124)) _CACHED_ENTRY_HEADER_8_STRUCT = construct.Struct( 'appcompatcache_cached_entry_header_8', construct.ULInt32('signature'), construct.ULInt32('unknown1'), construct.ULInt32('cached_entry_data_size'), construct.ULInt16('path_size')) # AppCompatCache format used in Windows 8.0. _CACHED_ENTRY_SIGNATURE_8_0 = '00ts' # AppCompatCache format used in Windows 8.1. _CACHED_ENTRY_SIGNATURE_8_1 = '10ts' def CheckSignature(self, value_data): """Parses the signature. Args: value_data: a binary string containing the value data. Returns: The format type if successful or None otherwise. """ signature = construct.ULInt32('signature').parse(value_data) if signature == self._HEADER_SIGNATURE_XP: return self.FORMAT_TYPE_XP elif signature == self._HEADER_SIGNATURE_2003: # TODO: determine which format version is used (2003 or Vista). return self.FORMAT_TYPE_2003 elif signature == self._HEADER_SIGNATURE_7: return self.FORMAT_TYPE_7 elif signature == self._HEADER_SIGNATURE_8: if value_data[signature:signature + 4] in [ self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1 ]: return self.FORMAT_TYPE_8 def ParseHeader(self, format_type, value_data): """Parses the header. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. Returns: A header object (instance of AppCompatCacheHeader). Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8 ]: raise RuntimeError( u'Unsupported format type: {0:d}'.format(format_type)) # TODO: change to collections.namedtuple or use __slots__ if the overhead # of a regular object becomes a problem. header_object = AppCompatCacheHeader() if format_type == self.FORMAT_TYPE_XP: header_object.header_size = self._HEADER_XP_32BIT_STRUCT.sizeof() header_struct = self._HEADER_XP_32BIT_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_2003: header_object.header_size = self._HEADER_2003_STRUCT.sizeof() header_struct = self._HEADER_2003_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_VISTA: header_object.header_size = self._HEADER_VISTA_STRUCT.sizeof() header_struct = self._HEADER_VISTA_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_7: header_object.header_size = self._HEADER_7_STRUCT.sizeof() header_struct = self._HEADER_7_STRUCT.parse(value_data) elif format_type == self.FORMAT_TYPE_8: header_object.header_size = self._HEADER_8_STRUCT.sizeof() header_struct = self._HEADER_8_STRUCT.parse(value_data) if format_type in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: header_object.number_of_cached_entries = header_struct.get( 'number_of_cached_entries') return header_object def DetermineCacheEntrySize(self, format_type, value_data, cached_entry_offset): """Parses a cached entry. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. cached_entry_offset: integer value that contains the offset of the first cached entry data relative to the start of the value data. Returns: The cached entry size if successful or None otherwise. Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8 ]: raise RuntimeError( u'Unsupported format type: {0:d}'.format(format_type)) cached_entry_data = value_data[cached_entry_offset:] cached_entry_size = 0 if format_type == self.FORMAT_TYPE_XP: cached_entry_size = self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof() elif format_type in [ self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: path_size = construct.ULInt16('path_size').parse( cached_entry_data[0:2]) maximum_path_size = construct.ULInt16('maximum_path_size').parse( cached_entry_data[2:4]) path_offset_32bit = construct.ULInt32('path_offset').parse( cached_entry_data[4:8]) path_offset_64bit = construct.ULInt32('path_offset').parse( cached_entry_data[8:16]) if maximum_path_size < path_size: logging.error(u'Path size value out of bounds.') return path_end_of_string_size = maximum_path_size - path_size if path_size == 0 or path_end_of_string_size != 2: logging.error(u'Unsupported path size values.') return # Assume the entry is 64-bit if the 32-bit path offset is 0 and # the 64-bit path offset is set. if path_offset_32bit == 0 and path_offset_64bit != 0: if format_type == self.FORMAT_TYPE_2003: cached_entry_size = self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof( ) elif format_type == self.FORMAT_TYPE_VISTA: cached_entry_size = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof( ) elif format_type == self.FORMAT_TYPE_7: cached_entry_size = self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof( ) else: if format_type == self.FORMAT_TYPE_2003: cached_entry_size = self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof( ) elif format_type == self.FORMAT_TYPE_VISTA: cached_entry_size = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof( ) elif format_type == self.FORMAT_TYPE_7: cached_entry_size = self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof( ) elif format_type == self.FORMAT_TYPE_8: cached_entry_size = self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof() return cached_entry_size def ParseCachedEntry(self, format_type, value_data, cached_entry_offset, cached_entry_size): """Parses a cached entry. Args: format_type: integer value that contains the format type. value_data: a binary string containing the value data. cached_entry_offset: integer value that contains the offset of the cached entry data relative to the start of the value data. cached_entry_size: integer value that contains the cached entry data size. Returns: A cached entry object (instance of AppCompatCacheCachedEntry). Raises: RuntimeError: if the format type is not supported. """ if format_type not in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7, self.FORMAT_TYPE_8 ]: raise RuntimeError( u'Unsupported format type: {0:d}'.format(format_type)) cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] cached_entry_struct = None if format_type == self.FORMAT_TYPE_XP: if cached_entry_size == self._CACHED_ENTRY_XP_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_XP_32BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_2003: if cached_entry_size == self._CACHED_ENTRY_2003_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_2003_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_2003_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_VISTA: if cached_entry_size == self._CACHED_ENTRY_VISTA_32BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_VISTA_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_VISTA_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_7: if cached_entry_size == self._CACHED_ENTRY_7_32BIT_STRUCT.sizeof(): cached_entry_struct = self._CACHED_ENTRY_7_32BIT_STRUCT.parse( cached_entry_data) elif cached_entry_size == self._CACHED_ENTRY_7_64BIT_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_7_64BIT_STRUCT.parse( cached_entry_data) elif format_type == self.FORMAT_TYPE_8: if cached_entry_data[0:4] not in [ self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1 ]: raise RuntimeError(u'Unsupported cache entry signature') if cached_entry_size == self._CACHED_ENTRY_HEADER_8_STRUCT.sizeof( ): cached_entry_struct = self._CACHED_ENTRY_HEADER_8_STRUCT.parse( cached_entry_data) cached_entry_data_size = cached_entry_struct.get( 'cached_entry_data_size') cached_entry_size = 12 + cached_entry_data_size cached_entry_data = value_data[ cached_entry_offset:cached_entry_offset + cached_entry_size] if not cached_entry_struct: raise RuntimeError(u'Unsupported cache entry size: {0:d}'.format( cached_entry_size)) cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = cached_entry_size path_offset = 0 data_size = 0 if format_type == self.FORMAT_TYPE_XP: string_size = 0 for string_index in xrange(0, 528, 2): if (ord(cached_entry_data[string_index]) == 0 and ord(cached_entry_data[string_index + 1]) == 0): break string_size += 2 cached_entry_object.path = binary.Ut16StreamCopyToString( cached_entry_data[0:string_size]) elif format_type in [ self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: path_size = cached_entry_struct.get('path_size') path_offset = cached_entry_struct.get('path_offset') elif format_type == self.FORMAT_TYPE_8: path_size = cached_entry_struct.get('path_size') cached_entry_data_offset = 14 + path_size cached_entry_object.path = binary.Ut16StreamCopyToString( cached_entry_data[14:cached_entry_data_offset]) remaining_data = cached_entry_data[cached_entry_data_offset:] cached_entry_object.insertion_flags = construct.ULInt32( 'insertion_flags').parse(remaining_data[0:4]) cached_entry_object.shim_flags = construct.ULInt32( 'shim_flags').parse(remaining_data[4:8]) if cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_0: cached_entry_data_offset += 8 elif cached_entry_data[0:4] == self._CACHED_ENTRY_SIGNATURE_8_1: cached_entry_data_offset += 10 remaining_data = cached_entry_data[cached_entry_data_offset:] if format_type in [ self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003, self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7 ]: cached_entry_object.last_modification_time = cached_entry_struct.get( 'last_modification_time') elif format_type == self.FORMAT_TYPE_8: cached_entry_object.last_modification_time = construct.ULInt64( 'last_modification_time').parse(remaining_data[0:8]) if format_type in [self.FORMAT_TYPE_XP, self.FORMAT_TYPE_2003]: cached_entry_object.file_size = cached_entry_struct.get( 'file_size') elif format_type in [self.FORMAT_TYPE_VISTA, self.FORMAT_TYPE_7]: cached_entry_object.insertion_flags = cached_entry_struct.get( 'insertion_flags') cached_entry_object.shim_flags = cached_entry_struct.get( 'shim_flags') if format_type == self.FORMAT_TYPE_XP: cached_entry_object.last_update_time = cached_entry_struct.get( 'last_update_time') if format_type == self.FORMAT_TYPE_7: data_offset = cached_entry_struct.get('data_offset') data_size = cached_entry_struct.get('data_size') elif format_type == self.FORMAT_TYPE_8: data_offset = cached_entry_offset + cached_entry_data_offset + 12 data_size = construct.ULInt32('data_size').parse( remaining_data[8:12]) if path_offset > 0 and path_size > 0: path_size += path_offset cached_entry_object.path = binary.Ut16StreamCopyToString( value_data[path_offset:path_size]) if data_size > 0: data_size += data_offset cached_entry_object.data = value_data[data_offset:data_size] return cached_entry_object
""" Setup and unique functionality for the wide-band correlator modes. A wideband correlator's FPGAs process all digitised data, which is a multiple of the FPGA clock rates. """ """ Revisions: 2011-07-07 PVP Initial revision. """ import construct, corr_functions # f-engine control register register_fengine_control = construct.BitStruct( 'control', construct.Padding(32 - 20 - 1), # 21 - 31 construct.Flag('tvgsel_noise'), # 20 construct.Flag('tvgsel_fdfs'), # 19 construct.Flag('tvgsel_pkt'), # 18 construct.Flag('tvgsel_ct'), # 17 construct.Flag('tvg_en'), # 16 construct.Padding(16 - 13 - 1), # 14 - 15 construct.Flag('adc_protect_disable'), # 13 construct.Flag('flasher_en'), # 12 construct.Padding(12 - 9 - 1), # 10 - 11 construct.Flag('gbe_enable'), # 9 construct.Flag('gbe_rst'), # 8 construct.Padding(8 - 3 - 1), # 4 - 7 construct.Flag('clr_status'), # 3 construct.Flag('arm'), # 2 construct.Flag('soft_sync'), # 1 construct.Flag('mrst')) # 0 # f-engine status
class UtmpParser(interface.SingleFileBaseParser): """Parser for Linux/Unix UTMP files.""" _INITIAL_FILE_OFFSET = None NAME = 'utmp' DESCRIPTION = u'Parser for Linux/Unix UTMP files.' LINUX_UTMP_ENTRY = construct.Struct('utmp_linux', construct.ULInt32('type'), construct.ULInt32('pid'), construct.String('terminal', 32), construct.ULInt32('terminal_id'), construct.String('username', 32), construct.String('hostname', 256), construct.ULInt16('termination'), construct.ULInt16('exit'), construct.ULInt32('session'), construct.ULInt32('timestamp'), construct.ULInt32('microsecond'), construct.ULInt32('address_a'), construct.ULInt32('address_b'), construct.ULInt32('address_c'), construct.ULInt32('address_d'), construct.Padding(20)) LINUX_UTMP_ENTRY_SIZE = LINUX_UTMP_ENTRY.sizeof() STATUS_TYPE = { 0: 'EMPTY', 1: 'RUN_LVL', 2: 'BOOT_TIME', 3: 'NEW_TIME', 4: 'OLD_TIME', 5: 'INIT_PROCESS', 6: 'LOGIN_PROCESS', 7: 'USER_PROCESS', 8: 'DEAD_PROCESS', 9: 'ACCOUNTING' } # Set a default test value for few fields, this is supposed to be a text # that is highly unlikely to be seen in a terminal field, or a username field. # It is important that this value does show up in such fields, but otherwise # it can be a free flowing text field. _DEFAULT_TEST_VALUE = u'Ekki Fraedilegur Moguleiki, thetta er bull ! = + _<>' def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses an UTMP file-like object. Args: parser_mediator: A parser mediator object (instance of ParserMediator). file_object: The file-like object to extract data from. Raises: UnableToParseFile: when the file cannot be parsed. """ file_object.seek(0, os.SEEK_SET) try: structure = self.LINUX_UTMP_ENTRY.parse_stream(file_object) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( u'Unable to parse UTMP Header with error: {0:s}'.format( exception)) if structure.type not in self.STATUS_TYPE: raise errors.UnableToParseFile( (u'Not an UTMP file, unknown type ' u'[{0:d}].').format(structure.type)) if not self._VerifyTextField(structure.terminal): raise errors.UnableToParseFile( u'Not an UTMP file, unknown terminal.') if not self._VerifyTextField(structure.username): raise errors.UnableToParseFile( u'Not an UTMP file, unknown username.') if not self._VerifyTextField(structure.hostname): raise errors.UnableToParseFile( u'Not an UTMP file, unknown hostname.') # Check few values. terminal = self._GetTextFromNullTerminatedString( structure.terminal, self._DEFAULT_TEST_VALUE) if terminal == self._DEFAULT_TEST_VALUE: raise errors.UnableToParseFile( u'Not an UTMP file, no terminal set.') username = self._GetTextFromNullTerminatedString( structure.username, self._DEFAULT_TEST_VALUE) if username == self._DEFAULT_TEST_VALUE: raise errors.UnableToParseFile( u'Not an UTMP file, no username set.') if not structure.timestamp: raise errors.UnableToParseFile( u'Not an UTMP file, no timestamp set in the first record.') file_object.seek(0, os.SEEK_SET) event_object = self._ReadUtmpEvent(file_object) while event_object: event_object.offset = file_object.tell() parser_mediator.ProduceEvent(event_object) event_object = self._ReadUtmpEvent(file_object) def _VerifyTextField(self, text): """Check if a byte stream is a null terminated string. Args: event_object: text field from the structure. Return: True if it is a null terminated string, False otherwise. """ _, _, null_chars = text.partition(b'\x00') if not null_chars: return False return len(null_chars) == null_chars.count(b'\x00') def _ReadUtmpEvent(self, file_object): """Returns an UtmpEvent from a single UTMP entry. Args: file_object: a file-like object that points to an UTMP file. Returns: An event object constructed from a single UTMP record or None if we have reached the end of the file (or EOF). """ offset = file_object.tell() data = file_object.read(self.LINUX_UTMP_ENTRY_SIZE) if not data or len(data) != self.LINUX_UTMP_ENTRY_SIZE: return try: entry = self.LINUX_UTMP_ENTRY.parse(data) except (IOError, construct.FieldError): logging.warning( (u'UTMP entry at 0x{:x} couldn\'t be parsed.').format(offset)) return self._ReadUtmpEvent(file_object) user = self._GetTextFromNullTerminatedString(entry.username) terminal = self._GetTextFromNullTerminatedString(entry.terminal) if terminal == '~': terminal = u'system boot' computer_name = self._GetTextFromNullTerminatedString(entry.hostname) if computer_name == u'N/A' or computer_name == u':0': computer_name = u'localhost' status = self.STATUS_TYPE.get(entry.type, u'N/A') if not entry.address_b: try: ip_address = socket.inet_ntoa( construct.ULInt32('int').build(entry.address_a)) if ip_address == '0.0.0.0': ip_address = u'localhost' except (IOError, construct.FieldError, socket.error): ip_address = u'N/A' else: ip_address = u'{0:d}.{1:d}.{2:d}.{3:d}'.format( entry.address_a, entry.address_b, entry.address_c, entry.address_d) return UtmpEvent(entry.timestamp, entry.microsecond, user, computer_name, terminal, status, ip_address, entry) def _GetTextFromNullTerminatedString(self, null_terminated_string, default_string=u'N/A'): """Get a UTF-8 text from a raw null terminated string. Args: null_terminated_string: Raw string terminated with null character. default_string: The default string returned if the parser fails. Returns: A decoded UTF-8 string or if unable to decode, the supplied default string. """ text, _, _ = null_terminated_string.partition('\x00') try: text = text.decode('utf-8') except UnicodeDecodeError: logging.warning( u'[UTMP] Decode UTF8 failed, the message string may be cut short.' ) text = text.decode('utf-8', 'ignore') if not text: return default_string return text
payload_len = 128 #how big to make each packet in 64bit words brams=['bram_msb','bram_lsb','bram_oob'] tx_snap = 'snap_gbe0_tx' rx_snap = 'snap_gbe3_rx' tx_core_name = 'gbe0' test_core_bram = 'qdrBram' test_QDRReg = 'testQDR' test_fineFFT= 'vals_testQDR' fpga=[] snap_debug='snap_debug' snap_fengine_debug_coarse_fft = construct.BitStruct(snap_debug, construct.Padding(128 - (4*18)), construct.BitField("d0_r", 18), construct.BitField("d0_i", 18), construct.BitField("d1_r", 18), construct.BitField("d1_i", 18)) def bin2fp(bits, m = 8, e = 7): if m > 32: raise RuntimeError('Unsupported fixed format: %i.%i' % (m,e)) shift = 32 - m bits = bits << shift m = m + shift e = e + shift return float(numpy.int32(bits)) / (2**e)