def build_floats(x, byteorder=sys.byteorder): if byteorder == 'little': c = construct.Array(len(x), construct.LFloat32("x")) else: c = construct.Array(len(x), construct.BFloat32("x")) return c.build(x)
def read_trace(n, l, f=5): ret = [] if PRINT == True: for i in range(n): buf = FH.read(l) # IBM floats - 4 byte - Must be big endian if f == 1: ret.append( construct.BFloat32("x").parse(ibmfloat.ibm2ieee32(buf))) # INT - 4 byte or 2 byte elif f == 2: if ENDIAN == 'little': # Swap 4 byte b = construct.SLInt32("x").parse(buf) else: b = construct.SBInt32("x").parse(buf) ret.append(b) elif f == 3: if ENDIAN == 'little': # Swap 2 byte b = construct.SLInt16("x").parse(buf) else: b = construct.SBInt16("x").parse(buf) ret.append(b) # IEEE floats - 4 byte elif f == 5: if ENDIAN == 'little': # Swap 4 byte b = construct.LFloat32("x").parse(buf) else: b = construct.BFloat32("x").parse(buf) ret.append(b) # INT - 1 byte elif f == 8: ret.append(construct.SBInt8("x").parse(buf)) else: FH.read(n * l) return ret
class UserAssistCollector(collector.WindowsVolumeCollector): """Class that defines a Windows User Assist information collector.""" _USER_ASSIST_KEY = ( u'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\' u'Explorer\\UserAssist') # UserAssist format version used in Windows 2000, XP, 2003, Vista. _USER_ASSIST_V3_STRUCT = construct.Struct( u'user_assist_entry', construct.ULInt32(u'unknown1'), construct.ULInt32(u'execution_count'), construct.ULInt64(u'last_execution_time')) # UserAssist format version used in Windows 2008, 7, 8. _USER_ASSIST_V5_STRUCT = construct.Struct( u'user_assist_entry', construct.ULInt32(u'unknown1'), construct.ULInt32(u'execution_count'), construct.ULInt32(u'application_focus_count'), construct.ULInt32(u'application_focus_duration'), construct.LFloat32(u'unknown2'), construct.LFloat32(u'unknown3'), construct.LFloat32(u'unknown4'), construct.LFloat32(u'unknown5'), construct.LFloat32(u'unknown6'), construct.LFloat32(u'unknown7'), construct.LFloat32(u'unknown8'), construct.LFloat32(u'unknown9'), construct.LFloat32(u'unknown10'), construct.LFloat32(u'unknown11'), construct.ULInt32(u'unknown12'), construct.ULInt64(u'last_execution_time'), construct.ULInt32(u'unknown13')) def __init__(self, debug=False, mediator=None): """Initializes the collector object. Args: debug: optional boolean value to indicate if debug information should be printed. mediator: a volume scanner mediator (instance of dfvfs.VolumeScannerMediator) or None. """ super(UserAssistCollector, self).__init__(mediator=mediator) self._debug = debug registry_file_reader = collector.CollectorRegistryFileReader(self) self._registry = registry.WinRegistry( registry_file_reader=registry_file_reader) self.found_user_assist_key = False # TODO: replace print by output_writer. def _CollectUserAssistFromKey(self, unused_output_writer, guid_sub_key): """Collects the User Assist information from a GUID sub key. Args: output_writer: the output writer object. guid_sub_key: the User Assist GUID Registry key (instance of dfwinreg.WinRegistryKey). """ version_value = guid_sub_key.GetValueByName(u'Version') if not version_value: logging.warning(u'Missing Version value in sub key: {0:s}'.format( guid_sub_key.name)) return format_version = version_value.GetDataAsObject() if format_version == 3: value_data_size = self._USER_ASSIST_V3_STRUCT.sizeof() elif format_version == 5: value_data_size = self._USER_ASSIST_V5_STRUCT.sizeof() print(u'GUID\t\t: {0:s}'.format(guid_sub_key.name)) print(u'Format version\t: {0:d}'.format(format_version)) print(u'') count_sub_key = guid_sub_key.GetSubkeyByName(u'Count') for value in count_sub_key.GetValues(): output_string = u'Original name\t: {0:s}'.format(value.name) print(output_string.encode(u'utf-8')) try: value_name = value.name.decode(u'rot-13') except UnicodeEncodeError as exception: characters = [] for char in value.name: if ord(char) < 128: try: characters.append(char.decode(u'rot-13')) except UnicodeEncodeError: characters.append(char) else: characters.append(char) value_name = u''.join(characters) try: output_string = u'Converted name\t: {0:s}'.format(value_name) print(output_string.encode(u'utf-8')) except UnicodeEncodeError as exception: logging.warning( u'Unable to convert: {0:s} with error: {1:s}'.format( value.name, exception)) print(u'Value data:') print(hexdump.Hexdump(value.data)) if value_name != u'UEME_CTLSESSION': if value_data_size != len(value.data): logging.warning( (u'Version: {0:d} size mismatch (calculated: {1:d}, ' u'stored: {2:d}).').format(format_version, value_data_size, len(value.data))) return if format_version == 3: parsed_data = self._USER_ASSIST_V3_STRUCT.parse(value.data) elif format_version == 5: parsed_data = self._USER_ASSIST_V5_STRUCT.parse(value.data) print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown1'))) print(u'Execution count\t\t\t\t\t\t\t\t: {0:d}'.format( parsed_data.get(u'execution_count'))) if format_version == 5: print( u'Application focus count\t\t\t\t\t\t\t: {0:d}'.format( parsed_data.get(u'application_focus_count'))) print(u'Application focus duration\t\t\t\t\t\t: {0:d}'. format( parsed_data.get(u'application_focus_duration'))) print(u'Unknown2\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown2'))) print(u'Unknown3\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown3'))) print(u'Unknown4\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown4'))) print(u'Unknown5\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown5'))) print(u'Unknown6\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown6'))) print(u'Unknown7\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown7'))) print(u'Unknown8\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown8'))) print(u'Unknown9\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown9'))) print(u'Unknown10\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown10'))) print(u'Unknown11\t\t\t\t\t\t\t\t: {0:.2f}'.format( parsed_data.get(u'unknown11'))) print(u'Unknown12\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown12'))) timestamp = parsed_data.get(u'last_execution_time') date_string = (datetime.datetime(1601, 1, 1) + datetime.timedelta(microseconds=timestamp / 10)) print(u'Last execution time\t\t\t\t\t\t\t: {0!s} (0x{1:08x})'. format(date_string, timestamp)) if format_version == 5: print(u'Unknown13\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( parsed_data.get(u'unknown13'))) print(u'') def Collect(self, output_writer): """Collects the User Assist information. Args: output_writer: the output writer object. """ self.found_user_assist_key = False user_assist_key = self._registry.GetKeyByPath(self._USER_ASSIST_KEY) if not user_assist_key: return self.found_user_assist_key = True print(u'Key: {0:s}'.format(self._USER_ASSIST_KEY)) print(u'') for guid_sub_key in user_assist_key.GetSubkeys(): self._CollectUserAssistFromKey(output_writer, guid_sub_key)
class AutomaticDestinationsOlecfPlugin(interface.OlecfPlugin): """Plugin that parses an .automaticDestinations-ms OLECF file.""" NAME = u'olecf_automatic_destinations' DESCRIPTION = u'Parser for *.automaticDestinations-ms OLECF files.' REQUIRED_ITEMS = frozenset([u'DestList']) _RE_LNK_ITEM_NAME = re.compile(r'^[1-9a-f][0-9a-f]*$') # We cannot use the parser registry here since winlnk could be disabled. # TODO: see if there is a more elegant solution for this. _WINLNK_PARSER = winlnk.WinLnkParser() _DEST_LIST_STREAM_HEADER = construct.Struct( u'dest_list_stream_header', construct.ULInt32(u'unknown1'), construct.ULInt32(u'number_of_entries'), construct.ULInt32(u'number_of_pinned_entries'), construct.LFloat32(u'unknown2'), construct.ULInt32(u'last_entry_number'), construct.Padding(4), construct.ULInt32(u'last_revision_number'), construct.Padding(4)) _DEST_LIST_STREAM_HEADER_SIZE = _DEST_LIST_STREAM_HEADER.sizeof() # Using Construct's utf-16 encoding here will create strings with their # end-of-string characters exposed. Instead the strings are read as # binary strings and converted using ReadUtf16(). _DEST_LIST_STREAM_ENTRY = construct.Struct( u'dest_list_stream_entry', construct.ULInt64(u'unknown1'), construct.Array(16, construct.Byte(u'droid_volume_identifier')), construct.Array(16, construct.Byte(u'droid_file_identifier')), construct.Array(16, construct.Byte(u'birth_droid_volume_identifier')), construct.Array(16, construct.Byte(u'birth_droid_file_identifier')), construct.String(u'hostname', 16), construct.ULInt32(u'entry_number'), construct.ULInt32(u'unknown2'), construct.LFloat32(u'unknown3'), construct.ULInt64(u'last_modification_time'), construct.ULInt32(u'pin_status'), construct.ULInt16(u'path_size'), construct.String(u'path', lambda ctx: ctx.path_size * 2)) def ParseDestList(self, parser_mediator, olecf_item): """Parses the DestList OLECF item. Args: parser_mediator: A parser mediator object (instance of ParserMediator). olecf_item: An OLECF item (instance of pyolecf.item). """ try: header = self._DEST_LIST_STREAM_HEADER.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( u'Unable to parse DestList header with error: {0:s}'.format( exception)) if header.unknown1 != 1: # TODO: add format debugging notes to parser mediator. logging.debug(u'[{0:s}] unknown1 value: {1:d}.'.format( self.NAME, header.unknown1)) entry_offset = olecf_item.get_offset() while entry_offset < olecf_item.size: try: entry = self._DEST_LIST_STREAM_ENTRY.parse_stream(olecf_item) except (IOError, construct.FieldError) as exception: raise errors.UnableToParseFile( u'Unable to parse DestList entry with error: {0:s}'.format( exception)) if not entry: break event_object = AutomaticDestinationsDestListEntryEvent( entry.last_modification_time, eventdata.EventTimestamp.MODIFICATION_TIME, entry_offset, entry) parser_mediator.ProduceEvent(event_object) entry_offset = olecf_item.get_offset() def ParseItems(self, parser_mediator, file_entry=None, root_item=None, **unused_kwargs): """Parses OLECF items. Args: parser_mediator: A parser mediator object (instance of ParserMediator). file_entry: Optional file entry object (instance of dfvfs.FileEntry). The default is None. root_item: Optional root item of the OLECF file. The default is None. Raises: ValueError: If the root_item is not set. """ if root_item is None: raise ValueError(u'Root item not set.') for item in root_item.sub_items: if item.name == u'DestList': self.ParseDestList(parser_mediator, item) elif self._RE_LNK_ITEM_NAME.match(item.name): if file_entry: display_name = u'{0:s} # {1:s}'.format( parser_mediator.GetDisplayName(), item.name) else: display_name = u'# {0:s}'.format(item.name) self._WINLNK_PARSER.UpdateChainAndParseFileObject( parser_mediator, item, display_name=display_name)
class SystemResourceUsageMonitorESEDBPlugin(interface.ESEDBPlugin): """Parses a System Resource Usage Monitor (SRUM) ESE database file.""" NAME = 'srum' DESCRIPTION = ( 'Parser for System Resource Usage Monitor (SRUM) ESE database files.') # TODO: add support for tables: # {5C8CF1C7-7257-4F13-B223-970EF5939312} # {97C2CE28-A37B-4920-B1E9-8B76CD341EC5} # {B6D82AF1-F780-4E17-8077-6CB9AD8A6FC4} # {D10CA2FE-6FCF-4F6D-848E-B2E99266FA86} # {DA73FB89-2BEA-4DDC-86B8-6E048C6DA477} # {FEE4E14F-02A9-4550-B5CE-5FA2DA202E37} # TODO: convert interface_luid into string representation # TODO: convert l2_profile_flags into string representation in formatter OPTIONAL_TABLES = { '{973F5D5C-1D90-4944-BE8E-24B94231A174}': 'ParseNetworkDataUsage', '{D10CA2FE-6FCF-4F6D-848E-B2E99266FA89}': 'ParseApplicationResourceUsage', '{DD6636C4-8929-4683-974E-22C046A43763}': 'ParseNetworkConnectivityUsage' } REQUIRED_TABLES = {'SruDbIdMapTable': ''} _GUID_TABLE_VALUE_MAPPINGS = { 'TimeStamp': '_ConvertValueBinaryDataToFloatingPointValue' } _FLOAT32_LITTLE_ENDIAN = construct.LFloat32('float32') _FLOAT64_LITTLE_ENDIAN = construct.LFloat64('float64') _APPLICATION_RESOURCE_USAGE_VALUES_MAP = { 'application': 'AppId', 'background_bytes_read': 'BackgroundBytesRead', 'background_bytes_written': 'BackgroundBytesWritten', 'background_context_switches': 'BackgroundContextSwitches', 'background_cycle_time': 'BackgroundCycleTime', 'background_number_for_flushes': 'BackgroundNumberOfFlushes', 'background_number_for_read_operations': 'BackgroundNumReadOperations', 'background_number_for_write_operations': 'BackgroundNumWriteOperations', 'face_time': 'FaceTime', 'foreground_bytes_read': 'ForegroundBytesRead', 'foreground_bytes_written': 'ForegroundBytesWritten', 'foreground_context_switches': 'ForegroundContextSwitches', 'foreground_cycle_time': 'ForegroundCycleTime', 'foreground_number_for_flushes': 'ForegroundNumberOfFlushes', 'foreground_number_for_read_operations': 'ForegroundNumReadOperations', 'foreground_number_for_write_operations': 'ForegroundNumWriteOperations', 'identifier': 'AutoIncId', 'user_identifier': 'UserId' } _NETWORK_CONNECTIVITY_USAGE_VALUES_MAP = { 'application': 'AppId', 'connected_time': 'ConnectedTime', 'identifier': 'AutoIncId', 'interface_luid': 'InterfaceLuid', 'l2_profile_flags': 'L2ProfileFlags', 'l2_profile_identifier': 'L2ProfileId', 'user_identifier': 'UserId' } _NETWORK_DATA_USAGE_VALUES_MAP = { 'application': 'AppId', 'bytes_recieved': 'BytesRecvd', 'bytes_sent': 'BytesSent', 'identifier': 'AutoIncId', 'interface_luid': 'InterfaceLuid', 'l2_profile_flags': 'L2ProfileFlags', 'l2_profile_identifier': 'L2ProfileId', 'user_identifier': 'UserId' } _SUPPORTED_IDENTIFIER_TYPES = (0, 1, 2, 3) def _ConvertValueBinaryDataToFloatingPointValue(self, value): """Converts a binary data value into a floating-point value. Args: value (bytes): binary data value containing an ASCII string or None. Returns: float: floating-point representation of binary data value or None. """ if value: value_length = len(value) if value_length == 4: return self._FLOAT32_LITTLE_ENDIAN.parse(value) elif value_length == 8: return self._FLOAT64_LITTLE_ENDIAN.parse(value) return None def _GetIdentifierMappings(self, parser_mediator, cache, database): """Retrieves the identifier mappings from SruDbIdMapTable table. In the SRUM database individual tables contain numeric identifiers for the application ("AppId") and user identifier ("UserId"). A more descriptive string of these values can be found in the SruDbIdMapTable. For example the numeric value of 42 mapping to DiagTrack. This method will cache the mappings of a specific SRUM database. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (ESEDBCache): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (pyesedb.file): ESE database. Returns: dict[int, str]: mapping of numeric identifiers to their string representation. """ identifier_mappings = cache.GetResults('SruDbIdMapTable', default_value={}) if not identifier_mappings: esedb_table = database.get_table_by_name('SruDbIdMapTable') if not esedb_table: parser_mediator.ProduceExtractionError( 'unable to retrieve table: SruDbIdMapTable') else: identifier_mappings = self._ParseIdentifierMappingsTable( parser_mediator, esedb_table) cache.StoreDictInCache('SruDbIdMapTable', identifier_mappings) return identifier_mappings def _ParseGUIDTable(self, parser_mediator, cache, database, esedb_table, values_map, event_data_class): """Parses a table with a GUID as name. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (ESEDBCache): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (pyesedb.file): ESE database. esedb_table (pyesedb.table): table. values_map (dict[str, str]): mapping of table columns to event data attribute names. event_data_class (type): event data class. Raises: ValueError: if the cache, database or table value is missing. """ if cache is None: raise ValueError('Missing cache value.') if database is None: raise ValueError('Missing database value.') if esedb_table is None: raise ValueError('Missing table value.') identifier_mappings = self._GetIdentifierMappings( parser_mediator, cache, database) for esedb_record in esedb_table.records: if parser_mediator.abort: break record_values = self._GetRecordValues( parser_mediator, esedb_table.name, esedb_record, value_mappings=self._GUID_TABLE_VALUE_MAPPINGS) event_data = event_data_class() for attribute_name, column_name in values_map.items(): record_value = record_values.get(column_name, None) if attribute_name in ('application', 'user_identifier'): # Human readable versions of AppId and UserId values are stored # in the SruDbIdMapTable table; also referred to as identifier # mapping. Here we look up the numeric identifier stored in the GUID # table in SruDbIdMapTable. record_value = identifier_mappings.get( record_value, record_value) setattr(event_data, attribute_name, record_value) timestamp = record_values.get('TimeStamp') if timestamp: date_time = dfdatetime_ole_automation_date.OLEAutomationDate( timestamp=timestamp) timestamp_description = definitions.TIME_DESCRIPTION_SAMPLE else: date_time = dfdatetime_semantic_time.SemanticTime('Not set') timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME event = time_events.DateTimeValuesEvent(date_time, timestamp_description) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = record_values.get('ConnectStartTime') if timestamp: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_FIRST_CONNECTED) parser_mediator.ProduceEventWithEventData(event, event_data) def _ParseIdentifierMappingRecord(self, parser_mediator, table_name, esedb_record): """Extracts an identifier mapping from a SruDbIdMapTable record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. table_name (str): name of the table the record is stored in. esedb_record (pyesedb.record): record. Returns: tuple[int, str]: numeric identifier and its string representation or None, None if no identifier mapping can be retrieved from the record. """ record_values = self._GetRecordValues(parser_mediator, table_name, esedb_record) identifier = record_values.get('IdIndex', None) if identifier is None: parser_mediator.ProduceExtractionError( 'IdIndex value missing from table: SruDbIdMapTable') return None, None identifier_type = record_values.get('IdType', None) if identifier_type not in self._SUPPORTED_IDENTIFIER_TYPES: parser_mediator.ProduceExtractionError( 'unsupported IdType value: {0!s} in table: SruDbIdMapTable'. format(identifier_type)) return None, None mapped_value = record_values.get('IdBlob', None) if mapped_value is None: parser_mediator.ProduceExtractionError( 'IdBlob value missing from table: SruDbIdMapTable') return None, None if identifier_type == 3: try: fwnt_identifier = pyfwnt.security_identifier() fwnt_identifier.copy_from_byte_stream(mapped_value) mapped_value = fwnt_identifier.get_string() except IOError: parser_mediator.ProduceExtractionError( 'unable to decode IdBlob value as Windows NT security identifier' ) return None, None else: try: mapped_value = mapped_value.decode('utf-16le').rstrip('\0') except UnicodeDecodeError: parser_mediator.ProduceExtractionError( 'unable to decode IdBlob value as UTF-16 little-endian string' ) return None, None return identifier, mapped_value def _ParseIdentifierMappingsTable(self, parser_mediator, esedb_table): """Extracts identifier mappings from the SruDbIdMapTable table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. esedb_table (pyesedb.table): table. Returns: dict[int, str]: mapping of numeric identifiers to their string representation. """ identifier_mappings = {} for esedb_record in esedb_table.records: if parser_mediator.abort: break identifier, mapped_value = self._ParseIdentifierMappingRecord( parser_mediator, esedb_table.name, esedb_record) if identifier is None or mapped_value is None: continue if identifier in identifier_mappings: parser_mediator.ProduceExtractionError( 'identifier: {0:d} already exists in mappings.'.format( identifier)) continue identifier_mappings[identifier] = mapped_value return identifier_mappings def ParseApplicationResourceUsage(self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): """Parses the application resource usage table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. """ self._ParseGUIDTable(parser_mediator, cache, database, table, self._APPLICATION_RESOURCE_USAGE_VALUES_MAP, SRUMApplicationResourceUsageEventData) def ParseNetworkDataUsage(self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): """Parses the network data usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. """ self._ParseGUIDTable(parser_mediator, cache, database, table, self._NETWORK_DATA_USAGE_VALUES_MAP, SRUMNetworkDataUsageEventData) def ParseNetworkConnectivityUsage(self, parser_mediator, cache=None, database=None, table=None, **unused_kwargs): """Parses the network connectivity usage monitor table. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. cache (Optional[ESEDBCache]): cache, which contains information about the identifiers stored in the SruDbIdMapTable table. database (Optional[pyesedb.file]): ESE database. table (Optional[pyesedb.table]): table. """ # TODO: consider making ConnectStartTime + ConnectedTime an event. self._ParseGUIDTable(parser_mediator, cache, database, table, self._NETWORK_CONNECTIVITY_USAGE_VALUES_MAP, SRUMNetworkConnectivityUsageEventData)
) MESH_Data = construct.Struct("MESH_Data", construct.ULInt16("unk_word_00"), # + 0x00 construct.ULInt16("nb_vertices"), # + 0x02 construct.ULInt32("offset_vertices"), # + 0x04 construct.ULInt32("nb_triangles"), # + 0x08 construct.ULInt32("offset_triangles"), # + 0x0C construct.ULInt32("unk_dword_00"), # + 0x10 construct.ULInt32("unk_dword_01"), # + 0x14 construct.ULInt32("unk_dword_02"), # + 0x18 construct.ULInt32("unk_dword_03"), # + 0x1C ) VERTEX_Data = construct.Struct("VERTEX_Data", construct.LFloat32("position_x"), # + 0x00 construct.LFloat32("position_y"), # + 0x04 construct.LFloat32("position_z"), # + 0x08 construct.LFloat32("normal_x"), # + 0x0C construct.LFloat32("normal_y"), # + 0x10 construct.LFloat32("normal_z"), # + 0x14 construct.LFloat32("texture_u"), # + 0x18 construct.LFloat32("texture_v"), # + 0x1C ) TRIANGLES_Data = construct.Struct("TRIANGLES_Data", construct.Array(3, construct.ULInt16("vertex_indice")) ) BONES_Data = construct.Struct("BONES_Data", construct.ULInt32("offset_name"), # + 0x00
class AutomaticDestinationsFile(object): """Class that contains an .automaticDestinations-ms file. Attributes: entries: list of the LNK file entries. recovered_entries: list of the recovered LNK file entries. """ _DEST_LIST_STREAM_HEADER = construct.Struct( u'dest_list_stream_header', construct.ULInt32(u'format_version'), construct.ULInt32(u'number_of_entries'), construct.ULInt32(u'number_of_pinned_entries'), construct.LFloat32(u'unknown1'), construct.ULInt32(u'last_entry_number'), construct.ULInt32(u'unknown2'), construct.ULInt32(u'last_revision_number'), construct.ULInt32(u'unknown3')) _DEST_LIST_STREAM_ENTRY_V1 = construct.Struct( u'dest_list_stream_entry_v1', construct.ULInt64(u'unknown1'), construct.Bytes(u'droid_volume_identifier', 16), construct.Bytes(u'droid_file_identifier', 16), construct.Bytes(u'birth_droid_volume_identifier', 16), construct.Bytes(u'birth_droid_file_identifier', 16), construct.String(u'hostname', 16), construct.ULInt32(u'entry_number'), construct.ULInt32(u'unknown2'), construct.LFloat32(u'unknown3'), construct.ULInt64(u'last_modification_time'), construct.ULInt32(u'pin_status'), construct.ULInt16(u'path_size')) _DEST_LIST_STREAM_ENTRY_V3 = construct.Struct( u'dest_list_stream_entry_v3', construct.ULInt64(u'unknown1'), construct.Bytes(u'droid_volume_identifier', 16), construct.Bytes(u'droid_file_identifier', 16), construct.Bytes(u'birth_droid_volume_identifier', 16), construct.Bytes(u'birth_droid_file_identifier', 16), construct.String(u'hostname', 16), construct.ULInt32(u'entry_number'), construct.ULInt32(u'unknown2'), construct.LFloat32(u'unknown3'), construct.ULInt64(u'last_modification_time'), construct.ULInt32(u'pin_status'), construct.ULInt32(u'unknown4'), construct.ULInt32(u'unknown5'), construct.ULInt64(u'unknown6'), construct.ULInt16(u'path_size')) def __init__(self, debug=False): """Initializes the .automaticDestinations-ms file object. Args: debug: optional boolean value to indicate if debug information should be printed. """ super(AutomaticDestinationsFile, self).__init__() self._debug = debug self._format_version = None self._file_object = None self._file_object_opened_in_object = False self._file_size = 0 self._olecf_file = pyolecf.file() self.entries = [] self.recovered_entries = [] def _ReadDestList(self): """Reads the DestList stream. Raises: IOError: if the DestList stream cannot be read. """ olecf_item = self._olecf_file.root_item.get_sub_item_by_name(u'DestList') self._ReadDestListHeader(olecf_item) stream_offset = olecf_item.get_offset() while stream_offset < olecf_item.get_size(): entry_size = self._ReadDestListEntry( olecf_item, stream_offset, ) stream_offset += entry_size def _ReadDestListEntry(self, olecf_item, stream_offset): """Reads a DestList stream entry. Args: olecf_item: the OLECF item (instance of pyolecf.item). stream_offset: an integer containing the stream offset of the entry. Returns: An integer containing the entry data size. Raises: IOError: if the DestList stream entry cannot be read. """ if self._format_version == 1: dest_list_entry = self._DEST_LIST_STREAM_ENTRY_V1 elif self._format_version >= 3: dest_list_entry = self._DEST_LIST_STREAM_ENTRY_V3 if self._debug: print(u'Reading entry at offset: 0x{0:08x}'.format(stream_offset)) entry_data = olecf_item.read(dest_list_entry.sizeof()) if self._debug: print(u'Entry data:') print(hexdump.Hexdump(entry_data)) try: dest_list_entry_struct = dest_list_entry.parse(entry_data) except construct.FieldError as exception: raise IOError(( u'Unable to parse entry with error: {0:s}').format(exception)) entry_path_size = dest_list_entry_struct.path_size * 2 if self._debug: print(u'Unknown1\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.unknown1)) try: uuid_object = uuid.UUID( bytes_le=dest_list_entry_struct.droid_volume_identifier) print(u'Droid volume identifier\t\t\t\t\t\t\t: {0:s}'.format( uuid_object)) except (TypeError, ValueError): pass try: uuid_object = uuid.UUID( bytes_le=dest_list_entry_struct.droid_file_identifier) print(u'Droid file identifier\t\t\t\t\t\t\t: {0:s}'.format( uuid_object)) except (TypeError, ValueError): pass try: uuid_object = uuid.UUID( bytes_le=dest_list_entry_struct.birth_droid_volume_identifier) print(u'Birth droid volume identifier\t\t\t\t\t\t: {0:s}'.format( uuid_object)) except (TypeError, ValueError): pass try: uuid_object = uuid.UUID( bytes_le=dest_list_entry_struct.birth_droid_file_identifier) print(u'Birth droid file identifier\t\t\t\t\t\t: {0:s}'.format( uuid_object)) except (TypeError, ValueError): pass hostname = dest_list_entry_struct.hostname hostname, _, _ = hostname.partition(u'\x00') print(u'Hostname\t\t\t\t\t\t\t\t: {0:s}'.format(hostname)) print(u'Entry number\t\t\t\t\t\t\t\t: {0:d}'.format( dest_list_entry_struct.entry_number)) print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.unknown2)) print(u'Unknown3\t\t\t\t\t\t\t\t: {0:f}'.format( dest_list_entry_struct.unknown3)) print(u'Last modification time\t\t\t\t\t\t\t: {0!s}'.format( FromFiletime(dest_list_entry_struct.last_modification_time))) # TODO: debug print pin status. print(u'Pin status\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.pin_status)) if self._format_version >= 3: print(u'Unknown4\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.unknown4)) print(u'Unknown5\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.unknown5)) print(u'Unknown6\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_entry_struct.unknown6)) print(u'Path size\t\t\t\t\t\t\t\t: {0:d} ({1:d})'.format( dest_list_entry_struct.path_size, entry_path_size)) print(u'') entry_path_data = olecf_item.read(entry_path_size) if self._debug: print(u'Entry path data:') print(hexdump.Hexdump(entry_path_data)) try: path_string = entry_path_data.decode(u'utf16') except UnicodeDecodeError as exception: path_string = u'' if self._debug: print(u'Path string\t\t\t\t\t\t\t\t: {0:s}'.format(path_string)) print(u'') entry_footer_data = b'' if self._format_version >= 3: entry_footer_data = olecf_item.read(4) if self._debug: print(u'Entry footer data:') print(hexdump.Hexdump(entry_footer_data)) return len(entry_data) + len(entry_path_data) + len(entry_footer_data) def _ReadDestListHeader(self, olecf_item): """Reads the DestList stream header. Args: olecf_item: the OLECF item (instance of pyolecf.item). Raises: IOError: if the DestList stream header cannot be read. """ olecf_item.seek(0, os.SEEK_SET) if self._debug: print(u'Reading header at offset: 0x{0:08x}'.format(0)) header_data = olecf_item.read(self._DEST_LIST_STREAM_HEADER.sizeof()) if self._debug: print(u'Header data:') print(hexdump.Hexdump(header_data)) try: dest_list_header_struct = self._DEST_LIST_STREAM_HEADER.parse(header_data) except construct.FieldError as exception: raise IOError(( u'Unable to parse header with error: {0:s}').format(exception)) if self._debug: print(u'Format version\t\t\t\t\t\t\t\t: {0:d}'.format( dest_list_header_struct.format_version)) print(u'Number of entries\t\t\t\t\t\t\t: {0:d}'.format( dest_list_header_struct.number_of_entries)) print(u'Number of pinned entries\t\t\t\t\t\t: {0:d}'.format( dest_list_header_struct.number_of_pinned_entries)) print(u'Unknown1\t\t\t\t\t\t\t\t: {0:f}'.format( dest_list_header_struct.unknown1)) print(u'Last entry number\t\t\t\t\t\t\t: {0:d}'.format( dest_list_header_struct.last_entry_number)) print(u'Unknown2\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_header_struct.unknown2)) print(u'Last revision number\t\t\t\t\t\t\t: {0:d}'.format( dest_list_header_struct.last_revision_number)) print(u'Unknown3\t\t\t\t\t\t\t\t: 0x{0:08x}'.format( dest_list_header_struct.unknown3)) print(u'') if dest_list_header_struct.format_version not in (1, 3, 4): raise IOError( u'Unsupported format version: {0:d}'.format( dest_list_header_struct.format_version)) self._format_version = dest_list_header_struct.format_version def _ReadLNKFile(self, olecf_item): """Reads a LNK file. Args: olecf_item: the OLECF item (instance of pyolecf.item). Returns: A LNK file entry (instance of LNKFileEntry). Raises: IOError: if the LNK file cannot be read. """ if self._debug: print(u'Reading LNK file from stream: {0:s}'.format(olecf_item.name)) lnk_file_entry = LNKFileEntry(olecf_item.name) try: lnk_file_entry.Open(olecf_item) except IOError as exception: raise IOError(( u'Unable to parse LNK file from stream: {0:s} ' u'with error: {1:s}').format(olecf_item.name, exception)) if self._debug: print(u'') return lnk_file_entry def _ReadLNKFiles(self): """Reads the LNK files. Raises: IOError: if the LNK files cannot be read. """ for olecf_item in self._olecf_file.root_item.sub_items: if olecf_item.name == u'DestList': continue lnk_file_entry = self._ReadLNKFile(olecf_item) if lnk_file_entry: self.entries.append(lnk_file_entry) def Close(self): """Closes the .customDestinations-ms file.""" if self._olecf_file: self._olecf_file.close() if self._file_object_opened_in_object: self._file_object.close() self._file_object = None def Open(self, filename): """Opens the .customDestinations-ms file. Args: filename: the filename. """ stat_object = os.stat(filename) self._file_size = stat_object.st_size self._file_object = open(filename, 'rb') self._file_object_opened_in_object = True self._olecf_file.open_file_object(self._file_object) self._ReadDestList() self._ReadLNKFiles()
def lfloat(): PFLOAT = construct.Struct("PFLOAT", construct.LFloat32("x")) return PFLOAT
def bin_header_le_float(): BIN = construct.Struct( "BIN", # Increment between evenly spaced samples (nominal # value). construct.LFloat32("delta"), # Minimum value of dependent variable. construct.LFloat32("depmin"), # Maximum value of dependent variable. construct.LFloat32("depmax"), # Multiplying scale factor for dependent variable construct.LFloat32("scale"), # Observed increment if different from nominal # value. construct.LFloat32("odelta"), # Beginning value of the independent variable. # [required] construct.LFloat32("b"), # Ending value of the independent variable. # [required] construct.LFloat32("e"), # Event origin time (seconds relative to reference # time.) construct.LFloat32("o"), # First arrival time (seconds relative to reference # time.) construct.LFloat32("a"), construct.LFloat32("fmt"), # construct.LFloat32("t0"), # construct.LFloat32("t1"), # construct.LFloat32("t2"), # construct.LFloat32("t3"), # construct.LFloat32("t4"), # construct.LFloat32("t5"), # construct.LFloat32("t6"), # construct.LFloat32("t7"), # construct.LFloat32("t8"), # construct.LFloat32("t9"), # # Fini or end of event time (seconds relative to # reference construct.LFloat32("f"), # time.) construct.LFloat32("resp0"), # construct.LFloat32("resp1"), # construct.LFloat32("resp2"), # construct.LFloat32("resp3"), # construct.LFloat32("resp4"), # construct.LFloat32("resp5"), # construct.LFloat32("resp6"), # construct.LFloat32("resp7"), # construct.LFloat32("resp8"), # construct.LFloat32("resp9"), # # Station latitude (degrees, north positive) construct.LFloat32("stla"), # Station longitude (degrees, east positive). construct.LFloat32("stlo"), # Station elevation (meters). [not currently used] construct.LFloat32("stel"), # Station depth below surface (meters). [not # currently construct.LFloat32("stdp"), # used] # Event latitude (degrees north positive). construct.LFloat32("evla"), # Event longitude (degrees east positive). construct.LFloat32("evlo"), # Event elevation (meters). [not currently used] construct.LFloat32("evel"), # Event depth below surface (meters). [not currently # used] construct.LFloat32("evdp"), construct.LFloat32("mag"), # Event magnitude. # User defined variable storage area {ai n}=0,9. construct.LFloat32("user0"), construct.LFloat32("user1"), # construct.LFloat32("user2"), # construct.LFloat32("user3"), # construct.LFloat32("user4"), # construct.LFloat32("user5"), # construct.LFloat32("user6"), # construct.LFloat32("user7"), # construct.LFloat32("user8"), # construct.LFloat32("user9"), # # Station to event distance (km). construct.LFloat32("dist"), # Event to station azimuth (degrees). construct.LFloat32("az"), # Station to event azimuth (degrees). construct.LFloat32("baz"), # Station to event great circle arc length # (degrees). construct.LFloat32("gcarc"), construct.LFloat32("sb"), # construct.LFloat32("sdelta"), # # Mean value of dependent variable. construct.LFloat32("depmen"), # Component azimuth (degrees, clockwise from north). construct.LFloat32("cmpaz"), # Component incident angle (degrees, from vertical). construct.LFloat32("cmpinc"), construct.LFloat32("xminimum"), construct.LFloat32("xmaximum"), construct.LFloat32("yminimum"), construct.LFloat32("ymaximum"), construct.LFloat32("unused6"), construct.LFloat32("unused7"), construct.LFloat32("unused8"), construct.LFloat32("unused9"), construct.LFloat32("unused10"), construct.LFloat32("unused11"), construct.LFloat32("unused12")) return BIN