def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant BT entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ device_cache = match.get('DeviceCache', {}) for device, value in iter(device_cache.items()): name = value.get('Name', '') if name: name = ''.join(('Name:', name)) event_data = plist_event.PlistTimeEventData() event_data.root = '/DeviceCache' datetime_value = value.get('LastInquiryUpdate', None) if datetime_value: event_data.desc = ' '.join( filter(None, ('Bluetooth Discovery', name))) event_data.key = '{0:s}/LastInquiryUpdate'.format(device) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) if device in match.get('PairedDevices', []): event_data.desc = 'Paired:True {0:s}'.format(name) event_data.key = device event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData( event, event_data) datetime_value = value.get('LastNameUpdate', None) if datetime_value: event_data.desc = ' '.join( filter(None, ('Device Name Set', name))) event_data.key = '{0:s}/LastNameUpdate'.format(device) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = value.get('LastServicesUpdate', None) if datetime_value: event_data.desc = ' '.join( filter(None, ('Services Updated', name))) event_data.key = '{0:s}/LastServicesUpdate'.format(device) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extract device information from the iPod plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ devices = match.get('Devices', {}) for device_identifier, device_information in iter(devices.items()): datetime_value = device_information.get('Connected', None) if not datetime_value: continue event_data = IPodPlistEventData() event_data.device_id = device_identifier # TODO: refactor. for key, value in iter(device_information.items()): if key == 'Connected': continue attribute_name = key.lower().replace(' ', '_') setattr(event_data, attribute_name, value) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_LAST_CONNECTED) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Extracts relevant install history entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for entry in top_level: datetime_value = entry.get('date', None) package_identifiers = entry.get('packageIdentifiers', []) if not datetime_value or not package_identifiers: continue display_name = entry.get('displayName', '<UNKNOWN>') display_version = entry.get('displayVersion', '<DISPLAY_VERSION>') process_name = entry.get('processName', '<PROCESS_NAME>') package_identifiers = ', '.join(package_identifiers) event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Installation of [{0:s} {1:s}] using [{2:s}]. Packages: ' '{3:s}.').format(display_name, display_version, process_name, package_identifiers) event_data.key = '' event_data.root = '/item' event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Airport entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ if 'RememberedNetworks' not in match: return for wifi in match['RememberedNetworks']: ssid = wifi.get('SSIDString', 'UNKNOWN_SSID') security_type = wifi.get('SecurityType', 'UNKNOWN_SECURITY_TYPE') event_data = plist_event.PlistTimeEventData() event_data.desc = ( '[WiFi] Connected to network: <{0:s}> using security {1:s}' ).format(ssid, security_type) event_data.key = 'item' event_data.root = '/RememberedNetworks' datetime_value = wifi.get('LastConnected', None) if datetime_value: event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) else: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Volume Configuration Spotlight entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ stores = match.get('Stores', {}) for volume_name, volume in iter(stores.items()): datetime_value = volume.get('CreationDate', None) if not datetime_value: continue partial_path = volume['PartialPath'] event_data = plist_event.PlistTimeEventData() event_data.desc = 'Spotlight Volume {0:s} ({1:s}) activated.'.format( volume_name, partial_path) event_data.key = '' event_data.root = '/Stores' event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Spotlight entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ shortcuts = match.get('UserShortcuts', {}) for search_text, data in iter(shortcuts.items()): datetime_value = data.get('LAST_USED', None) if not datetime_value: continue display_name = data.get('DISPLAY_NAME', '<DISPLAY_NAME>') path = data.get('PATH', '<PATH>') event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Spotlight term searched "{0:s}" associate to {1:s} ({2:s})').format( search_text, display_name, path) event_data.key = search_text event_data.root = '/UserShortcuts' event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ destinations = match.get('Destinations', []) for destination in destinations: destination_identifier = (destination.get('DestinationID', None) or 'Unknown device') alias = destination.get('BackupAlias', '<ALIAS>') try: alias = self.TM_BACKUP_ALIAS.parse(alias).value alias = codecs.decode(alias, 'utf-8') except construct.FieldError: alias = 'Unknown alias' event_data = plist_event.PlistTimeEventData() event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format( alias, destination_identifier) event_data.key = 'item/SnapshotDates' event_data.root = '/Destinations' snapshot_dates = destination.get('SnapshotDates', []) for datetime_value in snapshot_dates: event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Apple Account entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ accounts = match.get('Accounts', {}) for name_account, account in iter(accounts.items()): first_name = account.get('FirstName', '<FirstName>') last_name = account.get('LastName', '<LastName>') general_description = '{0:s} ({1:s} {2:s})'.format( name_account, first_name, last_name) event_data = plist_event.PlistTimeEventData() event_data.key = name_account event_data.root = '/Accounts' datetime_value = account.get('CreationDate', None) if datetime_value: event_data.desc = 'Configured Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('LastSuccessfulConnect', None) if datetime_value: event_data.desc = 'Connected Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('ValidationDate', None) if datetime_value: event_data.desc = 'Last validation Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant MacOS update entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ version = match.get('LastAttemptSystemVersion', 'N/A') pending = match.get('LastUpdatesAvailable', None) event_data = plist_event.PlistTimeEventData() event_data.desc = 'Last MacOS {0:s} full update.'.format(version) event_data.key = '' event_data.root = '/' datetime_value = match.get('LastFullSuccessfulDate', None) if datetime_value: event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = match.get('LastSuccessfulDate', None) if datetime_value and pending: software = [] for update in match.get('RecommendedUpdates', []): identifier = update.get('Identifier', '<IDENTIFIER>') product_key = update.get('Product Key', '<PRODUCT_KEY>') software.append('{0:s}({1:s})'.format(identifier, product_key)) if not software: return software = ','.join(software) event_data.desc = ( 'Last Mac OS {0!s} partially update, pending {1!s}: ' '{2:s}.').format(version, pending, software) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Simple method to exact date values from a Plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for root, key, datetime_value in interface.RecurseKey(top_level): if not isinstance(datetime_value, datetime.datetime): continue event_data = plist_event.PlistTimeEventData() event_data.key = key event_data.root = root event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ backup_alias_map = self._GetDataTypeMap('timemachine_backup_alias') destinations = match.get('Destinations', []) for destination in destinations: backup_alias_data = destination.get('BackupAlias', b'') try: backup_alias = self._ReadStructureFromByteStream( backup_alias_data, 0, backup_alias_map) alias = backup_alias.string except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionError( 'unable to parse backup alias value with error: {0!s}'. format(exception)) alias = 'Unknown alias' destination_identifier = (destination.get('DestinationID', None) or 'Unknown device') event_data = plist_event.PlistTimeEventData() event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format( alias, destination_identifier) event_data.key = 'item/SnapshotDates' event_data.root = '/Destinations' snapshot_dates = destination.get('SnapshotDates', []) for datetime_value in snapshot_dates: event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def ParseFileObject(self, parser_mediator, file_object): """Parses a file-like object using Hachoir. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ file_name = parser_mediator.GetDisplayName() try: fstream = hachoir_core.stream.InputIOStream(file_object, None, tags=[]) except hachoir_core.error.HachoirError as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, exception)) if not fstream: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, 'Not fstream')) try: doc_parser = hachoir_parser.guessParser(fstream) except hachoir_core.error.HachoirError as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, exception)) if not doc_parser: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, 'Not parser')) try: metadata = hachoir_metadata.extractMetadata(doc_parser) except (AssertionError, AttributeError) as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, exception)) try: metatext = metadata.exportPlaintext(human=False) except AttributeError as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, exception)) if not metatext: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: No metadata'.format( self.NAME, file_name)) attributes = {} extracted_events = [] for meta in metatext: if not meta.startswith('-'): continue if len(meta) < 3: continue key, _, value = meta[2:].partition(': ') key2, _, value2 = value.partition(': ') if key2 == 'LastPrinted' and value2 != 'False': date_object = timelib.Timestamp.FromTimeString( value2, timezone=parser_mediator.timezone) if isinstance(date_object, datetime.datetime): extracted_events.append((date_object, key2)) try: date = metadata.get(key) if isinstance(date, datetime.datetime): extracted_events.append((date, key)) except ValueError: pass if key in attributes: if isinstance(attributes.get(key), list): attributes[key].append(value) else: old_value = attributes.get(key) attributes[key] = [old_value, value] else: attributes[key] = value if not extracted_events: raise errors.UnableToParseFile( '[{0:s}] unable to parse file {1:s}: {2:s}'.format( self.NAME, file_name, 'No events discovered')) event_data = HachoirEventData() event_data.metadata = attributes for datetime_value, usage in extracted_events: event = time_events.PythonDatetimeEvent(datetime_value, usage) parser_mediator.ProduceEventWithEventData(event, event_data)