def ParseBookmarkFolderRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a bookmark folder row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) title = self._GetRowValue(query_hash, row, 'title') event_data = FirefoxPlacesBookmarkFolderEventData() event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.title = title or 'N/A' timestamp = self._GetRowValue(query_hash, row, 'dateAdded') if timestamp: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = self._GetRowValue(query_hash, row, 'lastModified') if timestamp: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def _CopyValueToDateTime(self, value): """Copies an event filter value to a date and time object. Args: value (str): event filter value. Returns: dfdatetime.PosixTimeInMicroseconds: date and time object. Raises: ValueError: if the value cannot be copied to a date and time object. """ if not isinstance(value, int): try: value = int(value, 10) except (TypeError, ValueError): pass if isinstance(value, int): date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=value) else: try: # Adjust the ISO 8601 string so is rembles a Python date and time # string. if value and len(value) > 10 and value[10] == 'T': value = ' '.join(value.split('T')) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds() date_time.CopyFromDateTimeString(value) except (TypeError, ValueError): raise ValueError( 'Unsupported timestamp value: {0!s}'.format(value)) return date_time
def testProperties(self): """Tests the properties.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) self.assertEqual(posix_time_object.timestamp, 1281643591546875) posix_time_object = posix_time.PosixTimeInMicroseconds() self.assertIsNone(posix_time_object.timestamp)
def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses an utmp file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ file_offset = 0 try: timestamp, event_data = self._ReadEntry(parser_mediator, file_object, file_offset) except errors.ParseError as exception: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: {0!s}'.format( exception)) if not event_data.username: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: missing username' ) if not timestamp: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: missing timestamp' ) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData(event, event_data) file_offset = file_object.tell() file_size = file_object.get_size() while file_offset < file_size: if parser_mediator.abort: break try: timestamp, event_data = self._ReadEntry( parser_mediator, file_object, file_offset) except errors.ParseError: # Note that the utmp file can contain trailing data. break date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData(event, event_data) file_offset = file_object.tell()
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Apple Account entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ accounts = match.get('Accounts', {}) for name_account, account in iter(accounts.items()): first_name = account.get('FirstName', '<FirstName>') last_name = account.get('LastName', '<LastName>') general_description = '{0:s} ({1:s} {2:s})'.format( name_account, first_name, last_name) event_data = plist_event.PlistTimeEventData() event_data.key = name_account event_data.root = '/Accounts' datetime_value = account.get('CreationDate', None) if datetime_value: event_data.desc = 'Configured Apple account {0:s}'.format( general_description) timestamp = timelib.Timestamp.FromPythonDatetime( datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('LastSuccessfulConnect', None) if datetime_value: event_data.desc = 'Connected Apple account {0:s}'.format( general_description) timestamp = timelib.Timestamp.FromPythonDatetime( datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('ValidationDate', None) if datetime_value: event_data.desc = 'Last validation Apple account {0:s}'.format( general_description) timestamp = timelib.Timestamp.FromPythonDatetime( datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def testCopyToDateTimeString(self): """Tests the CopyToDateTimeString function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) date_time_string = posix_time_object.CopyToDateTimeString() self.assertEqual(date_time_string, '2010-08-12 20:06:31.546875') posix_time_object = posix_time.PosixTimeInMicroseconds() date_time_string = posix_time_object.CopyToDateTimeString() self.assertIsNone(date_time_string)
def testCopyToStatTimeTuple(self): """Tests the CopyToStatTimeTuple function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) stat_time_tuple = posix_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, (1281643591, 5468750)) posix_time_object = posix_time.PosixTimeInMicroseconds() stat_time_tuple = posix_time_object.CopyToStatTimeTuple() self.assertEqual(stat_time_tuple, (None, None))
def testGetDate(self): """Tests the GetDate function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) date_tuple = posix_time_object.GetDate() self.assertEqual(date_tuple, (2010, 8, 12)) posix_time_object = posix_time.PosixTimeInMicroseconds() date_tuple = posix_time_object.GetDate() self.assertEqual(date_tuple, (None, None, None))
def testGetTimeOfDay(self): """Tests the GetTimeOfDay function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) time_of_day_tuple = posix_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (20, 6, 31)) posix_time_object = posix_time.PosixTimeInMicroseconds() time_of_day_tuple = posix_time_object.GetTimeOfDay() self.assertEqual(time_of_day_tuple, (None, None, None))
def testGetNormalizedTimestamp(self): """Tests the _GetNormalizedTimestamp function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) normalized_timestamp = posix_time_object._GetNormalizedTimestamp() self.assertEqual(normalized_timestamp, decimal.Decimal('1281643591.546875')) posix_time_object = posix_time.PosixTimeInMicroseconds() normalized_timestamp = posix_time_object._GetNormalizedTimestamp() self.assertIsNone(normalized_timestamp)
def _PrintSessionDetailsAsTable(self, session, session_identifier): """Prints the details of a session as a table. Args: session (Session): session. session_identifier (str): session identifier, formatted as a UUID. """ start_time = 'N/A' if session.start_time is not None: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=session.start_time) start_time = date_time.CopyToDateTimeStringISO8601() completion_time = 'N/A' if session.completion_time is not None: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=session.completion_time) completion_time = date_time.CopyToDateTimeStringISO8601() enabled_parser_names = 'N/A' if session.enabled_parser_names: enabled_parser_names = ', '.join( sorted(session.enabled_parser_names)) command_line_arguments = session.command_line_arguments or 'N/A' parser_filter_expression = session.parser_filter_expression or 'N/A' preferred_encoding = session.preferred_encoding or 'N/A' if session.artifact_filters: artifact_filters_string = ', '.join(session.artifact_filters) else: artifact_filters_string = 'N/A' filter_file = session.filter_file or 'N/A' title = 'Session: {0:s}'.format(session_identifier) table_view = views.ViewsFactory.GetTableView(self._views_format_type, title=title) table_view.AddRow(['Start time', start_time]) table_view.AddRow(['Completion time', completion_time]) table_view.AddRow(['Product name', session.product_name]) table_view.AddRow(['Product version', session.product_version]) table_view.AddRow(['Command line arguments', command_line_arguments]) table_view.AddRow( ['Parser filter expression', parser_filter_expression]) table_view.AddRow(['Enabled parser and plugins', enabled_parser_names]) table_view.AddRow(['Preferred encoding', preferred_encoding]) table_view.AddRow(['Debug mode', session.debug_mode]) table_view.AddRow(['Artifact filters', artifact_filters_string]) table_view.AddRow(['Filter file', filter_file]) table_view.Write(self._output_writer)
def testGetPlasoTimestamp(self): """Tests the GetPlasoTimestamp function.""" posix_time_object = posix_time.PosixTimeInMicroseconds( timestamp=1281643591546875) expected_micro_posix_timestamp = 1281643591546875 micro_posix_timestamp = posix_time_object.GetPlasoTimestamp() self.assertEqual(micro_posix_timestamp, expected_micro_posix_timestamp) posix_time_object = posix_time.PosixTimeInMicroseconds() micro_posix_timestamp = posix_time_object.GetPlasoTimestamp() self.assertIsNone(micro_posix_timestamp)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Mac OS X update entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ version = match.get(u'LastAttemptSystemVersion', u'N/A') pending = match.get(u'LastUpdatesAvailable', None) event_data = plist_event.PlistTimeEventData() event_data.desc = u'Last Mac OS X {0:s} full update.'.format(version) event_data.key = u'' event_data.root = u'/' datetime_value = match.get(u'LastFullSuccessfulDate', None) if datetime_value: timestamp = timelib.Timestamp.FromPythonDatetime(datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = match.get(u'LastSuccessfulDate', None) if datetime_value and pending: software = [] for update in match.get(u'RecommendedUpdates', []): identifier = update.get(u'Identifier', u'<IDENTIFIER>') product_key = update.get(u'Product Key', u'<PRODUCT_KEY>') software.append(u'{0:s}({1:s})'.format(identifier, product_key)) if not software: return software = u','.join(software) event_data.desc = ( u'Last Mac OS {0!s} partially update, pending {1!s}: ' u'{2:s}.').format(version, pending, software) timestamp = timelib.Timestamp.FromPythonDatetime(datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def _FormatTime(self, event): """Formats the time. Args: event (EventObject): event. Returns: str: time field. """ # TODO: preserve dfdatetime as an object. # TODO: add support for self._output_mediator.timezone date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) year, month, day_of_month = date_time.GetDate() hours, minutes, seconds = date_time.GetTimeOfDay() try: # Ensure that the date is valid. _ = '{0:04d}-{1:02d}-{2:02d}'.format(year, month, day_of_month) return '{0:02d}:{1:02d}:{2:02d}'.format(hours, minutes, seconds) except (TypeError, ValueError): self._ReportEventError( event, ('unable to copy timestamp: {0!s} to a human readable time. ' 'Defaulting to: "--:--:--"').format(event.timestamp)) return '--:--:--'
def _FormatDateTime(self, event, event_data): """Formats the date and time. Args: event (EventObject): event. event_data (EventData): event data. Returns: str: date and time string or "N/A" if no event timestamp is available. """ if not event.timestamp: return 'N/A' # TODO: preserve dfdatetime as an object. # TODO: add support for self._output_mediator.timezone date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) year, month, day_of_month = date_time.GetDate() hours, minutes, seconds = date_time.GetTimeOfDay() try: return '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'.format( year, month, day_of_month, hours, minutes, seconds) except (TypeError, ValueError): self._ReportEventError(event, event_data, ( 'unable to copy timestamp: {0!s} to a human readable date and ' 'time. Defaulting to: "0000-00-00 00:00:00"').format( event.timestamp)) return '0000-00-00 00:00:00'
def CheckEventValues(self, storage_writer, event, expected_event_values): """Asserts that an event and its event data matches the expected values. Args: storage_writer (StorageWriter): storage writer. event (EventObject): event to check. expected_event_values (dict[str, list[str]): expected values of the event and event data attribute values per name. """ event_data = None for name, expected_value in expected_event_values.items(): if name == 'timestamp' and isinstance(expected_value, str): posix_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) value = posix_time.CopyToDateTimeString() elif name in ('timestamp', 'timestamp_desc'): value = getattr(event, name, None) else: if not event_data: event_data = self._GetEventDataOfEvent(storage_writer, event) value = getattr(event_data, name, None) error_message = ( 'event value: "{0:s}" does not match expected value').format(name) self.assertEqual(value, expected_value, error_message)
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Extracts relevant install history entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for entry in top_level: datetime_value = entry.get('date', None) package_identifiers = entry.get('packageIdentifiers', []) if not datetime_value or not package_identifiers: continue display_name = entry.get('displayName', '<UNKNOWN>') display_version = entry.get('displayVersion', '<DISPLAY_VERSION>') process_name = entry.get('processName', '<PROCESS_NAME>') package_identifiers = ', '.join(package_identifiers) event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Installation of [{0:s} {1:s}] using [{2:s}]. Packages: ' '{3:s}.').format(display_name, display_version, process_name, package_identifiers) event_data.key = '' event_data.root = '/item' timestamp = timelib.Timestamp.FromPythonDatetime(datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def WriteEventBody(self, event, event_data, event_tag): """Writes event values to the output. Args: event (EventObject): event. event_data (EventData): event data. event_tag (EventTag): event tag. """ if not hasattr(event, 'timestamp'): return # TODO: preserve dfdatetime as an object. date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) posix_timestamp = date_time.CopyToPosixTimestamp() if not posix_timestamp: posix_timestamp = 0 source = self._FormatSource(event, event_data) hostname = self._FormatHostname(event_data) username = self._FormatUsername(event_data) description = self._FormatDescription(event, event_data) out_write = '{0:d}|{1:s}|{2:s}|{3:s}|{4!s}\n'.format( posix_timestamp, source, hostname, username, description) self._output_writer.Write(out_write)
def WriteEventBody(self, event): """Writes the body of an event object to the output. Args: event (EventObject): event. """ if not hasattr(event, 'timestamp'): return # TODO: preserve dfdatetime as an object. date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) posix_timestamp = date_time.CopyToPosixTimestamp() if not posix_timestamp: posix_timestamp = 0 source = self._FormatSource(event) hostname = self._FormatHostname(event) username = self._FormatUsername(event) description = self._FormatDescription(event) notes = self._FormatNotes(event) out_write = '{0:d}|{1:s}|{2:s}|{3:s}|{4:s}|{5!s}|{6!s}\n'.format( posix_timestamp, source, hostname, username, description, self._output_mediator.timezone, notes) self._output_writer.Write(out_write)
def _PrintAnalysisReportsDetails(self, storage_reader): """Prints the details of the analysis reports. Args: storage_reader (StorageReader): storage reader. """ for index, analysis_report in enumerate( storage_reader.GetAnalysisReports()): if index + 1 <= self._number_of_analysis_reports: continue date_time_string = None if analysis_report.time_compiled is not None: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=analysis_report.time_compiled) date_time_string = date_time.CopyToDateTimeStringISO8601() title = 'Analysis report: {0:d}'.format(index) table_view = views.ViewsFactory.GetTableView( self._views_format_type, title=title) table_view.AddRow(['Name plugin', analysis_report.plugin_name or 'N/A']) table_view.AddRow(['Date and time', date_time_string or 'N/A']) table_view.AddRow(['Event filter', analysis_report.event_filter or 'N/A']) if not analysis_report.analysis_counter: table_view.AddRow(['Text', analysis_report.text or '']) else: table_view.AddRow(['Results', '']) for key, value in sorted(analysis_report.analysis_counter.items()): table_view.AddRow([key, value]) table_view.Write(self._output_writer)
def _AddArgumentDateTime(self, **unused_kwargs): """Adds a date and time argument to the current expression. Note that this function is used as a callback by _GetNextToken. Returns: str: state or None if the argument could not be added to the current expression. Raises: ParseError: if datetime value does not contain a valid POSIX timestamp in microseconds or ISO 8601 date and time string. """ if isinstance(self._datetime_value, int): date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=self._datetime_value) elif isinstance(self._datetime_value, str): try: date_time = dfdatetime_time_elements.TimeElementsInMicroseconds() date_time.CopyFromStringISO8601(self._datetime_value) except ValueError: raise errors.ParseError('unsupported ISO 8601 string: {0:s}.'.format( self._datetime_value)) else: raise errors.ParseError('unsupported datetime value: {0!s}.'.format( self._datetime_value)) self._datetime_value = None return self._AddArgument(date_time)
def CreateEventFromValues(event_values): """Creates an event and event data from event values. Args: event_values (dict[str, str]): event values. Returns: tuple[EventObject, EventData, EventDataStream]: event, event data and event data stream for testing. """ copy_of_event_values = dict(event_values) event = events.EventObject() for attribute_name in ('timestamp', 'timestamp_desc'): attribute_value = copy_of_event_values.pop(attribute_name, None) if attribute_value is not None: if attribute_name == 'timestamp' and isinstance(attribute_value, str): attribute_value = shared_test_lib.CopyTimestampFromString( attribute_value) setattr(event, attribute_name, attribute_value) event.date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) event_data_stream = events.EventDataStream() for attribute_name in ('path_spec', 'md5_hash', 'sha256_hash'): attribute_value = copy_of_event_values.pop(attribute_name, None) if attribute_value is not None: setattr(event_data_stream, attribute_name, attribute_value) event_data = events.EventData() event_data.CopyFromDict(copy_of_event_values) return event, event_data, event_data_stream
def ParseMessagesRow(self, parser_mediator, query, row, **unused_kwargs): """Parses an Messages row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = HangoutsMessageData() event_data.sender = self._GetRowValue(query_hash, row, 'full_name') event_data.body = self._GetRowValue(query_hash, row, 'text') event_data.offset = self._GetRowValue(query_hash, row, '_id') event_data.query = query event_data.message_status = self._GetRowValue(query_hash, row, 'status') event_data.message_type = self._GetRowValue(query_hash, row, 'type') timestamp = self._GetRowValue(query_hash, row, 'timestamp') date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
def testCopyFromString(self): """Tests the CopyFromString function.""" posix_time_object = posix_time.PosixTimeInMicroseconds() expected_timestamp = 1281571200000000 posix_time_object.CopyFromString(u'2010-08-12') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281647191000000 posix_time_object.CopyFromString(u'2010-08-12 21:06:31') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281647191546875 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281650791546875 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875-01:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = 1281643591546875 posix_time_object.CopyFromString(u'2010-08-12 21:06:31.546875+01:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp) expected_timestamp = -11644387200000000 posix_time_object.CopyFromString(u'1601-01-02 00:00:00') self.assertEqual(posix_time_object.timestamp, expected_timestamp)
def _GetValue(self, attribute_name, event, event_data, event_tag): """Retrieves the value of a specific event, data or tag attribute. Args: attribute_name (str): name of the attribute to retrieve the value from. event (EventObject): event to retrieve the value from. event_data (EventData): event data to retrieve the value from. event_tag (EventTag): event tag to retrieve the value from. Returns: object: attribute value or None if not available. """ if attribute_name in self._DEPRECATED_ATTRIBUTE_NAMES: logging.warning( 'Expansion of {0:s} in event filter no longer supported'.format( attribute_name)) if attribute_name in self._EVENT_ATTRIBUTE_NAMES: attribute_value = getattr(event, attribute_name, None) # Make sure timestamp attribute values are (dfdatetime) date time objects. # TODO: remove when timestamp values are (de)serialized as dfdatetime # objects. if attribute_name == 'timestamp' and not isinstance( attribute_value, dfdatetime_posix_time.PosixTimeInMicroseconds): attribute_value = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=attribute_value) elif attribute_name == 'tag': attribute_value = getattr(event_tag, 'labels', None) else: attribute_value = getattr(event_data, attribute_name, None) return attribute_value
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant Spotlight entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ shortcuts = match.get('UserShortcuts', {}) for search_text, data in iter(shortcuts.items()): datetime_value = data.get('LAST_USED', None) if not datetime_value: continue display_name = data.get('DISPLAY_NAME', '<DISPLAY_NAME>') path = data.get('PATH', '<PATH>') event_data = plist_event.PlistTimeEventData() event_data.desc = ( 'Spotlight term searched "{0:s}" associate to {1:s} ({2:s})' ).format(search_text, display_name, path) event_data.key = search_text event_data.root = '/UserShortcuts' timestamp = timelib.Timestamp.FromPythonDatetime(datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def GetString(self): """Retrieves a string representation of the report. Returns: str: string representation of the report. """ string_list = [] string_list.append('Report generated from: {0:s}'.format( self.plugin_name)) time_compiled = getattr(self, 'time_compiled', None) if time_compiled is not None: date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=time_compiled) date_time_string = date_time.CopyToDateTimeStringISO8601() string_list.append('Generated on: {0:s}'.format(date_time_string)) filter_string = getattr(self, 'filter_string', '') if filter_string: string_list.append('Filter String: {0:s}'.format(filter_string)) if self.text: string_list.append('') string_list.append('Report text:') string_list.append(self.text) return '\n'.join(string_list)
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): """Extracts relevant TimeMachine entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. """ destinations = match.get('Destinations', []) for destination in destinations: destination_identifier = (destination.get('DestinationID', None) or 'Unknown device') alias = destination.get('BackupAlias', '<ALIAS>') try: alias = self.TM_BACKUP_ALIAS.parse(alias).value except construct.FieldError: alias = 'Unknown alias' event_data = plist_event.PlistTimeEventData() event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format( alias, destination_identifier) event_data.key = 'item/SnapshotDates' event_data.root = '/Destinations' snapshot_dates = destination.get('SnapshotDates', []) for datetime_value in snapshot_dates: timestamp = timelib.Timestamp.FromPythonDatetime( datetime_value) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def _ExtractContentSettingsExceptions(self, exceptions_dict, parser_mediator): """Extracts site specific events. Args: exceptions_dict (dict): Permission exceptions data from Preferences file. parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. """ for permission in exceptions_dict: if permission not in self._EXCEPTIONS_KEYS: continue exception_dict = exceptions_dict.get(permission, {}) for urls, url_dict in exception_dict.items(): last_used = url_dict.get('last_used', None) if not last_used: continue # If secondary_url is '*', the permission applies to primary_url. # If secondary_url is a valid URL, the permission applies to # elements loaded from secondary_url being embedded in primary_url. primary_url, secondary_url = urls.split(',') event_data = ChromeContentSettingsExceptionsEventData() event_data.permission = permission event_data.primary_url = primary_url event_data.secondary_url = secondary_url timestamp = int(last_used * 1000000) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
def _FormatDate(self, event, event_data): """Formats the date. Args: event (EventObject): event. event_data (EventData): event data. Returns: str: date field. """ # TODO: preserve dfdatetime as an object. # TODO: add support for self._output_mediator.timezone date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) year, month, day_of_month = date_time.GetDate() try: return '{0:04d}-{1:02d}-{2:02d}'.format(year, month, day_of_month) except (TypeError, ValueError): self._ReportEventError( event, event_data, ('unable to copy timestamp: {0!s} to a human readable date. ' 'Defaulting to: "0000-00-00"').format(event.timestamp)) return '0000-00-00'