def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: analysis report. """ results = {} for key, count in iter(self._counter.items()): search_engine, _, search_term = key.partition(':') results.setdefault(search_engine, {}) results[search_engine][search_term] = count lines_of_text = [] for search_engine, terms in sorted(results.items()): lines_of_text.append(' == ENGINE: {0:s} =='.format(search_engine)) for search_term, count in sorted(terms.items(), key=lambda x: (x[1], x[0]), reverse=True): lines_of_text.append('{0:d} {1:s}'.format(count, search_term)) # An empty string is added to have SetText create an empty line. lines_of_text.append('') lines_of_text.append('') report_text = '\n'.join(lines_of_text) analysis_report = reports.AnalysisReport(plugin_name=self.NAME, text=report_text) analysis_report.report_array = self._search_term_timeline analysis_report.report_dict = results return analysis_report
def _ConvertDictToAnalysisReport(self, json_dict): """Converts a JSON dict into an analysis report. The dictionary of the JSON serialized objects consists of: { '__type__': 'AnalysisReport' '_event_tags': { ... } 'report_array': { ... } 'report_dict': { ... } ... } Here '__type__' indicates the object base type. In this case this should be 'AnalysisReport'. The rest of the elements of the dictionary make up the preprocessing object properties. Args: json_dict: a dictionary of the JSON serialized objects. Returns: An analysis report (instance of AnalysisReport). """ # Plugin name is set as one of the attributes. analysis_report = reports.AnalysisReport(u'') for key, value in iter(json_dict.items()): setattr(analysis_report, key, value) return analysis_report
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: The analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ results = {} for key, count in self._counter.iteritems(): search_engine, _, search_term = key.partition(u':') results.setdefault(search_engine, {}) results[search_engine][search_term] = count lines_of_text = [] for search_engine, terms in sorted(results.items()): lines_of_text.append(u' == ENGINE: {0:s} =='.format(search_engine)) for search_term, count in sorted( terms.iteritems(), key=lambda x: (x[1], x[0]), reverse=True): lines_of_text.append(u'{0:d} {1:s}'.format(count, search_term)) # An empty string is added to have SetText create an empty line. lines_of_text.append(u'') lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) analysis_report = reports.AnalysisReport(self.NAME, text=report_text) analysis_report.report_array = self._search_term_timeline analysis_report.report_dict = results return analysis_report
def testReadAndWriteSerializedAnalysisReport(self): """Test ReadSerialized and WriteSerialized of AnalysisReport.""" expected_report_text = ( ' == USER: dude ==\n' ' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]\n' '\n' ' == USER: frank ==\n' ' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]\n' ' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]\n' '\n') expected_analysis_report = reports.AnalysisReport( plugin_name='chrome_extension_test', text=expected_report_text) expected_analysis_report.time_compiled = 1431978243000000 json_string = (json_serializer.JSONAttributeContainerSerializer. WriteSerialized(expected_analysis_report)) self.assertIsNotNone(json_string) analysis_report = (json_serializer.JSONAttributeContainerSerializer. ReadSerialized(json_string)) self.assertIsNotNone(analysis_report) self.assertIsInstance(analysis_report, reports.AnalysisReport)
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: analysis report. """ different_hashes = 0 different_list = [] for rpm_entries in self._rpmdb_dict: rpmentrylist = self._rpmdb_dict[rpm_entries] for rpmitem in rpmentrylist: if rpmitem.sha256 != rpmitem.event_sha256: different_hashes += 1 entry = u"filename:{0}\n\tevent_sha256_sum:{1}\n\trpmdb_sha256_sum:{2}".format( rpmitem.path, rpmitem.event_sha256, rpmitem.sha256) different_list.append(entry) different_list.insert( 0, u"RPMDB plugin determined {0} sha256 hashes differ from the rpm database.\n" .format(different_hashes)) report_text = u'\n'.join(different_list) analysis_report = reports.AnalysisReport(plugin_name=self.NAME, text=report_text) return analysis_report
def testReadAndWriteSerializedAnalysisReport(self): """Test ReadSerialized and WriteSerialized of AnalysisReport.""" expected_comment = u'This is a test event tag.' expected_uuid = u'403818f93dce467bac497ef0f263fde8' expected_labels = [u'Test', u'AnotherTest'] expected_report_dict = { u'dude': [[ u'Google Keep - notes and lists', u'hmjkmjkepdijhoojdojkdfohbdgmmhki' ]], u'frank': [[u'YouTube', u'blpcfgokakmgnkcojhhkbfbldkacnbeo'], [u'Google Play Music', u'icppfcnhkcmnfdhfhphakoifcfokfdhg']] } expected_report_text = ( u' == USER: dude ==\n' u' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]\n' u'\n' u' == USER: frank ==\n' u' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]\n' u' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]\n' u'\n') expected_event_tag = events.EventTag(comment=expected_comment, event_uuid=expected_uuid) expected_event_tag.AddLabels(expected_labels) expected_analysis_report = reports.AnalysisReport( plugin_name=u'chrome_extension_test', text=expected_report_text) expected_analysis_report.report_dict = expected_report_dict expected_analysis_report.time_compiled = 1431978243000000 expected_analysis_report.SetTags([expected_event_tag]) json_string = (json_serializer.JSONAttributeContainerSerializer. WriteSerialized(expected_analysis_report)) self.assertIsNotNone(json_string) analysis_report = (json_serializer.JSONAttributeContainerSerializer. ReadSerialized(json_string)) self.assertIsNotNone(analysis_report) self.assertIsInstance(analysis_report, reports.AnalysisReport) # TODO: preserve the tuples in the report dict. # TODO: add report_array tests. # TODO: remove _event_tags. expected_analysis_report_dict = { u'_event_tags': [], u'plugin_name': u'chrome_extension_test', u'report_dict': expected_report_dict, u'text': expected_report_text, u'time_compiled': 1431978243000000 } analysis_report_dict = analysis_report.CopyToDict() self.assertEqual(sorted(analysis_report_dict.items()), sorted(expected_analysis_report_dict.items()))
def testWriteSerialized(self): """Tests the WriteSerialized function.""" analysis_report = reports.AnalysisReport(u'chrome_extension_test', text=self._report_text) analysis_report.report_dict = self._report_dict analysis_report.time_compiled = 1431978243000000 self._TestWriteSerialized(self._serializer, analysis_report, self._proto_string)
def testGetAttributeNames(self): """Tests the GetAttributeNames function.""" attribute_container = reports.AnalysisReport() expected_attribute_names = [ 'filter_string', 'plugin_name', 'report_dict', 'text', 'time_compiled'] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names)
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfVFS. Returns: AnalysisReport: analysis report. """ return reports.AnalysisReport(plugin_name=self.NAME, text='TestMemory report')
def testStoreReport(self): """Tests the StoreReport function.""" analysis_report = reports.AnalysisReport( plugin_name=u'test', text=u'test report') with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') storage_file = zip_file.StorageFile(temp_file) storage_file.StoreReport(analysis_report) storage_file.Close()
def ReadSerializedObject(cls, proto): """Reads an analysis report from serialized form. Args: proto: a protobuf object containing the serialized form (instance of plaso_storage_pb2.AnalysisReport). Returns: An analysis report (instance of AnalysisReport). """ # Plugin name is set as one of the attributes. analysis_report = reports.AnalysisReport(u'') for proto_attribute, value in proto.ListFields(): if proto_attribute.name == u'_event_tags': event_tags = [] # pylint: disable=protected-access for proto_event_tag in proto._event_tags: event_tag = ProtobufEventTagSerializer.ReadSerializedObject( proto_event_tag) event_tags.append(event_tag) analysis_report.SetTags(event_tags) # TODO: replace by ReadSerializedListObject, need tests first. # list_object = ProtobufEventAttributeSerializer.ReadSerializedListObject( # proto.report_array) elif proto_attribute.name == u'report_array': report_array = [] for proto_array in proto.report_array.values: _, list_value = ProtobufEventAttributeSerializer.ReadSerializedObject( proto_array) report_array.append(list_value) analysis_report.report_array = report_array # TODO: replace by ReadSerializedDictObject, need tests first. # dict_object = ProtobufEventAttributeSerializer.ReadSerializedDictObject( # proto.report_dict) elif proto_attribute.name == u'report_dict': report_dict = {} for proto_dict in proto.report_dict.attributes: dict_key, dict_value = ( ProtobufEventAttributeSerializer.ReadSerializedObject(proto_dict)) report_dict[dict_key] = dict_value analysis_report.report_dict = report_dict else: setattr(analysis_report, proto_attribute.name, value) return analysis_report
def testReadAndWriteSerializedAnalysisReport(self): """Test ReadSerialized and WriteSerialized of AnalysisReport.""" expected_report_dict = { 'dude': [ ['Google Keep - notes and lists', 'hmjkmjkepdijhoojdojkdfohbdgmmhki'] ], 'frank': [ ['YouTube', 'blpcfgokakmgnkcojhhkbfbldkacnbeo'], ['Google Play Music', 'icppfcnhkcmnfdhfhphakoifcfokfdhg'] ] } expected_report_text = ( ' == USER: dude ==\n' ' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]\n' '\n' ' == USER: frank ==\n' ' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]\n' ' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]\n' '\n') expected_analysis_report = reports.AnalysisReport( plugin_name='chrome_extension_test', text=expected_report_text) expected_analysis_report.report_dict = expected_report_dict expected_analysis_report.time_compiled = 1431978243000000 json_string = ( json_serializer.JSONAttributeContainerSerializer.WriteSerialized( expected_analysis_report)) self.assertIsNotNone(json_string) analysis_report = ( json_serializer.JSONAttributeContainerSerializer.ReadSerialized( json_string)) self.assertIsNotNone(analysis_report) self.assertIsInstance(analysis_report, reports.AnalysisReport) # TODO: preserve the tuples in the report dict. # TODO: add report_array tests. expected_analysis_report_dict = { 'plugin_name': 'chrome_extension_test', 'report_dict': expected_report_dict, 'text': expected_report_text, 'time_compiled': 1431978243000000} analysis_report_dict = analysis_report.CopyToDict() self.assertEqual( sorted(analysis_report_dict.items()), sorted(expected_analysis_report_dict.items()))
def testCopyToDict(self): """Tests the CopyToDict function.""" analysis_report = reports.AnalysisReport( plugin_name=u'test', text=u'This is a test analysis report') expected_dict = { u'plugin_name': u'test', u'text': u'This is a test analysis report' } test_dict = analysis_report.CopyToDict() self.assertEqual(test_dict, expected_dict)
def testAddAnalysisReport(self): """Tests the AddAnalysisReport function.""" analysis_report = reports.AnalysisReport( plugin_name=u'test', text=u'test report') with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') storage_file = gzip_file.GZIPStorageFile() storage_file.Open(path=temp_file, read_only=False) storage_file.AddAnalysisReport(analysis_report) storage_file.Close()
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: analysis report. """ report_text = u'Tagging plugin produced {0:d} tags.\n'.format( self._number_of_event_tags) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def testCopyToDict(self): """Tests the CopyToDict function.""" event_tag = reports.AnalysisReport( u'test', text=u'This is a test analysis report') expected_dict = { u'_event_tags': [], u'plugin_name': u'test', u'text': u'This is a test analysis report' } test_dict = event_tag.CopyToDict() self.assertEqual(test_dict, expected_dict)
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: The analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ report_text = u'Tagging plugin produced {0:d} tags.\n'.format( len(self._tags)) analysis_report = reports.AnalysisReport(self.NAME, text=report_text) analysis_report.SetTags(self._tags) return analysis_report
def testAddAnalysisReport(self): """Tests the AddAnalysisReport function.""" session = sessions.Session() analysis_report = reports.AnalysisReport( plugin_name='test', text='test report') storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddAnalysisReport(analysis_report) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddAnalysisReport(analysis_report)
def testWriteSerialized(self): """Tests the WriteSerialized function.""" event_tag = events.EventTag(comment=self._comment, event_uuid=self._event_uuid) event_tag.AddLabels(self._labels) self.assertTrue(event_tag.IsValidForSerialization()) analysis_report = reports.AnalysisReport(u'chrome_extension_test', text=self._report_text) analysis_report.report_dict = self._report_dict analysis_report.time_compiled = 1431978243000000 analysis_report.SetTags([event_tag]) self._TestWriteSerialized(self._serializer, analysis_report, self._json_dict)
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: report. """ # TODO: refactor to update the counter on demand instead of # during reporting. path_specs_per_labels_counter = collections.Counter() tags = [] while self._ContinueReportCompilation(): try: self._LogProgressUpdateIfReasonable() hash_analysis = self.hash_analysis_queue.get( timeout=self._analysis_queue_timeout) except Queue.Empty: # The result queue is empty, but there could still be items that need # to be processed by the analyzer. continue pathspecs, labels, new_tags = self._HandleHashAnalysis( hash_analysis) tags.extend(new_tags) for label in labels: path_specs_per_labels_counter[label] += len(pathspecs) self._analyzer.SignalAbort() lines_of_text = [u'{0:s} hash tagging results'.format(self.NAME)] for label, count in path_specs_per_labels_counter.items(): line_of_text = ( u'{0:d} path specifications tagged with label: {1:s}'.format( count, label)) lines_of_text.append(line_of_text) lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) for event_tag in tags: mediator.ProduceEventTag(event_tag) return reports.AnalysisReport( plugin_name=self.NAME, text=report_text)
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: the analysis report. """ lines_of_text = ['Listing domains visited by all users'] for domain in sorted(self._domains): lines_of_text.append(domain) lines_of_text.append('') report_text = '\n'.join(lines_of_text) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: The analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ lines_of_text = [u'Listing domains visited by all users'] for domain in sorted(self._domains): lines_of_text.append(domain) lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: analysis report. """ report_text = [ 'Sessionize plugin identified {0:d} sessions and ' 'applied {1:d} tags.'.format( len(self._events_per_session), self._number_of_event_tags)] for session, event_count in enumerate(self._events_per_session): report_text.append('\tSession {0:d}: {1:d} events'.format( session, event_count)) report_text = '\n'.join(report_text) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: report. """ lines_of_text = ['Listing file paths and hashes'] for path_spec, hashes in sorted(self._paths_with_hashes.items(), key=lambda tuple: tuple[0].comparable): path_string = self._GeneratePathString(mediator, path_spec, hashes) lines_of_text.append(path_string) lines_of_text.append('') report_text = '\n'.join(lines_of_text) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: the analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ lines_of_text = [u'Listing file paths and hashes'] for pathspec, hashes in sorted(self._paths_with_hashes.items(), key=lambda tuple: tuple[0].comparable): path_string = self._GeneratePathString(analysis_mediator, pathspec, hashes) lines_of_text.append(path_string) lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) return reports.AnalysisReport(plugin_name=self.NAME, text=report_text)
def testGetAnalysisReports(self): """Tests the GetAnalysisReports function.""" analysis_report = reports.AnalysisReport(plugin_name='test', text='test report') with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) storage_file.AddAnalysisReport(analysis_report) storage_file.Close() storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file) test_reports = list(storage_file.GetAnalysisReports()) self.assertEqual(len(test_reports), 1) storage_file.Close()
def CompileReport(self, mediator): """Compiles a report of the analysis. After the plugin has received every copy of an event to analyze this function will be called so that the report can be assembled. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: report. """ analysis_report = reports.AnalysisReport(plugin_name=self.NAME) time_elements = time.gmtime() time_compiled = calendar.timegm(time_elements) analysis_report.time_compiled = (time_compiled * definitions.MICROSECONDS_PER_SECOND) analysis_report.analysis_counter = self._analysis_counter return analysis_report
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: The analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ lines_of_text = [] for user, extensions in sorted(self._results.items()): lines_of_text.append(u' == USER: {0:s} =='.format(user)) for extension, extension_id in sorted(extensions): lines_of_text.append(u' {0:s} [{1:s}]'.format( extension, extension_id)) lines_of_text.append(u'') lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) analysis_report = reports.AnalysisReport(self.NAME, text=report_text) analysis_report.report_dict = self._results return analysis_report
def CompileReport(self, analysis_mediator): """Compiles an analysis report. Args: analysis_mediator: The analysis mediator object (instance of AnalysisMediator). Returns: The analysis report (instance of AnalysisReport). """ tags = [] lines_of_text = [u'{0:s} hash tagging Results'.format(self.NAME)] while self._ContinueReportCompilation(): try: self._LogProgressUpdateIfReasonable() hash_analysis = self.hash_analysis_queue.get( timeout=self._analysis_queue_timeout) except Queue.Empty: # The result queue is empty, but there could still be items that need # to be processed by the analyzer. continue pathspecs, tag_strings, new_tags = self._HandleHashAnalysis( hash_analysis) tags.extend(new_tags) for pathspec in pathspecs: text_line = self._GenerateTextLine(analysis_mediator, pathspec, tag_strings) lines_of_text.append(text_line) self._analyzer.SignalAbort() lines_of_text.append(u'') report_text = u'\n'.join(lines_of_text) analysis_report = reports.AnalysisReport(plugin_name=self.NAME, text=report_text) analysis_report.SetTags(tags) return analysis_report
def CompileReport(self, mediator): """Compiles an analysis report. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. Returns: AnalysisReport: analysis report. """ lines_of_text = [] for user, extensions in sorted(self._results.items()): lines_of_text.append(' == USER: {0:s} =='.format(user)) for extension, extension_identifier in sorted(extensions): lines_of_text.append(' {0:s} [{1:s}]'.format( extension, extension_identifier)) lines_of_text.append('') lines_of_text.append('') report_text = '\n'.join(lines_of_text) analysis_report = reports.AnalysisReport(plugin_name=self.NAME, text=report_text) analysis_report.report_dict = self._results return analysis_report