def testFileSizeDistributionReportPlugin(self): filename = "winexec_img.dd" client_id, = self.SetupClients(1) # Add a file to be reported. filestore_test_lib.AddFileToFileStore(rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, filename)), client_id=client_id, token=self.token) # Scan for files to be reported (the one we just added). for _ in flow_test_lib.TestFlowHelper( filestore_stats.FilestoreStatsCronFlow.__name__, token=self.token): pass report = report_plugins.GetReportByName( filestore_report_plugins.FileSizeDistributionReportPlugin.__name__) api_report_data = report.GetReportData( stats_api.ApiGetReportArgs(name=report.__class__.__name__), token=self.token) self.checkStaticData(api_report_data) for series in api_report_data.stack_chart.data: if series.label == "976.6 KiB - 4.8 MiB": self.assertEqual([p.y for p in series.points], [1]) else: self.assertEqual([p.y for p in series.points], [0])
def testEmptyFileHasNoBackreferences(self): # First make sure we store backrefs for a non empty file. filename = os.path.join(self.base_path, "tcpip.sig") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=filename) filestore_test_lib.AddFileToFileStore( pathspec, client_id=self.client_id, token=self.token) self.assertEqual(len(self._GetBackRefs(filename)), 3) # Now use the empty file. filename = os.path.join(self.base_path, "empty_file") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=filename) filestore_test_lib.AddFileToFileStore( pathspec, client_id=self.client_id, token=self.token) self.assertEqual(len(self._GetBackRefs(filename)), 0)
def AddFile(self, path): """Add file with a subpath (relative to winexec_img.dd) to the store.""" pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "winexec_img.dd")) pathspec.Append(path=path, pathtype=rdf_paths.PathSpec.PathType.TSK) return filestore_test_lib.AddFileToFileStore( pathspec, client_id=self.client_id, token=self.token)
def testHashAgeUpdatedWhenNewHitAddedAfterAFF4IndexCacheAge(self): # Check that there are no hashes. hashes = list( filestore.HashFileStore.ListHashes(token=self.token, age=(41e6, 1e10))) self.assertEqual(len(hashes), 0) with utils.Stubber(time, "time", lambda: 42): filestore_test_lib.AddFileToFileStore( rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "one_a")), client_id=self.client_id, token=self.token) hashes = list( filestore.HashFileStore.ListHashes(token=self.token, age=(41e6, 1e10))) self.assertTrue(hashes) hits = list( filestore.HashFileStore.GetClientsForHash(hashes[0], token=self.token)) self.assertEqual(len(hits), 1) latest_time = 42 + aff4.FACTORY.intermediate_cache_age + 1 with utils.Stubber(time, "time", lambda: latest_time): filestore_test_lib.AddFileToFileStore( rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "a", "b", "c", "helloc.txt")), client_id=self.client_id, token=self.token) # Check that now we have two hits for the previosly added hash. hits = list( filestore.HashFileStore.GetClientsForHash(hashes[0], token=self.token)) self.assertEqual(len(hits), 2) # Check that new hit affects hash age. hashes = list( filestore.HashFileStore.ListHashes(token=self.token, age=(43e6, 1e10))) self.assertTrue(hashes)