def testWritePathInfosStoresCopy(self): client_id = self.InitializeClient() path_info = objects.PathInfo.OS(components=["foo", "bar"]) path_info.stat_entry.st_size = 1337 path_info.hash_entry.sha256 = b"foo" self.db.WritePathInfos(client_id, [path_info]) timestamp_1 = rdfvalue.RDFDatetime.Now() path_info.stat_entry.st_size = 42 path_info.hash_entry.sha256 = b"bar" self.db.WritePathInfos(client_id, [path_info]) timestamp_2 = rdfvalue.RDFDatetime.Now() result_1 = self.db.FindPathInfoByPathID( client_id, objects.PathInfo.PathType.OS, objects.PathID(["foo", "bar"]), timestamp=timestamp_1) self.assertEqual(result_1.stat_entry.st_size, 1337) self.assertEqual(result_1.hash_entry.sha256, b"foo") result_2 = self.db.FindPathInfoByPathID( client_id, objects.PathInfo.PathType.OS, objects.PathID(["foo", "bar"]), timestamp=timestamp_2) self.assertEqual(result_2.stat_entry.st_size, 42) self.assertEqual(result_2.hash_entry.sha256, b"bar")
def testFindPathInfosByPathIDsMany(self): client_id = self.InitializeClient() path_info_1 = objects.PathInfo.OS(components=["foo", "bar"]) path_info_1.stat_entry.st_mode = 42 path_info_1.hash_entry.md5 = b"foobar" path_info_2 = objects.PathInfo.OS(components=["baz", "quux", "norf"]) path_info_2.hash_entry.sha256 = b"bazquuxnorf" path_info_3 = objects.PathInfo.OS(components=["blargh"], directory=True) path_info_3.stat_entry.st_size = 1337 self.db.WritePathInfos(client_id, [path_info_1, path_info_2, path_info_3]) path_id_1 = objects.PathID(["foo", "bar"]) path_id_2 = objects.PathID(["baz", "quux", "norf"]) path_id_3 = objects.PathID(["blargh"]) results = self.db.FindPathInfosByPathIDs(client_id, objects.PathInfo.PathType.OS, [path_id_1, path_id_2, path_id_3]) self.assertEqual(results[path_id_1].components, ["foo", "bar"]) self.assertEqual(results[path_id_1].stat_entry.st_mode, 42) self.assertEqual(results[path_id_1].hash_entry.md5, b"foobar") self.assertEqual(results[path_id_2].components, ["baz", "quux", "norf"]) self.assertEqual(results[path_id_2].hash_entry.sha256, b"bazquuxnorf") self.assertEqual(results[path_id_3].components, ["blargh"]) self.assertEqual(results[path_id_3].stat_entry.st_size, 1337) self.assertEqual(results[path_id_3].directory, True)
def testMultiWriteHistoryTwoPaths(self): client_id = self.InitializeClient() path_info_foo = rdf_objects.PathInfo.OS( components=["foo"], timestamp=rdfvalue.RDFDatetime.FromHumanReadable("2010-10-10")) path_info_bar = rdf_objects.PathInfo.OS( components=["bar"], timestamp=rdfvalue.RDFDatetime.FromHumanReadable("2011-11-11")) self.db.WritePathInfos(client_id, [path_info_foo, path_info_bar]) hash_entries = { path_info_foo: rdf_crypto.Hash(md5=b"foo"), path_info_bar: rdf_crypto.Hash(md5=b"bar"), } self.db.MultiWritePathHistory(client_id, {}, hash_entries) path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["foo"])) self.assertEqual(path_info.hash_entry.md5, b"foo") self.assertEqual(path_info.last_hash_entry_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2010-10-10")) path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["bar"])) self.assertEqual(path_info.hash_entry.md5, b"bar") self.assertEqual(path_info.last_hash_entry_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2011-11-11"))
def testMultiWriteHistoryTwoPathTypes(self): client_id = self.InitializeClient() path_info_1 = rdf_objects.PathInfo.OS(components=["foo"]) path_info_1.timestamp = rdfvalue.RDFDatetime.FromHumanReadable("1999-01-01") path_info_2 = rdf_objects.PathInfo.TSK(components=["bar"]) path_info_2.timestamp = rdfvalue.RDFDatetime.FromHumanReadable("1988-01-01") self.db.WritePathInfos(client_id, [path_info_1, path_info_2]) stat_entries = { path_info_1: rdf_client.StatEntry(st_mode=1337), path_info_2: rdf_client.StatEntry(st_mode=108), } self.db.MultiWritePathHistory(client_id, stat_entries, {}) path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["foo"])) self.assertEqual(path_info.stat_entry.st_mode, 1337) self.assertEqual(path_info.last_stat_entry_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("1999-01-01")) path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.TSK, rdf_objects.PathID(["bar"])) self.assertEqual(path_info.stat_entry.st_mode, 108) self.assertEqual(path_info.last_stat_entry_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("1988-01-01"))
def testFindDescendentPathIDsSingleResult(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["foo", "bar"]), ]) results = self.db.FindDescendentPathIDs(client_id, objects.PathInfo.PathType.OS, objects.PathID(["foo"])) self.assertItemsEqual(results, [objects.PathID(["foo", "bar"])])
def testHash(self): quuxes = dict() quuxes[rdf_objects.PathID(["foo", "bar"])] = 4 quuxes[rdf_objects.PathID(["foo", "baz"])] = 8 quuxes[rdf_objects.PathID(["norf"])] = 15 quuxes[rdf_objects.PathID(["foo", "bar"])] = 16 quuxes[rdf_objects.PathID(["norf"])] = 23 quuxes[rdf_objects.PathID(["thud"])] = 42 self.assertEqual(quuxes[rdf_objects.PathID(["foo", "bar"])], 16) self.assertEqual(quuxes[rdf_objects.PathID(["foo", "baz"])], 8) self.assertEqual(quuxes[rdf_objects.PathID(["norf"])], 23) self.assertEqual(quuxes[rdf_objects.PathID(["thud"])], 42)
def testWritePathInfosMetadata(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [objects.PathInfo.TSK(components=["foo", "bar"], directory=True)]) results = self.db.FindPathInfosByPathIDs(client_id, objects.PathInfo.PathType.TSK, [objects.PathID(["foo", "bar"])]) result_path_info = results[objects.PathID(["foo", "bar"])] self.assertEqual(result_path_info.path_type, objects.PathInfo.PathType.TSK) self.assertEqual(result_path_info.components, ["foo", "bar"]) self.assertEqual(result_path_info.directory, True)
def testWritePathInfosUpdatesAncestors(self): client_id = self.InitializeClient() self.db.WritePathInfos(client_id, [ objects.PathInfo.OS(components=["foo"], directory=False), ]) self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["foo", "bar"]), ]) results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, [objects.PathID(["foo"])]) self.assertEqual(len(results), 1) self.assertTrue(results[objects.PathID(["foo"])].directory)
def testFindPathInfoByPathIDNonExistent(self): client_id = self.InitializeClient() path_id = objects.PathID(["foo", "bar", "baz"]) with self.assertRaises(db.UnknownPathError): self.db.FindPathInfoByPathID(client_id, objects.PathInfo.PathType.OS, path_id)
def testWritePathInfoHashAndStatEntrySeparateWrites(self): client_id = self.InitializeClient() stat_entry = rdf_client.StatEntry(st_mode=1337) stat_entry_path_info = objects.PathInfo.OS( components=["foo"], stat_entry=stat_entry) stat_entry_timestamp = rdfvalue.RDFDatetime.Now() self.db.WritePathInfos(client_id, [stat_entry_path_info]) hash_entry = rdf_crypto.Hash(sha256=hashlib.sha256("foo").digest()) hash_entry_path_info = objects.PathInfo.OS( components=["foo"], hash_entry=hash_entry) hash_entry_timestamp = rdfvalue.RDFDatetime.Now() self.db.WritePathInfos(client_id, [hash_entry_path_info]) result = self.db.FindPathInfoByPathID(client_id, objects.PathInfo.PathType.OS, objects.PathID(["foo"])) now = rdfvalue.RDFDatetime.Now() self.assertEqual(result.components, ["foo"]) self.assertTrue(result.HasField("stat_entry")) self.assertTrue(result.HasField("hash_entry")) self.assertEqual(result.stat_entry, stat_entry) self.assertEqual(result.hash_entry, hash_entry) self.assertGreater(result.last_stat_entry_timestamp, stat_entry_timestamp) self.assertLess(result.last_stat_entry_timestamp, hash_entry_timestamp) self.assertGreater(result.last_hash_entry_timestamp, hash_entry_timestamp) self.assertLess(result.last_hash_entry_timestamp, now)
def testWritePathInfosTypeSeparated(self): client_id = self.InitializeClient() self.db.WritePathInfos(client_id, [ objects.PathInfo.OS(components=["foo"], directory=True), objects.PathInfo.TSK(components=["foo"], directory=False), ]) os_results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, [objects.PathID(["foo"])]) self.assertEqual(len(os_results), 1) self.assertTrue(os_results[objects.PathID(["foo"])].directory) tsk_results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.TSK, [objects.PathID(["foo"])]) self.assertEqual(len(tsk_results), 1) self.assertFalse(tsk_results[objects.PathID(["foo"])].directory)
def testFindDescendentPathIDsLimited(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ rdf_objects.PathInfo.OS(components=["foo", "bar", "baz", "quux"]), rdf_objects.PathInfo.OS(components=["foo", "bar", "blargh"]), rdf_objects.PathInfo.OS( components=["foo", "norf", "thud", "plugh"]), ]) results = self.db.FindDescendentPathIDs( client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["foo"]), max_depth=2) self.assertIn(rdf_objects.PathID(["foo", "bar"]), results) self.assertIn(rdf_objects.PathID(["foo", "bar", "baz"]), results) self.assertIn(rdf_objects.PathID(["foo", "bar", "blargh"]), results) self.assertIn(rdf_objects.PathID(["foo", "norf", "thud"]), results) self.assertNotIn(rdf_objects.PathID(["foo", "bar", "baz", "quux"]), results) self.assertNotIn( rdf_objects.PathID(["foo", "norf", "thud", "plugh"]), results)
def testFindDescendentPathIDsAll(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ rdf_objects.PathInfo.OS(components=["foo", "bar"]), rdf_objects.PathInfo.OS(components=["baz", "quux"]), ]) results = self.db.FindDescendentPathIDs(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID([])) self.assertItemsEqual(results, [ rdf_objects.PathID(["foo"]), rdf_objects.PathID(["foo", "bar"]), rdf_objects.PathID(["baz"]), rdf_objects.PathID(["baz", "quux"]), ])
def testFindPathInfoByPathIDTimestampStatAndHashEntry(self): client_id = self.InitializeClient() path_info = rdf_objects.PathInfo.OS(components=["foo"]) path_id = rdf_objects.PathID(["foo"]) path_info.stat_entry = rdf_client.StatEntry(st_mode=42) path_info.hash_entry = None self.db.WritePathInfos(client_id, [path_info]) timestamp_1 = rdfvalue.RDFDatetime.Now() path_info.stat_entry = None path_info.hash_entry = rdf_crypto.Hash(md5=b"quux") self.db.WritePathInfos(client_id, [path_info]) timestamp_2 = rdfvalue.RDFDatetime.Now() path_info.stat_entry = rdf_client.StatEntry(st_mode=1337) path_info.hash_entry = None self.db.WritePathInfos(client_id, [path_info]) timestamp_3 = rdfvalue.RDFDatetime.Now() path_info.stat_entry = rdf_client.StatEntry(st_mode=4815162342) path_info.hash_entry = rdf_crypto.Hash(md5=b"norf") self.db.WritePathInfos(client_id, [path_info]) timestamp_4 = rdfvalue.RDFDatetime.Now() path_info_1 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_1) self.assertEqual(path_info_1.stat_entry.st_mode, 42) self.assertFalse(path_info_1.HasField("hash_entry")) path_info_2 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_2) self.assertEqual(path_info_2.stat_entry.st_mode, 42) self.assertEqual(path_info_2.hash_entry.md5, b"quux") path_info_3 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_3) self.assertEqual(path_info_3.stat_entry.st_mode, 1337) self.assertEqual(path_info_3.hash_entry.md5, b"quux") path_info_4 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_4) self.assertEqual(path_info_4.stat_entry.st_mode, 4815162342) self.assertEqual(path_info_4.hash_entry.md5, b"norf")
def testFindDescendentPathIDsTypeSeparated(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["usr", "bin", "javac"]), objects.PathInfo.TSK(components=["usr", "bin", "gdb"]), ]) os_results = self.db.FindDescendentPathIDs(client_id, objects.PathInfo.PathType.OS, objects.PathID(["usr", "bin"])) self.assertEqual(os_results, {objects.PathID(["usr", "bin", "javac"])}) tsk_results = self.db.FindDescendentPathIDs(client_id, objects.PathInfo.PathType.TSK, objects.PathID(["usr", "bin"])) self.assertEqual(tsk_results, {objects.PathID(["usr", "bin", "gdb"])})
def testWritePathInfosUpdates(self): client_id = self.InitializeClient() self.db.WritePathInfos(client_id, [ objects.PathInfo.OS(components=["foo", "bar", "baz"], directory=False), ]) self.db.WritePathInfos(client_id, [ objects.PathInfo.OS(components=["foo", "bar", "baz"], directory=True), ]) results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, [ objects.PathID(["foo", "bar", "baz"]), ]) result_path_info = results[objects.PathID(["foo", "bar", "baz"])] self.assertTrue(result_path_info.directory)
def Handle(self, args, token=None): ValidateVfsPath(args.file_path) if args.timestamp: age = args.timestamp else: age = aff4.ALL_TIMES file_obj = aff4.FACTORY.Open(args.client_id.ToClientURN().Add( args.file_path), mode="r", age=age, token=token) if data_store.RelationalDBReadEnabled(category="vfs"): # These are not really "files" so they cannot be stored in the database # but they still can be queried so we need to return something. Sometimes # they contain a trailing slash so we need to take care of that. # # TODO(hanuszczak): Require VFS paths to be normalized so that trailing # slash is either forbidden or mandatory. if args.file_path.endswith("/"): args.file_path = args.file_path[:-1] if args.file_path in ["fs", "registry", "temp", "fs/os", "fs/tsk"]: api_file = ApiFile() api_file.name = api_file.path = args.file_path api_file.is_directory = True return ApiGetFileDetailsResult(file=api_file) path_type, components = rdf_objects.ParseCategorizedPath( args.file_path) # TODO(hanuszczak): The tests passed even without support for timestamp # filtering. The test suite should be probably improved in that regard. path_id = rdf_objects.PathID(components) path_info = data_store.REL_DB.FindPathInfoByPathID( str(args.client_id), path_type, path_id, timestamp=args.timestamp) if path_info: stat_entry = path_info.stat_entry hash_entry = path_info.hash_entry else: stat_entry = rdf_client.StatEntry() hash_entry = rdf_crypto.Hash() else: stat_entry = None hash_entry = None return ApiGetFileDetailsResult( file=ApiFile().InitFromAff4Object(file_obj, stat_entry=stat_entry, hash_entry=hash_entry, with_details=True))
def testFindPathInfoByPathIDValidatesTimestamp(self): client_id = self.InitializeClient() path_id = objects.PathID(["foo", "bar", "baz"]) with self.assertRaises(TypeError): self.db.FindPathInfoByPathID( client_id, objects.PathInfo.PathType.REGISTRY, path_id, timestamp=rdfvalue.Duration("10s"))
def testWritePathInfosDuplicatedData(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["foo", "bar"]), ]) self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["foo", "bar"]), ]) results = self.db.FindPathInfosByPathIDs(client_id, objects.PathInfo.PathType.OS, [objects.PathID(["foo", "bar"])]) self.assertEqual(len(results), 1) result_path_info = results[objects.PathID(["foo", "bar"])] self.assertEqual(result_path_info.components, ["foo", "bar"]) self.assertEqual(result_path_info.directory, False)
def testStatHistory(self): datetime = rdfvalue.RDFDatetime.FromHumanReadable client_urn = self.SetupClient(0) file_urn = client_urn.Add("fs/os").Add("foo") with test_lib.FakeTime(datetime("2000-01-01")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.STAT, rdf_client.StatEntry(st_size=10)) with test_lib.FakeTime(datetime("2000-02-02")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.STAT, rdf_client.StatEntry(st_size=20)) with test_lib.FakeTime(datetime("2000-03-03")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.STAT, rdf_client.StatEntry(st_size=30)) data_migration.MigrateClientVfs(client_urn) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"]), timestamp=datetime("2000-01-10")) self.assertEqual(path_info.stat_entry.st_size, 10) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"]), timestamp=datetime("2000-02-20")) self.assertEqual(path_info.stat_entry.st_size, 20) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"]), timestamp=datetime("2000-03-30")) self.assertEqual(path_info.stat_entry.st_size, 30)
def testHashHistory(self): datetime = rdfvalue.RDFDatetime.FromHumanReadable client_urn = self.SetupClient(0) file_urn = client_urn.Add("fs/os").Add("bar") with test_lib.FakeTime(datetime("2010-01-01")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.HASH, rdf_crypto.Hash(md5=b"quux")) with test_lib.FakeTime(datetime("2020-01-01")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.HASH, rdf_crypto.Hash(md5=b"norf")) with test_lib.FakeTime(datetime("2030-01-01")): with self._Aff4Open(file_urn) as fd: fd.Set(fd.Schema.HASH, rdf_crypto.Hash(md5=b"blargh")) data_migration.MigrateClientVfs(client_urn) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["bar"]), timestamp=datetime("2010-12-31")) self.assertEqual(path_info.hash_entry.md5, b"quux") path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["bar"]), timestamp=datetime("2020-12-31")) self.assertEqual(path_info.hash_entry.md5, b"norf") path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["bar"]), timestamp=datetime("2030-12-31")) self.assertEqual(path_info.hash_entry.md5, b"blargh")
def testWritePathInfosExpansion(self): client_id = self.InitializeClient() self.db.WritePathInfos( client_id, [ objects.PathInfo.OS(components=["foo", "bar", "baz"]), ]) results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, [ objects.PathID(["foo"]), objects.PathID(["foo", "bar"]), objects.PathID(["foo", "bar", "baz"]), ]) self.assertEqual(len(results), 3) foo = results[objects.PathID(["foo"])] self.assertEqual(foo.components, ["foo"]) self.assertTrue(foo.directory) foobar = results[objects.PathID(["foo", "bar"])] self.assertEqual(foobar.components, ["foo", "bar"]) self.assertTrue(foobar.directory) foobarbaz = results[objects.PathID(["foo", "bar", "baz"])] self.assertEqual(foobarbaz.components, ["foo", "bar", "baz"]) self.assertFalse(foobarbaz.directory)
def testWritePathInfosStatEntry(self): client_id = self.InitializeClient() stat_entry = rdf_client.StatEntry() stat_entry.pathspec.path = "foo/bar" stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS stat_entry.st_mode = 1337 stat_entry.st_mtime = 108 stat_entry.st_atime = 4815162342 path_info = objects.PathInfo.FromStatEntry(stat_entry) self.db.WritePathInfos(client_id, [path_info]) results = self.db.FindPathInfosByPathIDs(client_id, objects.PathInfo.PathType.OS, [ objects.PathID([]), objects.PathID(["foo"]), objects.PathID(["foo", "bar"]), ]) root_path_info = results[objects.PathID([])] self.assertFalse(root_path_info.HasField("stat_entry")) foo_path_info = results[objects.PathID(["foo"])] self.assertFalse(foo_path_info.HasField("stat_entry")) foobar_path_info = results[objects.PathID(["foo", "bar"])] self.assertTrue(foobar_path_info.HasField("stat_entry")) self.assertFalse(foobar_path_info.HasField("hash_entry")) self.assertEqual(foobar_path_info.stat_entry.st_mode, 1337) self.assertEqual(foobar_path_info.stat_entry.st_mtime, 108) self.assertEqual(foobar_path_info.stat_entry.st_atime, 4815162342)
def testStatFromTree(self): client_urn = self.SetupClient(0) with self._Aff4Open(client_urn.Add("fs/os").Add("foo/bar/baz")) as fd: stat_entry = rdf_client.StatEntry(st_mtime=101) fd.Set(fd.Schema.STAT, stat_entry) data_migration.MigrateClientVfs(client_urn) foo_path_id = rdf_objects.PathID(["foo"]) foo_bar_path_id = rdf_objects.PathID(["foo", "bar"]) foo_bar_baz_path_id = rdf_objects.PathID(["foo", "bar", "baz"]) path_infos = data_store.REL_DB.FindPathInfosByPathIDs( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_ids=[foo_path_id, foo_bar_path_id, foo_bar_baz_path_id]) self.assertEqual(path_infos[foo_path_id].stat_entry.st_mtime, None) self.assertEqual(path_infos[foo_bar_path_id].stat_entry.st_mtime, None) self.assertEqual(path_infos[foo_bar_baz_path_id].stat_entry.st_mtime, 101)
def GetUrnHashEntry(urn, token=None): """Returns an `rdf_crypto.Hash` instance for given URN of an AFF4 file.""" if data_store.RelationalDBReadEnabled(category="vfs"): client_id, vfs_path = urn.Split(2) path_type, components = rdf_objects.ParseCategorizedPath(vfs_path) path_id = rdf_objects.PathID(components) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id, path_type, path_id) return path_info.hash_entry else: with aff4.FACTORY.Open(urn, token=token) as fd: return GetFileHashEntry(fd)
def testFindPathInfoByPathIDTimestampStatEntry(self): client_id = self.InitializeClient() pathspec = rdf_paths.PathSpec( path="foo/bar/baz", pathtype=rdf_paths.PathSpec.PathType.OS) stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=42) self.db.WritePathInfos(client_id, [rdf_objects.PathInfo.FromStatEntry(stat_entry)]) timestamp_1 = rdfvalue.RDFDatetime.Now() stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=101) self.db.WritePathInfos(client_id, [rdf_objects.PathInfo.FromStatEntry(stat_entry)]) timestamp_2 = rdfvalue.RDFDatetime.Now() stat_entry = rdf_client.StatEntry(pathspec=pathspec, st_size=1337) self.db.WritePathInfos(client_id, [rdf_objects.PathInfo.FromStatEntry(stat_entry)]) timestamp_3 = rdfvalue.RDFDatetime.Now() path_id = rdf_objects.PathID(["foo", "bar", "baz"]) path_info_last = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id) self.assertEqual(path_info_last.stat_entry.st_size, 1337) self.assertEqual(path_info_last.components, ["foo", "bar", "baz"]) path_info_1 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_1) self.assertEqual(path_info_1.stat_entry.st_size, 42) self.assertEqual(path_info_last.components, ["foo", "bar", "baz"]) path_info_2 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_2) self.assertEqual(path_info_2.stat_entry.st_size, 101) self.assertEqual(path_info_last.components, ["foo", "bar", "baz"]) path_info_3 = self.db.FindPathInfoByPathID( client_id, rdf_objects.PathInfo.PathType.OS, path_id, timestamp=timestamp_3) self.assertEqual(path_info_3.stat_entry.st_size, 1337) self.assertEqual(path_info_last.components, ["foo", "bar", "baz"])
def testFindPathInfoByPathIDOlder(self): client_id = self.InitializeClient() path_info = rdf_objects.PathInfo.OS(components=["foo"]) path_info.stat_entry.st_mode = 42 path_info.hash_entry.md5 = b"foo" self.db.WritePathInfos(client_id, [path_info]) path_info = rdf_objects.PathInfo.OS(components=["bar"]) path_info.stat_entry.st_mode = 1337 path_info.hash_entry.md5 = b"bar" self.db.WritePathInfos(client_id, [path_info]) path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["foo"])) self.assertEqual(path_info.stat_entry.st_mode, 42) self.assertEqual(path_info.hash_entry.md5, b"foo") path_info = self.db.FindPathInfoByPathID(client_id, rdf_objects.PathInfo.PathType.OS, rdf_objects.PathID(["bar"])) self.assertEqual(path_info.stat_entry.st_mode, 1337) self.assertEqual(path_info.hash_entry.md5, b"bar")
def testStatEntryFromSimpleFile(self): client_urn = self.SetupClient(0) with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd: stat_entry = rdf_client.StatEntry(st_mode=1337, st_size=42) fd.Set(fd.Schema.STAT, stat_entry) data_migration.MigrateClientVfs(client_urn) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"])) self.assertEqual(path_info.stat_entry.st_mode, 1337) self.assertEqual(path_info.stat_entry.st_size, 42)
def testHashEntryFromSimpleFile(self): client_urn = self.SetupClient(0) with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd: hash_entry = rdf_crypto.Hash(md5=b"bar", sha256=b"baz") fd.Set(fd.Schema.HASH, hash_entry) data_migration.MigrateClientVfs(client_urn) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"])) self.assertEqual(path_info.hash_entry.md5, b"bar") self.assertEqual(path_info.hash_entry.sha256, b"baz")
def testStatAndHashEntryFromSimpleFile(self): client_urn = self.SetupClient(0) with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd: stat_entry = rdf_client.StatEntry(st_mode=108) fd.Set(fd.Schema.STAT, stat_entry) hash_entry = rdf_crypto.Hash(sha256=b"quux") fd.Set(fd.Schema.HASH, hash_entry) data_migration.MigrateClientVfs(client_urn) path_info = data_store.REL_DB.FindPathInfoByPathID( client_id=client_urn.Basename(), path_type=rdf_objects.PathInfo.PathType.OS, path_id=rdf_objects.PathID(["foo"])) self.assertEqual(path_info.stat_entry.st_mode, 108) self.assertEqual(path_info.hash_entry.sha256, b"quux")