def testUpdateFromDirectory(self): dest = objects.PathInfo(components=["usr", "local", "bin"]) self.assertFalse(dest.directory) dest.UpdateFrom( objects.PathInfo(components=["usr", "local", "bin"], directory=True)) self.assertTrue(dest.directory)
def testMergePathInfoLastUpdate(self): components = ["usr", "local", "bin"] dest = objects.PathInfo(components=components) self.assertIsNone(dest.last_path_history_timestamp) dest.UpdateFrom( objects.PathInfo(components=components, last_path_history_timestamp=rdfvalue.RDFDatetime. FromHumanReadable("2017-01-01"))) self.assertEqual(dest.last_path_history_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2017-01-01")) # Merging in a record without last_path_history_timestamp shouldn't change # it. dest.UpdateFrom(objects.PathInfo(components=components)) self.assertEqual(dest.last_path_history_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2017-01-01")) # Merging in a record with an earlier last_path_history_timestamp shouldn't # change it. dest.UpdateFrom( objects.PathInfo(components=components, last_path_history_timestamp=rdfvalue.RDFDatetime. FromHumanReadable("2016-01-01"))) self.assertEqual(dest.last_path_history_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2017-01-01")) # Merging in a record with a later last_path_history_timestamp should change # it. dest.UpdateFrom( objects.PathInfo(components=components, last_path_history_timestamp=rdfvalue.RDFDatetime. FromHumanReadable("2018-01-01"))) self.assertEqual(dest.last_path_history_timestamp, rdfvalue.RDFDatetime.FromHumanReadable("2018-01-01"))
def testPathTypeSeparates(self): path_1 = ["usr", "bin", "javac"] path_2 = ["usr", "bin", "gdb"] client_id = "C.bbbbbbbbbbbbbbbb" self.db.WritePathInfos(client_id, [ objects.PathInfo( components=path_1, path_type=objects.PathInfo.PathType.OS, last_path_history_timestamp=rdfvalue.RDFDatetime.FromHumanReadable( "2017-01-01")), objects.PathInfo( components=path_2, path_type=objects.PathInfo.PathType.TSK, last_path_history_timestamp=rdfvalue.RDFDatetime.FromHumanReadable( "2017-01-02")), ]) results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, map(objects.PathInfo.MakePathID, [path_1, path_2, ["usr"], ["usr", "bin"]])) self.assertIn(objects.PathInfo.MakePathID(path_1), results) self.assertNotIn(objects.PathInfo.MakePathID(path_2), results) results = self.db.FindDescendentPathIDs( client_id, objects.PathInfo.PathType.OS, objects.PathInfo.MakePathID(["usr", "bin"])) self.assertEqual(results, [objects.PathInfo.MakePathID(path_1)]) results = self.db.FindDescendentPathIDs( client_id, objects.PathInfo.PathType.TSK, objects.PathInfo.MakePathID(["usr", "bin"])) self.assertEqual(results, [objects.PathInfo.MakePathID(path_2)])
def testFindDescendentPathIDs(self): path_1 = [ "usr", "lib", "lightning", "chrome", "calendar-en-US", "locale", "en-US", "calendar", "calendar-occurrence-prompt.properties" ] path_2 = [ "usr", "lib", "lightning", "chrome", "calendar", "content", "calendar", "calendar-creation.js" ] client_id = "C.bbbbbbbbbbbbbbbb" self.db.WritePathInfos(client_id, [ objects.PathInfo( components=path_1, path_type=objects.PathInfo.PathType.OS, ), objects.PathInfo( components=path_2, path_type=objects.PathInfo.PathType.OS, ) ]) # Read everything. results = self.db.FindDescendentPathIDs( client_id, objects.PathInfo.PathType.OS, objects.PathInfo.MakePathID(["usr", "lib", "lightning"])) results_set = set(results) self.assertEqual(len(results), len(results_set)) self.assertIn(objects.PathInfo.MakePathID(path_1), results_set) self.assertIn(objects.PathInfo.MakePathID(path_2), results_set) self.assertNotIn( objects.PathInfo.MakePathID(["usr", "lib", "lightning"]), results_set) self.assertIn( objects.PathInfo.MakePathID(["usr", "lib", "lightning", "chrome"]), results_set) # Read 2 levels. results = self.db.FindDescendentPathIDs( client_id, objects.PathInfo.PathType.OS, objects.PathInfo.MakePathID(["usr", "lib", "lightning"]), max_depth=2) results_set = set(results) self.assertEqual(len(results), len(results_set)) # Our leaf nodes shouldn't be there, they are too deep. self.assertNotIn(objects.PathInfo.MakePathID(path_1), results_set) self.assertNotIn(objects.PathInfo.MakePathID(path_2), results_set) # We don't expect start point to be included. self.assertNotIn( objects.PathInfo.MakePathID(["usr", "lib", "lightning"]), results_set) # We do expect 2 layers, but no more. self.assertIn( objects.PathInfo.MakePathID(["usr", "lib", "lightning", "chrome"]), results_set) self.assertIn( objects.PathInfo.MakePathID( ["usr", "lib", "lightning", "chrome", "calendar"]), results_set) self.assertNotIn( objects.PathInfo.MakePathID( ["usr", "lib", "lightning", "chrome", "calendar", "content"]), results_set)
def testUpdateFromStatEntryRetain(self): stat_entry = rdf_client.StatEntry(st_mode=707) dst = objects.PathInfo(components=["foo", "bar"], stat_entry=stat_entry) src = objects.PathInfo(components=["foo", "bar"]) dst.UpdateFrom(src) self.assertEqual(dst.stat_entry.st_mode, 707)
def testUpdateFromStatEntryUpdate(self): dst = objects.PathInfo(components=["foo", "bar"]) stat_entry = rdf_client.StatEntry(st_mode=1337) src = objects.PathInfo(components=["foo", "bar"], stat_entry=stat_entry) dst.UpdateFrom(src) self.assertEqual(dst.stat_entry.st_mode, 1337)
def testWritePathInfosRawValidates(self): path = ["usr", "local"] client_id = "C.bbbbbbbbbbbbbbbb" # Not a valid client_id with self.assertRaises(ValueError): self.db.WritePathInfosRaw("", [ objects.PathInfo( path_type=objects.PathInfo.PathType.OS, components=path, ) ]) # Missing path_type. with self.assertRaises(ValueError): self.db.WritePathInfosRaw(client_id, [objects.PathInfo(components=path)])
def testWritePathInfosValidatesPathType(self): path = ["usr", "local"] client_id = "C.bbbbbbbbbbbbbbbb" with self.assertRaises(ValueError): self.db.WritePathInfos(client_id, [objects.PathInfo(components=path)])
def __init__(self, path_type, components): self._path_info = objects.PathInfo(path_type=path_type, components=components) self._stat_entries = {} self._hash_entries = {} self._children = set()
def testGetAncestorsOrder(self): path_info = objects.PathInfo(components=["foo", "bar", "baz", "quux"]) results = list(path_info.GetAncestors()) self.assertEqual(len(results), 4) self.assertEqual(results[0].components, ["foo", "bar", "baz"]) self.assertEqual(results[1].components, ["foo", "bar"]) self.assertEqual(results[2].components, ["foo"]) self.assertEqual(results[3].components, [])
def testUpdateFromValidates(self): # cannot merge from a string with self.assertRaises(ValueError): objects.PathInfo(components=["usr", "local", "bin"], ).UpdateFrom("/usr/local/bin") # both must refer to the same path type with self.assertRaises(ValueError): objects.PathInfo( components=["usr", "local", "bin"], path_type=objects.PathInfo.PathType.OS, ).UpdateFrom( objects.PathInfo( components=["usr", "local", "bin"], path_type=objects.PathInfo.PathType.TSK, )) # both must refer to the same path with self.assertRaises(ValueError): objects.PathInfo(components=["usr", "local", "bin"]).UpdateFrom( objects.PathInfo(components=["usr", "local", "bin", "protoc"]))
def CreateFileVersions(self, client_id, file_path): """Add a new version for a file.""" path_type, components = rdf_objects.ParseCategorizedPath(file_path) with test_lib.FakeTime(self.time_1): token = access_control.ACLToken(username="******") fd = aff4.FACTORY.Create(client_id.Add(file_path), aff4.AFF4MemoryStream, mode="w", token=token) fd.Write("Hello World") fd.Close() if data_store.RelationalDBWriteEnabled(): path_info = rdf_objects.PathInfo() path_info.path_type = path_type path_info.components = components path_info.directory = False data_store.REL_DB.WritePathInfos(client_id.Basename(), [path_info]) with test_lib.FakeTime(self.time_2): fd = aff4.FACTORY.Create(client_id.Add(file_path), aff4.AFF4MemoryStream, mode="w", token=token) fd.Write("Goodbye World") fd.Close() if data_store.RelationalDBWriteEnabled(): path_info = rdf_objects.PathInfo() path_info.path_type = path_type path_info.components = components path_info.directory = False data_store.REL_DB.WritePathInfos(client_id.Basename(), [path_info])
def CreateFileVersion(client_id, path, content, timestamp, token=None): """Add a new version for a file.""" with test_lib.FakeTime(timestamp): with aff4.FACTORY.Create( client_id.Add(path), aff4_type=aff4_grr.VFSFile, mode="w", token=token) as fd: fd.Write(content) fd.Set(fd.Schema.CONTENT_LAST, rdfvalue.RDFDatetime.Now()) if data_store.RelationalDBWriteEnabled(): path_type, components = rdf_objects.ParseCategorizedPath(path) path_info = rdf_objects.PathInfo() path_info.path_type = path_type path_info.components = components path_info.directory = False data_store.REL_DB.WritePathInfos(client_id.Basename(), [path_info])
def CreateFolder(client_id, path, timestamp, token=None): """Creates a VFS folder.""" with test_lib.FakeTime(timestamp): with aff4.FACTORY.Create( client_id.Add(path), aff4_type=aff4_standard.VFSDirectory, mode="w", token=token) as _: pass if data_store.RelationalDBWriteEnabled(): path_type, components = rdf_objects.ParseCategorizedPath(path) path_info = rdf_objects.PathInfo() path_info.path_type = path_type path_info.components = components path_info.directory = True data_store.REL_DB.WritePathInfos(client_id.Basename(), [path_info])
def WritePathInfos(self, client_id, path_infos): """Writes a collection of path_info records for a client. If any records are already present in the database, they will be merged - see db_path_utils.MergePathInfo. Args: client_id: The client of interest. path_infos: A list of rdfvalue.objects.PathInfo records. """ infos_by_id = {info.path_id: info for info in path_infos} for info in path_infos: for path_id, components in rdf_objects.PathInfo.MakeAncestorPathIDs( info.components): if path_id not in infos_by_id: infos_by_id[path_id] = rdf_objects.PathInfo( components=components, path_type=info.path_type, directory=True) self.WritePathInfosRaw(client_id, infos_by_id.values())
def testWritePathInfos(self): path_1 = ["usr", "bin", "javac"] path_2 = ["usr", "bin", "gdb"] client_id = "C.bbbbbbbbbbbbbbbb" self.db.WritePathInfos(client_id, [ objects.PathInfo( components=path_1, path_type=objects.PathInfo.PathType.OS, last_path_history_timestamp=rdfvalue.RDFDatetime.FromHumanReadable( "2017-01-01")), objects.PathInfo( components=path_2, path_type=objects.PathInfo.PathType.OS, last_path_history_timestamp=rdfvalue.RDFDatetime.FromHumanReadable( "2017-01-02")), ]) # In addition to the 2 paths we put, we should also find the 2 shared parent # directories. results = self.db.FindPathInfosByPathIDs( client_id, objects.PathInfo.PathType.OS, map(objects.PathInfo.MakePathID, [path_1, path_2, ["usr"], ["usr", "bin"]])) self.assertEqual( results, { objects.PathInfo.MakePathID(path_1): objects.PathInfo( components=path_1, path_type=objects.PathInfo.PathType.OS, last_path_history_timestamp=rdfvalue.RDFDatetime. FromHumanReadable("2017-01-01")), objects.PathInfo.MakePathID(path_2): objects.PathInfo( components=path_2, path_type=objects.PathInfo.PathType.OS, last_path_history_timestamp=rdfvalue.RDFDatetime. FromHumanReadable("2017-01-02")), objects.PathInfo.MakePathID(["usr"]): objects.PathInfo( components=["usr"], path_type=objects.PathInfo.PathType.OS, directory=True), objects.PathInfo.MakePathID(["usr", "bin"]): objects.PathInfo( components=["usr", "bin"], path_type=objects.PathInfo.PathType.OS, directory=True), })
def testRootPathDirectoryValidation(self): with self.assertRaises(AssertionError): objects.PathInfo(components=[])
def testValidateEmptyComponent(self): with self.assertRaisesRegexp(ValueError, "Empty"): objects.PathInfo(components=["foo", "", "bar"])
def CreateClientObject(self, vfs_fixture): """Make a new client object.""" # First remove the old fixture just in case its still there. aff4.FACTORY.Delete(self.client_id, token=self.token) # Create the fixture at a fixed time. with test_lib.FakeTime(self.age): for path, (aff4_type, attributes) in vfs_fixture: path %= self.args aff4_object = aff4.FACTORY.Create(self.client_id.Add(path), aff4_type, mode="rw", token=self.token) if data_store.RelationalDBWriteEnabled(): data_store.REL_DB.WriteClientMetadata( self.client_id.Basename(), fleetspeak_enabled=False) components = [ component for component in path.split("/") if component ] if components[0:2] == ["fs", "os"]: path_info = rdf_objects.PathInfo() path_info.path_type = rdf_objects.PathInfo.PathType.OS path_info.components = components[2:] if aff4_type in [ aff4_grr.VFSFile, aff4_grr.VFSMemoryFile ]: path_info.directory = False elif aff4_type == aff4_standard.VFSDirectory: path_info.directory = True else: raise ValueError("Incorrect AFF4 type: %s" % aff4_type) data_store.REL_DB.WritePathInfos( client_id=self.client_id.Basename(), path_infos=[path_info]) for attribute_name, value in attributes.items(): attribute = aff4.Attribute.PREDICATES[attribute_name] if isinstance(value, (str, unicode)): # Interpolate the value value %= self.args # Is this supposed to be an RDFValue array? if aff4.issubclass(attribute.attribute_type, rdf_protodict.RDFValueArray): rdfvalue_object = attribute() for item in value: new_object = rdfvalue_object.rdf_type.FromTextFormat( utils.SmartStr(item)) rdfvalue_object.Append(new_object) # It is a text serialized protobuf. elif aff4.issubclass(attribute.attribute_type, rdf_structs.RDFProtoStruct): # Use the alternate constructor - we always write protobufs in # textual form: rdfvalue_object = attribute.attribute_type.FromTextFormat( utils.SmartStr(value)) elif aff4.issubclass(attribute.attribute_type, rdfvalue.RDFInteger): rdfvalue_object = attribute(int(value)) else: rdfvalue_object = attribute(value) # If we don't already have a pathspec, try and get one from the stat. if aff4_object.Get(aff4_object.Schema.PATHSPEC) is None: # If the attribute was a stat, it has a pathspec nested in it. # We should add that pathspec as an attribute. if attribute.attribute_type == rdf_client.StatEntry: stat_object = attribute.attribute_type.FromTextFormat( utils.SmartStr(value)) if stat_object.pathspec: pathspec_attribute = aff4.Attribute( "aff4:pathspec", rdf_paths.PathSpec, "The pathspec used to retrieve " "this object from the client.", "pathspec") aff4_object.AddAttribute( pathspec_attribute, stat_object.pathspec) if attribute in ["aff4:content", "aff4:content"]: # For AFF4MemoryStreams we need to call Write() instead of # directly setting the contents.. aff4_object.Write(rdfvalue_object) else: aff4_object.AddAttribute(attribute, rdfvalue_object) if (isinstance(rdfvalue_object, rdf_client.StatEntry) and rdfvalue_object.pathspec.pathtype != "UNSET"): if data_store.RelationalDBWriteEnabled(): client_id = self.client_id.Basename() path_info = rdf_objects.PathInfo.FromStatEntry( rdfvalue_object) data_store.REL_DB.WritePathInfos( client_id, [path_info]) # Populate the KB from the client attributes. if aff4_type == aff4_grr.VFSGRRClient: kb = rdf_client.KnowledgeBase() artifact.SetCoreGRRKnowledgeBaseValues(kb, aff4_object) aff4_object.Set(aff4_object.Schema.KNOWLEDGE_BASE, kb) # Make sure we do not actually close the object here - we only want to # sync back its attributes, not run any finalization code. aff4_object.Flush() if aff4_type == aff4_grr.VFSGRRClient: index = client_index.CreateClientIndex(token=self.token) index.AddClient(aff4_object)
def testValidateDoubleDotComponent(self): with self.assertRaisesRegexp(ValueError, "Incorrect"): objects.PathInfo(components=["..", "foo", "bar"])
def testGetAncestorsEmpty(self): path_info = objects.PathInfo(components=[], directory=True) self.assertEqual(list(path_info.GetAncestors()), [])
def testUpdateFromValidatesComponents(self): with self.assertRaises(ValueError): objects.PathInfo(components=["usr", "local", "bin"]).UpdateFrom( objects.PathInfo(components=["usr", "local", "bin", "protoc"]))
def testUpdateFromValidatesType(self): with self.assertRaises(TypeError): objects.PathInfo(components=["usr", "local", "bin"], ).UpdateFrom("/usr/local/bin")
def testGetAncestorsRoot(self): path_info = objects.PathInfo(components=["foo"]) results = list(path_info.GetAncestors()) self.assertEqual(len(results), 1) self.assertEqual(results[0].components, [])