def testDownloadCollectionWithFoldersEntries(self): """Check we can download a collection that also references folders.""" fd = sequential_collection.GeneralIndexedCollection( self.collection_urn, token=self.token) with data_store.DB.GetMutationPool(token=self.token) as pool: fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testdir1", pathtype="OS"), st_mode=stat.S_IFDIR)), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def testDownloadCollectionWithFoldersEntries(self): """Check we can download RDFValueCollection that also references folders.""" fd = aff4.FACTORY.Create(self.collection_urn, collects.RDFValueCollection, token=self.token) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS")))) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testdir1", pathtype="OS"), st_mode=stat.S_IFDIR))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def testDownloadHuntResultCollection(self): """Check we can download files references in HuntResultCollection.""" # Create a collection with URNs to some files. fd = results.HuntResultCollection(self.collection_urn, token=self.token) with data_store.DB.GetMutationPool(token=self.token) as pool: fd.AddAsMessage(rdfvalue.RDFURN(self.out.Add("testfile1")), self.client_id, mutation_pool=pool) fd.AddAsMessage(rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), self.client_id, mutation_pool=pool) fd.AddAsMessage(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), self.client_id, mutation_pool=pool) fd.AddAsMessage(collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))), self.client_id, mutation_pool=pool) self._VerifyDownload()
def _ExecuteDownload(self, fname, stat_object, args): stat_entry = self.Stat(fname, stat_object, True) uploaded_file = self.Upload(fname, stat_object, args.action.download, args.upload_token) if uploaded_file: uploaded_file.stat_entry = stat_entry return rdf_file_finder.FileFinderResult(stat_entry=stat_entry, uploaded_file=uploaded_file)
def _ExecuteHash(self, fname, args): stat_opts = rdf_file_finder.FileFinderStatActionOptions( resolve_links=True, collect_ext_attrs=args.action.hash.collect_ext_attrs) stat_entry = self.Stat(fname, stat_opts) hash_entry = self.Hash(fname, args.action.hash) return rdf_file_finder.FileFinderResult(stat_entry=stat_entry, hash_entry=hash_entry)
def Start(self): """Issue the find request.""" super(FileFinder, self).Start() if not self.args.paths: # Nothing to do. return self.state.files_found = 0 self.state.sorted_conditions = sorted(self.args.conditions, key=self._ConditionWeight) # TODO(user): We may change self.args just by accessing self.args.action # (a nested message will be created). Therefore we should be careful # about not modifying self.args: they will be written as FLOW_ARGS attribute # and will be different from what the user has actually passed in. # We need better semantics for RDFStructs - creating a nested field on # read access is totally unexpected. if self.args.HasField("action"): action = self.args.action.Copy() else: action = rdf_file_finder.FileFinderAction() # This is used by MultiGetFileMixin. if action.action_type == rdf_file_finder.FileFinderAction.Action.HASH: self.state.file_size = action.hash.max_size elif action.action_type == rdf_file_finder.FileFinderAction.Action.DOWNLOAD: self.state.file_size = action.download.max_size if self.args.pathtype in (rdf_paths.PathSpec.PathType.MEMORY, rdf_paths.PathSpec.PathType.REGISTRY): # Memory and Registry StatEntries won't pass the file type check. self.args.process_non_regular_files = True if self.args.pathtype == rdf_paths.PathSpec.PathType.MEMORY: # If pathtype is MEMORY, we're treating provided paths not as globs, # but as paths to memory devices. for path in self.args.paths: pathspec = rdf_paths.PathSpec( path=utils.SmartUnicode(path), pathtype=rdf_paths.PathSpec.PathType.MEMORY) aff4path = aff4_grr.VFSGRRClient.PathspecToURN( pathspec, self.client_id) stat_entry = rdf_client.StatEntry(aff4path=aff4path, pathspec=pathspec) self.ApplyCondition( rdf_file_finder.FileFinderResult(stat_entry=stat_entry), condition_index=0) else: self.GlobForPaths( self.args.paths, pathtype=self.args.pathtype, process_non_regular_files=self.args.process_non_regular_files)
def _ExecuteDownload(self, fname, stat, args): args.action.download.resolve_links = True stat_entry = self.Stat(fname, stat, args.action.download.stat) uploaded_file = self.Upload(fname, stat, args.action.download, args.upload_token) if uploaded_file: uploaded_file.stat_entry = stat_entry return rdf_file_finder.FileFinderResult(stat_entry=stat_entry, uploaded_file=uploaded_file)
def Run(self, args): self.follow_links = args.follow_links self.process_non_regular_files = args.process_non_regular_files # TODO(user): Add xdev support. for fname in self.CollectGlobs(args.paths): self.Progress() self.conditions = self.ParseConditions(args) try: stat_object = os.lstat(fname) except OSError: continue if (not self.process_non_regular_files and not stat.S_ISREG(stat_object.st_mode)): continue result = rdf_file_finder.FileFinderResult() conditions_apply = True for c in self.conditions: if not c(fname, stat_object, result): conditions_apply = False break if not conditions_apply: continue if args.action.action_type == args.action.Action.STAT: result.stat_entry = self.Stat(fname, stat_object, args.action.stat.resolve_links) self.SendReply(result) continue else: result.stat_entry = self.Stat(fname, stat_object, True) # We never want to hash/download the link, always the target. if stat.S_ISLNK(stat_object.st_mode): try: stat_object = os.stat(fname) except OSError: continue if args.action.action_type == args.action.Action.DOWNLOAD: # TODO(user): DOWNLOAD raise NotImplementedError() elif args.action.action_type == args.action.Action.HASH: result.hash_entry = self.Hash( fname, stat_object, args.action.hash.max_size, args.action.hash.oversized_file_policy) self.SendReply(result)
def _ExecuteDownload(self, fname, args): stat_opts = rdf_file_finder.FileFinderStatActionOptions( resolve_links=True, collect_ext_attrs=args.action.download.collect_ext_attrs) stat_entry = self.Stat(fname, stat_opts) uploaded_file = self.Upload(fname, args.action.download) if uploaded_file: uploaded_file.stat_entry = stat_entry return rdf_file_finder.FileFinderResult(stat_entry=stat_entry, uploaded_file=uploaded_file)
def _AddTestData(self, fd): fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add( rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS"))) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS")))) fd.Add( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))))
def testHuntAuthorizationIsRequiredToGenerateResultsArchive(self): stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsTextPresent, "Create a new approval request")
def testShowsGenerateArchiveButtonForFileFinderHunt(self): stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Files referenced in this collection can be downloaded")
def testShowsGenerateArchiveButtonForFileFinderHunt(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Files referenced in this collection can be downloaded")
def testHuntAuthorizationIsRequiredToGenerateResultsArchive(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsTextPresent, "Create a new approval request")
def Run(self, args): self.stat_cache = utils.StatCache() action = self._ParseAction(args) for path in self._GetExpandedPaths(args): self.Progress() try: matches = self._Validate(args, path) result = rdf_file_finder.FileFinderResult() result.matches = matches action.Execute(path, result) self.SendReply(result) except _SkipFileException: pass
def testNetgroupBufferParser(self): """Ensure we can extract users from a netgroup file.""" parser = linux_file_parser.NetgroupBufferParser() buf1 = rdf_client.BufferReference(data="group1 (-,user1,) (-,user2,) " "(-,user3,)\n") buf2 = rdf_client.BufferReference( data="super_group3 (-,user5,) (-,user6,)" " group1 group2\n") ff_result = rdf_file_finder.FileFinderResult(matches=[buf1, buf2]) with test_lib.ConfigOverrider( {"Artifacts.netgroup_user_blacklist": ["user2", "user3"]}): out = list(parser.Parse(ff_result, None)) self.assertItemsEqual([x.username for x in out], [u"user1", u"user5", u"user6"])
def testExportCommandIsShownForStatEntryResults(self): stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.Open("/#/hunts/%s/results" % hunt_urn.Basename()) self.Click("link=Show export command") self.WaitUntil(self.IsTextPresent, "/usr/bin/grr_api_shell 'http://localhost:8000/' " "--exec_code 'grrapi.Hunt(\"%s\").GetFilesArchive()." "WriteToFile(\"./hunt_results_%s.zip\")'" % (hunt_urn.Basename(), hunt_urn.Basename().replace(":", "_")))
def _testCollection(self, collection_type): # Create a collection with URNs to some files. fd = aff4.FACTORY.Create( "aff4:/testcoll", collection_type, token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdf_client.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Add( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry(aff4path=self.out.Add( "testfile6")))) fd.Close() self._VerifyDownload()
def testShowsPerFileDownloadButtonForFileFinderHunt(self): stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil( self.IsElementPresent, "css=grr-results-collection button:has(span.glyphicon-download)")
def _AddTestData(self, fd): with data_store.DB.GetMutationPool(token=self.token) as pool: fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool) fd.Add(rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add(collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))), mutation_pool=pool)
def testPasswdBufferParser(self): """Ensure we can extract users from a passwd file.""" parser = linux_file_parser.PasswdBufferParser() buf1 = rdf_client.BufferReference(data="user1:x:1000:1000:User1" " Name,,,:/home/user1:/bin/bash\n") buf2 = rdf_client.BufferReference(data="user2:x:1000:1000:User2" " Name,,,:/home/user2:/bin/bash\n") ff_result = rdf_file_finder.FileFinderResult(matches=[buf1, buf2]) out = list(parser.Parse(ff_result, None)) self.assertEqual(len(out), 2) self.assertTrue(isinstance(out[1], rdf_client.User)) self.assertTrue(isinstance(out[1], rdf_client.User)) self.assertEqual(out[0].username, "user1") self.assertEqual(out[0].full_name, "User1 Name,,,")
def testGenerateZipButtonGetsDisabledAfterClick(self): stat_entry = rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.GrantHuntApproval(hunt_urn) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]") self.WaitUntil(self.IsTextPresent, "Generation has started")
def testShowsPerFileDownloadButtonForFileFinderHunt(self): with self.ACLChecksDisabled(): client_id = self.SetupClients(1)[0] stat_entry = rdf_client.StatEntry(aff4path=client_id.Add("fs/os/foo/bar")) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsElementPresent, "css=grr-results-collection grr-downloadable-urn button")
def _testCollection(self, collection_type): # Create a collection with URNs to some files. fd = aff4.FACTORY.Create(self.collection_urn, collection_type, token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add( rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS"))) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS")))) fd.Add( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS")))) fd.Close() self._VerifyDownload()
def testDownloadHuntResultCollection(self): """Check we can download files references in HuntResultCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create( "aff4:/testcoll", results.HuntResultCollection, token=self.token) fd.AddAsMessage(rdfvalue.RDFURN(self.out.Add("testfile1")), self.client_id) fd.AddAsMessage( rdf_client.StatEntry(aff4path=self.out.Add("testfile2")), self.client_id) fd.AddAsMessage( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( aff4path=self.out.Add("testfile5"))), self.client_id) fd.AddAsMessage( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( aff4path=self.out.Add("testfile6"))), self.client_id) fd.Close() self._VerifyDownload()
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in a collection.""" # Create a collection with URNs to some files. fd = sequential_collection.GeneralIndexedCollection( self.collection_urn, token=self.token) with data_store.DB.GetMutationPool(token=self.token) as pool: fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool) fd.Add(rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertEqual(len(symlinks), 3) self.assertListEqual(sorted(symlinks), [ "C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5" ]) self.assertEqual( os.readlink( os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in RDFValueCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create( "aff4:/testcoll", collects.RDFValueCollection, token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdf_client.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection( "aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertEqual(len(symlinks), 3) self.assertListEqual( sorted(symlinks), [ "C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5" ]) self.assertEqual( os.readlink( os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def GlobReportMatch(self, response): """This method is called by the glob mixin when there is a match.""" super(FileFinder, self).GlobReportMatch(response) self.ApplyCondition( rdf_file_finder.FileFinderResult(stat_entry=response), condition_index=0)
def Run(self, args): self.follow_links = args.follow_links self.process_non_regular_files = args.process_non_regular_files # Generate a list of mount points where we stop recursive searches. if args.xdev == args.XDev.NEVER: # Never cross device boundaries, stop at all mount points. self.mountpoints_blacklist = set( [p.mountpoint for p in psutil.disk_partitions(all=True)]) elif args.xdev == args.XDev.LOCAL: # Descend into file systems on physical devices only. self.mountpoints_blacklist = ( set([p.mountpoint for p in psutil.disk_partitions(all=True)]) - set( [p.mountpoint for p in psutil.disk_partitions(all=False)])) elif args.xdev == args.XDev.ALWAYS: # Never stop at any device boundary. self.mountpoints_blacklist = set() for fname in self.CollectGlobs(args.paths): self.Progress() self.conditions = self.ParseConditions(args) try: stat_object = os.lstat(fname) except OSError: continue if (not self.process_non_regular_files and not stat.S_ISREG(stat_object.st_mode)): continue result = rdf_file_finder.FileFinderResult() conditions_apply = True for c in self.conditions: if not c(fname, stat_object, result): conditions_apply = False break if not conditions_apply: continue if args.action.action_type == args.action.Action.STAT: result.stat_entry = self.Stat(fname, stat_object, args.action.stat.resolve_links) self.SendReply(result) continue else: stat_entry = self.Stat(fname, stat_object, True) # We never want to hash/download the link, always the target. if stat.S_ISLNK(stat_object.st_mode): try: stat_object = os.stat(fname) except OSError: continue if args.action.action_type == args.action.Action.DOWNLOAD: max_bytes = None max_size = args.action.download.max_size if stat_entry.st_size > max_size: policy = args.action.download.oversized_file_policy policy_enum = args.action.download.OversizedFilePolicy if policy == policy_enum.DOWNLOAD_TRUNCATED: max_bytes = max_size elif policy == policy_enum.SKIP: continue else: raise ValueError("Unknown oversized file policy %s." % int(policy)) uploaded_file = self.grr_worker.UploadFile( open(fname, "rb"), args.upload_token, max_bytes=max_bytes, network_bytes_limit=self.network_bytes_limit, session_id=self.session_id, progress_callback=self.Progress) uploaded_file.stat_entry = stat_entry result.uploaded_file = uploaded_file elif args.action.action_type == args.action.Action.HASH: result.stat_entry = stat_entry result.hash_entry = self.Hash(fname, stat_object, args.action.hash.max_size, args.action.hash.oversized_file_policy) self.SendReply(result)
def Run(self, args): self.follow_links = args.follow_links self.process_non_regular_files = args.process_non_regular_files # Generate a list of mount points where we stop recursive searches. if args.xdev == args.XDev.NEVER: # Never cross device boundaries, stop at all mount points. self.mountpoints_blacklist = set( [p.mountpoint for p in psutil.disk_partitions(all=True)]) elif args.xdev == args.XDev.LOCAL: # Descend into file systems on physical devices only. self.mountpoints_blacklist = ( set([p.mountpoint for p in psutil.disk_partitions(all=True)]) - set( [p.mountpoint for p in psutil.disk_partitions(all=False)])) elif args.xdev == args.XDev.ALWAYS: # Never stop at any device boundary. self.mountpoints_blacklist = set() for fname in self.CollectGlobs(args.paths): self.Progress() self.conditions = self.ParseConditions(args) try: stat_object = os.lstat(fname) except OSError: continue if (not self.process_non_regular_files and not stat.S_ISREG(stat_object.st_mode)): continue result = rdf_file_finder.FileFinderResult() conditions_apply = True for c in self.conditions: if not c(fname, stat_object, result): conditions_apply = False break if not conditions_apply: continue if args.action.action_type == args.action.Action.STAT: result.stat_entry = self.Stat(fname, stat_object, args.action.stat.resolve_links) self.SendReply(result) continue else: result.stat_entry = self.Stat(fname, stat_object, True) # We never want to hash/download the link, always the target. if stat.S_ISLNK(stat_object.st_mode): try: stat_object = os.stat(fname) except OSError: continue if args.action.action_type == args.action.Action.DOWNLOAD: # TODO(user): DOWNLOAD raise NotImplementedError() elif args.action.action_type == args.action.Action.HASH: result.hash_entry = self.Hash(fname, stat_object, args.action.hash.max_size, args.action.hash.oversized_file_policy) self.SendReply(result)