def testSelectArtifactsForChecks(self): client_id = self.SetupLinuxUser() session_id, _ = self.RunFlow(client_id) state = flow_test_lib.GetFlowState(self.client_id, session_id) self.assertIn("DebianPackagesStatus", state.artifacts_wanted) self.assertIn("SshdConfigFile", state.artifacts_wanted) client_id = self.SetupWindowsUser() session_id, _ = self.RunFlow(client_id) state = flow_test_lib.GetFlowState(self.client_id, session_id) self.assertIn("WMIInstalledSoftware", state.artifacts_wanted)
def testRandomFile(self): content = os.urandom(1024) response = responses.Response(responses.POST, "https://foo.bar/quux") response.status = 201 response.headers = { "Location": "https://foo.bar/norf", } responses.add(response) handler = gcs_test_lib.FakeUploadHandler() responses.add_callback(responses.PUT, "https://foo.bar/norf", handler) with temp.AutoTempFilePath() as temp_path: with open(temp_path, mode="wb") as temp_file: temp_file.write(content) flow_id = self._Collect(path=temp_path, signed_url="https://foo.bar/quux") state = flow_test_lib.GetFlowState(self.client_id, flow_id) self.assertNotEmpty(state.encryption_key) encrypted_buf = io.BytesIO(handler.content) decrypted_buf = aead.Decrypt(encrypted_buf, state.encryption_key) self.assertEqual(decrypted_buf.read(), content)
def testMultiGetFileMultiFiles(self): """Test MultiGetFile downloading many files at once.""" client_mock = action_mocks.MultiGetFileClientMock() pathspecs = [] # Make 30 files to download. for i in range(30): path = os.path.join(self.temp_dir, "test_%s.txt" % i) with open(path, "wb") as fd: fd.write("Hello") pathspecs.append( rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS, path=path)) args = transfer.MultiGetFileArgs(pathspecs=pathspecs, maximum_pending_files=10) session_id = flow_test_lib.TestFlowHelper( transfer.MultiGetFile.__name__, client_mock, token=self.token, client_id=self.client_id, args=args) if data_store.RelationalDBReadEnabled(): # Now open each file and make sure the data is there. for pathspec in pathspecs: cp = db.ClientPath.FromPathSpec(self.client_id.Basename(), pathspec) fd_rel_db = file_store.OpenFile(cp) self.assertEqual("Hello", fd_rel_db.read()) # Check that SHA256 hash of the file matches the contents # hash and that MD5 and SHA1 are set. history = data_store.REL_DB.ReadPathInfoHistory( cp.client_id, cp.path_type, cp.components) self.assertEqual(history[-1].hash_entry.sha256, fd_rel_db.hash_id.AsBytes()) self.assertIsNotNone(history[-1].hash_entry.sha1) self.assertIsNotNone(history[-1].hash_entry.md5) else: # Check up on the internal flow state. flow_state = flow_test_lib.GetFlowState(self.client_id, session_id, token=self.token) # All the pathspecs should be in this list. self.assertLen(flow_state.indexed_pathspecs, 30) # At any one time, there should not be more than 10 files or hashes # pending. self.assertLessEqual(len(flow_state.pending_files), 10) self.assertLessEqual(len(flow_state.pending_hashes), 10) # When we finish there should be no pathspecs stored in the flow state. for flow_pathspec in flow_state.indexed_pathspecs: self.assertIsNone(flow_pathspec) for flow_request_data in flow_state.request_data_list: self.assertIsNone(flow_request_data) for pathspec in pathspecs: urn = pathspec.AFF4Path(self.client_id) fd = aff4.FACTORY.Open(urn, token=self.token) self.assertEqual("Hello", fd.read())