def testWithFiles(self): foo = temp.AutoTempFilePath(suffix="foo") bar = temp.AutoTempFilePath(suffix="bar") baz = temp.AutoTempFilePath(suffix="baz") with context.MultiContext([foo, bar, baz]) as filepaths: self.assertLen(filepaths, 3) self.assertEndsWith(filepaths[0], "foo") self.assertEndsWith(filepaths[1], "bar") self.assertEndsWith(filepaths[2], "baz") wbopen = functools.partial(io.open, mode="wb") with context.MultiContext(list(map(wbopen, filepaths))) as filedescs: self.assertLen(filedescs, 3) filedescs[0].write(b"FOO") filedescs[1].write(b"BAR") filedescs[2].write(b"BAZ") # At this point all three files should be correctly written, closed and # ready for reading. rbopen = functools.partial(io.open, mode="rb") with context.MultiContext(list(map(rbopen, filepaths))) as filedescs: self.assertLen(filedescs, 3) self.assertEqual(filedescs[0].read(), b"FOO") self.assertEqual(filedescs[1].read(), b"BAR") self.assertEqual(filedescs[2].read(), b"BAZ")
def testFlowReportsErrorWhenCollectingFileAboveSingleLimit(self): with temp.AutoTempFilePath() as temp_file_path: target_bytes = 2**20 # 1 MiB less_than_necessary_bytes = target_bytes // 2 with io.open(temp_file_path, "wb") as fd: fd.write(b"1" * target_bytes) table = f""" [ {{ "collect_column": "{temp_file_path}" }} ] """ with mock.patch.object(osquery_flow, "FILE_COLLECTION_MAX_SINGLE_FILE_BYTES", less_than_necessary_bytes): with osquery_test_lib.FakeOsqueryiOutput(stdout=table, stderr=""): flow_id = self._InitializeFlow("Doesn't matter", ["collect_column"], check_flow_errors=False) progress = flow_test_lib.GetFlowProgress( self.client_id, flow_id) self.assertEqual( f"File with path {temp_file_path} is too big: " f"{target_bytes} bytes when the limit is " f"{less_than_necessary_bytes} bytes.", progress.error_message)
def testWorksWithReadLengthOnSeekableFile(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for offset, read_length in self.READ_LENGTH_INTERVALS: with self.subTest(offset=offset, read_length=read_length, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, read_length=read_length) self.assertEqual( self.TEST_DATA[offset:offset + read_length], actual_bytes) with self.subTest(offset=offset, read_length=read_length, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, read_length=read_length) self.assertEqual( self.TEST_DATA[offset:offset + read_length], actual_bytes)
def testUsrBin(self): args = self.grr_api.types.CreateFlowArgs("TimelineFlow") args.root = "/bin/".encode("utf-8") flow = self.RunFlowAndWait("TimelineFlow", args=args) with temp.AutoTempFilePath(suffix=".body") as temp_filepath: timeline_format = timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY body = flow.GetCollectedTimeline(timeline_format) body.WriteToFile(temp_filepath) with io.open(temp_filepath, mode="r", encoding="utf-8") as temp_filedesc: entries = list(csv.reader(temp_filedesc, delimiter="|")) paths = [entry[1] for entry in entries] # `/bin` might be symlink to `/usr/bin`. self.assertTrue("/bin/bash" in paths or "/usr/bin/bash" in paths) self.assertTrue("/bin/cat" in paths or "/usr/bin/cat" in paths) self.assertTrue("/bin/chmod" in paths or "/usr/bin/chmod" in paths) self.assertTrue("/bin/cp" in paths or "/usr/bin/cp" in paths) self.assertTrue("/bin/rm" in paths or "/usr/bin/rm" in paths) self.assertTrue("/bin/sleep" in paths or "/usr/bin/sleep" in paths) for entry in entries: assertBodyEntrySanity(self, entry)
def testRandomFile(self): content = os.urandom(1024) response = responses.Response(responses.POST, "https://foo.bar/quux") response.status = 201 response.headers = { "Location": "https://foo.bar/norf", } responses.add(response) handler = gcs_test_lib.FakeUploadHandler() responses.add_callback(responses.PUT, "https://foo.bar/norf", handler) with temp.AutoTempFilePath() as temp_path: with open(temp_path, mode="wb") as temp_file: temp_file.write(content) flow_id = self._Collect(path=temp_path, signed_url="https://foo.bar/quux") state = flow_test_lib.GetFlowState(self.client_id, flow_id) self.assertNotEmpty(state.encryption_key) encrypted_buf = io.BytesIO(handler.content) decrypted_buf = aead.Decrypt(encrypted_buf, state.encryption_key) self.assertEqual(decrypted_buf.read(), content)
def testLimitedAmount(self): action = FakeAction() uploader = uploading.TransferStoreUploader(action, chunk_size=3) with temp.AutoTempFilePath() as temp_filepath: with io.open(temp_filepath, "wb") as temp_file: temp_file.write(b"1234567890") blobdesc = uploader.UploadFilePath(temp_filepath, amount=5) self.assertEqual(action.charged_bytes, 5) self.assertLen(action.messages, 2) self.assertEqual(action.messages[0].item.data, zlib.compress(b"123")) self.assertEqual(action.messages[1].item.data, zlib.compress(b"45")) self.assertLen(blobdesc.chunks, 2) self.assertEqual(blobdesc.chunk_size, 3) self.assertEqual(blobdesc.chunks[0].offset, 0) self.assertEqual(blobdesc.chunks[0].length, 3) self.assertEqual(blobdesc.chunks[0].digest, Sha256(b"123")) self.assertEqual(blobdesc.chunks[1].offset, 3) self.assertEqual(blobdesc.chunks[1].length, 2) self.assertEqual(blobdesc.chunks[1].digest, Sha256(b"45"))
def testSize(self): with temp.AutoTempFilePath() as tempfile: with open(tempfile, mode="wb") as tempfile_handle: tempfile_handle.write(b"A" * 42) result = statx.Get(tempfile.encode("utf-8")) self.assertEqual(result.size, 42)
def testCustomOffset(self): action = FakeAction() uploader = uploading.TransferStoreUploader(action, chunk_size=2) with temp.AutoTempFilePath() as temp_filepath: with io.open(temp_filepath, "wb") as temp_file: temp_file.write(b"0123456") blobdesc = uploader.UploadFilePath(temp_filepath, offset=2) self.assertEqual(action.charged_bytes, 5) self.assertLen(action.messages, 3) self.assertEqual(action.messages[0].item.data, zlib.compress(b"23")) self.assertEqual(action.messages[1].item.data, zlib.compress(b"45")) self.assertEqual(action.messages[2].item.data, zlib.compress(b"6")) self.assertLen(blobdesc.chunks, 3) self.assertEqual(blobdesc.chunk_size, 2) self.assertEqual(blobdesc.chunks[0].offset, 2) self.assertEqual(blobdesc.chunks[0].length, 2) self.assertEqual(blobdesc.chunks[0].digest, Sha256(b"23")) self.assertEqual(blobdesc.chunks[1].offset, 4) self.assertEqual(blobdesc.chunks[1].length, 2) self.assertEqual(blobdesc.chunks[1].digest, Sha256(b"45")) self.assertEqual(blobdesc.chunks[2].offset, 6) self.assertEqual(blobdesc.chunks[2].length, 1) self.assertEqual(blobdesc.chunks[2].digest, Sha256(b"6"))
def testWorksWithReadLengthAndFileSizeOverrideOnSeekableFiles(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for (offset, read_length, file_size_override ) in self.READ_LENGTH_FILE_SIZE_OVERRIDE_INTERVALS: upper_limit = min(offset + read_length, file_size_override) with self.subTest(offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, read_length=read_length, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:upper_limit], actual_bytes) with self.subTest(offset=offset, read_length=read_length, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, read_length=read_length, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:upper_limit], actual_bytes)
def testManyChunks(self): action = FakeAction() uploader = uploading.TransferStoreUploader(action, chunk_size=3) with temp.AutoTempFilePath() as temp_filepath: with open(temp_filepath, "w") as temp_file: temp_file.write("1234567890") blobdesc = uploader.UploadFilePath(temp_filepath) self.assertEqual(action.charged_bytes, 10) self.assertLen(action.messages, 4) self.assertEqual(action.messages[0].item.data, zlib.compress("123")) self.assertEqual(action.messages[1].item.data, zlib.compress("456")) self.assertEqual(action.messages[2].item.data, zlib.compress("789")) self.assertEqual(action.messages[3].item.data, zlib.compress("0")) self.assertLen(blobdesc.chunks, 4) self.assertEqual(blobdesc.chunk_size, 3) self.assertEqual(blobdesc.chunks[0].offset, 0) self.assertEqual(blobdesc.chunks[0].length, 3) self.assertEqual(blobdesc.chunks[0].digest, Sha256("123")) self.assertEqual(blobdesc.chunks[1].offset, 3) self.assertEqual(blobdesc.chunks[1].length, 3) self.assertEqual(blobdesc.chunks[1].digest, Sha256("456")) self.assertEqual(blobdesc.chunks[2].offset, 6) self.assertEqual(blobdesc.chunks[2].length, 3) self.assertEqual(blobdesc.chunks[2].digest, Sha256("789")) self.assertEqual(blobdesc.chunks[3].offset, 9) self.assertEqual(blobdesc.chunks[3].length, 1) self.assertEqual(blobdesc.chunks[3].digest, Sha256("0"))
def testGetSize(self): with temp.AutoTempFilePath() as temp_filepath: with io.open(temp_filepath, "wb") as fd: fd.write(b"foobarbaz") stat = filesystem.Stat.FromPath(temp_filepath, follow_symlink=False) self.assertEqual(stat.GetSize(), 9)
def testStatExtAttrUnicode(self): with temp.AutoTempFilePath() as temp_filepath: name_0 = "user.żółć".encode("utf-8") value_0 = "jaźń".encode("utf-8") filesystem_test_lib.SetExtAttr(temp_filepath, name=name_0, value=value_0) name_1 = "user.rtęć".encode("utf-8") value_1 = "kość".encode("utf-8") filesystem_test_lib.SetExtAttr(temp_filepath, name=name_1, value=value_1) action = rdf_file_finder.FileFinderAction.Stat( collect_ext_attrs=True) results = self._RunFileFinder([temp_filepath], action) self.assertLen(results, 1) ext_attrs = results[0].stat_entry.ext_attrs self.assertLen(ext_attrs, 2) self.assertEqual(ext_attrs[0].name, name_0) self.assertEqual(ext_attrs[0].value, value_0) self.assertEqual(ext_attrs[1].name, name_1) self.assertEqual(ext_attrs[1].value, value_1)
def testHash(self): def MD5(data): return text.Hexify(hashlib.md5(data).digest()) def SHA256(data): return text.Hexify(hashlib.sha256(data).digest()) with temp.AutoTempFilePath() as filepath: content = b"FOOBARBAZ" with io.open(filepath, "wb") as filedesc: filedesc.write(content) results = self._RunQuery(""" SELECT md5, sha256 FROM hash WHERE path = "{}"; """.format(filepath)) self.assertLen(results, 1) table = results[0].table self.assertLen(table.rows, 1) self.assertEqual(list(table.Column("md5")), [MD5(content)]) self.assertEqual(list(table.Column("sha256")), [SHA256(content)])
def testFromStat(self): with temp.AutoTempFilePath() as filepath: time_before = round(time.time()) with io.open(filepath, mode="wb") as filedesc: filedesc.write(b"1234567") time_after = round(time.time()) # TODO(hanuszczak): `AutoTempFilePath` should return a `Path` object. filepath_bytes = filepath.encode("utf-8") filepath_stat = os.lstat(filepath) entry = rdf_timeline.TimelineEntry.FromStat( filepath_bytes, filepath_stat) self.assertEqual(entry.size, 7) self.assertTrue(stat_mode.S_ISREG(entry.mode)) # TODO(hanuszczak): Switch this test to use nanosecond precision once we # are Python 3.7-only. self.assertBetween(round(entry.atime_ns / 1e9), time_before, time_after) self.assertBetween(round(entry.mtime_ns / 1e9), time_before, time_after) self.assertBetween(round(entry.ctime_ns / 1e9), time_before, time_after) self.assertEqual(entry.dev, filepath_stat.st_dev) self.assertEqual(entry.ino, filepath_stat.st_ino) self.assertEqual(entry.uid, filepath_stat.st_uid) self.assertEqual(entry.gid, filepath_stat.st_gid)
def testSavesRawDataCorrectly(self): with temp.AutoTempFilePath() as path: p = config_parser.YamlConfigFileParser(path) p.SaveData({"Section1": {"test": "val2"}}) with open(path, "r") as fd: self.assertEqual(fd.read(), "Section1:\n test: val2\n")
def testFlowReportsErrorWhenCollectingSingleFileAboveTotalLimit(self): with temp.AutoTempFilePath() as temp_file_path: target_bytes = 2**20 # 1 MiB less_than_necessary_bytes = target_bytes // 2 with io.open(temp_file_path, "wb") as fd: fd.write(b"1" * target_bytes) table = f""" [ {{ "collect_column": "{temp_file_path}" }} ] """ with mock.patch.object(osquery_flow, "FILE_COLLECTION_MAX_TOTAL_BYTES", less_than_necessary_bytes): with osquery_test_lib.FakeOsqueryiOutput(stdout=table, stderr=""): flow_id = self._InitializeFlow( file_collection_columns=["collect_column"], check_flow_errors=False) progress = flow_test_lib.GetFlowProgress( self.client_id, flow_id) self.assertEqual( "Files for collection exceed the total size limit of " f"{less_than_necessary_bytes} bytes.", progress.error_message)
def testWorksWithFileSizeOverrideOnSeekableFile(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) for offset, file_size_override in self.FILE_SIZE_OVERRIDE_INTERVALS: with self.subTest(offset=offset, file_size_override=file_size_override, stat_available=True): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=True, offset=offset, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:file_size_override], actual_bytes) with self.subTest(offset=offset, file_size_override=file_size_override, stat_available=False): actual_bytes = self._ReadBytesWithGetFile( test_path, stat_available=False, offset=offset, file_size_override=file_size_override) self.assertEqual(self.TEST_DATA[offset:file_size_override], actual_bytes)
def testFlowReportsErrorWhenCollectingRowsAboveLimit(self): with temp.AutoTempFilePath() as temp_file_path: with io.open(temp_file_path, mode="w", encoding="utf-8") as fd: fd.write("Just sample text to put in the file.") table = f""" [ {{ "collect_column": "{temp_file_path}"}}, {{ "collect_column": "{temp_file_path}"}} ] """ with mock.patch.object(osquery_flow, "FILE_COLLECTION_MAX_ROWS", 1): with osquery_test_lib.FakeOsqueryiOutput(stdout=table, stderr=""): flow_id = self._InitializeFlow( file_collection_columns=["collect_column"], check_flow_errors=False) progress = flow_test_lib.GetFlowProgress( self.client_id, flow_id) self.assertEqual( "Requested file collection on a table with 2 rows, " "but the limit is 1 rows.", progress.error_message)
def testRetryUpload(self): bq_client = bigquery.BigQueryClient() resp = mock.Mock() resp.status = 503 error = mock.Mock() error.resp = resp job = mock.Mock() # Always raise errors.HttpError on job.execute() job.configure_mock( **{"execute.side_effect": errors.HttpError(resp, b"nocontent")}) job_id = "hunts_HFFE1D044_Results_1446056474" with temp.AutoTempFilePath() as filepath: with io.open(filepath, "w", encoding="utf-8") as filedesc: filedesc.write("{data}") with mock.patch.object(time, "sleep") as mock_sleep: with self.assertRaises(bigquery.BigQueryJobUploadError): bq_client.RetryUpload(job, job_id, error) # Make sure retry sleeps are correct. max_calls = config.CONFIG["BigQuery.retry_max_attempts"] retry_interval = config.CONFIG["BigQuery.retry_interval"] multiplier = config.CONFIG["BigQuery.retry_multiplier"] self.assertEqual(job.execute.call_count, max_calls) mock_sleep.assert_has_calls([ mock.call(retry_interval.ToFractional(rdfvalue.SECONDS)), mock.call( retry_interval.ToFractional(rdfvalue.SECONDS) * multiplier) ])
def testRegular(self): with temp.AutoTempFilePath() as temp_filepath: stat = filesystem.Stat.FromPath(temp_filepath, follow_symlink=False) self.assertFalse(stat.IsDirectory()) self.assertTrue(stat.IsRegular()) self.assertFalse(stat.IsSocket()) self.assertFalse(stat.IsSymlink())
def testStatExtAttrs(self): with temp.AutoTempFilePath() as temp_filepath: filesystem_test_lib.SetExtAttr(temp_filepath, name=b"user.foo", value=b"norf") filesystem_test_lib.SetExtAttr(temp_filepath, name=b"user.bar", value=b"quux") action = rdf_file_finder.FileFinderAction.Stat( collect_ext_attrs=True) results = self._RunFileFinder([temp_filepath], action) self.assertLen(results, 1) ext_attrs = results[0].stat_entry.ext_attrs self.assertEqual(ext_attrs[0].name, b"user.foo") self.assertEqual(ext_attrs[0].value, b"norf") self.assertEqual(ext_attrs[1].name, b"user.bar") self.assertEqual(ext_attrs[1].value, b"quux") action = rdf_file_finder.FileFinderAction.Stat( collect_ext_attrs=False) results = self._RunFileFinder([temp_filepath], action) self.assertLen(results, 1) ext_attrs = results[0].stat_entry.ext_attrs self.assertFalse(ext_attrs)
def testReadsTheWholeStatableFileWhenNoSizesPassed(self): with temp.AutoTempFilePath() as test_path: with open(test_path, "wb") as fd: fd.write(self.TEST_DATA) actual_bytes = self._ReadBytesWithGetFile(test_path, stat_available=True) self.assertEqual(self.TEST_DATA, actual_bytes)
def testNoEncryptionKey(self): with temp.AutoTempFilePath() as temppath: args = rdf_large_file.CollectLargeFileArgs() args.path_spec.path = temppath args.path_spec.pathtype = rdf_paths.PathSpec.PathType.OS with self.assertRaisesRegex(ValueError, "key"): list(large_file.CollectLargeFile(args))
def testSavesRawDataCorrectly(self): with temp.AutoTempFilePath() as path: p = config_parser.IniConfigFileParser(path) p.SaveData({"Section1.test": "val2"}) with open(path, "r") as fd: self.assertEqual(fd.read(), "[DEFAULT]\nSection1.test = val2\n\n")
def testRaisesWhenFileIsNotAccessible(self): with temp.AutoTempFilePath() as path: with open(path, "w") as fd: fd.write("") os.chmod(path, stat.S_IWUSR) with self.assertRaises(config_parser.ReadDataPermissionError): p = config_parser.IniConfigFileParser(path) p.ReadData()
def testFlowDoesntCollectWhenColumnsAboveLimit(self): with temp.AutoTempFilePath() as temp_file_path: with io.open(temp_file_path, mode="w", encoding="utf-8") as fd: fd.write("Just sample text to put in the file.") with mock.patch.object(osquery_flow, "FILE_COLLECTION_MAX_COLUMNS", 1): # Should raise immediately, no need to fake Osquery output with self.assertRaises(RuntimeError): self._RunFlow(file_collection_columns=["collect1", "collect2"])
def testNonSqliteDatabase(self): with temp.AutoTempFilePath(suffix="-journal") as filepath: with io.open(filepath, "wb") as filedesc: filedesc.write(b"foobar") with io.open(filepath, "rb") as filedesc: # This should not fail, but return an empty list of results. results = list(chrome_history.ChromeParser().Parse(filepath, filedesc)) self.assertEmpty(results)
def testReturnsForExistingPathLinux(self): with temp.AutoTempFilePath() as path: fstype = timeline.GetFilesystemType(path.encode("utf-8")) # `/proc/filesystem` lists all filesystems supported by the kernel. with open("/proc/filesystems", mode="r", encoding="utf-8") as proc_fs: supported_fstypes = set(proc_fs.read().split()) self.assertIn(fstype, supported_fstypes)
def testGetFlagsSymlink(self): with temp.AutoTempDirPath(remove_non_empty=True) as temp_dirpath, \ temp.AutoTempFilePath() as temp_filepath: temp_linkpath = os.path.join(temp_dirpath, "foo") os.symlink(temp_filepath, temp_linkpath) stat = filesystem.Stat.FromPath(temp_linkpath, follow_symlink=False) self.assertTrue(stat.IsSymlink()) self.assertEqual(stat.GetLinuxFlags(), 0) self.assertEqual(stat.GetOsxFlags(), 0)
def testReadsRawDataCorrectly(self): with temp.AutoTempFilePath() as path: with open(path, "w") as fd: fd.write(""" Section1: test: val2 """) p = config_parser.YamlConfigFileParser(path) self.assertEqual(p.ReadData(), {"Section1": {"test": "val2"}})