def test_fail_safe_write_file(self): file_path = os.path.join(self.tmp_dir.name, "test_folder", "test_file") write_binary_to_file(b'this is a test', file_path) self.assertTrue(os.path.exists(file_path), "file not created") read_binary = get_binary_from_file(file_path) self.assertEqual(read_binary, b'this is a test', "written data not correct") # Test not overwrite flag write_binary_to_file(b'do not overwrite', file_path, overwrite=False) read_binary = get_binary_from_file(file_path) self.assertEqual(read_binary, b'this is a test', "written data not correct") # Test overwrite flag write_binary_to_file(b'overwrite', file_path, overwrite=True) read_binary = get_binary_from_file(file_path) self.assertEqual(read_binary, b'overwrite', "written data not correct") # Test copy_file_flag write_binary_to_file(b'second_overwrite', file_path, file_copy=True) self.assertTrue(os.path.exists("{}-1".format(file_path)), "new file copy does not exist") read_binary_original = get_binary_from_file(file_path) self.assertEqual(read_binary_original, b'overwrite', "original file no longer correct") read_binary_new = get_binary_from_file("{}-1".format(file_path)) self.assertEqual(read_binary_new, b'second_overwrite', "binary of new file not correct")
def test_fail_safe_read_file(self): test_file_path = os.path.join(self.get_directory_of_current_file(), "data", "read_test") file_binary = get_binary_from_file(test_file_path) self.assertEqual(file_binary, b'this is a test', "content not correct") # Test none existing file none_existing_file_path = os.path.join( self.get_directory_of_current_file(), "data", "none_existing_file") file_binary = get_binary_from_file(none_existing_file_path) self.assertEqual(file_binary, b'', "content not correct") # Test link link_path = os.path.join(self.get_directory_of_current_file(), "data", "link_test") file_binary = get_binary_from_file(link_path) assert file_binary == 'symbolic link -> read_test'
def unpack_function(file_path, tmp_dir): raw = get_binary_from_file(file_path) meta_dict = _get_meta_data(raw) meta_dict['encoding_overhead'] = ENCODING_OVERHEAD payloads = _get_payloads(raw) _store_files(payloads, tmp_dir) return meta_dict
def test_set_binary(): binary = get_binary_from_file('{}/get_files_test/testfile1'.format( get_test_data_dir())) md5 = 'e802ca22f6cd2d9357cf3da1d191879e' firmware = Firmware() firmware.set_binary(binary) assert firmware.md5 == md5
def _sync_view(self, plugin_path): if plugin_path: view_source = self._get_view_file_path(plugin_path) if view_source is not None: view = get_binary_from_file(view_source) with ConnectTo(ViewUpdater, self.config) as connection: connection.update_view(self.NAME, view)
def generate_uid(file_path): file_data = get_binary_from_file(file_path) if file_data == b'' or type(file_data) is not bytes: return "0_0" file_hash = sha256(file_data).hexdigest() file_size = get_file_size(file_path) return "{}_{}".format(file_hash, file_size)
def test_get_raw_payloads(self): raw_content = get_binary_from_file(TEST_FILE) payloads = _get_raw_payloads(raw_content) self.assertEqual(len(payloads), 3, "number of payloads not correct") self.assertEqual(payloads[0], b'<~<+oue+DGm>FD,5.Anc:,F<FCgH#.D-A0C~>', "simple payload not correct") self.assertEqual(payloads[1], b'<~<+oue+DGm>@3BW&@rH6q+Dl72BHV,0DJ*O$+E1b7Ci<`m+EV:*F<GX<Dfol,+Cf>-FCAm$+\nEM+;ATD3q+Dbb0ATJu&DIal2D]it9/hSa~>', "multiline payload not correct") self.assertEqual(payloads[2], b'<~@;^"*BOu3kAoD^,@<;~>', "other header format")
def unpack_function(file_path, tmp_dir): raw_binary = get_binary_from_file(file_path) meta_data = dict() for extractor in EXTRACTOR_LIST: data_sections = extractor.extract_function(raw_binary, *extractor.optional_parameters) dump_files(data_sections, tmp_dir, suffix=extractor.file_suffix) meta_data[extractor.name] = len(data_sections) return meta_data
def test_store_blacklist(): tmp_dir = TemporaryDirectory() tmp_file_path = os.path.join(tmp_dir.name, 'blacklist_test_file.txt') test_data = ['abc', 'def'] _store_blacklist(test_data, tmp_file_path) assert os.path.exists(tmp_file_path) result = get_binary_from_file(tmp_file_path) assert result == b'abc\ndef'
def get_template_as_string(view_name: str) -> str: ''' Get the content of template ``view_name`` from the template directory as string. :param view_name: The name of the template file. :return: The contents of the template file as string. ''' path = Path(get_template_dir()) / view_name return get_binary_from_file(str(path)).decode('utf-8')
def _repack_extracted_files(extraction_dir, out_file_path): with Popen('tar -C {} -cvzf {} .'.format(extraction_dir.name, out_file_path), shell=True, stdout=PIPE) as process: output = process.stdout.read().decode() logging.debug('tar -cvzf:\n {}'.format(output)) return get_binary_from_file(out_file_path)
def __init__(self, file_path, raw=None): self.errors = [] self.warnings = [] if raw is None: self.raw = get_binary_from_file(file_path) else: self.raw = raw self.parse_header() self.extract_payload() self.log_errors_and_warnings()
def unpack_function(file_path, tmp_dir): """ file_path specifies the input file. tmp_dir should be used to store the extracted files. Optional: Return a dict with meta information """ file_binary = get_binary_from_file(file_path) pjl_commands = get_pjl_commands(file_binary) extract_all_upgrades(file_binary, pjl_commands, tmp_dir) extract_fingerprint(file_binary, tmp_dir) return {'pjl_commands': pjl_commands}
def unpack_function(file_path, tmp_dir): """ file_path specifies the input file. tmp_dir should be used to store the extracted files. Optional: Return a dict with meta information """ raw_data = get_binary_from_file(file_path) decompressed_data = zlib.decompress(raw_data) output_file_path = os.path.join(tmp_dir, "zlib_decompressed") write_binary_to_file(decompressed_data, output_file_path) return {}
def test_extraction_new_container_format(self): input_file = Path(TEST_DATA_DIR, 'hp_new_container.pjl') _, meta_data = self.unpacker.extract_files_from_file(str(input_file), self.tmp_dir.name) assert len(meta_data) > 1 assert Path(self.tmp_dir.name, 'fingerprint.txt').exists() extracted_file_path = Path(self.tmp_dir.name, '812.bin') assert extracted_file_path.exists() binary_content = get_binary_from_file(str(extracted_file_path)) assert binary_content[0:4] == b'\x94\x1E\x12\x00' assert len(binary_content) == 224
def unpack_function(file_path, tmp_dir): raw_binary = get_binary_from_file(file_path) ff_padded_sections = cut_at_padding(raw_binary, padding_min_length=16, padding_pattern=b'\xff') lzma_streams = extract_lzma_streams(raw_binary) decompressed_lzma_streams = get_decompressed_lzma_streams(lzma_streams) dump_files(ff_padded_sections, tmp_dir) dump_files(decompressed_lzma_streams, tmp_dir, suffix='_lzma_decompressed') return _get_meta_data(ff_padded_sections, lzma_streams)
def test_extract_file_from_upgrade(): with TemporaryDirectory(prefix='fact_test_') as tmp_dir: expected_path_of_dumped_file = Path(tmp_dir, 'HP_Color_LaserJet_CP4525.bin') _extract_file_from_upgrade(TEST_DATA_UPGRADE_RAW, TEST_COMMAND, tmp_dir) assert expected_path_of_dumped_file.exists() dumped_file_binary = get_binary_from_file( str(expected_path_of_dumped_file)) assert dumped_file_binary == b'\x00Test File 1'
def tar_repack(self, file_path): extraction_dir = TemporaryDirectory(prefix='FAF_tar_repack') container_storage = TemporaryDirectory(prefix='FAF_tar_repack') self.extract_files_from_file(file_path, extraction_dir.name) out_file_path = os.path.join(container_storage.name, 'download.tar.gz') output = Popen('tar -C {} -cvzf {} .'.format(extraction_dir.name, out_file_path), shell=True, stdout=PIPE).stdout.read().decode() logging.debug('tar -cvzf:\n {}'.format(output)) tar = get_binary_from_file(out_file_path) extraction_dir.cleanup() container_storage.cleanup() return tar
def unpack_function(file_path: str, tmp_dir: str) -> dict: """ file_path specifies the input file. tmp_dir should be used to store the extracted files. Optional: Return a dict with meta information """ raw_binary = get_binary_from_file(file_path) data_sections = cut_at_padding(raw_binary, padding_min_length=16) lzma_streams = extract_lzma_streams(raw_binary) decompressed_lzma_streams = get_decompressed_lzma_streams(lzma_streams) dump_files(data_sections, tmp_dir) dump_files(decompressed_lzma_streams, tmp_dir, suffix='_lzma_decompressed') return _get_meta_data(data_sections, lzma_streams)
def process_object(self, file_object): result = {} tmp_dir = TemporaryDirectory(prefix='faf_analysis_binwalk_') dir_path = tmp_dir.name signature_analysis_result = execute_shell_command('(cd {} && xvfb-run -a binwalk -BEJ {})'.format(dir_path, file_object.file_path)) result['signature_analysis'] = make_unicode_string(signature_analysis_result) result['summary'] = list(set(self._extract_summary(result['signature_analysis']))) pic_path = os.path.join(dir_path, '{}.png'.format(os.path.basename(file_object.file_path))) result['entropy_analysis_graph'] = get_binary_from_file(pic_path) tmp_dir.cleanup() file_object.processed_analysis[self.NAME] = result return file_object
def test_extraction(self): in_file = os.path.join(TEST_DATA_DIR, 'test.zlib') files, meta_data = self.unpacker.extract_files_from_file( in_file, self.tmp_dir.name) self.assertEqual(len(files), 1, 'number of extracted files not correct') self.assertEqual(files[0], os.path.join(self.tmp_dir.name, 'zlib_decompressed'), 'file name not correct') file_binary = get_binary_from_file(files[0]) file_hash = get_sha256(file_binary) self.assertEqual( file_hash, 'e429103649e24ca126077bfb38cce8c57cc913a966d7e36356e4fe0513ab02c4') self.assertEqual(len(meta_data.keys()), 4, 'more or fewer than standard keys in meta dict')
def test_header_and_binary(self): files, meta_data = self.unpacker.extract_files_from_file( self.test_file.file_path, self.tmp_dir.name) files = set(files) self.assertEqual(len(files), 1, 'file number incorrect') data_bin = get_binary_from_file( os.path.join(self.tmp_dir.name, 'dlm_data.bin')) self.assertEqual( get_sha256(data_bin), '701962b0d11f50d9129d5a1655ee054865e90cd1b547d40d590ea96f7dfb64eb') self.assertEqual(meta_data['dlm_version'], 'NO_DLM_VERSION_CHECK', 'meta: dlm_version not correct') self.assertEqual( meta_data['dlm_signature'], '90ec11f7b52468378362987a4ed9e56855070915887e6afe567e1c47875b29f9', 'meta: dlm_signature not correct') self.assertEqual(meta_data['dlm_name'], '080415_08142013', 'meta: dlm_name not correct') self.assertEqual(meta_data['dlm_extraction_criteria'], 'upgradeExtract.sh /tmp/080415_08142013.dnld', 'meta: dlm_criteria not correct')
def create_from_file(self, file_path): self.set_binary(get_binary_from_file(file_path)) self.set_file_path(file_path)
def _get_uid(file_path, root_path: Path): return create_uid(get_binary_from_file(str(root_path / file_path[1:])))
def _create_from_file(self, file_path: str): self.set_binary(get_binary_from_file(file_path)) self.create_binary_from_path()
def get_template_as_string(view_name): path = os.path.join(get_template_dir(), view_name) return get_binary_from_file(path).decode('utf-8')
def _hash_compare(self, filename, hashsum): binary_data = get_binary_from_file( os.path.join(self.tmp_dir.name, filename)) self.assertEqual(get_sha256(binary_data), hashsum, 'Checksum incorrect for file {}'.format(filename))
def test_file_object_init_with_binary(self): bin_data = get_binary_from_file('{}/test_data_file.bin'.format(get_test_data_dir())) test_object = FileObject(bin_data) self.assertEqual(test_object.sha256, '268d870ffa2b21784e4dc955d8e8b8eb5f3bcddd6720a1e6d31d2cf84bd1bff8', 'correct sha256') self.assertEqual(test_object.file_name, None, 'correct file name')
def check_file_presence_and_content(self, file_path, file_binary): self.assertTrue(os.path.exists(file_path), 'file exists') self.assertEqual(get_binary_from_file(file_path), file_binary, 'correct content')
def get_analysis_view(view_name): view_path = os.path.join( get_src_dir(), 'web_interface/templates/analysis_plugins/{}.html'.format(view_name)) return get_binary_from_file(view_path).decode('utf-8')