def test_get_virtual_file_path(self): fo = FileObject(binary=b'file_object') self.assertIn(fo.get_uid(), fo.get_virtual_file_paths().keys(), 'not correct if path _ name not set') fo.set_name('the_file_name.txt') self.assertEqual(fo.get_virtual_file_paths()[fo.get_uid()][0], fo.get_uid(), 'not correct if path not set') fo.virtual_file_path = {fo.get_uid(): '/foo/bar/the_file_name.txt'} self.assertEqual(fo.get_virtual_file_paths()[fo.get_uid()], '/foo/bar/the_file_name.txt', 'not correct if path set')
def test_get_virtual_file_path(self): fo = FileObject(binary=b'file_object') assert fo.uid in fo.get_virtual_file_paths().keys(), 'not correct if path _ name not set' fo.file_name = 'the_file_name.txt' assert fo.get_virtual_file_paths()[fo.uid][0] == fo.uid, 'not correct if path not set' fo.virtual_file_path = {fo.uid: '/foo/bar/the_file_name.txt'} assert fo.get_virtual_file_paths()[fo.uid] == '/foo/bar/the_file_name.txt', 'not correct if path set'
def generate_and_store_file_objects(self, file_paths: List[Path], extractor_dir: str, parent: FileObject): extracted_files = {} for item in file_paths: if not file_is_empty(item): current_file = FileObject(file_path=str(item)) current_virtual_path = '{}|{}|{}'.format( parent.get_base_of_virtual_path( parent.get_virtual_file_paths()[ parent.get_root_uid()][0]), parent.get_uid(), get_object_path_excluding_fact_dirs( make_unicode_string(str(item)), str(Path(extractor_dir, 'files')))) current_file.temporary_data[ 'parent_fo_type'] = get_file_type_from_path( parent.file_path)['mime'] if current_file.get_uid( ) in extracted_files: # the same file is extracted multiple times from one archive extracted_files[current_file.get_uid()].virtual_file_path[ parent.get_root_uid()].append(current_virtual_path) else: self.db_interface.set_unpacking_lock(current_file.uid) self.file_storage_system.store_file(current_file) current_file.virtual_file_path = { parent.get_root_uid(): [current_virtual_path] } current_file.parent_firmware_uids.add( parent.get_root_uid()) extracted_files[current_file.get_uid()] = current_file return extracted_files
def create_test_file_object(bin_path='get_files_test/testfile1'): fo = FileObject(file_path=os.path.join(get_test_data_dir(), bin_path)) processed_analysis = {'dummy': {'summary': [ 'sum a', 'file exclusive sum b'], 'content': 'file abcd'}, 'file_type': {'full': 'Not a PE file'}} fo.processed_analysis.update(processed_analysis) fo.virtual_file_path = fo.get_virtual_file_paths() return fo
def generate_and_store_file_objects(self, file_paths: List[Path], extraction_dir: Path, parent: FileObject): extracted_files = {} for item in file_paths: if not file_is_empty(item): current_file = FileObject(file_path=str(item)) base = get_base_of_virtual_path( parent.get_virtual_file_paths()[parent.get_root_uid()][0]) current_virtual_path = join_virtual_path( base, parent.uid, get_relative_object_path(item, extraction_dir)) current_file.temporary_data[ 'parent_fo_type'] = get_file_type_from_path( parent.file_path)['mime'] if current_file.uid in extracted_files: # the same file is extracted multiple times from one archive extracted_files[current_file.uid].virtual_file_path[ parent.get_root_uid()].append(current_virtual_path) else: self.db_interface.set_unpacking_lock(current_file.uid) self.file_storage_system.store_file(current_file) current_file.virtual_file_path = { parent.get_root_uid(): [current_virtual_path] } current_file.parent_firmware_uids.add( parent.get_root_uid()) extracted_files[current_file.uid] = current_file return extracted_files