def test_get_temp_dir_path(caplog): assert get_temp_dir_path() == '/tmp' assert get_temp_dir_path(MockConfig( get_test_data_dir())) == get_test_data_dir() not_a_dir = str(Path(get_test_data_dir()) / '__init__.py') with caplog.at_level(logging.WARNING): assert get_temp_dir_path(MockConfig(not_a_dir)) == '/tmp' assert 'TempDir path does not exist and could not be created' in caplog.messages[ 0]
def unpack(self, current_fo: FileObject): ''' Recursively extract all objects included in current_fo and add them to current_fo.files_included ''' logging.debug('[worker {}] Extracting {}: Depth: {}'.format(self.worker_id, current_fo.uid, current_fo.depth)) if current_fo.depth >= self.config.getint('unpack', 'max_depth'): logging.warning('{} is not extracted since depth limit ({}) is reached'.format(current_fo.uid, self.config.get('unpack', 'max_depth'))) self._store_unpacking_depth_skip_info(current_fo) return [] tmp_dir = TemporaryDirectory(prefix='fact_unpack_', dir=get_temp_dir_path(self.config)) file_path = self._generate_local_file_path(current_fo) extracted_files = self.extract_files_from_file(file_path, tmp_dir.name) extracted_file_objects = self.generate_and_store_file_objects(extracted_files, tmp_dir.name, current_fo) extracted_file_objects = self.remove_duplicates(extracted_file_objects, current_fo) self.add_included_files_to_object(extracted_file_objects, current_fo) # set meta data current_fo.processed_analysis['unpacker'] = json.loads(Path(tmp_dir.name, 'reports', 'meta.json').read_text()) self.cleanup(tmp_dir) return extracted_file_objects
def process_object(self, file_object: FileObject): with TemporaryDirectory(prefix=self.NAME, dir=get_temp_dir_path(self.config)) as tmp_dir: file_path = Path(tmp_dir) / file_object.file_name file_path.write_bytes(file_object.binary) try: result = run_docker_container(DOCKER_IMAGE, TIMEOUT_IN_SECONDS, CONTAINER_TARGET_PATH, reraise=True, mount=(CONTAINER_TARGET_PATH, str(file_path)), label=self.NAME, include_stderr=False) file_object.processed_analysis[self.NAME] = loads(result) except ReadTimeout: file_object.processed_analysis[self.NAME][ 'warning'] = 'Analysis timed out. It might not be complete.' except (DockerException, IOError): file_object.processed_analysis[self.NAME][ 'warning'] = 'Analysis issues. It might not be complete.' except JSONDecodeError: logging.error('Could not decode JSON output: {}'.format( repr(result))) return file_object
def _extract_metadata_from_file_system(self, file_object: FileObject, file_type: str): type_parameter = '-t {}'.format(file_type.split('/')[1]) with suppress(MountingError): with mount(file_object.file_path, type_parameter, get_temp_dir_path(self.config)) as mounted_path: self._analyze_metadata_of_mounted_dir(mounted_path)
def tar_repack(self, file_path): extraction_directory = TemporaryDirectory(prefix='FACT_tar_repack', dir=get_temp_dir_path( self.config)) self.extract_files_from_file(file_path, extraction_directory.name) archive_directory = TemporaryDirectory(prefix='FACT_tar_repack', dir=get_temp_dir_path( self.config)) archive_path = os.path.join(archive_directory.name, 'download.tar.gz') tar_binary = self._repack_extracted_files( Path(extraction_directory.name, 'files'), archive_path) self._cleanup_directories(archive_directory, extraction_directory) return tar_binary
def unpack_fo(self, file_object: FileObject) -> Optional[TemporaryDirectory]: file_path = ( file_object.file_path if file_object.file_path else self._get_file_path_from_db(file_object.uid) ) if not file_path or not Path(file_path).is_file(): logging.error('could not unpack {}: file path not found'.format(file_object.uid)) return None extraction_dir = TemporaryDirectory(prefix='FACT_plugin_qemu_exec', dir=get_temp_dir_path(self.config)) self.extract_files_from_file(file_path, extraction_dir.name) return extraction_dir
def get_uploaded_file_binary(request_file, config: ConfigParser): if request_file: tmp_dir = TemporaryDirectory(prefix='fact_upload_', dir=get_temp_dir_path(config)) tmp_file_path = os.path.join(tmp_dir.name, 'upload.bin') try: request_file.save(tmp_file_path) with open(tmp_file_path, 'rb') as tmp_file: binary = tmp_file.read() tmp_dir.cleanup() return binary except Exception: return None return None
def get_version_for_component(self, result, file_object: FileObject): versions = set() for matched_string in result['strings']: match = matched_string[2] match = make_unicode_string(match) versions.add(self.get_version(match, result['meta'])) if result['meta'].get('format_string'): key_strings = [ s.decode() for _, _, s in result['strings'] if b'%s' in s ] if key_strings: versions.update( extract_data_from_ghidra(file_object.binary, key_strings, get_temp_dir_path(self.config))) if '' in versions and len( versions ) > 1: # if there are actual version results, remove the "empty" result versions.remove('') result['meta']['version'] = list(versions) return result
def _download_pdf_report(self, uid): with ConnectTo(FrontEndDbInterface, self._config) as sc: object_exists = sc.existence_quick_check(uid) if not object_exists: return render_template('uid_not_found.html', uid=uid) with ConnectTo(FrontEndDbInterface, self._config) as connection: firmware = connection.get_complete_object_including_all_summaries(uid) try: with TemporaryDirectory(dir=get_temp_dir_path(self._config)) as folder: pdf_path = build_pdf_report(firmware, folder) binary = pdf_path.read_bytes() except RuntimeError as error: return render_template('error.html', message=str(error)) response = make_response(binary) response.headers['Content-Disposition'] = 'attachment; filename={}'.format(pdf_path.name) return response
def _get_uploaded_file_binary(request_file: FileStorage, config: ConfigParser) -> Optional[bytes]: ''' Retrieves the binary from the request file storage and returns it as byte string. May return `None` if no binary was found or an exception occurred. :param request_file: A file contained in the flask request object. :param config: The FACT configuration. :return: The binary as byte string or `None` if no binary was found. ''' if request_file: tmp_dir = TemporaryDirectory(prefix='fact_upload_', dir=get_temp_dir_path(config)) tmp_file_path = os.path.join(tmp_dir.name, 'upload.bin') try: request_file.save(tmp_file_path) with open(tmp_file_path, 'rb') as tmp_file: binary = tmp_file.read() tmp_dir.cleanup() return binary except Exception: return None return None
def process_object(self, file_object): result = {} tmp_dir = TemporaryDirectory(prefix='fact_analysis_binwalk_', dir=get_temp_dir_path(self.config)) dir_path = tmp_dir.name signature_analysis_result = execute_shell_command( '(cd {} && xvfb-run -a binwalk -BEJ {})'.format( dir_path, file_object.file_path)) result['signature_analysis'] = make_unicode_string( signature_analysis_result) result['summary'] = list( set(self._extract_summary(result['signature_analysis']))) pic_path = os.path.join( dir_path, '{}.png'.format(os.path.basename(file_object.file_path))) result['entropy_analysis_graph'] = get_binary_from_file(pic_path) tmp_dir.cleanup() file_object.processed_analysis[self.NAME] = result return file_object
def process_object(self, file_object): result = {} with TemporaryDirectory(prefix='fact_analysis_binwalk_', dir=get_temp_dir_path(self.config)) as tmp_dir: signature_analysis_result = execute_shell_command( f'(cd {tmp_dir} && xvfb-run -a binwalk -BEJ {file_object.file_path})' ) try: pic_path = Path( tmp_dir) / f'{Path(file_object.file_path).name}.png' result['entropy_analysis_graph'] = pic_path.read_bytes() result['signature_analysis'] = signature_analysis_result result['summary'] = list( set(self._extract_summary(signature_analysis_result))) except FileNotFoundError: result = {'failed': 'Binwalk analysis failed'} logging.error( f'Binwalk analysis on {file_object.uid} failed:\n{signature_analysis_result}' ) file_object.processed_analysis[self.NAME] = result return file_object
def _get_uploaded_file_binary(request_file: FileStorage, config: ConfigParser) -> Optional[bytes]: ''' Retrieves the binary from the request file storage and returns it as byte string. May return `None` if no binary was found or an exception occurred. :param request_file: A file contained in the flask request object. :param config: The FACT configuration. :return: The binary as byte string or `None` if no binary was found. ''' if not request_file: return None with TemporaryDirectory(prefix='fact_upload_', dir=get_temp_dir_path(config)) as tmp_dir: tmp_file_path = Path(tmp_dir) / 'upload.bin' try: request_file.save(str(tmp_file_path)) return tmp_file_path.read_bytes() except IOError: logging.error( 'Encountered error when trying to read uploaded file:', exc_info=True) return None