def _crack_hash(self, passwd_entry, result_dict, key): with NamedTemporaryFile() as fp: fp.write(b':'.join(passwd_entry[:2])) fp.seek(0) result_dict[key]['log'] = execute_shell_command('john --wordlist={} {}'.format(self.wordlist_path, fp.name)) output = execute_shell_command('john --show {}'.format(fp.name)).split('\n') if len(output) > 2: with suppress(KeyError): result_dict[key]['password'] = output[0].split(':')[1] return True return False
def _mount_single_filesystem(file_path, mime_type, tmp_dir): type_parameter = '-t {}'.format( TYPES[mime_type]) if mime_type in TYPES else '' mount_dir = TemporaryDirectory() output = execute_shell_command('sudo mount {} -v -o ro,loop {} {}'.format( type_parameter, file_path, mount_dir.name)) output += execute_shell_command('sudo cp -av {}/* {}/'.format( mount_dir.name, tmp_dir)) output += execute_shell_command('sudo umount -v {}'.format(mount_dir.name)) mount_dir.cleanup() return output
def clean_firmadyne(): change_dir_to_firmadyne_dir() command = 'sudo ./scripts/delete.sh 1'.format(FIRMADYNE_PATH) _, rc = execute_interactive_shell_command( command, inputs={'Password for user firmadyne: ': 'firmadyne'}, timeout=120) if rc > 0: return 0 command = 'sudo ./scripts/additional_delete.sh'.format(FIRMADYNE_PATH) execute_shell_command(command) return 1
def _crack_hash(self, passwd_entry, result_entry, format_term=''): with NamedTemporaryFile() as fp: fp.write(passwd_entry) fp.seek(0) result_entry['log'] = execute_shell_command('{} --wordlist={} {} {}'.format(JOHN_PATH, self.wordlist_path, fp.name, format_term)) output = execute_shell_command('{} {} --show {}'.format(JOHN_PATH, fp.name, format_term)).split('\n') if len(output) > 1: with suppress(KeyError): if '0 password hashes cracked' in output[-2]: result_entry['ERROR'] = 'hash type is not supported' return False result_entry['password'] = output[0].split(':')[1] return True return False
def mount(file_path, fs_type=''): mount_dir = TemporaryDirectory() try: mount_rv = execute_shell_command( 'sudo mount {} -v -o ro,loop {} {}'.format(fs_type, file_path, mount_dir.name)) if _mount_was_successful(mount_dir.name): yield Path(mount_dir.name) else: logging.error('could not mount {}: {}'.format(file_path, mount_rv)) raise MountingError('error while mounting fs') finally: execute_shell_command('sudo umount -v {}'.format(mount_dir.name)) mount_dir.cleanup()
def crack_hash(passwd_entry: bytes, result_entry: dict, format_term: str = '') -> bool: with NamedTemporaryFile() as fp: fp.write(passwd_entry) fp.seek(0) command = f'{JOHN_PATH} --wordlist={WORDLIST_PATH} {fp.name} {format_term}' result_entry['log'] = execute_shell_command(command) output = execute_shell_command(f'{JOHN_PATH} {fp.name} --show {format_term}').split('\n') if len(output) > 1: with suppress(KeyError): if '0 password hashes cracked' in output[-2]: result_entry['ERROR'] = 'hash type is not supported' return False result_entry['password'] = output[0].split(':')[1] return True return False
def unpack_function(file_path, tmp_dir): if not path.exists(TOOL_PATH): return {'output': "Error: phantom_firmware_tools not installed! Re-Run the installation script!"} output = execute_shell_command('(cd {} && fakeroot {} -x -vv -p {})'.format(tmp_dir, TOOL_PATH, file_path)) + "\n" meta_data = {'output': output} return meta_data
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' unpacker = Uboot(file_path) meta = {} uboot_path = f'{tmp_dir}/uboot.{uBootHeader.COMPRESSION[unpacker.ubootheader.compression_type]}' with open(uboot_path, 'wb') as uboot: uboot.write(unpacker.extract_uboot_image()) uboot_header_path = f'{tmp_dir}/uboot_header.bin' with open(uboot_header_path, 'wb') as uboot: uboot.write(unpacker.extract_uboot_header()) remaining = unpacker.get_remaining_blocks() for offset, block in remaining.items(): unknown_path = f'{tmp_dir}/{offset}_unknown.bin' with open(unknown_path, 'wb') as hdr: hdr.write(block) # scan for device tree blobs if DTB_MAGIC in Path(file_path).read_bytes(): cmd = f'''extract-dtb {file_path} -o {Path(tmp_dir) / 'dtb'}''' output = cmd + '\n' output += execute_shell_command(cmd, timeout=10) meta['extract-dtb'] = output return meta
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' output = execute_shell_command('fakeroot {} -o {} {}'.format( stuffit_unpacker, tmp_dir, file_path)) return {'output': output}
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' unpacker = '{} -e'.format(UNYAFFS2_EXECUTEABLE) if _is_big_endian(file_path) else '{} -v'.format(UNYAFFS_EXECUTEABLE) output = execute_shell_command('fakeroot {} {} {}'.format(unpacker, file_path, tmp_dir)) return {'output': output}
def unpack_function(file_path, tmp_dir): decrypted_file = Path(tmp_dir, 'decrypted_image') extraction_command = 'python3 {} -i {} -o {}'.format( TOOL_PATH, file_path, decrypted_file) output = execute_shell_command(extraction_command) return {'output': output}
def run_firmadyne(input_file): command = '/usr/bin/python3 {}/firmadyne_wrapper.py {} &> {}/LOG.log'.format( INTERNAL_DIRECTORY_PATH, input_file, FIRMADYNE_INSTALLATION_DIR) execute_shell_command(command) try: result_file = '{}/results.json'.format(FIRMADYNE_INSTALLATION_DIR) with open(result_file, 'r') as filepointer: results_json = filepointer.read() dict_results = json.loads(results_json) os.remove(result_file) except Exception as e: error_message = 'could not load firmadyne result: {} {}'.format( sys.exc_info()[0].__name__, e) logging.error(error_message) dict_results = {'result': 'Failed', 'error_message': error_message} return dict_results
def shutdown(self): if self.config['data_storage']['mongo_server'] == 'localhost': logging.info('stop local mongo database') command = 'mongo --eval "db.shutdownServer()" {}:{}/admin --username {} --password "{}"'.format( self.config['data_storage']['mongo_server'], self.config['data_storage']['mongo_port'], self.config['data_storage']['db_admin_user'], self.config['data_storage']['db_admin_pw'] ) output = execute_shell_command(command) logging.debug(output)
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' logging.debug('File Type unknown: execute binwalk on {}'.format(file_path)) output = execute_shell_command('binwalk --extract --carve --signature --directory {} {}'.format(tmp_dir, file_path)) return {'output': output}
def _execute_yara_search(self, rule_file_path, target_path=None): ''' scans the (whole) db directory with the provided rule file and returns the (raw) results yara-python cannot be used, because it (currently) supports single-file scanning only :param rule_file_path: file path to yara rule file :return: output from yara scan ''' command = 'yara -r {} {}'.format(rule_file_path, self.db_path if target_path is None else target_path) return execute_shell_command(command)
def unpack_function(file_path, tmp_dir): """ file_path specifies the input file. tmp_dir should be used to store the extracted files. """ output = execute_shell_command( 'fakeroot patool extract --outdir {} {}'.format(tmp_dir, file_path), timeout=600) return {'output': output}
def _get_kernel_hardening_data(kernel_config: str) -> List[List[str]]: try: with NamedTemporaryFile() as fp: fp.write(kernel_config.encode()) fp.seek(0) command = f'kconfig-hardened-check -c {fp.name} -m json 2>/dev/null' return json.loads(execute_shell_command(command)) except (JSONDecodeError, KeyError): logging.warning('kconfig-hardened-check analysis failed') return []
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' # tpl-tool unpacker unpacks files in the directory of the input file -> copy input file and delete afterwards tmp_file_path = os.path.join(tmp_dir, os.path.basename(file_path)) copyfile(file_path, tmp_file_path) result = {} result['output'] = execute_shell_command('fakeroot {} -x {}'.format( path_to_unpacker, tmp_file_path)) result['header-info'] = execute_shell_command('{} -s {}'.format( path_to_unpacker, tmp_file_path)) os.remove(tmp_file_path) return result
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. ''' mime_type = get_file_type_from_path(file_path)['mime'] type_parameter = '-t {}'.format( type_dict[mime_type]) if mime_type in type_dict else '' mount_dir = TemporaryDirectory() output = execute_shell_command('sudo mount {} -v -o ro,loop {} {}'.format( type_parameter, file_path, mount_dir.name)) output += execute_shell_command('sudo cp -av {}/* {}/'.format( mount_dir.name, tmp_dir)) output += execute_shell_command('sudo umount -v {}'.format(mount_dir.name)) mount_dir.cleanup() return {'output': output}
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. local_tmp_dir should be used to store the extracted files. ''' output = execute_shell_command( 'fakeroot ubireader_extract_images -i -w -v --output-dir {} {}'.format( tmp_dir, file_path)) + '\n' meta_data = {'output': output} return meta_data
def unpack_function(file_path, tmp_dir): if not path.exists(TOOL_PATH): return {'output': 'Error: phantom_firmware_tools not installed! Re-Run the installation script!'} output = execute_shell_command('(cd {} && fakeroot python3 {} -x -vv -p {})'.format(tmp_dir, TOOL_PATH, file_path)) + '\n' _rename_files(tmp_dir) _remove_ini_files(tmp_dir) meta_data = {'output': output} return meta_data
def start(self, _authenticate=True): if self.config['data_storage']['mongo_server'] == 'localhost': logging.info("start local mongo database") self.check_file_and_directory_existence_and_permissions() auth_option = '--auth ' if _authenticate else '' command = 'mongod {}--config {} --fork --logpath {}'.format(auth_option, self.config_path, self.mongo_log_path) output = execute_shell_command(command) logging.debug(output) else: logging.info('using external mongodb: {}:{}'.format(self.config['data_storage']['mongo_server'], self.config['data_storage']['mongo_port']))
def _execute_yara_search(self, rule_file_path: str, target_path: Optional[str] = None) -> str: ''' Scans the (whole) db directory with the provided rule file and returns the (raw) results. Yara-python cannot be used, because it (currently) supports single-file scanning only. :param rule_file_path: The file path to the yara rule file. :return: The output from the yara scan. ''' command = 'yara -r {} {}'.format(rule_file_path, self.db_path if target_path is None else target_path) return execute_shell_command(command)
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. tmp_dir should be used to store the extracted files. Optional: Return a dict with meta information ''' extraction_command = 'python3 {} --superbrute --extract --output {} {}'.format( TOOL_PATH, tmp_dir, file_path) output = execute_shell_command(extraction_command) return {'output': output}
def command_absolute_path(cmd): """ we want to use the absolute path for a tool so we can execute in fakeroot """ tool = cmd[0] if tool not in TOOL_PATHS: TOOL_PATHS[tool] = execute_shell_command( 'which {}'.format(tool)).strip() cmd[0] = TOOL_PATHS[tool] return ' '.join(cmd)
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. local_tmp_dir should be used to store the extracted files. ''' extract_dir = os.path.join(tmp_dir, 'jffs-root') output = execute_shell_command('fakeroot jefferson -v -d {} {}'.format(extract_dir, file_path)) + '\n' meta_data = {'output': output} logging.debug(output) return meta_data
def check_archives_validity(self, file_path: Path, command, search_key=None): output = execute_shell_command(command.format(file_path)) if search_key and search_key in output.replace('\n ', ''): self.remove_file(file_path) elif not search_key and self.output_is_empty(output): self.remove_file(file_path)
def do_analysis(self, file_path): pylint_output = execute_shell_command( 'pylint --output-format=json {}'.format(file_path)) try: pylint_json = json.loads(pylint_output) except json.JSONDecodeError: logging.warning( 'Failed to execute pylint:\n{}'.format(pylint_output)) return list() return self._extract_relevant_warnings(pylint_json)
def unpack_function(file_path, tmp_dir): ''' file_path specifies the input file. local_tmp_dir should be used to store the extracted files. ''' extract_dir = Path(tmp_dir) / 'jffs-root' output = execute_shell_command(f'fakeroot jefferson -v -d {extract_dir} {file_path}') + '\n' meta_data = {'output': output} logging.debug(output) return meta_data
def check_kernel_config(kernel_config: str) -> dict: try: with NamedTemporaryFile() as fp: fp.write(kernel_config.encode()) fp.seek(0) command = f'{CHECKSEC_PATH} --kernel={fp.name} --output=json 2>/dev/null' result = json.loads(execute_shell_command(command)) whitelist_configs(result) return result except (JSONDecodeError, KeyError): logging.debug('Checksec kernel analysis failed') return {}