def test_get_scan(self): scan_id = '123' query = MagicMock() self.mock_session.query.return_value = query scan_manager.get_scan(scan_id, self.mock_session) assert query.get.called query.get.assert_called_once_with(scan_id)
def merge_results(results, scan_id=None): for result in results: logger.info('{} merge results'.format(result)) # with db.session_scope() as session: # scan = get_scan(scan_id, session) # After the merge we remove the folder with the scan source scan_dir = os.path.join(SHARED_VOLUME_PATH, scan_id) try: shutil.rmtree(scan_dir) except IOError as e: logger.error("Error while removing tmp dir: {} - {}".format( scan_dir, e)) with db.session_scope() as session: scan = get_scan(scan_id, session) project = scan.project scan = update_scan_state(scan, ScanState.DONE, session) session.commit() if project.hook_type != ProjectHookType.NONE.name: # launch notify task logger.debug('{} launch notify task for project.hook_type'.format( scan.id)) notify_results.delay(scan.id)
def scan_deps(scan_id: str): with db.session_scope() as session: logger.debug('{} extract dependencies'.format(scan_id)) scan = get_scan(scan_id, session) dependencies = get_dependencies(scan.lang, scan.source_path) logger.debug('found dependencies {}'.format(dependencies)) # save all dependencies in the database add_scan_deps(scan.id, dependencies, datetime.now(), session) scan.total_packages = len(dependencies) session.commit() logger.debug('saved {} dependencies'.format(len(dependencies))) # compare the dependencies in this scan with the last scan for this project previous_scan = get_previous_scan_for_project(scan.project_id, scan.id, session) if previous_scan is None: logger.debug('no previous scan found for {}'.format(scan_id)) deps_equals = False else: logger.debug('previous scan to {} is {}'.format( scan_id, previous_scan.id)) deps_equals = compare_scan_deps(scan.id, previous_scan.id, session) if deps_equals: update_scan_state(scan, ScanState.SAME_DEPS_AS_PREVIOUS, session) logger.debug('{} scan has same deps as {}'.format( scan_id, previous_scan.id)) if not deps_equals: get_vulnerabilities.delay(scan_id)
def get_vulnerabilities(scan_id: str): with db.session_scope() as session: logger.debug('{} extract dependencies'.format(scan_id)) scan_deps = get_scan_deps(scan_id, session) scan = get_scan(scan_id, session) project = scan.project url = '{}/api/v1/check-dependencies?cpeDetailed=1'.format(PATTON_URI) req_body = { 'method': 'auto', 'source': 'auto', 'libraries': [{ 'library': scan_dep.library, 'version': scan_dep.version } for scan_dep in scan_deps] } response = requests.post(url, json=req_body).json() total_vulnerabilities = 0 if response: for key in response: if response[key]: [library, version] = key.split(':') scan_dep = get_scan_dep_by_scan_id_and_raw_dep( scan_id, '{}:{}'.format(library, version), session) cpes = response[key] for cpe_dict in cpes['cpes']: cpe = cpe_dict['cpe'] cves = cpe_dict['cves'] total_vulnerabilities += len(cves) # save all dependencies in the database add_scan_vuln(scan_dep.id, scan.id, scan.lang, cpe, cves, session) logger.info('saved {cves} cves for cpe {cpe}'.format( cves=len(cves), cpe=cpe)) scan.total_vulnerabilities = total_vulnerabilities update_scan_state(scan, ScanState.DONE, session) session.commit() # After the merge we remove the folder with the scan source scan_dir = os.path.join(SHARED_VOLUME_PATH, scan_id) try: shutil.rmtree(scan_dir) except IOError as e: logger.error("Error while removing tmp dir: {} - {}".format( scan_dir, e)) if project.hook_type != ProjectHookType.NONE.name: # launch notify task logger.debug('{} launch notify task for project.hook_type'.format( scan.id)) notify_results.delay(scan.id)
def notify_results(scan_id): with db.session_scope() as session: scan = get_scan(scan_id, session) scan_vulns = [ '{}:{}'.format(scan_vuln.library, scan_vuln.version) for scan_vuln in get_scan_vulnerabilities(scan_id, session) ] project = scan.project logger.debug('notify project data {}'.format(project.hook_data)) notify_scan_results(project, scan_vulns)
def notify_results(scan_id): with db.session_scope() as session: scan = get_scan(scan_id, session) scan_vulns = set([ scan_vuln.scan_dep.raw_dep for scan_vuln in get_scan_vulnerabilities(scan_id, session) ]) project = scan.project logger.debug('notify project data {}'.format(project.hook_data)) notify_scan_results(project, scan_vulns)
def notify_results(scan_id): with db.session_scope() as session: scan = get_scan(scan_id, session) project = scan.project logger.debug('notify project data {}'.format(project.hook_data)) notif_text = 'project at {} has vulnerabilities'.format(project.repo) if project.hook_type == ProjectHookType.SLACK.name: hook_data_dict = json.loads(project.hook_data) slack.notify(hook_data_dict.get('webhook_url'), notif_text)
def prepare_scan(scan_id: str): with db.session_scope() as session: logger.info('{} START SCAN'.format(scan_id)) scan = get_scan(scan_id, session) project = scan.project logger.debug('{} for project({})'.format(scan_id, project.id)) try: # clone the repository in a shared volume cloned_dir = clone_project(config.SHARED_VOLUME_PATH, scan_id, project.repo, project.repo_auth_type, scan.branch) scan.source_path = cloned_dir session.add(scan) logger.debug('{} cloned dir {}'.format(scan_id, cloned_dir)) except Exception as e: update_scan_state(scan, ScanState.INVALID_BRANCH, session) session.commit() logger.error(str(e)) raise e # if a .deeptracy.yml is found, parse it to a dictionary try: deeptracy_yml = parse_deeptracy_yml(cloned_dir) logger.debug('{} .deeptracy.yml {}'.format( scan_id, 'TRUE' if deeptracy_yml else 'FALSE')) except Exception as e: update_scan_state(scan, ScanState.INVALID_YML_ON_PROJECT, session) session.commit() logger.error('{} unable to parse .deeptracy.yml'.format(scan_id)) raise e # the language for a scan can be specified on the scan of in the deeptracy file in the sources if scan.lang is None: if deeptracy_yml is None: update_scan_state(scan, ScanState.CANT_GET_LANGUAGE, session) session.commit() logger.debug( '{} unable to retrieve language for scan'.format(scan_id)) raise TaskException('unable to retrieve language for scan') else: lang = deeptracy_yml.get( 'lang') # the parse ensures us a valid lang in the dict scan.lang = lang # update the san object to store the language session.add(scan) # once the scan is ready continue with the dependency extraction scan_deps.delay(scan_id)
def get_vulnerabilities(scan_id: str): with db.session_scope() as session: logger.debug('{} extract dependencies'.format(scan_id)) scan_deps = get_scan_deps(scan_id, session) scan_deps_len = len(scan_deps) scan = get_scan(scan_id, session) project = scan.project total_vulnerabilities = [] def get_response(i, scan_dep): [package, version] = scan_dep.raw_dep.split(':') url = '{}/batch'.format(PATTON_URI) response = requests.post(url, json=[[package, version]]).json() print(response) logger.info("Procesado {} de {}".format(i, scan_deps_len)) if response: for key in response: if response[key]: total_vulnerabilities.append([package, version]) # save all dependencies in the database add_scan_vul(scan.id, package, version, response[key], session) session.commit() logger.info('saved {vulnerabilities} vulnerabilities for package {package}:{version}'.format( vulnerabilities=len(response), package=package, version=version)) [get_response(i, scan_dep) for i, scan_dep in enumerate(scan_deps)] scan.total_vulnerabilities = len(total_vulnerabilities) update_scan_state(scan, ScanState.DONE, session) session.commit() # After the merge we remove the folder with the scan source scan_dir = os.path.join(SHARED_VOLUME_PATH, scan_id) try: shutil.rmtree(scan_dir) except IOError as e: logger.error("Error while removing tmp dir: {} - {}".format( scan_dir, e )) if project.hook_type != ProjectHookType.NONE.name: # launch notify task logger.debug('{} launch notify task for project.hook_type'.format(scan.id)) notify_results.delay(scan.id)
def start_scan(scan_id: str): with db.session_scope() as session: logger.info('{} START SCAN'.format(scan_id)) scan = get_scan(scan_id, session) project = scan.project logger.debug('{} for project({})'.format(scan_id, project.id)) # for the lang, get the plugins that can be run available_plugins_for_lang = get_plugins_for_lang(scan.lang, session) analysis_count = len(available_plugins_for_lang) if analysis_count < 1: update_scan_state(scan, ScanState.NO_PLUGINS_FOR_LANGUAGE, session) logger.debug('{} no plugins found for language {}'.format( scan_id, scan.lang)) raise TaskException('no plugins found for language {}'.format( scan.lang)) # when we have the lang, the number of analysis to run and the source code dir, update the scan scan.analysis_count = analysis_count scan.analysis_done = 0 scan.state = ScanState.RUNNING.name session.add(scan) session.commit() # save at this point as we need the ID for this scan # save each analysis to be ran for this scan in the database and collect its ids scan_analysis_ids = [] for plugin in available_plugins_for_lang: scan_analysis = add_scan_analysis(scan.id, plugin.id, session) session.commit( ) # Commit the session to persist the scan_analysis and get and id scan_analysis_ids.append(scan_analysis.id) # create a task for each analysis to run analyzers = [ run_analyzer.s(scan_analysis_id) for scan_analysis_id in scan_analysis_ids ] # launch all jobs chord(analyzers)(merge_results.s(scan_id=scan_id))
def test_get_scan_invalid_scan_id(self): with self.assertRaises(AssertionError): scan_manager.get_scan(None, self.mock_session)