def trigger_analyses_merge(analysis): """Submit project level analyses merge if neccessary.""" if analysis["status"] not in {"SUCCEEDED", "FAILED"}: return try: application = import_from_string(analysis["application"]["application_class"])() except ImportError: return if application.has_project_auto_merge: projects = {j["pk"]: j for i in analysis["targets"] for j in i["projects"]} for i in projects.values(): pending = api.get_instances_count( endpoint="analyses", status__in="STARTED,SUBMITTED", application=analysis["application"]["pk"], projects=i["pk"], ) if not pending: click.secho( f"Submitting project merge for {i} and " f"application {analysis.application}", fg="green", ) application.submit_merge_analysis(i) if application.has_individual_auto_merge: individuals = { i.sample.individual.pk: i.sample.individual for i in analysis["targets"] } for i in individuals.values(): pending = api.get_instances_count( endpoint="analyses", status__in="STARTED,SUBMITTED", application=analysis["application"]["pk"], targets__sample__individual__pk=i.pk, ) if not pending: click.secho( f"Submitting individual merge for {i} and " f"application {analysis.application}", fg="green", ) application.submit_merge_analysis(i)
def _set_analysis_permissions(analysis): protect_results = analysis.status == "SUCCEEDED" unique_analysis_per_individual = False application_protect_results = True chgrp_cmd = ( ["false"] if not system_settings.DEFAULT_LINUX_GROUP else ["chgrp", "-R", system_settings.DEFAULT_LINUX_GROUP, analysis.storage_url] ) try: application = import_from_string(analysis.application.application_class)() unique_analysis_per_individual = application.unique_analysis_per_individual application_protect_results = application.application_protect_results except ImportError: pass if ( # dont protect results if project level analysis analysis.project_level_analysis # dont protect results if individual level automerge or (analysis.individual_level_analysis and not unique_analysis_per_individual) # dont protect results if the application says so or not application_protect_results ): protect_results = False if protect_results: utils.check_admin() if analysis.ran_by != system_settings.api_username: src = analysis.storage_url + "__tmp" shutil.move(analysis.storage_url, src) cmd = utils.get_rsync_command(src, analysis.storage_url, chmod="a-w") subprocess.check_call(cmd, shell=True) else: subprocess.check_call(["chmod", "-R", "a-w", analysis.storage_url]) try: subprocess.check_output(chgrp_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: pass elif not protect_results or analysis.status in {"FAILED", "FINISHED"}: for i in [chgrp_cmd, ["chmod", "-R", "g+rwX", analysis.storage_url]]: try: subprocess.check_output(i, stderr=subprocess.STDOUT) except subprocess.CalledProcessError: pass
def rerun_signals(filters): """Rerun failed signals.""" for i in api.get_instances("signals", pk__gt=0, data__failure_traceback__isnull=False, **filters): click.secho(f"Rerunning signal: {i.slug}", fg="yellow") instance = api.get_instance(i.target_endpoint, i.target_id) try: api._run_signals( endpoint=i.target_endpoint, instance=instance, signals=[import_from_string(i.import_string)], raise_error=True, ) api.delete_instance("signals", i.pk) except exceptions.AutomationError: pass
def _get_analysis_results(analysis, raise_error=True): """Patch analysis results.""" app_name = f"{analysis.application.name} {analysis.application.version}" error_msg = f"\tFailed to patch {app_name}({analysis.pk}, {analysis.storage_url}):" results = analysis.results try: application = import_from_string(analysis.application.application_class)() except ImportError as error: click.secho(f"{error_msg} cant import application class", fg="red") return results if not analysis.storage_url: # pragma: no cover analysis = application._patch_analysis(analysis) try: # just used to make sure the app results are patched expected_app = application.primary_key if analysis.project_level_analysis: expected_app = application.project_level_auto_merge_application.pk elif ( analysis.individual_level_analysis and application.has_individual_auto_merge ): expected_app = application.individual_level_auto_merge_application.pk assert analysis.application.pk == expected_app, ( f"{application.__class__} does not match: " f"{analysis.application.application_class}" ) results = application._get_analysis_results(analysis) except Exception as error: # pragma: no cover pylint: disable=broad-except click.secho(f"{error_msg} {error}", fg="red") if raise_error: raise error print(traceback.format_exc()) return results
def run_web_signals(filters): """Rerun web signals.""" for i in api.get_instances( "signals", import_string__in=[ "isabl_cli.signals.resume_analysis_signal", "isabl_cli.signals.force_analysis_signal", ], **filters, ): click.secho(f"Running web signal: {i.slug}", fg="yellow") instance = api.get_instance(i.target_endpoint, i.target_id) try: api._run_signals( endpoint=i.target_endpoint, instance=instance, signals=[import_from_string(i.import_string)], raise_error=True, ) api.delete_instance("signals", i.pk) except exceptions.AutomationError: pass
def run_web_signals(analysis, restart=False, force=False): """Signal to trigger analyses executions.""" tuples = [(analysis.targets, analysis.references)] app = import_from_string(analysis.application.application_class) app().run(tuples=tuples, commit=True, restart=restart, force=force)
def merge_individual_analyses(individual, application): # pragma: no cover """Merge analyses by individual.""" individual = api.get_instance("individuals", individual) application = api.get_instance("applications", application) application = import_from_string(application["application_class"])() application.run_individual_merge(individual)
def merge_project_analyses(project, application): # pragma: no cover """Merge analyses by project.""" project = api.get_instance("projects", project) application = api.get_instance("applications", application) application = import_from_string(application["application_class"])() application.run_project_merge(project)