def run_task(project_id, event, task_id=None): secrets = get_project_secrets(project_id) if "control_tower_id" not in secrets: secrets = get_project_hidden_secrets(project_id) task_id = task_id if task_id else secrets["control_tower_id"] task = Task.query.filter(and_(Task.task_id == task_id)).first().to_json() app = run.connect_to_celery(1) celery_task = app.signature("tasks.execute", kwargs={ "task": unsecret(task, project_id=project_id), "event": unsecret(event, project_id=project_id) }) celery_task.apply_async() return {"message": "Accepted", "code": 200, "task_id": task_id}
def run_lambda(task, event): client = docker.from_env() container_name = NAME_CONTAINER_MAPPING.get(task['runtime']) if not container_name: return f"Container {task['runtime']} is not found" mount = Mount(type="volume", source=task['task_id'], target="/var/task") env_vars = loads(task.get("env_vars", "{}")) response = client.containers.run(f"lambci/{container_name}", command=[f"{task['task_handler']}", dumps(event)], mounts=[mount], stderr=True, remove=True, environment=env_vars) log = response.decode("utf-8", errors='ignore') if container_name == "lambda:python3.7": results = re.findall(r'({.+?})', log)[-1] else: # TODO: magic of 2 enters is very flaky, Need to think on how to workaround, probably with specific logging results = log.split("\n\n")[1] data = {"ts": int(mktime(datetime.utcnow().timetuple())), 'results': results, 'stderr': log} headers = { "Content-Type": "application/json", "Token": task['token'] } auth_token = unsecret("{{secret.auth_token}}", project_id=task['project_id']) if auth_token: headers['Authorization'] = f'bearer {auth_token}' post(f'{APP_HOST}/api/v1/task/{task["task_id"]}/results', headers=headers, data=dumps(data)) return results
def get(self, project_id: int, task_id: str): args = self.get_parser.parse_args(strict=False) task = Task.query.filter_by(task_id=task_id).first() project = Project.get_or_404(project_id) if args.get("exec"): return unsecret(task.to_json(), project_id=project.id) return task.to_json()
def run_task(project_id, event, task_id=None): secrets = get_project_secrets(project_id) if "control_tower_id" not in secrets: secrets = get_project_hidden_secrets(project_id) task_id = task_id if task_id else secrets["control_tower_id"] task = Task.query.filter(and_(Task.task_id == task_id)).first().to_json() check_tasks_quota(task) statistic = Statistic.query.filter(Statistic.project_id == task['project_id']).first() setattr(statistic, 'tasks_executions', Statistic.tasks_executions + 1) statistic.commit() arbiter = get_arbiter() task_kwargs = {"task": unsecret(task, project_id=project_id), "event": unsecret(event, project_id=project_id), "galloper_url": unsecret("{{secret.galloper_url}}", project_id=task['project_id']), "token": unsecret("{{secret.auth_token}}", project_id=task['project_id'])} arbiter.apply("execute_lambda", queue=RABBIT_QUEUE_NAME, task_kwargs=task_kwargs) arbiter.close() return {"message": "Accepted", "code": 200, "task_id": task_id}
def check_tasks_quota(task): if not ProjectQuota.check_quota(project_id=task['project_id'], quota='tasks_executions'): data = {"ts": int(mktime(datetime.utcnow().timetuple())), 'results': 'Forbidden', 'stderr': "The number of task executions allowed in the project has been exceeded"} headers = { "Content-Type": "application/json", "Token": task['token'] } auth_token = unsecret("{{secret.auth_token}}", project_id=task['project_id']) if auth_token: headers['Authorization'] = f'bearer {auth_token}' post(f'{APP_HOST}/api/v1/task/{task["task_id"]}/results', headers=headers, data=dumps(data)) raise Forbidden(description="The number of task executions allowed in the project has been exceeded")
def execute_lambda(self, task, event, *args, **kwargs): if not ProjectQuota.check_quota(project_id=task['project_id'], quota='tasks_executions'): data = {"ts": int(mktime(datetime.utcnow().timetuple())), 'results': 'Forbidden', 'stderr': "The number of task executions allowed in the project has been exceeded"} headers = { "Content-Type": "application/json", "Token": task['token'] } auth_token = unsecret("{{secret.auth_token}}", project_id=task['project_id']) if auth_token: headers['Authorization'] = f'bearer {auth_token}' post(f'{APP_HOST}/api/v1/task/{task["task_id"]}/results', headers=headers, data=dumps(data)) raise Forbidden(description="The number of task executions allowed in the project has been exceeded") statistic = db_session.query(Statistic).filter(Statistic.project_id == task['project_id']).first() setattr(statistic, 'tasks_executions', Statistic.tasks_executions + 1) statistic.commit() res = run_lambda(task, event) if task['callback']: event['result'] = res task = db_session.query(Task).filter(Task.task_id == task['callback'])[0].to_json() execute_lambda.apply_async(kwargs=dict(task=task, event=event)) return res
def configure_execution_json(self, output="cc", execution=False, thresholds={}): """ Create configuration for execution """ # if output == "dusty": # global_dast_settings = dict() global_dast_settings["max_concurrent_scanners"] = 1 if "toolreports" in self.dast_settings.get("reporters_checked", list()): global_dast_settings[ "save_intermediates_to"] = "/tmp/intermediates" # scanners_config = dict() if "zap" in self.dast_settings.get("scanners_checked", list()): scanners_config["zap"] = { "scan_types": "all", "target": self.dast_settings.get("dast_target_url"), } if "w3af" in self.dast_settings.get("scanners_checked", list()): scanners_config["w3af"] = { "target": self.dast_settings.get("dast_target_url"), } if "nikto" in self.dast_settings.get("scanners_checked", list()): scanners_config["nikto"] = { "target": self.dast_settings.get("dast_target_url"), } if "nmap" in self.dast_settings.get("scanners_checked", list()): scanners_config["nmap"] = { "target": self.dast_settings.get("dast_target_url"), } if "masscan" in self.dast_settings.get("scanners_checked", list()): scanners_config["masscan"] = { "target": self.dast_settings.get("dast_target_url"), } if "sslyze" in self.dast_settings.get("scanners_checked", list()): scanners_config["sslyze"] = { "target": self.dast_settings.get("dast_target_url"), } if "aemhacker" in self.dast_settings.get("scanners_checked", list()): scanners_config["aemhacker"] = { "target": self.dast_settings.get("dast_target_url"), } # reporters_config = dict() reporters_config["galloper"] = { "url": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if "toolreports" in self.dast_settings.get("reporters_checked", list()): reporters_config["galloper_tool_reports"] = { "bucket": "dast", "object": f"{self.test_uid}_tool_reports.zip", "source": "/tmp/intermediates", } if "quality" in self.dast_settings.get("reporters_checked", list()): reporters_config["galloper_junit_report"] = { "bucket": "dast", "object": f"{self.test_uid}_junit_report.xml", } reporters_config["galloper_quality_gate_report"] = { "bucket": "dast", "object": f"{self.test_uid}_quality_gate_report.json", } reporters_config["junit"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.xml", } # if "jira" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "jira" in project_secrets: jira_settings = loads(project_secrets["jira"]) reporters_config["jira"] = { "url": jira_settings["jira_url"], "username": jira_settings["jira_login"], "password": jira_settings["jira_password"], "project": jira_settings["jira_project"], "fields": { "Issue Type": jira_settings["issue_type"], } } # if "email" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "smtp" in project_secrets: email_settings = loads(project_secrets["smtp"]) reporters_config["email"] = { "server": email_settings["smtp_host"], "port": email_settings["smtp_port"], "login": email_settings["smtp_user"], "password": email_settings["smtp_password"], "mail_to": self.dast_settings.get("email_recipients", ""), } reporters_config["html"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.html", } # if "ado" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "ado" in project_secrets: reporters_config["azure_devops"] = loads( project_secrets["ado"]) # Thresholds tholds = {} if thresholds and any( int(thresholds[key]) > -1 for key in thresholds.keys()): for key, value in thresholds.items(): if int(value) > -1: tholds[key.capitalize()] = int(value) # dusty_config = { "config_version": 2, "suites": { "dast": { "settings": { "project_name": self.dast_settings.get("project_name"), "project_description": self.name, "environment_name": "target", "testing_type": "DAST", "scan_type": "full", "build_id": self.test_uid, "dast": global_dast_settings }, "scanners": { "dast": scanners_config }, "processing": { "min_severity_filter": { "severity": "Info" }, "quality_gate": { "thresholds": tholds }, "false_positive": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) }, "ignore_finding": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) } }, "reporters": reporters_config } } } # return dusty_config # job_type = "dast" container = f"getcarrier/{job_type}:{CURRENT_RELEASE}" parameters = { "cmd": f"run -b galloper:{job_type}_{self.test_uid} -s {job_type}", "GALLOPER_URL": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "GALLOPER_PROJECT_ID": f"{self.project_id}", "GALLOPER_AUTH_TOKEN": unsecret("{{secret.auth_token}}", project_id=self.project_id), } cc_env_vars = { "REDIS_HOST": unsecret("{{secret.redis_host}}", project_id=self.project_id), "REDIS_PASSWORD": unsecret("{{secret.redis_password}}", project_id=self.project_id), } concurrency = 1 # if output == "docker": return f"docker run --rm -i -t " \ f"-e project_id={self.project_id} " \ f"-e galloper_url={unsecret('{{secret.galloper_url}}', project_id=self.project_id)} " \ f"-e token=\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\" " \ f"getcarrier/control_tower:latest " \ f"-tid {self.test_uid}" if output == "cc": execution_json = { "job_name": self.name, "job_type": job_type, "concurrency": concurrency, "container": container, "execution_params": dumps(parameters), "cc_env_vars": cc_env_vars, } if "quality" in self.dast_settings.get("reporters_checked", list()): execution_json["quality_gate"] = "True" return execution_json # return ""
def configure_execution_json(self, output='cc', browser=None, test_type=None, params=None, env_vars=None, reporting=None, customization=None, cc_env_vars=None, parallel=None, execution=False): reports = [] for report in self.reporting: if report: reports.append(f"-r {report}") cmd = f"-f {self.file} -sc /tmp/data/{self.entrypoint} -l {self.loops} -b {browser} " \ f"-a {self.aggregation} {' '.join(reports)}" execution_json = { "container": self.runner, "execution_params": { "cmd": cmd, "REMOTE_URL": f'{unsecret("{{secret.redis_host}}", project_id=self.project_id)}:4444' }, "cc_env_vars": {}, "bucket": self.bucket, "job_name": self.name, "artifact": self.file, "job_type": self.job_type, "concurrency": 1 } if "jira" in self.reporting: execution_json["execution_params"]["JIRA"] = unsecret( "{{secret.jira}}", project_id=self.project_id) if "quality" in self.reporting: execution_json["quality_gate"] = True if "junit" in self.reporting: execution_json["junit"] = True if self.env_vars: for key, value in self.env_vars.items(): execution_json["execution_params"][key] = value if self.cc_env_vars: for key, value in self.cc_env_vars.items(): execution_json["cc_env_vars"][key] = value if "REDIS_HOST" not in execution_json["cc_env_vars"].keys(): execution_json["cc_env_vars"][ "REDIS_HOST"] = "{{secret.redis_host}}" if "REDIS_PASSWORD" not in execution_json["cc_env_vars"].keys(): execution_json["cc_env_vars"][ "REDIS_PASSWORD"] = "******" if self.customization: for key, value in self.customization.items(): if "additional_files" not in execution_json[ "execution_params"]: execution_json["execution_params"][ "additional_files"] = dict() execution_json["execution_params"]["additional_files"][ key] = value execution_json["execution_params"] = dumps( execution_json["execution_params"]) if execution: execution_json = unsecret(execution_json, project_id=self.project_id) if output == 'cc': current_app.logger.error(execution_json) return execution_json return f'docker run -t --rm -e project_id={self.project_id} ' \ f'-e galloper_url={unsecret("{{secret.galloper_url}}", project_id=self.project_id)} ' \ f"-e token=\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\" " \ f'getcarrier/control_tower:latest ' \ f'--test_id {self.test_uid}'
def configure_execution_json(self, output='cc', test_type=None, params=None, env_vars=None, reporting=None, customization=None, cc_env_vars=None, parallel=None, execution=False, emails=None): pairs = { "customization": [customization, self.customization], "params": [params, self.params], "env_vars": [env_vars, self.env_vars], "cc_env_vars": [cc_env_vars, self.cc_env_vars], "reporting": [reporting, self.reporting] } for pair in pairs.keys(): if not pairs[pair][0]: pairs[pair][0] = pairs[pair][1] else: for each in list(pairs[pair][0].keys()) + list( set(pairs[pair][1].keys()) - set(pairs[pair][0].keys())): pairs[pair][0][each] = pairs[pair][0][each] if each in list(pairs[pair][0].keys()) \ else pairs[pair][1][each] cmd = '' if not params: params = self.params if self.job_type == 'perfmeter': entrypoint = self.entrypoint if path.exists( self.entrypoint) else path.join('/mnt/jmeter', self.entrypoint) cmd = f"-n -t {entrypoint}" for key, value in params.items(): if test_type and key == "test.type": cmd += f" -Jtest.type={test_type}" else: cmd += f" -J{key}={value}" execution_json = { "container": self.runner, "execution_params": { "cmd": cmd }, "cc_env_vars": {}, "bucket": self.bucket, "job_name": self.name, "artifact": self.file, "job_type": self.job_type, "concurrency": self.parallel if not parallel else parallel } if self.reporting: if "junit" in self.reporting: execution_json["junit"] = "True" if "quality" in self.reporting: execution_json["quality_gate"] = "True" if "perfreports" in self.reporting: execution_json["save_reports"] = "True" if "jira" in self.reporting: execution_json["jira"] = "True" if "email" in self.reporting: execution_json["email"] = "True" if "rp" in self.reporting: execution_json["report_portal"] = "True" if "ado" in self.reporting: execution_json["azure_devops"] = "True" if emails: _emails = self.emails for each in emails.split(","): if each not in _emails: _emails += f",{each}" execution_json["email_recipients"] = _emails else: execution_json["email_recipients"] = self.emails if self.env_vars: for key, value in self.env_vars.items(): execution_json["execution_params"][key] = value if "influxdb_host" not in execution_json["execution_params"].keys(): execution_json["execution_params"][ "influxdb_host"] = "{{secret.influx_ip}}" if "loki_host" not in execution_json["execution_params"].keys(): execution_json["execution_params"][ "loki_host"] = "{{secret.loki_host}}" if "loki_port" not in execution_json["execution_params"].keys(): execution_json["execution_params"]["loki_port"] = "3100" if self.cc_env_vars: for key, value in self.cc_env_vars.items(): execution_json["cc_env_vars"][key] = value if "REDIS_HOST" not in execution_json["cc_env_vars"].keys(): execution_json["cc_env_vars"][ "REDIS_HOST"] = "{{secret.redis_host}}" if "REDIS_PASSWORD" not in execution_json["cc_env_vars"].keys(): execution_json["cc_env_vars"][ "REDIS_PASSWORD"] = "******" if "GALLOPER_WEB_HOOK" not in execution_json["cc_env_vars"].keys(): execution_json["cc_env_vars"][ "GALLOPER_WEB_HOOK"] = "{{secret.post_processor}}" if self.customization: for key, value in self.customization.items(): if "additional_files" not in execution_json[ "execution_params"]: execution_json["execution_params"][ "additional_files"] = dict() execution_json["execution_params"]["additional_files"][ key] = value if self.git: execution_json["git"] = self.git if self.job_type == "perfgun": execution_json["execution_params"]['test'] = self.entrypoint execution_json["execution_params"]["GATLING_TEST_PARAMS"] = "" for key, value in params.items(): execution_json["execution_params"][ "GATLING_TEST_PARAMS"] += f"-D{key}={value} " execution_json["execution_params"] = dumps( execution_json["execution_params"]) if execution: execution_json = unsecret(execution_json, project_id=self.project_id) if output == 'cc': return execution_json else: return "docker run -e project_id=%s -e galloper_url=%s -e token=%s" \ " getcarrier/control_tower:latest --test_id=%s" \ "" % (self.project_id, unsecret("{{secret.galloper_url}}", project_id=self.project_id), unsecret("{{secret.auth_token}}", project_id=self.project_id), self.test_uid)
def configure_execution_json(self, output="cc", execution=False, thresholds={}): """ Create configuration for execution """ # if output == "dusty": # global_sast_settings = dict() global_sast_settings["max_concurrent_scanners"] = 1 if "toolreports" in self.sast_settings.get("reporters_checked", list()): global_sast_settings["save_intermediates_to"] = "/tmp/intermediates" # actions_config = dict() if self.sast_settings.get("sast_target_type") == "target_git": git_url = self.sast_settings.get("sast_target_repo") branch = "master" if "@" in git_url[5:]: branch = git_url[5:].split("@")[1] git_url = git_url.replace(f"@{branch}", "") actions_config["git_clone"] = { "source": git_url, "branch": branch, "target": "/tmp/code" } if self.sast_settings.get("sast_target_repo_user") != "": actions_config["git_clone"]["username"] = unsecret(self.sast_settings.get("sast_target_repo_user"), project_id=self.project_id) if self.sast_settings.get("sast_target_repo_pass") != "": actions_config["git_clone"]["password"] = unsecret(self.sast_settings.get("sast_target_repo_pass"), project_id=self.project_id) if self.sast_settings.get("sast_target_repo_key") != "": actions_config["git_clone"]["key_data"] = unsecret(self.sast_settings.get("sast_target_repo_key"), project_id=self.project_id) if self.sast_settings.get("sast_target_type") == "target_galloper_artifact": actions_config["galloper_artifact"] = { "bucket": self.sast_settings.get("sast_target_artifact_bucket"), "object": self.sast_settings.get("sast_target_artifact"), "target": "/tmp/code", "delete": False } if self.sast_settings.get("sast_target_type") == "target_code_path": actions_config["galloper_artifact"] = { "bucket": "sast", "object": f"{self.test_uid}.zip", "target": "/tmp/code", "delete": True } # scanners_config = dict() scanners_config[self.sast_settings.get("language")] = { "code": "/tmp/code" } if "composition" in self.sast_settings.get("options_checked", list()): scanners_config["dependencycheck"] = { "comp_path": "/tmp/code", "comp_opts": "--enableExperimental" } if "secretscan" in self.sast_settings.get("options_checked", list()): scanners_config["gitleaks"] = { "code": "/tmp/code" } # reporters_config = dict() reporters_config["galloper"] = { "url": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if "toolreports" in self.sast_settings.get("reporters_checked", list()): reporters_config["galloper_tool_reports"] = { "bucket": "sast", "object": f"{self.test_uid}_tool_reports.zip", "source": "/tmp/intermediates", } if "quality" in self.sast_settings.get("reporters_checked", list()): reporters_config["galloper_junit_report"] = { "bucket": "sast", "object": f"{self.test_uid}_junit_report.xml", } reporters_config["galloper_quality_gate_report"] = { "bucket": "sast", "object": f"{self.test_uid}_quality_gate_report.json", } reporters_config["junit"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.xml", } # if "jira" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "jira" in project_secrets: jira_settings = loads(project_secrets["jira"]) reporters_config["jira"] = { "url": jira_settings["jira_url"], "username": jira_settings["jira_login"], "password": jira_settings["jira_password"], "project": jira_settings["jira_project"], "fields": { "Issue Type": jira_settings["issue_type"], } } # if "email" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "smtp" in project_secrets: email_settings = loads(project_secrets["smtp"]) reporters_config["email"] = { "server": email_settings["smtp_host"], "port": email_settings["smtp_port"], "login": email_settings["smtp_user"], "password": email_settings["smtp_password"], "mail_to": self.sast_settings.get("email_recipients", ""), } reporters_config["html"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.html", } # if "ado" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "ado" in project_secrets: reporters_config["azure_devops"] = loads(project_secrets["ado"]) # if "rp" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "rp" in project_secrets: rp = loads(project_secrets.get("rp")) reporters_config["reportportal"] = { "rp_host": rp["rp_host"], "rp_token": rp["rp_token"], "rp_project_name": rp["rp_project"], "rp_launch_name": "sast" } # Thresholds tholds = {} if thresholds and any(int(thresholds[key]) > -1 for key in thresholds.keys()): for key, value in thresholds.items(): if int(value) > -1: tholds[key.capitalize()] = int(value) # dusty_config = { "config_version": 2, "suites": { "sast": { "settings": { "project_name": self.sast_settings.get("project_name"), "project_description": self.name, "environment_name": "target", "testing_type": "SAST", "scan_type": "full", "build_id": self.test_uid, "sast": global_sast_settings }, "actions": actions_config, "scanners": { "sast": scanners_config }, "processing": { "min_severity_filter": { "severity": "Info" }, "quality_gate": { "thresholds": tholds }, "false_positive": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) }, "ignore_finding": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) } }, "reporters": reporters_config } } } # return dusty_config # job_type = "sast" container = f"getcarrier/{job_type}:{CURRENT_RELEASE}" parameters = { "cmd": f"run -b galloper:{job_type}_{self.test_uid} -s {job_type}", "GALLOPER_URL": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "GALLOPER_PROJECT_ID": f"{self.project_id}", "GALLOPER_AUTH_TOKEN": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if self.sast_settings.get("sast_target_type") == "target_code_path": parameters["code_path"] = self.sast_settings.get("sast_target_code") project_queues = get_project_queues(project_id=self.project_id) if self.region in project_queues["public"]: cc_env_vars = { "RABBIT_HOST": unsecret("{{secret.rabbit_host}}", project_id=self.project_id), "RABBIT_USER": unsecret("{{secret.rabbit_user}}", project_id=self.project_id), "RABBIT_PASSWORD": unsecret("{{secret.rabbit_password}}", project_id=self.project_id), "RABBIT_VHOST": "carrier" } else: cc_env_vars = { "RABBIT_HOST": unsecret("{{secret.rabbit_host}}", project_id=self.project_id), "RABBIT_USER": unsecret("{{secret.rabbit_project_user}}", project_id=self.project_id), "RABBIT_PASSWORD": unsecret("{{secret.rabbit_project_password}}", project_id=self.project_id), "RABBIT_VHOST": unsecret("{{secret.rabbit_project_vhost}}", project_id=self.project_id) } concurrency = 1 # if output == "docker": docker_run = f"docker run --rm -i -t" if self.sast_settings.get("sast_target_type") == "target_code_path": docker_run = f"docker run --rm -i -t -v \"{self.sast_settings.get('sast_target_code')}:/code\"" return f"{docker_run} " \ f"-e project_id={self.project_id} " \ f"-e galloper_url={unsecret('{{secret.galloper_url}}', project_id=self.project_id)} " \ f"-e token=\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\" " \ f"getcarrier/control_tower:{CURRENT_RELEASE} " \ f"-tid {self.test_uid}" if output == "cc": execution_json = { "job_name": self.name, "job_type": job_type, "concurrency": concurrency, "container": container, "execution_params": dumps(parameters), "cc_env_vars": cc_env_vars, "channel": self.region } if "quality" in self.sast_settings.get("reporters_checked", list()): execution_json["quality_gate"] = "True" return execution_json # return ""