def get(self, project_id): project = Project.get_or_404(project_id) args = self.get_parser.parse_args(strict=False) if args['name'] not in ['post_processor', 'control_tower', 'all']: return {"message": "go away", "code": 400}, 400 secrets = get_project_hidden_secrets(project.id) project_secrets = {} if args['name'] == 'post_processor': secrets = self._create_pp(project, secrets) elif args['name'] == 'control_tower': secrets = self._create_cc(project, secrets) elif args['name'] == 'all': secrets = self._create_pp(project, secrets) secrets = self._create_cc(project, secrets) project_secrets["galloper_url"] = APP_HOST project_secrets["project_id"] = project.id secrets["redis_host"] = APP_IP secrets["loki_host"] = EXTERNAL_LOKI_HOST.replace( "https://", "http://") secrets["influx_ip"] = APP_IP secrets["influx_port"] = INFLUX_PORT secrets["loki_port"] = LOKI_PORT secrets["redis_password"] = REDIS_PASSWORD set_project_secrets(project.id, project_secrets) else: return {"message": "go away", "code": 400}, 400 set_project_hidden_secrets(project.id, secrets) return {"message": "Done", "code": 200}
def jira_integration(args, project): try: if args["test"]: url = f'{args["config"]["jira_url"]}/rest/api/2/project' res = get(url, auth=(args["config"]["jira_login"], args["config"]["jira_password"])) if res.status_code == 200: message = "Successfully connected to Jira" else: message = "Connection failed" if "failed" not in message and args["config"]["jira_project"] not in res.text: message = "Connection succeed but project not found" return message else: jira_perf_api_config = { "assignee": args["config"]["jira_login"], "check_functional_errors": "True", "check_performance_degradation": "True", "check_missed_thresholds": "True", "performance_degradation_rate": 20, "missed_thresholds_rate": 50, "jira_labels": "performance, api", "jira_watchers": "", "jira_epic_key": "" } secrets = get_project_secrets(project.id) hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets["jira"] = dumps(args["config"]) secrets["jira_perf_api"] = dumps(jira_perf_api_config) set_project_secrets(project.id, secrets) set_project_hidden_secrets(project.id, hidden_secrets) return "Jira settings saved" except Exception as e: return f"Failed. Jira settings not saved. {str(e)}"
def get(self, project_id: int, secret: str) -> Tuple[dict, int]: # pylint: disable=R0201,C0111 # Check project_id for validity project = Project.get_or_404(project_id) # Get secret secrets = get_project_secrets(project.id) _secret = secrets.get(secret) if secrets.get(secret) else get_project_hidden_secrets(project.id).get(secret) return {"secret": _secret}, 200
def rp_integration(args, project): if args["test"]: url = f'{args["config"]["rp_host"]}/api/v1/project/{args["config"]["rp_project"]}' headers = { 'content-type': 'application/json', 'Authorization': f'bearer {args["config"]["rp_token"]}' } res = get(url, headers=headers) if res.status_code == 200: message = "Successfully connected to RP" else: message = "Connection failed" return message else: rp_perf_api_config = { "rp_launch_name": "carrier", "check_functional_errors": "True", "check_performance_degradation": "True", "check_missed_thresholds": "True", "performance_degradation_rate": 20, "missed_thresholds_rate": 50 } secrets = get_project_secrets(project.id) hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets["rp"] = dumps(args["config"]) secrets["rp_perf_api"] = dumps(rp_perf_api_config) set_project_secrets(project.id, secrets) set_project_hidden_secrets(project.id, hidden_secrets) return "RP settings saved"
def smtp_integration(args, project): try: if args["test"]: try: s = smtplib.SMTP_SSL(host=args['config']['smtp_host'], port=int(args['config']['smtp_port'])) s.ehlo() s.login(args['config']['smtp_user'], args['config']['smtp_password']) return "SMTP server connected" except smtplib.SMTPException as e: return f"SMTP server not connected. {str(e)}" else: secrets = get_project_secrets(project.id) hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets["smtp"] = dumps(args["config"]) env_vars = args["config"] env_vars["error_rate"] = 10 env_vars["performance_degradation_rate"] = 20 env_vars["missed_thresholds"] = 50 if "email_notification_id" in secrets: update_task(secrets["email_notification_id"], dumps(env_vars)) elif "email_notification_id" in hidden_secrets: update_task(hidden_secrets["email_notification_id"], dumps(env_vars)) else: email_notification_args = { "funcname": "email_notification", "invoke_func": "lambda_function.lambda_handler", "runtime": "Python 3.7", "env_vars": dumps(env_vars) } email_notification = create_task(project, File(EMAIL_NOTIFICATION_PATH), email_notification_args) hidden_secrets["email_notification_id"] = email_notification.task_id set_project_hidden_secrets(project.id, hidden_secrets) return "SMTP setting saved" except Exception as e: return f"Failed. SMTP server not connected. {str(e)}"
def drop_project_databases(project_id): hidden_secrets = get_project_hidden_secrets(project_id) db_list = [ hidden_secrets.get("jmeter_db"), hidden_secrets.get("gatling_db"), hidden_secrets.get("comparison_db"), hidden_secrets.get("telegraf_db") ] client = get_client(project_id) for each in db_list: client.query(f"drop database {each}")
def put(self, project_id: int, secret: str) -> Tuple[dict, int]: # pylint: disable=C0111 # Check project_id for validity project = Project.get_or_404(project_id) # Set secret secrets = get_project_secrets(project.id) hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets[secret] = secrets[secret] secrets.pop(secret, None) set_project_secrets(project.id, secrets) set_project_hidden_secrets(project.id, hidden_secrets) return {"message": f"Project secret was moved to hidden secrets"}, 200
def create_project_databases(project_id): hidden_secrets = get_project_hidden_secrets(project_id) db_list = [ hidden_secrets.get("jmeter_db"), hidden_secrets.get("gatling_db"), hidden_secrets.get("comparison_db"), hidden_secrets.get("telegraf_db") ] client = get_client(project_id) for each in db_list: client.query( f"create database {each} with duration 180d replication 1 shard duration 7d name autogen" )
def get_client(project_id, db_name=None): secrets = get_project_secrets(project_id) hidden_secrets = get_project_hidden_secrets(project_id) influx_host = secrets.get( "influx_ip") if "influx_ip" in secrets else hidden_secrets.get( "influx_ip", "") influx_user = secrets.get( "influx_user") if "influx_user" in secrets else hidden_secrets.get( "influx_user", "") influx_password = secrets.get("influx_password") if "influx_password" in secrets else \ hidden_secrets.get("influx_password", "") return InfluxDBClient(influx_host, 8086, influx_user, influx_password, db_name)
def ado_integration(args, project): if args["test"]: url = f'https://dev.azure.com/{args["config"]["org"]}/_apis/teams?api-version=6.1-preview.3' res = get(url, auth=("", (args["config"]["pat"])), headers={'content-type': 'application/json'}) if res.status_code == 200: message = "Successfully connected to ADO" else: message = "Connection failed" return message else: hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets["ado"] = dumps(args["config"]) set_project_hidden_secrets(project.id, hidden_secrets) return "ADO settings saved"
def run_task(project_id, event, task_id=None): secrets = get_project_secrets(project_id) if "control_tower_id" not in secrets: secrets = get_project_hidden_secrets(project_id) task_id = task_id if task_id else secrets["control_tower_id"] task = Task.query.filter(and_(Task.task_id == task_id)).first().to_json() app = run.connect_to_celery(1) celery_task = app.signature("tasks.execute", kwargs={ "task": unsecret(task, project_id=project_id), "event": unsecret(event, project_id=project_id) }) celery_task.apply_async() return {"message": "Accepted", "code": 200, "task_id": task_id}
def run_task(project_id, event, task_id=None): secrets = get_project_secrets(project_id) if "control_tower_id" not in secrets: secrets = get_project_hidden_secrets(project_id) task_id = task_id if task_id else secrets["control_tower_id"] task = Task.query.filter(and_(Task.task_id == task_id)).first().to_json() check_tasks_quota(task) statistic = Statistic.query.filter(Statistic.project_id == task['project_id']).first() setattr(statistic, 'tasks_executions', Statistic.tasks_executions + 1) statistic.commit() arbiter = get_arbiter() task_kwargs = {"task": unsecret(task, project_id=project_id), "event": unsecret(event, project_id=project_id), "galloper_url": unsecret("{{secret.galloper_url}}", project_id=task['project_id']), "token": unsecret("{{secret.auth_token}}", project_id=task['project_id'])} arbiter.apply("execute_lambda", queue=RABBIT_QUEUE_NAME, task_kwargs=task_kwargs) arbiter.close() return {"message": "Accepted", "code": 200, "task_id": task_id}
def get_project_queues(project_id): secrets = get_project_secrets(project_id) hidden_secrets = get_project_hidden_secrets(project_id) root_user = secrets[ "rabbit_user"] if "rabbit_user" in secrets else hidden_secrets[ "rabbit_user"] root_password = secrets[ "rabbit_password"] if "rabbit_password" in secrets else hidden_secrets[ "rabbit_password"] user = secrets[ "rabbit_project_user"] if "rabbit_project_user" in secrets else hidden_secrets[ "rabbit_project_user"] password = secrets["rabbit_project_password"] if "rabbit_project_password" in secrets \ else hidden_secrets["rabbit_project_password"] vhost = secrets["rabbit_project_vhost"] if "rabbit_project_vhost" in secrets \ else hidden_secrets["rabbit_project_vhost"] queues = {"public": [], "project": [], "clouds": []} # Check public on demand queues arbiter = get_arbiter(user=root_user, password=root_password, vhost="carrier") try: queues["public"] = list(arbiter.workers().keys()) except: queues["public"] = [] arbiter.close() # Check project on demand queues arbiter = get_arbiter(user=user, password=password, vhost=vhost) try: queues["project"] = list(arbiter.workers().keys()) except: queues["project"] = [] arbiter.close() # Check project Cloud integrations for each in ["aws", "azure_cloud", "gcp", "kubernetes"]: if each in hidden_secrets: queues["clouds"].append(each) return queues
def post(self, project_id: int): args = self._parser_post.parse_args(strict=False) project = Project.get_or_404(project_id) report = APIReport.query.filter_by( project_id=project_id, id=args.get("report_id")).first().to_json() event = { "galloper_url": "{{secret.galloper_url}}", "project_id": project.id, "token": "{{secret.auth_token}}", "report_id": args["report_id"], "influx_host": "{{secret.influx_ip}}", "config_file": "{}", "bucket": str(report["name"]).lower().replace(" ", "").replace("_", "").replace( "-", ""), "prefix": f'test_results_{uuid4()}_', } task = PerformanceTests.query.filter( and_(PerformanceTests.project_id == project.id, PerformanceTests.test_uid == report["test_uid"])).first() event["email_recipients"] = task.emails integration = [] for each in ["jira", "report_portal", "email", "azure_devops"]: if each in task.reporting: integration.append(each) junit = True if "junit" in task.reporting else False event["integration"] = integration event["junit"] = junit secrets = get_project_secrets(project_id) if "post_processor_id" not in secrets: secrets = get_project_hidden_secrets(project_id) return run_task(project.id, event, secrets["post_processor_id"])
def get(self, project_id): project = Project.get_or_404(project_id) args = self.get_parser.parse_args(strict=False) if args['name'] not in ['post_processor', 'control_tower', 'all']: return {"message": "go away", "code": 400}, 400 secrets = get_project_hidden_secrets(project.id) project_secrets = get_project_secrets(project.id) if args['name'] == 'post_processor': self.create_pp_task(project) elif args['name'] == 'control_tower': self.create_cc_task(project) elif args['name'] == 'all': self.create_pp_task(project) self.create_cc_task(project) project_secrets["galloper_url"] = APP_HOST project_secrets["project_id"] = project.id secrets[ "post_processor"] = f"{APP_HOST}/task/{secrets['post_processor_id']}" secrets["redis_host"] = APP_IP secrets["loki_host"] = EXTERNAL_LOKI_HOST.replace( "https://", "http://") secrets["influx_ip"] = APP_IP secrets["influx_port"] = INFLUX_PORT secrets["influx_user"] = INFLUX_USER secrets["influx_password"] = INFLUX_PASSWORD secrets["loki_port"] = LOKI_PORT secrets["redis_password"] = REDIS_PASSWORD secrets["rabbit_host"] = APP_IP secrets["rabbit_user"] = RABBIT_USER secrets["rabbit_password"] = RABBIT_PASSWORD set_project_secrets(project.id, project_secrets) else: return {"message": "go away", "code": 400}, 400 set_project_hidden_secrets(project.id, secrets) if "rabbit_project_user" not in project_secrets and "rabbit_project_user" not in secrets: create_project_user_and_vhost(project_id) return {"message": "Done", "code": 200}
def create_project_user_and_vhost(project_id): secrets = get_project_secrets(project_id) hidden_secrets = get_project_hidden_secrets(project_id) # connect to RabbitMQ management api rabbit_api = AdminAPI(url=f'http://carrier-rabbit:15672', auth=(hidden_secrets["rabbit_user"], hidden_secrets["rabbit_password"])) # prepare user credentials user = f"rabbit_user_{project_id}" password = password_generator() vhost = f"project_{project_id}_vhost" # create project user and vhost rabbit_api.create_vhost(vhost) rabbit_api.create_user(user, password) rabbit_api.create_user_permission(user, vhost) # set project secrets secrets["rabbit_project_user"] = user secrets["rabbit_project_password"] = password secrets["rabbit_project_vhost"] = vhost set_project_secrets(project_id, secrets)
def set_grafana_datasources(project_id): secrets = get_project_secrets(project_id) hidden_secrets = get_project_hidden_secrets(project_id) influx_user = secrets.get( "influx_user") if "influx_user" in secrets else hidden_secrets.get( "influx_user", "") influx_password = secrets.get("influx_password") if "influx_password" in secrets else \ hidden_secrets.get("influx_password", "") grafana_api_key = secrets.get( "gf_api_key") if "gf_api_key" in secrets else hidden_secrets.get( "gf_api_key", "") url = f"http://carrier-grafana:3000/grafana/api/datasources" headers = { "Authorization": f"Bearer {grafana_api_key}", "Content-Type": "application/json" } for each in ["jmeter", "gatling", "telegraf"]: data = deepcopy(DATASOURCE) data["name"] = f"{each}_{project_id}" data["database"] = f"{each}_{project_id}" data["user"] = influx_user data["password"] = influx_password post(url, json=data, headers=headers)
def aws_integration(args, project): if args["test"]: ec2 = boto3.client('ec2', aws_access_key_id=args["config"]["aws_access_key"], aws_secret_access_key=args["config"]["aws_secret_access_key"], region_name=args["config"]["region_name"]) config = { "Type": "request", 'AllocationStrategy': "lowestPrice", "IamFleetRole": args["config"]["iam_fleet_role"], "TargetCapacity": 1, "SpotPrice": "0.1", "TerminateInstancesWithExpiration": True, 'ValidFrom': datetime(2021, 1, 1), 'ValidUntil': datetime(2022, 1, 1), 'LaunchSpecifications': [ { "ImageId": args["config"]["image_id"], "InstanceType": "t3.medium", "KeyName": "carrier-test", "BlockDeviceMappings": [], "SpotPrice": "0.1", "NetworkInterfaces": [] } ] } try: ec2.request_spot_fleet(DryRun=True, SpotFleetRequestConfig=config) except Exception as e: if 'DryRunOperation' not in str(e): return f"Failed: {e}" return "Connected" else: hidden_secrets = get_project_hidden_secrets(project.id) hidden_secrets["aws"] = dumps(args["config"]) set_project_hidden_secrets(project.id, hidden_secrets) return "AWS settings saved"
def configure_execution_json(self, output="cc", execution=False, thresholds={}): """ Create configuration for execution """ # if output == "dusty": # global_dast_settings = dict() global_dast_settings["max_concurrent_scanners"] = 1 if "toolreports" in self.dast_settings.get("reporters_checked", list()): global_dast_settings[ "save_intermediates_to"] = "/tmp/intermediates" # scanners_config = dict() if "zap" in self.dast_settings.get("scanners_checked", list()): scanners_config["zap"] = { "scan_types": "all", "target": self.dast_settings.get("dast_target_url"), } if "w3af" in self.dast_settings.get("scanners_checked", list()): scanners_config["w3af"] = { "target": self.dast_settings.get("dast_target_url"), } if "nikto" in self.dast_settings.get("scanners_checked", list()): scanners_config["nikto"] = { "target": self.dast_settings.get("dast_target_url"), } if "nmap" in self.dast_settings.get("scanners_checked", list()): scanners_config["nmap"] = { "target": self.dast_settings.get("dast_target_url"), } if "masscan" in self.dast_settings.get("scanners_checked", list()): scanners_config["masscan"] = { "target": self.dast_settings.get("dast_target_url"), } if "sslyze" in self.dast_settings.get("scanners_checked", list()): scanners_config["sslyze"] = { "target": self.dast_settings.get("dast_target_url"), } if "aemhacker" in self.dast_settings.get("scanners_checked", list()): scanners_config["aemhacker"] = { "target": self.dast_settings.get("dast_target_url"), } # reporters_config = dict() reporters_config["galloper"] = { "url": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if "toolreports" in self.dast_settings.get("reporters_checked", list()): reporters_config["galloper_tool_reports"] = { "bucket": "dast", "object": f"{self.test_uid}_tool_reports.zip", "source": "/tmp/intermediates", } if "quality" in self.dast_settings.get("reporters_checked", list()): reporters_config["galloper_junit_report"] = { "bucket": "dast", "object": f"{self.test_uid}_junit_report.xml", } reporters_config["galloper_quality_gate_report"] = { "bucket": "dast", "object": f"{self.test_uid}_quality_gate_report.json", } reporters_config["junit"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.xml", } # if "jira" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "jira" in project_secrets: jira_settings = loads(project_secrets["jira"]) reporters_config["jira"] = { "url": jira_settings["jira_url"], "username": jira_settings["jira_login"], "password": jira_settings["jira_password"], "project": jira_settings["jira_project"], "fields": { "Issue Type": jira_settings["issue_type"], } } # if "email" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "smtp" in project_secrets: email_settings = loads(project_secrets["smtp"]) reporters_config["email"] = { "server": email_settings["smtp_host"], "port": email_settings["smtp_port"], "login": email_settings["smtp_user"], "password": email_settings["smtp_password"], "mail_to": self.dast_settings.get("email_recipients", ""), } reporters_config["html"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.html", } # if "ado" in self.dast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "ado" in project_secrets: reporters_config["azure_devops"] = loads( project_secrets["ado"]) # Thresholds tholds = {} if thresholds and any( int(thresholds[key]) > -1 for key in thresholds.keys()): for key, value in thresholds.items(): if int(value) > -1: tholds[key.capitalize()] = int(value) # dusty_config = { "config_version": 2, "suites": { "dast": { "settings": { "project_name": self.dast_settings.get("project_name"), "project_description": self.name, "environment_name": "target", "testing_type": "DAST", "scan_type": "full", "build_id": self.test_uid, "dast": global_dast_settings }, "scanners": { "dast": scanners_config }, "processing": { "min_severity_filter": { "severity": "Info" }, "quality_gate": { "thresholds": tholds }, "false_positive": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) }, "ignore_finding": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) } }, "reporters": reporters_config } } } # return dusty_config # job_type = "dast" container = f"getcarrier/{job_type}:{CURRENT_RELEASE}" parameters = { "cmd": f"run -b galloper:{job_type}_{self.test_uid} -s {job_type}", "GALLOPER_URL": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "GALLOPER_PROJECT_ID": f"{self.project_id}", "GALLOPER_AUTH_TOKEN": unsecret("{{secret.auth_token}}", project_id=self.project_id), } cc_env_vars = { "REDIS_HOST": unsecret("{{secret.redis_host}}", project_id=self.project_id), "REDIS_PASSWORD": unsecret("{{secret.redis_password}}", project_id=self.project_id), } concurrency = 1 # if output == "docker": return f"docker run --rm -i -t " \ f"-e project_id={self.project_id} " \ f"-e galloper_url={unsecret('{{secret.galloper_url}}', project_id=self.project_id)} " \ f"-e token=\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\" " \ f"getcarrier/control_tower:latest " \ f"-tid {self.test_uid}" if output == "cc": execution_json = { "job_name": self.name, "job_type": job_type, "concurrency": concurrency, "container": container, "execution_params": dumps(parameters), "cc_env_vars": cc_env_vars, } if "quality" in self.dast_settings.get("reporters_checked", list()): execution_json["quality_gate"] = "True" return execution_json # return ""
def configure_execution_json(self, output="cc", execution=False, thresholds={}): """ Create configuration for execution """ # if output == "dusty": # global_sast_settings = dict() global_sast_settings["max_concurrent_scanners"] = 1 if "toolreports" in self.sast_settings.get("reporters_checked", list()): global_sast_settings["save_intermediates_to"] = "/tmp/intermediates" # actions_config = dict() if self.sast_settings.get("sast_target_type") == "target_git": git_url = self.sast_settings.get("sast_target_repo") branch = "master" if "@" in git_url[5:]: branch = git_url[5:].split("@")[1] git_url = git_url.replace(f"@{branch}", "") actions_config["git_clone"] = { "source": git_url, "branch": branch, "target": "/tmp/code" } if self.sast_settings.get("sast_target_repo_user") != "": actions_config["git_clone"]["username"] = unsecret(self.sast_settings.get("sast_target_repo_user"), project_id=self.project_id) if self.sast_settings.get("sast_target_repo_pass") != "": actions_config["git_clone"]["password"] = unsecret(self.sast_settings.get("sast_target_repo_pass"), project_id=self.project_id) if self.sast_settings.get("sast_target_repo_key") != "": actions_config["git_clone"]["key_data"] = unsecret(self.sast_settings.get("sast_target_repo_key"), project_id=self.project_id) if self.sast_settings.get("sast_target_type") == "target_galloper_artifact": actions_config["galloper_artifact"] = { "bucket": self.sast_settings.get("sast_target_artifact_bucket"), "object": self.sast_settings.get("sast_target_artifact"), "target": "/tmp/code", "delete": False } if self.sast_settings.get("sast_target_type") == "target_code_path": actions_config["galloper_artifact"] = { "bucket": "sast", "object": f"{self.test_uid}.zip", "target": "/tmp/code", "delete": True } # scanners_config = dict() scanners_config[self.sast_settings.get("language")] = { "code": "/tmp/code" } if "composition" in self.sast_settings.get("options_checked", list()): scanners_config["dependencycheck"] = { "comp_path": "/tmp/code", "comp_opts": "--enableExperimental" } if "secretscan" in self.sast_settings.get("options_checked", list()): scanners_config["gitleaks"] = { "code": "/tmp/code" } # reporters_config = dict() reporters_config["galloper"] = { "url": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if "toolreports" in self.sast_settings.get("reporters_checked", list()): reporters_config["galloper_tool_reports"] = { "bucket": "sast", "object": f"{self.test_uid}_tool_reports.zip", "source": "/tmp/intermediates", } if "quality" in self.sast_settings.get("reporters_checked", list()): reporters_config["galloper_junit_report"] = { "bucket": "sast", "object": f"{self.test_uid}_junit_report.xml", } reporters_config["galloper_quality_gate_report"] = { "bucket": "sast", "object": f"{self.test_uid}_quality_gate_report.json", } reporters_config["junit"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.xml", } # if "jira" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "jira" in project_secrets: jira_settings = loads(project_secrets["jira"]) reporters_config["jira"] = { "url": jira_settings["jira_url"], "username": jira_settings["jira_login"], "password": jira_settings["jira_password"], "project": jira_settings["jira_project"], "fields": { "Issue Type": jira_settings["issue_type"], } } # if "email" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "smtp" in project_secrets: email_settings = loads(project_secrets["smtp"]) reporters_config["email"] = { "server": email_settings["smtp_host"], "port": email_settings["smtp_port"], "login": email_settings["smtp_user"], "password": email_settings["smtp_password"], "mail_to": self.sast_settings.get("email_recipients", ""), } reporters_config["html"] = { "file": "/tmp/{project_name}_{testing_type}_{build_id}_report.html", } # if "ado" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "ado" in project_secrets: reporters_config["azure_devops"] = loads(project_secrets["ado"]) # if "rp" in self.sast_settings.get("reporters_checked", list()): project_secrets = get_project_hidden_secrets(self.project_id) if "rp" in project_secrets: rp = loads(project_secrets.get("rp")) reporters_config["reportportal"] = { "rp_host": rp["rp_host"], "rp_token": rp["rp_token"], "rp_project_name": rp["rp_project"], "rp_launch_name": "sast" } # Thresholds tholds = {} if thresholds and any(int(thresholds[key]) > -1 for key in thresholds.keys()): for key, value in thresholds.items(): if int(value) > -1: tholds[key.capitalize()] = int(value) # dusty_config = { "config_version": 2, "suites": { "sast": { "settings": { "project_name": self.sast_settings.get("project_name"), "project_description": self.name, "environment_name": "target", "testing_type": "SAST", "scan_type": "full", "build_id": self.test_uid, "sast": global_sast_settings }, "actions": actions_config, "scanners": { "sast": scanners_config }, "processing": { "min_severity_filter": { "severity": "Info" }, "quality_gate": { "thresholds": tholds }, "false_positive": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) }, "ignore_finding": { "galloper": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "project_id": f"{self.project_id}", "token": unsecret("{{secret.auth_token}}", project_id=self.project_id) } }, "reporters": reporters_config } } } # return dusty_config # job_type = "sast" container = f"getcarrier/{job_type}:{CURRENT_RELEASE}" parameters = { "cmd": f"run -b galloper:{job_type}_{self.test_uid} -s {job_type}", "GALLOPER_URL": unsecret("{{secret.galloper_url}}", project_id=self.project_id), "GALLOPER_PROJECT_ID": f"{self.project_id}", "GALLOPER_AUTH_TOKEN": unsecret("{{secret.auth_token}}", project_id=self.project_id), } if self.sast_settings.get("sast_target_type") == "target_code_path": parameters["code_path"] = self.sast_settings.get("sast_target_code") project_queues = get_project_queues(project_id=self.project_id) if self.region in project_queues["public"]: cc_env_vars = { "RABBIT_HOST": unsecret("{{secret.rabbit_host}}", project_id=self.project_id), "RABBIT_USER": unsecret("{{secret.rabbit_user}}", project_id=self.project_id), "RABBIT_PASSWORD": unsecret("{{secret.rabbit_password}}", project_id=self.project_id), "RABBIT_VHOST": "carrier" } else: cc_env_vars = { "RABBIT_HOST": unsecret("{{secret.rabbit_host}}", project_id=self.project_id), "RABBIT_USER": unsecret("{{secret.rabbit_project_user}}", project_id=self.project_id), "RABBIT_PASSWORD": unsecret("{{secret.rabbit_project_password}}", project_id=self.project_id), "RABBIT_VHOST": unsecret("{{secret.rabbit_project_vhost}}", project_id=self.project_id) } concurrency = 1 # if output == "docker": docker_run = f"docker run --rm -i -t" if self.sast_settings.get("sast_target_type") == "target_code_path": docker_run = f"docker run --rm -i -t -v \"{self.sast_settings.get('sast_target_code')}:/code\"" return f"{docker_run} " \ f"-e project_id={self.project_id} " \ f"-e galloper_url={unsecret('{{secret.galloper_url}}', project_id=self.project_id)} " \ f"-e token=\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\" " \ f"getcarrier/control_tower:{CURRENT_RELEASE} " \ f"-tid {self.test_uid}" if output == "cc": execution_json = { "job_name": self.name, "job_type": job_type, "concurrency": concurrency, "container": container, "execution_params": dumps(parameters), "cc_env_vars": cc_env_vars, "channel": self.region } if "quality" in self.sast_settings.get("reporters_checked", list()): execution_json["quality_gate"] = "True" return execution_json # return ""