def delete(self, project_id: int) -> Tuple[dict, int]: project = Project.get_or_404(project_id) if SessionProject.get() == project.id: SessionProject.pop() return { "message": f"Project with id {project.id} was successfully unselected" }, 200
def post(self, project_id: Optional[int] = None) -> Tuple[dict, int]: args = self._parser_post.parse_args() SessionUser.set( dict(username=args["username"], groups=args.get("groups"))) if project_id: project = Project.get_or_404(project_id) SessionProject.set(project.id) return { "message": f"Project with id {project.id} was successfully selected" }, 200 return {"message": "user session configured"}, 200
def get(self, project_id: Optional[int] = None) -> Tuple[dict, int]: if not project_id: project_id = SessionProject.get() if project_id: project = Project.get_or_404(project_id) return project.to_json( exclude_fields=Project.API_EXCLUDE_FIELDS), 200 return {"message": "No project selected in session"}, 404
def post(self, project_id: Optional[int] = None) -> Tuple[dict, int]: data = self._parser_post.parse_args() name_ = data["name"] owner_ = data["owner"] package = data["package"].lower() dast_enabled_ = False if data["dast_enabled"] == "disabled" else True sast_enabled_ = False if data["sast_enabled"] == "disabled" else True performance_enabled_ = False if data[ "performance_enabled"] == "disabled" else True perf_tests_limit = data["perf_tests_limit"] ui_perf_tests_limit = data["ui_perf_tests_limit"] sast_scans_limit = data["sast_scans_limit"] dast_scans_limit = data["dast_scans_limit"] tasks_count_limit = data["tasks_count_limit"] task_executions_limit = data["task_executions_limit"] storage_space_limit = data["storage_space_limit"] data_retention_limit = data["data_retention_limit"] project = Project(name=name_, dast_enabled=dast_enabled_, project_owner=owner_, sast_enabled=sast_enabled_, performance_enabled=performance_enabled_, package=package) project_secrets = {} project_hidden_secrets = {} project.insert() SessionProject.set(project.id) # Looks weird, sorry :D if package == "custom": getattr(project_quota, "custom")(project.id, perf_tests_limit, ui_perf_tests_limit, sast_scans_limit, dast_scans_limit, -1, storage_space_limit, data_retention_limit, tasks_count_limit, task_executions_limit) else: getattr(project_quota, package)(project.id) statistic = Statistic(project_id=project.id, start_time=str(datetime.utcnow()), performance_test_runs=0, sast_scans=0, dast_scans=0, ui_performance_test_runs=0, public_pool_workers=0, tasks_executions=0) statistic.insert() pp_args = { "funcname": "post_processor", "invoke_func": "lambda_function.lambda_handler", "runtime": "Python 3.7", "env_vars": dumps({ "jmeter_db": "{{secret.jmeter_db}}", "gatling_db": "{{secret.gatling_db}}", "comparison_db": "{{secret.comparison_db}}", "AWS_LAMBDA_FUNCTION_TIMEOUT": 900 }) } pp = create_task(project, File(POST_PROCESSOR_PATH), pp_args) cc_args = { "funcname": "control_tower", "invoke_func": "lambda.handler", "runtime": "Python 3.7", "env_vars": dumps({ "token": "{{secret.auth_token}}", "galloper_url": "{{secret.galloper_url}}", "GALLOPER_WEB_HOOK": '{{secret.post_processor}}', "project_id": '{{secret.project_id}}', "loki_host": '{{secret.loki_host}}', "AWS_LAMBDA_FUNCTION_TIMEOUT": 900 }) } cc = create_task(project, File(CONTROL_TOWER_PATH), cc_args) project_secrets["galloper_url"] = APP_HOST project_secrets["project_id"] = project.id project_hidden_secrets["post_processor"] = f'{APP_HOST}{pp.webhook}' project_hidden_secrets["post_processor_id"] = pp.task_id project_hidden_secrets["redis_host"] = APP_IP project_hidden_secrets["loki_host"] = EXTERNAL_LOKI_HOST.replace( "https://", "http://") project_hidden_secrets["influx_ip"] = APP_IP project_hidden_secrets["influx_port"] = INFLUX_PORT project_hidden_secrets["loki_port"] = LOKI_PORT project_hidden_secrets["redis_password"] = REDIS_PASSWORD project_hidden_secrets["rabbit_host"] = APP_IP project_hidden_secrets["rabbit_user"] = RABBIT_USER project_hidden_secrets["rabbit_password"] = RABBIT_PASSWORD project_hidden_secrets["control_tower_id"] = cc.task_id project_hidden_secrets["influx_user"] = INFLUX_USER project_hidden_secrets["influx_password"] = INFLUX_PASSWORD project_hidden_secrets["jmeter_db"] = f'jmeter_{project.id}' project_hidden_secrets["gatling_db"] = f'gatling_{project.id}' project_hidden_secrets["comparison_db"] = f'comparison_{project.id}' project_hidden_secrets["telegraf_db"] = f'telegraf_{project.id}' project_hidden_secrets["gf_api_key"] = GF_API_KEY project_vault_data = {"auth_role_id": "", "auth_secret_id": ""} try: project_vault_data = initialize_project_space(project.id) except: current_app.logger.warning("Vault is not configured") project.secrets_json = { "vault_auth_role_id": project_vault_data["auth_role_id"], "vault_auth_secret_id": project_vault_data["auth_secret_id"], } project.commit() set_project_secrets(project.id, project_secrets) set_project_hidden_secrets(project.id, project_hidden_secrets) create_project_user_and_vhost(project.id) create_project_databases(project.id) set_grafana_datasources(project.id) return {"message": f"Project was successfully created"}, 200
def apply_full_delete_by_pk(cls, pk: int) -> None: import docker import psycopg2 from galloper.processors.minio import MinioClient from galloper.dal.influx_results import delete_test_data from galloper.database.models.task_results import Results from galloper.database.models.task import Task from galloper.database.models.security_results import SecurityResults from galloper.database.models.security_reports import SecurityReport from galloper.database.models.security_details import SecurityDetails from galloper.database.models.api_reports import APIReport from galloper.database.models.api_release import APIRelease from galloper.database.models.performance_tests import PerformanceTests from galloper.database.models.ui_report import UIReport from galloper.database.models.ui_result import UIResult from galloper.database.models.statistic import Statistic from galloper.database.models.project_quota import ProjectQuota _logger = logging.getLogger(cls.__name__.lower()) _logger.info("Start deleting entire project within transaction") project = cls.query.get_or_404(pk) minio_client = MinioClient(project=project) docker_client = docker.from_env() buckets_for_removal = minio_client.list_bucket() db_session.query(Project).filter_by(id=pk).delete() for model_class in (Results, SecurityResults, SecurityReport, SecurityDetails, APIRelease): db_session.query(model_class).filter_by(project_id=pk).delete() influx_result_data = [] for api_report in APIReport.query.filter_by(project_id=pk).all(): influx_result_data.append( (api_report.build_id, api_report.name, api_report.lg_type)) api_report.delete(commit=False) task_ids = [] for task in Task.query.filter_by(project_id=pk).all(): task_ids.append(task.task_id) task.delete(commit=False) for test in PerformanceTests.query.filter_by(project_id=pk).all(): test.delete(commit=False) for result in UIResult.query.filter_by(project_id=pk).all(): result.delete(commit=False) for result in UIReport.query.filter_by(project_id=pk).all(): result.delete(commit=False) for stats in Statistic.query.filter_by(project_id=pk).all(): stats.delete(commit=False) for quota in ProjectQuota.query.filter_by(project_id=pk).all(): quota.delete(commit=False) try: db_session.flush() except (psycopg2.DatabaseError, psycopg2.DataError, psycopg2.ProgrammingError, psycopg2.OperationalError, psycopg2.IntegrityError, psycopg2.InterfaceError, psycopg2.InternalError, psycopg2.Error) as exc: db_session.rollback() _logger.error(str(exc)) else: db_session.commit() for bucket in buckets_for_removal: minio_client.remove_bucket(bucket=bucket) for influx_item_data in influx_result_data: delete_test_data(*influx_item_data) for task_id in task_ids: try: volume = docker_client.volumes.get(task_id) except docker.errors.NotFound as docker_exc: _logger.info(str(docker_exc)) else: volume.remove(force=True) _logger.info("Project successfully deleted!") selected_project_id = SessionProject.get() if pk == selected_project_id: SessionProject.pop()
def used_in_session(self): selected_id = SessionProject.get() return self.id == selected_id