def recalculate(self, project, results): minio = MinioClient(project=project) for result in results: browsertime = result.file_name.replace('.html', '').split('_')[-1] try: fobj = minio.download_file('reports', f'{browsertime}.zip') report = zipfile.ZipFile(io.BytesIO(fobj)) data = loads(report.read(f'{browsertime}.json'))[0] result.fcp = data["browserScripts"][0]["timings"][ "paintTiming"]["first-contentful-paint"] result.lcp = data["browserScripts"][0]["timings"][ "largestContentfulPaint"]["renderTime"] result.cls = round( data["browserScripts"][0]['pageinfo']['layoutShift'], 3) result.tbt = data["cpu"][0]["longTasks"]["totalBlockingTime"] result.fvc = data["visualMetrics"][0]["FirstVisualChange"] result.lvc = data["visualMetrics"][0]["LastVisualChange"] result.browser_time = data result.commit() except: current_app.logger.error(format_exc()) current_app.logger.error( f"Bucket: reports File: {browsertime}.zip") continue return {"message": "Done"}
def insert(self): if not self.test_uid: self.test_uid = str(uuid4()) # super().insert() # project = Project.query.get_or_404(self.project_id) minio_client = MinioClient(project=project) minio_client.create_bucket(bucket="dast")
def index(project: Project): bucket_name = request.args.get("q", None) minio_client = MinioClient(project=project) buckets_list = minio_client.list_bucket() if not bucket_name or bucket_name not in buckets_list: return redirect(url_for("artifacts.index", q=buckets_list[0])) return render_template("artifacts/files.html", files=minio_client.list_files(bucket_name), buckets=buckets_list, bucket=bucket_name)
def to_json(self, exclude_fields: tuple = ()) -> dict: json_dict = super().to_json() project = Project.query.get_or_404(json_dict["project_id"]) minio_client = MinioClient(project=project) buckets_list = minio_client.list_bucket() storage_space = 0 for bucket in buckets_list: for file in minio_client.list_files(bucket): storage_space += file["size"] json_dict["storage_space"] = round(storage_space/1000000, 2) tasks = Task.query.filter_by(project_id=json_dict["project_id"]).all() json_dict["tasks_count"] = len(tasks) return json_dict
def upload_file(bucket, f, project, create_if_not_exists=True): name = f.filename content = f.read() f.seek(0, 2) file_size = f.tell() try: f.remove() except: pass storage_space_quota = project.get_storage_space_quota() statistic = Statistic.query.filter_by(project_id=project.id).first().to_json() if storage_space_quota != -1 and statistic['storage_space'] + file_size > storage_space_quota * 1000000: raise Forbidden(description="The storage space limit allowed in the project has been exceeded") if create_if_not_exists: if bucket not in MinioClient(project=project).list_bucket(): MinioClient(project=project).create_bucket(bucket) MinioClient(project=project).upload_file(bucket, content, name)
def upload_file(bucket, f, project, create_if_not_exists=False): name = f.filename content = f.read() f.seek(0, 2) file_size = f.tell() storage_space_quota = project.get_storage_space_quota() statistic = Statistic.query.filter_by( project_id=project.id).first().to_json() if storage_space_quota != -1 and statistic[ 'storage_space'] + file_size > storage_space_quota * 1000000: raise Forbidden( description= "The storage space limit allowed in the project has been exceeded") try: MinioClient(project=project).upload_file(bucket, content, name) except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: MinioClient(project=project).create_bucket(bucket) MinioClient(project=project).upload_file(bucket, content, name)
def run_test(project: Project): current_app.logger.info(request.form) url = request.form.get("url") remote_driver_address = request.form.get("remote_address", "127.0.0.1:4444") listener_address = request.form.get("perfui_listener", "127.0.0.1:9999") from selenium import webdriver from requests import get from time import time start_time = time() chrome_options = webdriver.ChromeOptions() chrome_options.add_argument("--window-size=1360,1020") driver = webdriver.Remote( command_executor=f"http://{remote_driver_address}/wd/hub", desired_capabilities=chrome_options.to_capabilities()) get(f"http://{listener_address}/record/start") current_time = time() - start_time driver.get(url) while not driver.execute_script( "return document.readyState === \"complete\" && performance.timing.loadEventEnd > 0" ): sleep(0.1) results = driver.execute_script(check_ui_performance) video_results = get(f"http://{listener_address}/record/stop").content results["info"]["testStart"] = int(current_time) driver.quit() videofolder = tempfile.mkdtemp() video_path = join(videofolder, "Video.mp4") current_app.logger.info(videofolder) with open(video_path, "w+b") as f: f.write(video_results) video_name = f"{results['info']['title']}_{int(start_time)}.mp4" MinioClient(project=project).upload_file("reports", open(video_path, "rb"), video_name) report = prepareReport(video_path, results, videofolder, True) report = report.get_report() report_name = f"{results['info']['title']}_{int(start_time)}.html" MinioClient(project=project).upload_file("reports", report, report_name) rmtree(videofolder) return redirect(url_for( "observer.index", message=f"/api/v1/artifacts/{project.id}/reports/{report_name}"), code=302)
def post(self, project_id: int, bucket: str): args = self._parser_post.parse_args() expiration_measure = args["expiration_measure"] expiration_value = args["expiration_value"] project = Project.get_or_404(project_id) data_retention_limit = project.get_data_retention_limit() minio_client = MinioClient(project=project) days = data_retention_limit or None if expiration_value and expiration_measure: today_date = datetime.today().date() expiration_date = today_date + relativedelta(**{expiration_measure: expiration_value}) time_delta = expiration_date - today_date days = time_delta.days if data_retention_limit != -1 and days > data_retention_limit: raise Forbidden(description="The data retention limit allowed in the project has been exceeded") created = minio_client.create_bucket(bucket) if created and days: minio_client.configure_bucket_lifecycle(bucket=bucket, days=days) return {"message": "Created", "code": 200}
def delete(self, project_id: int, bucket: str, filename: str): args = self._parser_delete.parse_args(strict=False) project = Project.get_or_404(project_id) for filename in args.get("fname[]", ()) or (): MinioClient(project=project).remove_file(bucket, filename) return {"message": "Deleted", "code": 200}
def get(self, project_id: int, bucket: str, filename: str): project = Project.get_or_404(project_id) current_app.logger.info(f"Bucket: {bucket} File: {filename}") fobj = MinioClient(project=project).download_file(bucket, filename) return send_file(BytesIO(fobj), attachment_filename=filename)
def delete(self, project_id: int, bucket: str): project = Project.get_or_404(project_id) MinioClient(project=project).remove_bucket(bucket) return {"message": "Deleted", "code": 200}
def get(self, project_id: int, bucket: str): project = Project.get_or_404(project_id) return MinioClient(project=project).list_files(bucket)
def apply_full_delete_by_pk(cls, pk: int) -> None: import docker import psycopg2 from galloper.processors.minio import MinioClient from galloper.dal.influx_results import delete_test_data from galloper.database.models.task_results import Results from galloper.database.models.task import Task from galloper.database.models.security_results import SecurityResults from galloper.database.models.security_reports import SecurityReport from galloper.database.models.security_details import SecurityDetails from galloper.database.models.api_reports import APIReport from galloper.database.models.api_release import APIRelease from galloper.database.models.performance_tests import PerformanceTests from galloper.database.models.ui_report import UIReport from galloper.database.models.ui_result import UIResult from galloper.database.models.statistic import Statistic from galloper.database.models.project_quota import ProjectQuota _logger = logging.getLogger(cls.__name__.lower()) _logger.info("Start deleting entire project within transaction") project = cls.query.get_or_404(pk) minio_client = MinioClient(project=project) docker_client = docker.from_env() buckets_for_removal = minio_client.list_bucket() db_session.query(Project).filter_by(id=pk).delete() for model_class in (Results, SecurityResults, SecurityReport, SecurityDetails, APIRelease): db_session.query(model_class).filter_by(project_id=pk).delete() influx_result_data = [] for api_report in APIReport.query.filter_by(project_id=pk).all(): influx_result_data.append( (api_report.build_id, api_report.name, api_report.lg_type)) api_report.delete(commit=False) task_ids = [] for task in Task.query.filter_by(project_id=pk).all(): task_ids.append(task.task_id) task.delete(commit=False) for test in PerformanceTests.query.filter_by(project_id=pk).all(): test.delete(commit=False) for result in UIResult.query.filter_by(project_id=pk).all(): result.delete(commit=False) for result in UIReport.query.filter_by(project_id=pk).all(): result.delete(commit=False) for stats in Statistic.query.filter_by(project_id=pk).all(): stats.delete(commit=False) for quota in ProjectQuota.query.filter_by(project_id=pk).all(): quota.delete(commit=False) try: db_session.flush() except (psycopg2.DatabaseError, psycopg2.DataError, psycopg2.ProgrammingError, psycopg2.OperationalError, psycopg2.IntegrityError, psycopg2.InterfaceError, psycopg2.InternalError, psycopg2.Error) as exc: db_session.rollback() _logger.error(str(exc)) else: db_session.commit() for bucket in buckets_for_removal: minio_client.remove_bucket(bucket=bucket) for influx_item_data in influx_result_data: delete_test_data(*influx_item_data) for task_id in task_ids: try: volume = docker_client.volumes.get(task_id) except docker.errors.NotFound as docker_exc: _logger.info(str(docker_exc)) else: volume.remove(force=True) _logger.info("Project successfully deleted!") selected_project_id = SessionProject.get() if pk == selected_project_id: SessionProject.pop()
def insert(self) -> None: from galloper.processors.minio import MinioClient super().insert() MinioClient(project=self).create_bucket(bucket="reports")