def generate_text_report(report): """Generate a text report""" _update_report(report) results = _get_results(report) if not results: _set_report_empty(report) return # Create file with header text_file = StringIO() text_file.write("Test Report\n") text_file.write("\n") text_file.write("Filter: {}\n".format(report["params"].get("filter", ""))) text_file.write("Source: {}\n".format(report["params"]["source"])) text_file.write("\n") # Now loop through the results and summarise them summary = { "passed": 0, "failed": 0, "skipped": 0, "error": 0, "xpassed": 0, "xfailed": 0, "other": 0, } for result in results: if result["result"] in summary: summary[result["result"]] += 1 else: summary["other"] += 1 text_file.writelines( ["{}: {}\n".format(key, value) for key, value in summary.items()]) text_file.write("\n") for result in results: result_path = _make_result_path(result) text_file.write("{}: {}\n".format(result_path, result["result"])) # Write the report to the database text_file.seek(0) report_file = ReportFile( filename=report["filename"], data={"contentType": "text/plain"}, report_id=report["id"], content=text_file.read().encode("utf8"), ) session.add(report_file) session.commit() _set_report_done(report)
def generate_json_report(report): """Generate a JSON report""" _update_report(report) results = _get_results(report) if not results: _set_report_empty(report) return report_dict = _make_dict(results) # Write the report to the database report_file = ReportFile( filename=report["filename"], data={"contentType": "application/json"}, report_id=report["id"], content=json.dumps(report_dict, indent=2).encode("utf8"), ) session.add(report_file) session.commit() _set_report_done(report)
def add_project(project=None, token_info=None, user=None): """Create a project :param body: Project :type body: dict | bytes :rtype: Project """ if not connexion.request.is_json: return "Bad request, JSON required", 400 project = Project.from_dict(**connexion.request.get_json()) user = User.query.get(user) if user: project.owner = user project.users.append(user) session.add(project) session.commit() return project.to_dict(), 201
def prune_old_files(months=5): """Delete artifact files older than specified months (here defined as 30 days).""" try: if isinstance(months, str): months = int(months) if months < 2: # we don't want to remove files more recent than 2 months return max_date = datetime.utcnow() - timedelta(days=months * DAYS_IN_MONTH) # delete artifact files older than max_date delete_statement = Artifact.__table__.delete().where(Artifact.upload_date < max_date) session.execute(delete_statement) session.commit() except Exception: # we don't want to continually retry this task return
def _create_result(tar, run_id, result, artifacts, project_id=None, metadata=None): """Create a result with artifacts, used in the archive importer""" old_id = None result_id = result.get("id") if is_uuid(result_id): result_record = session.query(Result).get(result_id) else: result_record = None if result_record: result_record.run_id = run_id else: old_id = result["id"] if "id" in result: result.pop("id") result["run_id"] = run_id if project_id: result["project_id"] = project_id if metadata: result["metadata"] = result.get("metadata", {}) result["metadata"].update(metadata) result["env"] = result.get("metadata", {}).get("env") result["component"] = result.get("metadata", {}).get("component") result_record = Result.from_dict(**result) session.add(result_record) session.commit() result = result_record.to_dict() for artifact in artifacts: session.add( Artifact( filename=artifact.name.split("/")[-1], result_id=result["id"], data={ "contentType": "text/plain", "resultId": result["id"] }, content=tar.extractfile(artifact).read(), )) session.commit() return old_id
def admin_update_project(id_, project=None, token_info=None, user=None): """Update a project :param id: ID of test project :type id: str :param body: Project :type body: dict | bytes :rtype: Project """ check_user_is_admin(user) if not connexion.request.is_json: return "Bad request, JSON required", 400 if not is_uuid(id_): id_ = convert_objectid_to_uuid(id_) project = Project.query.get(id_) if not project: abort(404) # Grab the fields from the request project_dict = connexion.request.get_json() # If the "owner" field is set, ignore it project_dict.pop("owner", None) # handle updating users separately for username in project_dict.pop("users", []): user_to_add = User.query.filter_by(email=username).first() if user_to_add and user_to_add not in project.users: project.users.append(user_to_add) # Make sure the project owner is in the list of users if project_dict.get("owner_id"): owner = User.query.get(project_dict["owner_id"]) if owner and owner not in project.users: project.users.append(owner) # update the rest of the project info project.update(project_dict) session.add(project) session.commit() return project.to_dict()
def delete_dashboard(id_, token_info=None, user=None): """Deletes a dashboard :param id: ID of the dashboard to delete :type id: str :rtype: tuple """ dashboard = Dashboard.query.get(id_) if not dashboard: return "Not Found", 404 if not project_has_user(dashboard.project, user): return "Forbidden", 403 widget_configs = WidgetConfig.query.filter(WidgetConfig.dashboard_id == dashboard.id).all() for widget_config in widget_configs: session.delete(widget_config) session.delete(dashboard) session.commit() return "OK", 200
def recover(email=None): """Recover a user account :param email: The e-mail address of the user """ if not connexion.request.is_json: return "Bad request, JSON is required", 400 login = connexion.request.get_json() if not login.get("email"): return "Bad request", 400 user = User.query.filter(User.email == login["email"]).first() if not user: return "Bad request", 400 # Create a random activation code. Base64 just for funsies user.activation_code = urlsafe_b64encode(str( uuid4()).encode("utf8")).strip(b"=") session.add(user) session.commit() return {}, 201
def generate_html_report(report): """Generate an HTML report""" _update_report(report) results = _get_results(report) if not results: _set_report_empty(report) return report_dict = _make_dict(results) tree = deepcopy(TREE_ROOT) counts = { "passed": 0, "failed": 0, "skipped": 0, "error": 0, "xpassed": 0, "xfailed": 0, "other": 0, } for _, result in report_dict.items(): _build_tree(result["name"], tree, result) try: counts[result["statuses"]["overall"]] += 1 except Exception: counts["other"] += 1 html_report = render_template( "reports/html-report.html", report_name=report["name"], tree=tree, results=report_dict, report=report, counts=counts, current_counts=counts, ) # Write the report to the database report_file = ReportFile( filename=report["filename"], data={"contentType": "text/hmtl"}, report_id=report["id"], content=html_report.encode("utf8"), ) session.add(report_file) session.commit() _set_report_done(report)
def update_group(id_, group=None): """Update a group :param id: The ID of the group :type id: str :param body: The updated group :type body: dict | bytes :rtype: Group """ if not connexion.request.is_json: return "Bad request, JSON required", 400 group = Group.query.get(id_) if not group: return "Group not found", 404 group.update(connexion.request.get_json()) session.add(group) session.commit() return group.to_dict()
def reset_password(activation_code=None, password=None): """Reset the password from the recover page :param e-mail: The e-mail address of the user :param activation_code: The activation_code supplied to the reset page :param password: The new password for the user """ if not connexion.request.is_json: return "Bad request, JSON is required", 400 login = connexion.request.get_json() if not login.get("activation_code") or not login.get("password"): return "Bad request", 400 user = User.query.filter( User.activation_code == login["activation_code"]).first() if not user: return "Invalid activation code", 400 user.password = login["password"] user.activation_code = None session.add(user) session.commit() return {}, 201
def add_token(token=None, token_info=None, user=None): """Create a new token :param body: Token object :type body: dict | bytes :rtype: Token """ if not connexion.request.is_json: return "Bad request, JSON is required", 400 user = User.query.get(user) if not user: return "Not authorized", 401 token = Token.from_dict(**connexion.request.get_json()) token.user = user token.expires = datetime.fromisoformat(token.expires.replace( "Z", "+00:00")) token.token = generate_token(user.id, token.expires.timestamp()) session.add(token) session.commit() return token.to_dict(), 201
def activate(activation_code=None): """Activate a user's account :param activation_code: The activation code """ if not activation_code: return "Not Found", 404 user = User.query.filter(User.activation_code == activation_code).first() login_url = build_url( current_app.config.get("FRONTEND_URL", "http://localhost:3000"), "login") if user: user.is_active = True user.activation_code = None session.add(user) session.commit() return redirect( f"{login_url}?st=success&msg=Account+activated,+please+log+in.") else: return redirect( f"{login_url}?st=error&msg=Invalid+activation+code,+please+check+the+link" "+in+your+email.")
def _add_artifacts(result, testcase, traceback, session): """To reduce cognitive complexity""" if traceback: session.add( Artifact( filename="traceback.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=traceback, )) if testcase.find("system-out"): system_out = bytes(str(testcase["system-out"]), "utf8") session.add( Artifact( filename="system-out.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=system_out, )) if testcase.find("system-err"): system_err = bytes(str(testcase["system-err"]), "utf8") session.add( Artifact( filename="system-err.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=system_err, )) session.commit()
def prune_old_runs(months=12): """ Remove runs older than specified months (here defined as 30 days). IMPORTANT NOTE: to avoid primary key errors, 'months' must be greater than what is used in 'prune_old_results' """ try: if isinstance(months, str): months = int(months) if months < 10: # we don't want to remove files more recent than 10 months return max_date = datetime.utcnow() - timedelta(days=months * DAYS_IN_MONTH) # delete artifact files older than max_date delete_statement = Run.__table__.delete().where(Run.start_time < max_date) session.execute(delete_statement) session.commit() except Exception: # we don't want to continually retry this task return
def get_user_from_keycloak(auth_data): """Get a user object from the keycloak server""" config = get_keycloak_config(is_private=True) response = requests.get( config["user_url"], headers={"Authorization": "Bearer " + auth_data["access_token"]}) if response.status_code == 200: user_json = response.json() user = User.query.filter(User.email == user_json["email"]).first() if not user: user = User( email=user_json["email"], name=user_json["name"], _password=user_json["sub"], is_active=True, is_superadmin=False, ) session.add(user) session.commit() return user else: print("Error getting user, response:", response.text) return None
def update_project(id_, project=None, token_info=None, user=None): """Update a project :param id: ID of test project :type id: str :param body: Project :type body: dict | bytes :rtype: Project """ if not connexion.request.is_json: return "Bad request, JSON required", 400 if not is_uuid(id_): id_ = convert_objectid_to_uuid(id_) project = Project.query.get(id_) if not project: return "Project not found", 404 user = User.query.get(user) if not user.is_superadmin and (not project.owner or project.owner.id != user.id): return "Forbidden", 403 # handle updating users separately updates = connexion.request.get_json() for username in updates.pop("users", []): user_to_add = User.query.filter_by(email=username).first() if user_to_add and user_to_add not in project.users: project.users.append(user_to_add) # update the rest of the project info project.update(updates) session.add(project) session.commit() return project.to_dict()
def _update_report(report): """Update the report with the parameters, etc.""" report_type = report["params"]["type"] report["name"] = _generate_report_name(report["params"]) report_filename = "{}.{}".format(report["name"], REPORTS[report_type]["extension"]) report.update({ "filename": report_filename, "mimetype": REPORTS[report_type]["mimetype"], "url": "{}/api/report/{}/download/{}".format( current_app.config.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename, ), "download_url": "{}/api/report/{}/download/{}".format( current_app.config.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename, ), "view_url": "{}/api/report/{}/view/{}".format( current_app.config.get("BACKEND_URL", "http://localhost:8080"), report["id"], report_filename, ), "status": "running", }) report_record = Report.query.get(report["id"]) report_record.update(report) session.add(report_record) session.commit()
def create_app(self): logging.getLogger("connexion.operation").setLevel("ERROR") extra_config = { "TESTING": True, "LIVESERVER_PORT": 0, "SQLALCHEMY_DATABASE_URI": "sqlite:///:memory:", "GOOGLE_CLIENT_ID": "*****@*****.**", "GITHUB_CLIENT_ID": None, "FACEBOOK_APP_ID": None, "GITLAB_CLIENT_ID": "dfgfdgh4563453456dsfgdsfg456", "GITLAB_BASE_URL": "https://gitlab.com", "JWT_SECRET": "89807erkjhdfgu768dfsgdsfg345r", "KEYCLOAK_BASE_URL": None, "KEYCLOAK_CLIENT_ID": None, } app = get_app(**extra_config) create_celery_app(app) # Add a test user with app.app_context(): self.test_user = User(name="Test User", email="*****@*****.**", is_active=True) session.add(self.test_user) session.commit() self.jwt_token = generate_token(self.test_user.id) token = Token(name="login-token", user=self.test_user, token=self.jwt_token) session.add(token) session.commit() session.refresh(self.test_user) if ibutsu_server.tasks.task is None: ibutsu_server.tasks.task = mock_task return app
def generate_exception_report(report): """Generate a text report""" _update_report(report) # TODO speed up with filtering # join with original filter in deepcopied report results = _get_results(report) if not results: _set_report_empty(report) return exception_results = [ result for result in results if result["result"] in ["error", "failed"] and "short_tb" in result["metadata"] ] total_count = len(exception_results) exception_type_indexed = defaultdict(list) for result in exception_results: exception_name = _exception_metadata_hack(result) if exception_name is None: continue exception_type_indexed[exception_name].append(result) # list of tuples for easy unpacking in the jinja template # exception_type, count, color designation warn_count = int(round(FAILURE_PERC_WARN * total_count)) danger_count = int(round(FAILURE_PERC_DANGER * total_count)) exception_counts = [] for exception_type, exceptions in exception_type_indexed.items(): severity_level = BSTRAP_INFO if len(exceptions) >= danger_count: severity_level = BSTRAP_DANGER elif len(exceptions) >= warn_count: severity_level = BSTRAP_WARN exception_counts.append( (exception_type, len(exceptions), severity_level)) report_dict = _make_dict(exception_results) tree = deepcopy(TREE_ROOT) counts = defaultdict(int) for result in report_dict.values(): # build tree for each result _build_tree(result["name"], tree, result) try: counts[result["statuses"]["overall"]] += 1 except Exception: counts["other"] += 1 exception_report = render_template( "reports/exception-report.html", report_name=report["name"], tree=tree, results=report_dict, report=report, exceptions=exception_type_indexed, exception_counts=exception_counts, counts=counts, current_counts=counts, ) # Write the report to the database report_file = ReportFile( filename=report["filename"], data={"contentType": "text/html"}, report_id=report["id"], content=exception_report.encode("utf8"), ) session.add(report_file) session.commit() _set_report_done(report)
def run_junit_import(import_): """Import a test run from a JUnit file""" # Update the status of the import import_record = Import.query.get(import_["id"]) _update_import_status(import_record, "running") # Fetch the file contents import_file = ImportFile.query.filter( ImportFile.import_id == import_["id"]).first() if not import_file: _update_import_status(import_record, "error") return # Parse the XML and create a run object(s) tree = objectify.fromstring(import_file.content) import_record.data["run_id"] = [] # Use current time as start time if no start time is present start_time = parser.parse( tree.get("timestamp")) if tree.get("timestamp") else datetime.utcnow() run_dict = { "created": datetime.utcnow(), "start_time": start_time, "duration": float(tree.get("time", 0.0)), "summary": { "errors": int(tree.get("errors", 0)), "failures": int(tree.get("failures", 0)), "skips": int(tree.get("skipped", 0)), "xfailures": int(tree.get("xfailures", 0)), "xpasses": int(tree.get("xpasses", 0)), "tests": int(tree.get("tests", 0)), }, } if import_record.data.get("project_id"): run_dict["project_id"] = import_record.data["project_id"] if import_record.data.get("source"): run_dict["source"] = import_record.data["source"] metadata = None if import_record.data.get("metadata"): # metadata is expected to be a json dict metadata = import_record.data["metadata"] run_dict["data"] = metadata # add env and component directly to the run dict if it exists in the metadata run_dict["env"] = metadata.get("env") run_dict["component"] = metadata.get("component") # Insert the run, and then update the import with the run id run = Run.from_dict(**run_dict) session.add(run) session.commit() run_dict = run.to_dict() import_record.run_id = run.id import_record.data["run_id"].append(run.id) # If the top level "testsuites" element doesn't have these, we'll need to build them manually run_data = { "duration": 0.0, "errors": 0, "failures": 0, "skips": 0, "xfailures": 0, "xpasses": 0, "tests": 0, } # Handle structures where testsuite is/isn't the top level tag testsuites = _get_ts_element(tree) # Run through the test suites and import all the test results for ts in testsuites: run_data["duration"] += float(ts.get("time", 0.0)) run_data["errors"] += int(ts.get("errors", 0)) run_data["failures"] += int(ts.get("failures", 0)) run_data["skips"] += int(ts.get("skipped", 0)) run_data["xfailures"] += int(ts.get("xfailures", 0)) run_data["xpasses"] += int(ts.get("xpasses", 0)) run_data["tests"] += int(ts.get("tests", 0)) for testcase in ts.iterchildren(tag="testcase"): test_name, backup_fspath = _get_test_name_path(testcase) result_dict = { "test_id": test_name, "start_time": run_dict["start_time"], "duration": float(testcase.get("time") or 0), "run_id": run.id, "metadata": { "run": run.id, "fspath": testcase.get("file") or backup_fspath, "line": testcase.get("line"), }, "params": {}, "source": ts.get("name"), } _populate_result_metadata(run_dict, result_dict, import_record, metadata is not None) result_dict, traceback = _process_result(result_dict, testcase) result = Result.from_dict(**result_dict) session.add(result) session.commit() _add_artifacts(result, testcase, traceback, session) if traceback: session.add( Artifact( filename="traceback.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=traceback, )) if testcase.find("system-out"): system_out = bytes(str(testcase["system-out"]), "utf8") session.add( Artifact( filename="system-out.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=system_out, )) if testcase.find("system-err"): system_err = bytes(str(testcase["system-err"]), "utf8") session.add( Artifact( filename="system-err.log", result_id=result.id, data={ "contentType": "text/plain", "resultId": result.id }, content=system_err, )) session.commit() # Check if we need to update the run if not run.duration: run.duration = run_data["duration"] if not run.summary["errors"]: run.summary["errors"] = run_data["errors"] if not run.summary["failures"]: run.summary["failures"] = run_data["failures"] if not run.summary["skips"]: run.summary["skips"] = run_data["skips"] if not run.summary["xfailures"]: run.summary["xfailures"] = run_data["xfailures"] if not run.summary["xpasses"]: run.summary["xpasses"] = run_data["xpasses"] if not run.summary["tests"]: run.summary["tests"] = run_data["tests"] session.add(run) session.commit() # Update the status of the import, now that we're all done _update_import_status(import_record, "done")
def run_archive_import(import_): """Import a test run from an Ibutsu archive file""" # Update the status of the import import_record = Import.query.get(str(import_["id"])) metadata = {} if import_record.data.get("metadata"): # metadata is expected to be a json dict metadata = import_record.data["metadata"] _update_import_status(import_record, "running") # Fetch the file contents import_file = ImportFile.query.filter( ImportFile.import_id == import_["id"]).first() if not import_file: _update_import_status(import_record, "error") return # First open the tarball and pull in the results run = None run_artifacts = [] results = [] result_artifacts = {} start_time = None file_object = BytesIO(import_file.content) with tarfile.open(fileobj=file_object) as tar: for member in tar.getmembers(): # We don't care about directories, skip them if member.isdir(): continue # Grab the run id run_id, rest = member.name.split("/", 1) if "/" not in rest: if member.name.endswith("run.json"): run = json.loads(tar.extractfile(member).read()) else: run_artifacts.append(member) continue result_id, file_name = rest.split("/") if member.name.endswith("result.json"): result = json.loads(tar.extractfile(member).read()) result_start_time = result.get("start_time") if not start_time or start_time > result_start_time: start_time = result_start_time results.append(result) else: try: result_artifacts[result_id].append(member) except KeyError: result_artifacts[result_id] = [member] run_dict = run or { "duration": 0, "summary": { "errors": 0, "failures": 0, "skips": 0, "xfailures": 0, "xpasses": 0, "tests": 0, }, } # patch things up a bit, if necessary run_dict["metadata"] = run_dict.get("metadata", {}) run_dict["metadata"].update(metadata) _populate_metadata(run_dict, import_record) _populate_created_times(run_dict, start_time) # If this run has a valid ID, check if this run exists if is_uuid(run_dict.get("id")): run = session.query(Run).get(run_dict["id"]) if run: run.update(run_dict) else: run = Run.from_dict(**run_dict) session.add(run) session.commit() import_record.run_id = run.id import_record.data["run_id"] = [run.id] # Loop through any artifacts associated with the run and upload them for artifact in run_artifacts: session.add( Artifact( filename=artifact.name.split("/")[-1], run_id=run.id, data={ "contentType": "text/plain", "runId": run.id }, content=tar.extractfile(artifact).read(), )) # Now loop through all the results, and create or update them for result in results: artifacts = result_artifacts.get(result["id"], []) _create_result( tar, run.id, result, artifacts, project_id=run_dict.get("project_id") or import_record.data.get("project_id"), metadata=metadata, ) # Update the import record _update_import_status(import_record, "done") if run: update_run.delay(run.id)
def _set_report_error(self, report): report.status = "error" session.add(report) session.commit()
def _set_report_status(report_id, status): """Set a report's status""" report = Report.query.get(report_id) report.status = status session.add(report) session.commit()
def _update_import_status(import_record, status): """Update the status of the import""" import_record.status = status session.add(import_record) session.commit()
def seed_users(projects): """ Add users and add users to projects in database. Schema for the request to /admin/run-task in JSON should be: .. code-block:: json { "task": "db.seed_users", "token": "<admin-token>", "params": { "projects": { "my-project": { "owner": "*****@*****.**", "users": [ "*****@*****.**", "*****@*****.**", ... ], }, "new-project": { "users": [ "*****@*****.**", "*****@*****.**", ... ] } } } } """ try: if not projects: print("No users to add, exiting...") return for project_name, project_info in projects.items(): project = Project.query.filter_by(name=project_name).first() if not project: print(f"Project with name {project_name} not found.") continue # create/set the project owner if project_info.get("owner"): project_owner = User.query.filter_by(email=project_info["owner"]).first() if not project_owner: project_owner = User( email=project_info["owner"], name=project_info["owner"].split("@")[0], is_active=True, ) project.owner = project_owner session.add(project) session.commit() # add the users for user_email in project_info.get("users", []): user = User.query.filter_by(email=user_email).first() # create the user if they don't exist if not user: user = User(email=user_email, name=user_email.split("@")[0], is_active=True) # add the project if the user needs to be added to the project if project not in user.projects: user.projects.append(project) session.add(user) session.commit() except Exception as e: # we don't want to continually retry this task print(e) return
def upload_artifact(body, token_info=None, user=None): """Uploads a artifact artifact :param result_id: ID of result to attach artifact to :type result_id: str :param run_id: ID of run to attach artifact to :type run_id: str :param filename: filename for storage :type filename: string :param file: file to upload :type file: werkzeug.datastructures.FileStorage :param additional_metadata: Additional data to pass to server :type additional_metadata: object :rtype: tuple """ result_id = body.get("result_id") or body.get("resultId") run_id = body.get("run_id") or body.get("runId") result = Result.query.get(result_id) if result and not project_has_user(result.project, user): return "Forbidden", 403 filename = body.get("filename") additional_metadata = body.get("additional_metadata", {}) file_ = connexion.request.files["file"] content_type = magic.from_buffer(file_.read()) data = { "contentType": content_type, "resultId": result_id, "runId": run_id, "filename": filename, } if additional_metadata: if isinstance(additional_metadata, str): try: additional_metadata = json.loads(additional_metadata) except (ValueError, TypeError): return "Bad request, additionalMetadata is not valid JSON", 400 if not isinstance(additional_metadata, dict): return "Bad request, additionalMetadata is not a JSON object", 400 data["additionalMetadata"] = additional_metadata # Reset the file pointer file_.seek(0) if data.get("runId"): artifact = Artifact( filename=filename, run_id=data["runId"], content=file_.read(), upload_date=datetime.utcnow(), data=additional_metadata, ) else: artifact = Artifact( filename=filename, result_id=data["resultId"], content=file_.read(), upload_date=datetime.utcnow(), data=additional_metadata, ) session.add(artifact) session.commit() return artifact.to_dict(), 201