def pytest_runtest_makereport(item, call): """Pytest hook for report preparation. Submit tests' data to a database. """ run_id = item.config.getoption("db_submit") if not run_id: yield return data = item._request.test_info["db_info"].copy() data["results"] = item._request.test_info["results"].copy() data["raw_results"] = item._request.test_info["raw_results"].copy() data["cpu_info"] = get_cpu_info() data["status"] = "not_finished" data["error_msg"] = "" report = (yield).get_result() if call.when in ["setup", "call"]: if call.when == "call": if not report.passed: data["status"] = "failed" data["error_msg"] = report.longrepr.reprcrash.message else: data["status"] = "passed" db_url = item.config.getoption("db_url") db_collection = item.config.getoption("db_collection") logging.info(f"Upload data to {db_url}/{'timetests'}.{db_collection}. " f"Data: {data}") upload_data(data, db_url, 'timetests', db_collection)
def prepare_db_info(request, instance, executable, niter, manifest_metadata): """Fixture for preparing and validating data to submit to a database. Fixture prepares data and metadata to submit to a database. One of the steps is parsing of build information from build manifest. After preparation, it checks if data contains required properties. """ FIELDS_FOR_ID = ['run_id', "test_exe", 'model', 'device', 'niter'] run_id = request.config.getoption("db_submit") if not run_id: yield return instance["db"] = {} # add db_metadata db_meta_path = request.config.getoption("db_metadata") if db_meta_path: with open(db_meta_path, "r") as db_meta_f: instance["db"].update(json.load(db_meta_f)) # add test info info = { # results will be added immediately before uploading to DB in `pytest_runtest_makereport`. **instance["orig_instance"], # TODO: think about use `instance` instead of `orig_instance` "run_id": run_id, "test_exe": str(executable.stem), "niter": niter, "test_name": request.node.name, "os": "_".join([str(item) for item in [get_os_name(), *get_os_version()]]), "cpu_info": get_cpu_info(), "status": "not_finished", "error_msg": "", "results": {}, "raw_results": {}, "references": instance["instance"].get( "references", {}), # upload actual references that were used "ref_factor": REFS_FACTOR, } info['_id'] = hashlib.sha256(''.join( [str(info[key]) for key in FIELDS_FOR_ID]).encode()).hexdigest() # add metadata instance["db"].update(info) # add manifest metadata instance["db"].update(manifest_metadata) # validate db_info schema = """ { "type": "object", "properties": { "device": { "type": "object", "properties": { "name": {"type": "string"} }, "required": ["name"] }, "model": { "type": "object", "properties": { "name": {"type": "string"}, "precision": {"type": "string"}, "framework": {"type": "string"} }, "required": ["name", "precision"] }, "run_id": {"type": "string"}, "test_exe": {"type": "string"}, "niter": {"type": "integer"}, "test_name": {"type": "string"}, "os": {"type": "string"}, "cpu_info": {"type": "string"}, "status": {"type": "string"}, "error_msg": {"type": "string"}, "results": {"type": "object"}, "raw_results": {"type": "object"}, "references": {"type": "object"}, "_id": {"type": "string"} }, "required": ["device", "model", "run_id", "test_exe", "niter", "test_name", "os", "cpu_info", "status", "error_msg", "results", "raw_results", "references", "_id"], "additionalProperties": true } """ schema = json.loads(schema) try: validate(instance=instance["db"], schema=schema) except ValidationError: request.config.option.db_submit = False raise yield instance["db"]["results"] = instance["results"] instance["db"]["raw_results"] = instance["raw_results"]