def get(self, job_name): """ Retrieves a single job instance """ parser = reqparse.RequestParser() parser.add_argument( "start", type=lambda s: pendulum.parse(s).in_timezone("UTC"), default=pendulum.parse("1970-01-01T00:00:00+00:00").in_timezone("UTC"), ) parser.add_argument( "end", type=lambda s: pendulum.parse(s).in_timezone("UTC"), default=pendulum.now().in_timezone("UTC") ) parser.add_argument("num_results", type=int, default=25) parser.add_argument("start_result", type=int, default=0) args = parser.parse_args() match_job_name = Q("term", br_job_name__raw=job_name) range_time = Q("range", **{"br_build_date_time": {"gte": args.start, "lt": args.end}}) combined_filters = match_job_name & range_time result = make_query( app.config["ES_INDEX"], combined_filters, includes=detailed_build_info["includes"], excludes=detailed_build_info["excludes"], size=args.num_results, start=args.start_result, ) if not result: return ns.abort(404, "Job {job} not found.".format(job=job_name)) return {"builds": result}
def test_make_query(mock_build_results): """ Basic smoke test for make_query """ result = make_query("test_index", None, [], [], agg=None, size=1, start=0) assert mock_build_results.search.called_with("test_index") assert mock_build_results.search.source.called_with([], []) assert mock_build_results.search.query.called_with("bool", filter=[None]) assert mock_build_results.search.execute.called_with()
def test_make_query_agg(mock_build_results): """ Basic smoke test for make_query with aggregation """ result = make_query("test_index", None, [], [], agg="agg", size=1, start=0) assert mock_build_results.search.called_with("test_index") assert mock_build_results.search.source.called_with([], []) assert mock_build_results.search.aggs.metric.called_with( "fail_count", "agg") assert mock_build_results.search.query.called_with("bool", filter=[None]) assert mock_build_results.search.execute.called_with()
def get(self, job_name, build_id): """ Retrieves a single Build object """ match_job_name = Q("term", br_job_name__raw=job_name) match_build_id = Q("term", br_build_id_key=build_id) combined_filter = match_job_name + match_build_id result = make_query( app.config["ES_INDEX"], combined_filter, includes=detailed_build_info["includes"], excludes=detailed_build_info["excludes"], ) if not result: return ns.abort( 404, "Build #{build} for job {job} not found.".format( build=build_id, job=job_name)) return result[0].__getstate__()[0]
def get(self, job_name, build_id): """ Retrieves a collection of tests. """ parser = reqparse.RequestParser() parser.add_argument( "test_status", choices=["passed", "failed", "skipped"], default=None, help= 'Test status must be "passed", "failed", or "skipped". Leave blank for all tests.', ) args = parser.parse_args() include_tests = [] if args.test_status == "failed" or args.test_status is None: include_tests.append("br_tests_object.br_tests_failed_object.*") if args.test_status == "passed" or args.test_status is None: include_tests.append("br_tests_object.br_tests_passed_object.*") if args.test_status == "skipped" or args.test_status is None: include_tests.append("br_tests_object.br_tests_skipped_object.*") build_details = {"includes": include_tests, "excludes": []} match_job_name = Q("term", br_job_name__raw=job_name) match_build_id = Q("term", br_build_id_key=build_id) combined_filter = match_job_name + match_build_id result = make_query( app.config["ES_INDEX"], combined_filter, includes=build_details["includes"], excludes=build_details["excludes"], ) if not result: return ns.abort( 404, "Tests for Build #{build} for job {job} not found.".format( build=build_id, job=job_name)) return result[0]
def get(self, job_name=None): """ Gets the AggregatedTests instance """ parser = reqparse.RequestParser() parser.add_argument( "job_name", default=None, help= "Input name of job to restrict search to. Leave blank for all tests." ) parser.add_argument("test_status", choices=["failed"], default=None, help='Test status must be "failed"') parser.add_argument( "start", type=lambda s: pendulum.parse(s).in_timezone("UTC"), default=pendulum.parse("1970-01-01T00:00:00+00:00").in_timezone( "UTC"), ) parser.add_argument( "end", type=lambda s: pendulum.parse(s).in_timezone("UTC"), default=pendulum.now().in_timezone("UTC")) args = parser.parse_args() # Use job_name if it is passed in, since this comes from api/job/<job_name>/tests/failing if job_name is None: job_name = args.get("job_name", None) ## Search for "failure", "FAILURE", "unstable", "UNSTABLE" match_status = Q( "match", br_status_key=BuildResults.BuildStatus.FAILURE.name) | Q( "match", br_status_key=BuildResults.BuildStatus.UNSTABLE.name) range_time = Q( "range", **{"br_build_date_time": { "gte": args.start, "lt": args.end }}) # Combine them combined_filter = match_status & range_time if job_name: ## Search for the exact job name combined_filter &= Q("term", br_job_name__raw=job_name) # Setup aggregation test_agg = A( "terms", field="br_tests_object.br_tests_failed_object.br_fullname.raw") results = make_query(app.config["ES_INDEX"], combined_filter, includes=[], excludes=[], agg=test_agg) return {"tests": results.__getstate__()[0]}