def list(self, request, project, jm): """ GET method implementation for list view Optional paramters (default): - offset (0) - count (10) - return_type (dict) """ filter = UrlQueryFilter(request.query_params) offset = int(filter.pop("offset", 0)) count = min(int(filter.pop("count", 10)), 2000) return_type = filter.pop("return_type", "dict").lower() exclusion_profile = filter.pop("exclusion_profile", "default") visibility = filter.pop("visibility", "included") if exclusion_profile in ('false', 'null'): exclusion_profile = None results = jm.get_job_list(offset, count, conditions=filter.conditions, exclusion_profile=exclusion_profile, visibility=visibility) if results: option_collections = jm.refdata_model.get_all_option_collections() for job in results: job["platform_option"] = get_option(job, option_collections) response_body = dict(meta={"repository": project}, results=[]) if results and return_type == "list": response_body["job_property_names"] = results[0].keys() results = [job.values() for job in results] response_body["results"] = results response_body["meta"].update(offset=offset, count=count) return Response(response_body)
def list(self, request, project, jm): """ GET method implementation for list view Optional parameters (default): - offset (0) - count (10) - return_type (dict) """ MAX_JOBS_COUNT = 2000 filter = UrlQueryFilter(request.query_params) offset = int(filter.pop("offset", 0)) count = int(filter.pop("count", 10)) if "last_modified" in filter.conditions: # could be more than one, this is a set for lm in filter.conditions["last_modified"]: datestr = lm[1] try: # ensure last_modified is a date parser.parse(datestr) except ValueError: return Response( "Invalid date value for `last_modified`: {}".format( datestr), status=HTTP_400_BAD_REQUEST) if count > MAX_JOBS_COUNT: msg = "Specified count exceeds API MAX_JOBS_COUNT value: {}".format( MAX_JOBS_COUNT) return Response({"error": msg}, status=HTTP_400_BAD_REQUEST) return_type = filter.pop("return_type", "dict").lower() exclusion_profile = filter.pop("exclusion_profile", "default") visibility = filter.pop("visibility", "included") if exclusion_profile in ('false', 'null'): exclusion_profile = None results = jm.get_job_list(offset, count, conditions=filter.conditions, exclusion_profile=exclusion_profile, visibility=visibility) if results: option_collection_map = self._get_option_collection_map() for job in results: option_hash = job['option_collection_hash'] if option_hash: job["platform_option"] = option_collection_map[option_hash] response_body = dict(meta={"repository": project}, results=[]) if results and return_type == "list": response_body["job_property_names"] = results[0].keys() results = [job.values() for job in results] response_body["results"] = results response_body["meta"].update(offset=offset, count=count) return Response(response_body)
def similar_jobs(self, request, project, jm, pk=None): """ Get a list of jobs similar to the one selected. """ job = jm.get_job(pk) if not job: return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) query_params = request.query_params.copy() query_params['job_type_id'] = job[0]['job_type_id'] query_params['id__ne'] = job[0]['id'] url_query_filter = UrlQueryFilter(query_params) offset = int(url_query_filter.pop("offset", 0)) # we don't need a big page size on this endoint, # let's cap it to 50 elements count = min(int(url_query_filter.pop("count", 10)), 50) return_type = url_query_filter.pop("return_type", "dict").lower() results = jm.get_job_list_sorted( offset, count, conditions=url_query_filter.conditions) response_body = dict(meta={"repository": project}, results=[]) if results and return_type == "list": response_body["job_property_names"] = results[0].keys() results = [item.values() for item in results] response_body["results"] = results response_body["meta"].update(offset=offset, count=count) return Response(response_body)
def list(self, request, project): """ return a list of job artifacts """ # @todo: remove after old data expires from this change on 3/5/2015 qparams = request.QUERY_PARAMS.copy() name = qparams.get('name', None) if name and name == 'text_log_summary': qparams['name__in'] = 'text_log_summary,Structured Log' del(qparams['name']) # end remove block # @todo: change ``qparams`` back to ``request.QUERY_PARAMS`` filter = UrlQueryFilter(qparams) offset = int(filter.pop("offset", 0)) count = min(int(filter.pop("count", 10)), 1000) with ArtifactsModel(project) as artifacts_model: objs = artifacts_model.get_job_artifact_list( offset, count, filter.conditions ) return Response(objs)
def similar_jobs(self, request, project, jm, pk=None): """ Get a list of jobs similar to the one selected. """ job = jm.get_job(pk) if job: query_params = request.query_params.copy() query_params['job_type_id'] = job[0]['job_type_id'] query_params['id__ne'] = job[0]['id'] url_query_filter = UrlQueryFilter(query_params) offset = int(url_query_filter.pop("offset", 0)) # we don't need a big page size on this endoint, # let's cap it to 50 elements count = min(int(url_query_filter.pop("count", 10)), 50) return_type = url_query_filter.pop("return_type", "dict").lower() results = jm.get_job_list_sorted(offset, count, conditions=url_query_filter.conditions) response_body = dict(meta={"repository": project}, results=[]) if results and return_type == "list": response_body["job_property_names"] = results[0].keys() results = [item.values() for item in results] response_body["results"] = results response_body["meta"].update(offset=offset, count=count) return Response(response_body) else: return Response("No job with id: {0}".format(pk), 404)
def list(self, request, project, jm): filter = UrlQueryFilter(request.query_params) offset = int(filter.pop("offset", 0)) count = min(int(filter.pop("count", 10)), 1000) objs = jm.get_bug_job_map_list(offset, count, filter.conditions) return Response(objs)
def list(self, request, project, jm): filter = UrlQueryFilter(request.QUERY_PARAMS) offset = filter.pop("offset", 0) count = min(int(filter.pop("count", 10)), 1000) objs = jm.get_bug_job_map_list(offset, count, filter.conditions) return Response(objs)
def list(self, request, project, jm): """ GET method implementation for list view Optional parameters (default): - offset (0) - count (10) - return_type (dict) """ MAX_JOBS_COUNT = 2000 filter = UrlQueryFilter(request.query_params) offset = int(filter.pop("offset", 0)) count = int(filter.pop("count", 10)) if "last_modified" in filter.conditions: # could be more than one, this is a set for lm in filter.conditions["last_modified"]: datestr = lm[1] try: # ensure last_modified is a date parser.parse(datestr) except ValueError: return Response( "Invalid date value for `last_modified`: {}".format(datestr), status=HTTP_400_BAD_REQUEST) if count > MAX_JOBS_COUNT: msg = "Specified count exceeds API MAX_JOBS_COUNT value: {}".format(MAX_JOBS_COUNT) return Response({"error": msg}, status=HTTP_400_BAD_REQUEST) return_type = filter.pop("return_type", "dict").lower() exclusion_profile = filter.pop("exclusion_profile", "default") visibility = filter.pop("visibility", "included") if exclusion_profile in ('false', 'null'): exclusion_profile = None results = jm.get_job_list(offset, count, conditions=filter.conditions, exclusion_profile=exclusion_profile, visibility=visibility) if results: option_collection_map = self._get_option_collection_map() for job in results: option_hash = job['option_collection_hash'] if option_hash: job["platform_option"] = option_collection_map[option_hash] response_body = dict(meta={"repository": project}, results=[]) if results and return_type == "list": response_body["job_property_names"] = results[0].keys() results = [job.values() for job in results] response_body["results"] = results response_body["meta"].update(offset=offset, count=count) return Response(response_body)
def list(self, request, project, jm): """ return a list of job artifacts """ filter = UrlQueryFilter(request.QUERY_PARAMS) offset = filter.pop("offset", 0) count = min(int(filter.pop("count", 10)), 1000) objs = jm.get_job_artifact_list(offset, count, filter.conditions) return Response(objs)
def list(self, request, project, jm): """ return a list of job artifacts """ filter = UrlQueryFilter(request.QUERY_PARAMS) offset = filter.pop("offset", 0) count = filter.pop("count", 10) objs = jm.get_job_artifact_list(offset, count, filter.conditions) return Response(objs)
def trigger_runnable_jobs(self, request, project, jm, pk=None): """ Add new jobs to a resultset. """ if not pk: return Response({"message": "resultset id required"}, status=400) # Making sure a resultset with this id exists filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' result_set_list = jm.get_result_set_list(0, 1, full, filter.conditions) if not result_set_list: return Response( {"message": "No resultset with id: {0}".format(pk)}, status=404) buildernames = request.data.get('buildernames', []) if len(buildernames) == 0: Response({"message": "The list of buildernames cannot be empty"}, status=400) publish_resultset_runnable_job_action.apply_async( args=[project, pk, request.user.email, buildernames], routing_key='publish_to_pulse') return Response( {"message": "New jobs added for push '{0}'".format(pk)})
def trigger_runnable_jobs(self, request, project, jm, pk=None): """ Add new jobs to a resultset. """ if not pk: return Response({"message": "resultset id required"}, status=HTTP_400_BAD_REQUEST) # Making sure a resultset with this id exists filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' result_set_list = jm.get_result_set_list(0, 1, full, filter.conditions) if not result_set_list: return Response({"message": "No resultset with id: {0}".format(pk)}, status=HTTP_404_NOT_FOUND) requested_jobs = request.data.get('requested_jobs', []) decisionTaskID = request.data.get('decisionTaskID', []) if len(requested_jobs) == 0: Response({"message": "The list of requested_jobs cannot be empty"}, status=HTTP_400_BAD_REQUEST) publish_resultset_runnable_job_action.apply_async( args=[project, pk, request.user.email, requested_jobs, decisionTaskID], routing_key='publish_to_pulse' ) return Response({"message": "New jobs added for push '{0}'".format(pk)})
def trigger_runnable_jobs(self, request, project, jm, pk=None): """ Add new jobs to a resultset. """ if not pk: return Response({"message": "resultset id required"}, status=HTTP_400_BAD_REQUEST) # Making sure a resultset with this id exists filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' result_set_list = jm.get_result_set_list(0, 1, full, filter.conditions) if not result_set_list: return Response( {"message": "No resultset with id: {0}".format(pk)}, status=HTTP_404_NOT_FOUND) requested_jobs = request.data.get('requested_jobs', []) decision_task_id = request.data.get('decision_task_id', []) if not requested_jobs: Response({"message": "The list of requested_jobs cannot be empty"}, status=HTTP_400_BAD_REQUEST) publish_resultset_runnable_job_action.apply_async( args=[ project, pk, request.user.email, requested_jobs, decision_task_id ], routing_key='publish_to_pulse') return Response( {"message": "New jobs added for push '{0}'".format(pk)})
def list(self, request, project, jm): """ GET method implementation for list view """ filter = UrlQueryFilter(request.QUERY_PARAMS) offset = filter.pop("offset", 0) count = min(int(filter.pop("count", 10)), 1000) full = filter.pop('full', 'true').lower() == 'true' objs = jm.get_job_list(offset, count, full, filter.conditions) if objs: option_collections = jm.refdata_model.get_all_option_collections() for job in objs: job["platform_option"] = get_option(job, option_collections) return Response(objs)
def list(self, request, project, jm): """ GET method for list of ``resultset`` records with revisions """ filter = UrlQueryFilter(request.QUERY_PARAMS) offset = filter.pop("offset", 0) count = filter.pop("count", 10) full = filter.pop('full', 'true').lower() == 'true' objs = jm.get_result_set_list( offset, count, full, filter.conditions ) return Response(self.get_resultsets_with_jobs(jm, objs, full, {}))
def list(self, request, project, jm): """ return a list of job artifacts """ # @todo: remove after old data expires from this change on 3/5/2015 qparams = request.QUERY_PARAMS.copy() name = qparams.get('name', None) if name and name == 'text_log_summary': qparams['name__in'] = 'text_log_summary,Structured Log' del (qparams['name']) # end remove block # @todo: change ``qparams`` back to ``request.QUERY_PARAMS`` filter = UrlQueryFilter(qparams) offset = filter.pop("offset", 0) count = min(int(filter.pop("count", 10)), 1000) objs = jm.get_job_artifact_list(offset, count, filter.conditions) return Response(objs)
def retrieve(self, request, project, jm, pk=None): """ GET method implementation for detail view of ``resultset`` """ filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' result_set_list = jm.get_result_set_list(0, 1, full, filter.conditions) if result_set_list: return Response(result_set_list[0]) return Response("No resultset with id: {0}".format(pk), status=HTTP_404_NOT_FOUND)
def retrieve(self, request, project, jm, pk=None): """ GET method implementation for detail view of ``resultset`` """ filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' objs = jm.get_result_set_list(0, 1, full, filter.conditions) if objs: rs = self.get_resultsets_with_jobs(jm, objs, full, {}) return Response(rs[0]) else: return Response("No resultset with id: {0}".format(pk), 404)
def retrieve(self, request, project, jm, pk=None): """ GET method implementation for detail view of ``resultset`` """ filter = UrlQueryFilter({"id": pk}) full = filter.pop('full', 'true').lower() == 'true' objs = jm.get_result_set_list(0, 1, full, filter.conditions) if objs: debug = request.QUERY_PARAMS.get('debug', None) rs = self.get_resultsets_with_jobs(jm, objs, full, {}, debug) return Response(rs[0]) else: return Response("No resultset with id: {0}".format(pk), 404)
def list(self, request, project, jm): """ GET method for list of ``resultset`` records with revisions """ # What is the upper limit on the number of resultsets returned by the api MAX_RESULTS_COUNT = 1000 # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in [ "fromchange", "tochange", "startdate", "enddate", "revision" ]: v = filter_params.get(param, None) if v: del (filter_params[param]) meta[param] = v # create a timestamp lookup based on the from/to change params that may # exist. This means we only make 1 DB query rather than 2, if we have # both a ``fromchange`` and a ``tochange`` value. ts_lookup = jm.get_resultset_all_revision_lookup( [meta[x] for x in ['fromchange', 'tochange'] if x in meta]) # translate these params into our own filtering mechanism if 'fromchange' in meta: filter_params.update({ "push_timestamp__gte": ts_lookup[meta['fromchange']]["push_timestamp"] }) if 'tochange' in meta: filter_params.update({ "push_timestamp__lte": ts_lookup[meta['tochange']]["push_timestamp"] }) if 'startdate' in meta: filter_params.update( {"push_timestamp__gte": to_timestamp(meta['startdate'])}) if 'enddate' in meta: # add a day because we aren't supplying a time, just a date. So # we're doing ``less than``, rather than ``less than or equal to``. filter_params.update( {"push_timestamp__lt": to_timestamp(meta['enddate']) + 86400}) if 'revision' in meta: # Allow the user to search by either the short or long version of # a revision. rev_key = "revisions_long_revision" \ if len(meta['revision']) == 40 else "revisions_short_revision" filter_params.update({rev_key: meta['revision']}) meta['filter_params'] = filter_params filter = UrlQueryFilter(filter_params) offset_id = int(filter.pop("id__lt", 0)) count = int(filter.pop("count", 10)) if count > MAX_RESULTS_COUNT: msg = "Specified count exceeds api limit: {}".format( MAX_RESULTS_COUNT) return Response({"error": msg}, status=HTTP_400_BAD_REQUEST) full = filter.pop('full', 'true').lower() == 'true' results = jm.get_result_set_list(offset_id, count, full, filter.conditions) for rs in results: rs["revisions_uri"] = reverse("resultset-revisions", kwargs={ "project": jm.project, "pk": rs["id"] }) meta['count'] = len(results) meta['repository'] = project resp = {'meta': meta, 'results': results} return Response(resp)
def list(self, request, project, jm): """ GET method for list of ``resultset`` records with revisions """ # make a mutable copy of these params filter_params = request.QUERY_PARAMS.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ["fromchange", "tochange", "startdate", "enddate"]: v = filter_params.get(param, None) if v: del(filter_params[param]) meta[param] = v # translate these params into our own filtering mechanism if 'fromchange' in meta: filter_params.update({ "push_timestamp__gte": jm.get_revision_timestamp(meta['fromchange']) }) if 'tochange' in meta: filter_params.update({ "push_timestamp__lte": jm.get_revision_timestamp(meta['tochange']) }) if 'startdate' in meta: filter_params.update({ "push_timestamp__gte": to_timestamp(meta['startdate']) }) if 'enddate' in meta: # add a day because we aren't supplying a time, just a date. So # we're doing ``less than``, rather than ``less than or equal to``. filter_params.update({ "push_timestamp__lt": to_timestamp(meta['enddate']) + 86400 }) meta['filter_params'] = filter_params filter = UrlQueryFilter(filter_params) offset_id = int(filter.pop("id__lt", 0)) count = min(int(filter.pop("count", 10)), 1000) full = filter.pop('full', 'true').lower() == 'true' results = jm.get_result_set_list( offset_id, count, full, filter.conditions ) for rs in results: rs["revisions_uri"] = reverse("resultset-revisions", kwargs={"project": jm.project, "pk": rs["id"]}) meta['count'] = len(results) meta['repository'] = project resp = { 'meta': meta, 'results': results } return Response(resp)
def list(self, request, project, jm): """ GET method for list of ``resultset`` records with revisions """ # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ["fromchange", "tochange", "startdate", "enddate", "revision"]: v = filter_params.get(param, None) if v: del(filter_params[param]) meta[param] = v # translate these params into our own filtering mechanism if 'fromchange' in meta: filter_params.update({ "push_timestamp__gte": jm.get_revision_timestamp(meta['fromchange']) }) if 'tochange' in meta: filter_params.update({ "push_timestamp__lte": jm.get_revision_timestamp(meta['tochange']) }) if 'startdate' in meta: filter_params.update({ "push_timestamp__gte": to_timestamp(meta['startdate']) }) if 'enddate' in meta: # add a day because we aren't supplying a time, just a date. So # we're doing ``less than``, rather than ``less than or equal to``. filter_params.update({ "push_timestamp__lt": to_timestamp(meta['enddate']) + 86400 }) if 'revision' in meta: # TODO: modify to use ``short_revision`` or ``long_revision`` fields # when addressing Bug 1079796 # It ends up that we store sometimes long, sometimes short # revisions in the ``revision`` field, depending on the repo/source. # (gaia, for instance). So we must search # for EITHER the short or long, when long is passed in. if len(meta['revision']) > 12: filter_params.update( {"revision__in": "{},{}".format(meta['revision'], meta['revision'][:12])} ) else: filter_params.update({"revision": meta['revision']}) meta['filter_params'] = filter_params filter = UrlQueryFilter(filter_params) offset_id = int(filter.pop("id__lt", 0)) count = min(int(filter.pop("count", 10)), 1000) full = filter.pop('full', 'true').lower() == 'true' results = jm.get_result_set_list( offset_id, count, full, filter.conditions ) for rs in results: rs["revisions_uri"] = reverse("resultset-revisions", kwargs={"project": jm.project, "pk": rs["id"]}) meta['count'] = len(results) meta['repository'] = project resp = { 'meta': meta, 'results': results } return Response(resp)
def list(self, request, project, jm): """ GET method for list of ``resultset`` records with revisions """ # What is the upper limit on the number of resultsets returned by the api MAX_RESULTS_COUNT = 1000 # make a mutable copy of these params filter_params = request.query_params.copy() # This will contain some meta data about the request and results meta = {} # support ranges for date as well as revisions(changes) like old tbpl for param in ["fromchange", "tochange", "startdate", "enddate", "revision"]: v = filter_params.get(param, None) if v: del(filter_params[param]) meta[param] = v # create a timestamp lookup based on the from/to change params that may # exist. This means we only make 1 DB query rather than 2, if we have # both a ``fromchange`` and a ``tochange`` value. ts_lookup = jm.get_resultset_all_revision_lookup( [meta[x] for x in ['fromchange', 'tochange'] if x in meta] ) # translate these params into our own filtering mechanism if 'fromchange' in meta: filter_params.update({ "push_timestamp__gte": ts_lookup[meta['fromchange']]["push_timestamp"] }) if 'tochange' in meta: filter_params.update({ "push_timestamp__lte": ts_lookup[meta['tochange']]["push_timestamp"] }) if 'startdate' in meta: filter_params.update({ "push_timestamp__gte": to_timestamp(meta['startdate']) }) if 'enddate' in meta: # add a day because we aren't supplying a time, just a date. So # we're doing ``less than``, rather than ``less than or equal to``. filter_params.update({ "push_timestamp__lt": to_timestamp(meta['enddate']) + 86400 }) if 'revision' in meta: # Allow the user to search by either the short or long version of # a revision. rev_key = "revisions_long_revision" \ if len(meta['revision']) == 40 else "revisions_short_revision" filter_params.update({rev_key: meta['revision']}) meta['filter_params'] = filter_params filter = UrlQueryFilter(filter_params) offset_id = int(filter.pop("id__lt", 0)) count = int(filter.pop("count", 10)) if count > MAX_RESULTS_COUNT: msg = "Specified count exceeds api limit: {}".format(MAX_RESULTS_COUNT) return Response({"error": msg}, status=HTTP_400_BAD_REQUEST) full = filter.pop('full', 'true').lower() == 'true' results = jm.get_result_set_list( offset_id, count, full, filter.conditions ) for rs in results: rs["revisions_uri"] = reverse("resultset-revisions", kwargs={"project": jm.project, "pk": rs["id"]}) meta['count'] = len(results) meta['repository'] = project resp = { 'meta': meta, 'results': results } return Response(resp)