def get_all(self, **kwargs): """Get all the jobs. Using filters, only get a subset of jobs. :param kwargs: job filters :return: a list of jobs """ context = t_context.extract_context_from_environ() if not policy.enforce(context, policy.ADMIN_API_JOB_LIST): return utils.format_api_error( 403, _('Unauthorized to show all jobs')) is_valid_filter, filters = self._get_filters(kwargs) if not is_valid_filter: msg = (_('Unsupported filter type: %(filters)s') % { 'filters': ', '.join([filter_name for filter_name in filters]) }) return utils.format_api_error(400, msg) filters = [{'key': key, 'comparator': 'eq', 'value': value} for key, value in six.iteritems(filters)] try: jobs_in_job_table = db_api.list_jobs(context, filters) jobs_in_job_log_table = db_api.list_jobs_from_log(context, filters) jobs = jobs_in_job_table + jobs_in_job_log_table return {'jobs': [self._get_more_readable_job(job) for job in jobs]} except Exception as e: LOG.exception('Failed to show all asynchronous jobs: ' '%(exception)s ', {'exception': e}) return utils.format_api_error( 500, _('Failed to show all asynchronous jobs'))
def get_all(self, **kwargs): """Get all the jobs. Using filters, only get a subset of jobs. :param kwargs: job filters :return: a list of jobs """ context = t_context.extract_context_from_environ() if not policy.enforce(context, policy.ADMIN_API_JOB_LIST): return utils.format_api_error( 403, _('Unauthorized to show all jobs')) # check limit and marker, default value -1 means no pagination _limit = kwargs.pop('limit', -1) try: limit = int(_limit) limit = utils.get_pagination_limit(limit) except ValueError as e: LOG.exception('Failed to convert pagination limit to an integer: ' '%(exception)s ', {'exception': e}) msg = (_("Limit should be an integer or a valid literal " "for int() rather than '%s'") % _limit) return utils.format_api_error(400, msg) marker = kwargs.pop('marker', None) sorts = [('timestamp', 'desc'), ('id', 'desc')] is_valid_filter, filters = self._get_filters(kwargs) if not is_valid_filter: msg = (_('Unsupported filter type: %(filters)s') % { 'filters': ', '.join( [filter_name for filter_name in filters]) }) return utils.format_api_error(400, msg) # project ID from client should be equal to the one from # context, since only the project ID in which the user # is authorized will be used as the filter. filters['project_id'] = context.project_id filters = [{'key': key, 'comparator': 'eq', 'value': value} for key, value in six.iteritems(filters)] try: if marker is not None: try: # verify whether the marker is effective db_api.get_job(context, marker) jobs = db_api.list_jobs(context, filters, sorts, limit, marker) jobs_from_log = [] if len(jobs) < limit: jobs_from_log = db_api.list_jobs_from_log( context, filters, sorts, limit - len(jobs), None) job_collection = jobs + jobs_from_log except t_exc.ResourceNotFound: try: db_api.get_job_from_log(context, marker) jobs_from_log = db_api.list_jobs_from_log( context, filters, sorts, limit, marker) job_collection = jobs_from_log except t_exc.ResourceNotFound: msg = (_('Invalid marker: %(marker)s') % {'marker': marker}) return utils.format_api_error(400, msg) else: jobs = db_api.list_jobs(context, filters, sorts, limit, marker) jobs_from_log = [] if len(jobs) < limit: jobs_from_log = db_api.list_jobs_from_log( context, filters, sorts, limit - len(jobs), None) job_collection = jobs + jobs_from_log # add link links = [] if len(job_collection) >= limit: marker = job_collection[-1]['id'] base = constants.JOB_PATH link = "%s?limit=%s&marker=%s" % (base, limit, marker) links.append({"rel": "next", "href": link}) result = {'jobs': [self._get_more_readable_job(job) for job in job_collection]} if links: result['jobs_links'] = links return result except Exception as e: LOG.exception('Failed to show all asynchronous jobs: ' '%(exception)s ', {'exception': e}) return utils.format_api_error( 500, _('Failed to show all asynchronous jobs'))
def test_delete(self, mock_context): mock_context.return_value = self.context # cover all job types. # each 'for' loop adds one item in job log table, we set count variable # to record dynamic total job entries in job log table. count = 1 for job_type in self.job_resource_map.keys(): job = self._prepare_job_element(job_type) resource_id = '#'.join([ job['resource'][resource_id] for resource_type, resource_id in self.job_resource_map[job_type] ]) # failure case, only admin can delete the job job_1 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) self.context.is_admin = False res = self.controller.delete(job_1['id']) self._validate_error_code(res, 403) self.context.is_admin = True db_api.delete_job(self.context, job_1['id']) # failure case, job not found res = self.controller.delete(-123) self._validate_error_code(res, 404) # failure case, delete a running job job_2 = db_api.register_job(self.context, job['project_id'], job_type, resource_id) job = db_api.get_job(self.context, job_2['id']) res = self.controller.delete(job_2['id']) self._validate_error_code(res, 400) # finish the job and delete it db_api.finish_job(self.context, job_2['id'], False, timeutils.utcnow()) db_api.delete_job(self.context, job_2['id']) # successful case, delete a successful job. successful job from # job log can't be deleted, here this successful job is from # job table. job_3 = self._prepare_job_element(job_type) resource_id_3 = '#'.join([ job_3['resource'][resource_id_3] for resource_type_3, resource_id_3 in self.job_resource_map[job_type] ]) job_4 = db_api.new_job(self.context, job_3['project_id'], job_type, resource_id_3) with self.context.session.begin(): job_dict = { 'status': constants.JS_Success, 'timestamp': timeutils.utcnow(), 'extra_id': uuidutils.generate_uuid() } core.update_resource(self.context, models.AsyncJob, job_4['id'], job_dict) job_4_succ = db_api.get_job(self.context, job_4['id']) self.controller.delete(job_4['id']) filters_job_4 = [{ 'key': 'type', 'comparator': 'eq', 'value': job_4_succ['type'] }, { 'key': 'status', 'comparator': 'eq', 'value': job_4_succ['status'] }, { 'key': 'resource_id', 'comparator': 'eq', 'value': job_4_succ['resource_id'] }, { 'key': 'extra_id', 'comparator': 'eq', 'value': job_4_succ['extra_id'] }] self.assertEqual( 0, len(db_api.list_jobs(self.context, filters_job_4))) self.assertEqual(count, len(db_api.list_jobs_from_log(self.context))) count = count + 1 # successful case, delete a new job job_5 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) self.controller.delete(job_5['id']) filters_job_5 = [{ 'key': 'type', 'comparator': 'eq', 'value': job_5['type'] }, { 'key': 'status', 'comparator': 'eq', 'value': job_5['status'] }, { 'key': 'resource_id', 'comparator': 'eq', 'value': job_5['resource_id'] }, { 'key': 'extra_id', 'comparator': 'eq', 'value': job_5['extra_id'] }] self.assertEqual( 0, len(db_api.list_jobs(self.context, filters_job_5))) # successful case, delete a failed job job_6 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) db_api.finish_job(self.context, job_6['id'], False, timeutils.utcnow()) job_6_failed = db_api.get_job(self.context, job_6['id']) self.controller.delete(job_6['id']) filters_job_6 = [{ 'key': 'type', 'comparator': 'eq', 'value': job_6_failed['type'] }, { 'key': 'status', 'comparator': 'eq', 'value': job_6_failed['status'] }, { 'key': 'resource_id', 'comparator': 'eq', 'value': job_6_failed['resource_id'] }, { 'key': 'extra_id', 'comparator': 'eq', 'value': job_6_failed['extra_id'] }] self.assertEqual( 0, len(db_api.list_jobs(self.context, filters_job_6)))
def test_delete(self, mock_context): mock_context.return_value = self.context # cover all job types. # each 'for' loop adds one item in job log table, we set count variable # to record dynamic total job entries in job log table. count = 1 for job_type in self.job_resource_map.keys(): job = self._prepare_job_element(job_type) resource_id = '#'.join([job['resource'][resource_id] for resource_type, resource_id in self.job_resource_map[job_type]]) # failure case, only admin can delete the job job_1 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) self.context.is_admin = False res = self.controller.delete(job_1['id']) self._validate_error_code(res, 403) self.context.is_admin = True db_api.delete_job(self.context, job_1['id']) # failure case, job not found res = self.controller.delete(-123) self._validate_error_code(res, 404) # failure case, delete a running job job_2 = db_api.register_job(self.context, job['project_id'], job_type, resource_id) job = db_api.get_job(self.context, job_2['id']) res = self.controller.delete(job_2['id']) self._validate_error_code(res, 400) # finish the job and delete it db_api.finish_job(self.context, job_2['id'], False, timeutils.utcnow()) db_api.delete_job(self.context, job_2['id']) # successful case, delete a successful job. successful job from # job log can't be deleted, here this successful job is from # job table. job_3 = self._prepare_job_element(job_type) resource_id_3 = '#'.join([job_3['resource'][resource_id_3] for resource_type_3, resource_id_3 in self.job_resource_map[job_type]]) job_4 = db_api.new_job(self.context, job_3['project_id'], job_type, resource_id_3) with self.context.session.begin(): job_dict = {'status': constants.JS_Success, 'timestamp': timeutils.utcnow(), 'extra_id': uuidutils.generate_uuid()} core.update_resource(self.context, models.AsyncJob, job_4['id'], job_dict) job_4_succ = db_api.get_job(self.context, job_4['id']) self.controller.delete(job_4['id']) filters_job_4 = [ {'key': 'type', 'comparator': 'eq', 'value': job_4_succ['type']}, {'key': 'status', 'comparator': 'eq', 'value': job_4_succ['status']}, {'key': 'resource_id', 'comparator': 'eq', 'value': job_4_succ['resource_id']}, {'key': 'extra_id', 'comparator': 'eq', 'value': job_4_succ['extra_id']}] self.assertEqual(0, len(db_api.list_jobs(self.context, filters_job_4))) self.assertEqual(count, len(db_api.list_jobs_from_log(self.context))) count = count + 1 # successful case, delete a new job job_5 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) self.controller.delete(job_5['id']) filters_job_5 = [ {'key': 'type', 'comparator': 'eq', 'value': job_5['type']}, {'key': 'status', 'comparator': 'eq', 'value': job_5['status']}, {'key': 'resource_id', 'comparator': 'eq', 'value': job_5['resource_id']}, {'key': 'extra_id', 'comparator': 'eq', 'value': job_5['extra_id']}] self.assertEqual(0, len(db_api.list_jobs(self.context, filters_job_5))) # successful case, delete a failed job job_6 = db_api.new_job(self.context, job['project_id'], job_type, resource_id) db_api.finish_job(self.context, job_6['id'], False, timeutils.utcnow()) job_6_failed = db_api.get_job(self.context, job_6['id']) self.controller.delete(job_6['id']) filters_job_6 = [ {'key': 'type', 'comparator': 'eq', 'value': job_6_failed['type']}, {'key': 'status', 'comparator': 'eq', 'value': job_6_failed['status']}, {'key': 'resource_id', 'comparator': 'eq', 'value': job_6_failed['resource_id']}, {'key': 'extra_id', 'comparator': 'eq', 'value': job_6_failed['extra_id']}] self.assertEqual(0, len(db_api.list_jobs(self.context, filters_job_6)))
def test_get_all_jobs_with_pagination(self, mock_context): self.context.project_id = uuidutils.generate_uuid() mock_context.return_value = self.context # map job type to project id for later project id filter validation. job_project_id_map = {} amount_of_all_jobs = len(self.job_resource_map.keys()) amount_of_running_jobs = 3 count = 1 # cover all job types. for job_type in self.job_resource_map.keys(): job = self._prepare_job_element(job_type) if count > 1: # for test convenience, the first job has a project ID # that is different from the context.project_id job['project_id'] = self.context.project_id job_project_id_map[job_type] = job['project_id'] resource_id = '#'.join([job['resource'][resource_id] for resource_type, resource_id in self.job_resource_map[job_type]]) if count <= amount_of_running_jobs: db_api.register_job(self.context, job['project_id'], job_type, resource_id) # because jobs are sorted by timestamp, without time delay then # all jobs are created at the same time, paginate_query can't # identify them time.sleep(1) else: db_api.new_job(self.context, job['project_id'], job_type, resource_id) time.sleep(1) count = count + 1 # query the jobs with several kinds of filters. # supported filters: project id, job status, job type. job_status_filter_1 = {'status': 'new'} job_status_filter_2 = {'status': 'fail'} job_status_filter_3 = {'status': 'running'} invalid_filter = {'status': "new-x"} unsupported_filter = {'fake_filter': "fake_filter"} count = 1 for job_type in self.job_resource_map.keys(): job_type_filter_1 = {'type': job_type} job_type_filter_2 = {'type': job_type + '_1'} # failure case, only admin can list the jobs self.context.is_admin = False res = self.controller.get_all() self._validate_error_code(res, 403) self.context.is_admin = True # test when specify project ID filter from client, if this # project ID is different from the one from context, then # it will be ignored, project ID from context will be # used instead. filter1 = {'project_id': uuidutils.generate_uuid()} res1 = self.controller.get_all(**filter1) filter2 = {'project_id': self.context.project_id} res2 = self.controller.get_all(**filter2) self.assertEqual(len(res2['jobs']), len(res1['jobs'])) res3 = self.controller.get_all() # there is one job whose project ID is different from # context.project_id. As the list operation only retrieves the # jobs whose project ID equals to context.project_id, so this # special job entry won't be retrieved. self.assertEqual(len(res3['jobs']), len(res2['jobs'])) # successful case, filter by job type jobs_job_type_filter_1 = self.controller.get_all( **job_type_filter_1) if count == 1: self.assertEqual(0, len(jobs_job_type_filter_1['jobs'])) else: self.assertEqual(1, len(jobs_job_type_filter_1['jobs'])) jobs_job_type_filter_2 = self.controller.get_all( **job_type_filter_2) self.assertEqual(0, len(jobs_job_type_filter_2['jobs'])) # successful case, filter by job status and job type if count <= amount_of_running_jobs: all_filters = dict(list(job_status_filter_3.items()) + list(job_type_filter_1.items())) jobs_all_filters = self.controller.get_all(**all_filters) if count == 1: self.assertEqual(0, len(jobs_all_filters['jobs'])) else: self.assertEqual(1, len(jobs_all_filters['jobs'])) else: all_filters = dict(list(job_status_filter_1.items()) + list(job_type_filter_1.items())) jobs_all_filters = self.controller.get_all(**all_filters) self.assertEqual(1, len(jobs_all_filters['jobs'])) # successful case, contradictory filter contradict_filters = dict(list(job_status_filter_2.items()) + list((job_type_filter_2.items()))) jobs_contradict_filters = self.controller.get_all( **contradict_filters) self.assertEqual(0, len(jobs_contradict_filters['jobs'])) count = count + 1 # failure case, unsupported filter res = self.controller.get_all(**unsupported_filter) self._validate_error_code(res, 400) # successful case, invalid filter jobs_invalid_filter = self.controller.get_all(**invalid_filter) self.assertEqual(0, len(jobs_invalid_filter['jobs'])) # successful case, list jobs without filters jobs_empty_filters = self.controller.get_all() self.assertEqual(amount_of_all_jobs - 1, len(jobs_empty_filters['jobs'])) # successful case, filter by job status jobs_job_status_filter_1 = self.controller.get_all( **job_status_filter_1) self.assertEqual(amount_of_all_jobs - amount_of_running_jobs, len(jobs_job_status_filter_1['jobs'])) jobs_job_status_filter_2 = self.controller.get_all( **job_status_filter_2) self.assertEqual(0, len(jobs_job_status_filter_2['jobs'])) jobs_job_status_filter_3 = self.controller.get_all( **job_status_filter_3) self.assertEqual(amount_of_running_jobs - 1, len(jobs_job_status_filter_3['jobs'])) # test for paginate query job_paginate_no_filter_1 = self.controller.get_all() self.assertEqual(amount_of_all_jobs - 1, len(job_paginate_no_filter_1['jobs'])) # no limit no marker job_paginate_filter_1 = {'status': 'new'} jobs_paginate_filter_1 = self.controller.get_all( **job_paginate_filter_1) self.assertEqual(amount_of_all_jobs - amount_of_running_jobs, len(jobs_paginate_filter_1['jobs'])) # failed cases, unsupported limit type job_paginate_filter_2 = {'limit': '2test'} res = self.controller.get_all(**job_paginate_filter_2) self._validate_error_code(res, 400) # successful cases job_paginate_filter_4 = {'status': 'new', 'limit': '2'} res = self.controller.get_all(**job_paginate_filter_4) self.assertEqual(2, len(res['jobs'])) job_paginate_filter_5 = {'status': 'new', 'limit': 2} res = self.controller.get_all(**job_paginate_filter_5) self.assertEqual(2, len(res['jobs'])) job_paginate_filter_6 = {'status': 'running', 'limit': 1} res1 = self.controller.get_all(**job_paginate_filter_6) marker = res1['jobs'][0]['id'] job_paginate_filter_7 = {'status': 'running', 'marker': marker} res2 = self.controller.get_all(**job_paginate_filter_7) self.assertEqual(amount_of_running_jobs - 1, len(res2['jobs'])) job_paginate_filter_8 = {'status': 'new', 'limit': 3} res = self.controller.get_all(**job_paginate_filter_8) self.assertGreaterEqual(res['jobs'][0]['timestamp'], res['jobs'][1]['timestamp']) self.assertGreaterEqual(res['jobs'][1]['timestamp'], res['jobs'][2]['timestamp']) # unsupported marker type res = self.controller.get_all(marker=None) self.assertEqual(amount_of_all_jobs - 1, len(res['jobs'])) res = self.controller.get_all(marker='-123') self._validate_error_code(res, 400) # marker not in job table and job log table job_paginate_filter_9 = {'marker': uuidutils.generate_uuid()} res = self.controller.get_all(**job_paginate_filter_9) self._validate_error_code(res, 400) # test marker and limit limit = 2 pt = '/v1.0/jobs\?limit=\w+&marker=([\w-]+)' job_paginate_filter = {'status': 'new', 'limit': limit} res = self.controller.get_all(**job_paginate_filter) while 'jobs_links' in res: m = re.match(pt, res['jobs_links'][0]['href']) marker = m.group(1) self.assertEqual(limit, len(res['jobs'])) job_paginate_filter = {'status': 'new', 'limit': limit, 'marker': marker} res = self.controller.get_all(**job_paginate_filter) job_paginate_filter_10 = {'status': 'running'} res = self.controller.get_all(**job_paginate_filter_10) self.assertEqual(amount_of_running_jobs - 1, len(res['jobs'])) # add some rows to job log table for i in xrange(amount_of_running_jobs - 1): db_api.finish_job(self.context, res['jobs'][i]['id'], True, timeutils.utcnow()) time.sleep(1) res_success_log = db_api.list_jobs_from_log(self.context, None) self.assertEqual(amount_of_running_jobs - 1, len(res_success_log)) res_in_job = db_api.list_jobs(self.context, None) self.assertEqual(amount_of_all_jobs - (amount_of_running_jobs - 1), len(res_in_job)) job_paginate_filter_11 = {'limit': 2} res = self.controller.get_all(**job_paginate_filter_11) self.assertIsNotNone(res['jobs_links'][0]['href'])
def test_get_all_jobs_with_pagination(self, mock_context): self.context.project_id = uuidutils.generate_uuid() mock_context.return_value = self.context # map job type to project id for later project id filter validation. job_project_id_map = {} amount_of_all_jobs = len(self.job_resource_map.keys()) amount_of_running_jobs = 3 count = 1 # cover all job types. for job_type in self.job_resource_map.keys(): job = self._prepare_job_element(job_type) if count > 1: # for test convenience, the first job has a project ID # that is different from the context.project_id job['project_id'] = self.context.project_id job_project_id_map[job_type] = job['project_id'] resource_id = '#'.join([ job['resource'][resource_id] for resource_type, resource_id in self.job_resource_map[job_type] ]) if count <= amount_of_running_jobs: db_api.register_job(self.context, job['project_id'], job_type, resource_id) # because jobs are sorted by timestamp, without time delay then # all jobs are created at the same time, paginate_query can't # identify them time.sleep(1) else: db_api.new_job(self.context, job['project_id'], job_type, resource_id) time.sleep(1) count = count + 1 # query the jobs with several kinds of filters. # supported filters: project id, job status, job type. job_status_filter_1 = {'status': 'new'} job_status_filter_2 = {'status': 'fail'} job_status_filter_3 = {'status': 'running'} invalid_filter = {'status': "new-x"} unsupported_filter = {'fake_filter': "fake_filter"} count = 1 for job_type in self.job_resource_map.keys(): job_type_filter_1 = {'type': job_type} job_type_filter_2 = {'type': job_type + '_1'} # failure case, only admin can list the jobs self.context.is_admin = False res = self.controller.get_all() self._validate_error_code(res, 403) self.context.is_admin = True # test when specify project ID filter from client, if this # project ID is different from the one from context, then # it will be ignored, project ID from context will be # used instead. filter1 = {'project_id': uuidutils.generate_uuid()} res1 = self.controller.get_all(**filter1) filter2 = {'project_id': self.context.project_id} res2 = self.controller.get_all(**filter2) self.assertEqual(len(res2['jobs']), len(res1['jobs'])) res3 = self.controller.get_all() # there is one job whose project ID is different from # context.project_id. As the list operation only retrieves the # jobs whose project ID equals to context.project_id, so this # special job entry won't be retrieved. self.assertEqual(len(res3['jobs']), len(res2['jobs'])) # successful case, filter by job type jobs_job_type_filter_1 = self.controller.get_all( **job_type_filter_1) if count == 1: self.assertEqual(0, len(jobs_job_type_filter_1['jobs'])) else: self.assertEqual(1, len(jobs_job_type_filter_1['jobs'])) jobs_job_type_filter_2 = self.controller.get_all( **job_type_filter_2) self.assertEqual(0, len(jobs_job_type_filter_2['jobs'])) # successful case, filter by job status and job type if count <= amount_of_running_jobs: all_filters = dict( list(job_status_filter_3.items()) + list(job_type_filter_1.items())) jobs_all_filters = self.controller.get_all(**all_filters) if count == 1: self.assertEqual(0, len(jobs_all_filters['jobs'])) else: self.assertEqual(1, len(jobs_all_filters['jobs'])) else: all_filters = dict( list(job_status_filter_1.items()) + list(job_type_filter_1.items())) jobs_all_filters = self.controller.get_all(**all_filters) self.assertEqual(1, len(jobs_all_filters['jobs'])) # successful case, contradictory filter contradict_filters = dict( list(job_status_filter_2.items()) + list((job_type_filter_2.items()))) jobs_contradict_filters = self.controller.get_all( **contradict_filters) self.assertEqual(0, len(jobs_contradict_filters['jobs'])) count = count + 1 # failure case, unsupported filter res = self.controller.get_all(**unsupported_filter) self._validate_error_code(res, 400) # successful case, invalid filter jobs_invalid_filter = self.controller.get_all(**invalid_filter) self.assertEqual(0, len(jobs_invalid_filter['jobs'])) # successful case, list jobs without filters jobs_empty_filters = self.controller.get_all() self.assertEqual(amount_of_all_jobs - 1, len(jobs_empty_filters['jobs'])) # successful case, filter by job status jobs_job_status_filter_1 = self.controller.get_all( **job_status_filter_1) self.assertEqual(amount_of_all_jobs - amount_of_running_jobs, len(jobs_job_status_filter_1['jobs'])) jobs_job_status_filter_2 = self.controller.get_all( **job_status_filter_2) self.assertEqual(0, len(jobs_job_status_filter_2['jobs'])) jobs_job_status_filter_3 = self.controller.get_all( **job_status_filter_3) self.assertEqual(amount_of_running_jobs - 1, len(jobs_job_status_filter_3['jobs'])) # test for paginate query job_paginate_no_filter_1 = self.controller.get_all() self.assertEqual(amount_of_all_jobs - 1, len(job_paginate_no_filter_1['jobs'])) # no limit no marker job_paginate_filter_1 = {'status': 'new'} jobs_paginate_filter_1 = self.controller.get_all( **job_paginate_filter_1) self.assertEqual(amount_of_all_jobs - amount_of_running_jobs, len(jobs_paginate_filter_1['jobs'])) # failed cases, unsupported limit type job_paginate_filter_2 = {'limit': '2test'} res = self.controller.get_all(**job_paginate_filter_2) self._validate_error_code(res, 400) # successful cases job_paginate_filter_4 = {'status': 'new', 'limit': '2'} res = self.controller.get_all(**job_paginate_filter_4) self.assertEqual(2, len(res['jobs'])) job_paginate_filter_5 = {'status': 'new', 'limit': 2} res = self.controller.get_all(**job_paginate_filter_5) self.assertEqual(2, len(res['jobs'])) job_paginate_filter_6 = {'status': 'running', 'limit': 1} res1 = self.controller.get_all(**job_paginate_filter_6) marker = res1['jobs'][0]['id'] job_paginate_filter_7 = {'status': 'running', 'marker': marker} res2 = self.controller.get_all(**job_paginate_filter_7) self.assertEqual(amount_of_running_jobs - 1, len(res2['jobs'])) job_paginate_filter_8 = {'status': 'new', 'limit': 3} res = self.controller.get_all(**job_paginate_filter_8) self.assertGreaterEqual(res['jobs'][0]['timestamp'], res['jobs'][1]['timestamp']) self.assertGreaterEqual(res['jobs'][1]['timestamp'], res['jobs'][2]['timestamp']) # unsupported marker type res = self.controller.get_all(marker=None) self.assertEqual(amount_of_all_jobs - 1, len(res['jobs'])) res = self.controller.get_all(marker='-123') self._validate_error_code(res, 400) # marker not in job table and job log table job_paginate_filter_9 = {'marker': uuidutils.generate_uuid()} res = self.controller.get_all(**job_paginate_filter_9) self._validate_error_code(res, 400) # test marker and limit limit = 2 pt = '/v1.0/jobs\?limit=\w+&marker=([\w-]+)' job_paginate_filter = {'status': 'new', 'limit': limit} res = self.controller.get_all(**job_paginate_filter) while 'jobs_links' in res: m = re.match(pt, res['jobs_links'][0]['href']) marker = m.group(1) self.assertEqual(limit, len(res['jobs'])) job_paginate_filter = { 'status': 'new', 'limit': limit, 'marker': marker } res = self.controller.get_all(**job_paginate_filter) job_paginate_filter_10 = {'status': 'running'} res = self.controller.get_all(**job_paginate_filter_10) self.assertEqual(amount_of_running_jobs - 1, len(res['jobs'])) # add some rows to job log table for i in xrange(amount_of_running_jobs - 1): db_api.finish_job(self.context, res['jobs'][i]['id'], True, timeutils.utcnow()) time.sleep(1) res_success_log = db_api.list_jobs_from_log(self.context, None) self.assertEqual(amount_of_running_jobs - 1, len(res_success_log)) res_in_job = db_api.list_jobs(self.context, None) self.assertEqual(amount_of_all_jobs - (amount_of_running_jobs - 1), len(res_in_job)) job_paginate_filter_11 = {'limit': 2} res = self.controller.get_all(**job_paginate_filter_11) self.assertIsNotNone(res['jobs_links'][0]['href'])