def test_no_processor(self): response = self.client.get("/api/async/preview/", HTTP_ACCEPT="application/json") self.assertEqual(response.status_code, HttpResponseRedirect.status_code) normal_job_id = response["Location"].split("/")[-2] # Add an async request with a valid processor. response = self.client.get("/api/async/preview/?processor=manager", HTTP_ACCEPT="application/json") self.assertEqual(response.status_code, HttpResponseRedirect.status_code) valid_job_id = response["Location"].split("/")[-2] # The Parametizer cleaning process should set this to the default # value if the processor is not in the list of choices which, in our # case, is the list of available query processors so we should just # end up with the default processor. response = self.client.get("/api/async/preview/?processor=INVALID", HTTP_ACCEPT="application/json") self.assertEqual(response.status_code, HttpResponseRedirect.status_code) invalid_job_id = response["Location"].split("/")[-2] # The three requests above should have triggered 3 queued jobs. self.assertEqual(utils.get_job_count(), 3) for job_id in [normal_job_id, valid_job_id, invalid_job_id]: self.assert_job_status_equal(utils.get_job(job_id), "queued") # Sleeping a couple seconds should leave plenty of time for the worker # to do its thing and finish up the three jobs from above. utils.run_jobs() time.sleep(3) # The three previous requests should now all be completed and their # items should match what we expect. for job_id in [normal_job_id, invalid_job_id, valid_job_id]: self.assert_job_status_equal(utils.get_job(job_id), "finished") # When no processor is specified, all rows should be in the job result. response = self.client.get("/api/jobs/{0}/result/".format(normal_job_id), HTTP_ACCEPT="application/json") content = json.loads(response.content) self.assertEqual(len(content["items"]), 6) # When the manager processor is used, only a single row should be in # the result set. response = self.client.get("/api/jobs/{0}/result/".format(valid_job_id), HTTP_ACCEPT="application/json") content = json.loads(response.content) self.assertEqual(len(content["items"]), 1) # When an invalid processor is specified, the default processor should # be used. As we see above, the default processor should return all # the rows. response = self.client.get("/api/jobs/{0}/result/".format(invalid_job_id), HTTP_ACCEPT="application/json") content = json.loads(response.content) self.assertEqual(len(content["items"]), 6)
def test_create_and_cancel(self): # Create 3 meaningless jobs. We're just testing job setup and # cancellation here, not the execution. utils.async_get_result_rows(None, None, {}) job_options = { 'name': 'Job X', } job_x_id = utils.async_get_result_rows(None, None, {}, job_options) job_options = { 'name': 'Job Y', 'query_name': 'job_y_query', } job_y_id = utils.async_get_result_rows(None, None, {}, job_options) self.assertEqual(async_utils.get_job_count(), 3) jobs = async_utils.get_jobs() self.assertEqual(len(jobs), 3) job_x = async_utils.get_job(job_x_id) self.assertTrue(job_x in jobs) self.assertEqual(job_x.meta['name'], 'Job X') self.assertEqual(async_utils.cancel_job(job_x_id), None) self.assertEqual(async_utils.get_job_count(), 2) async_utils.cancel_job('invalid_id') self.assertEqual(async_utils.get_job_count(), 2) self.assertTrue('canceled' in async_utils.cancel_job(job_y_id)) self.assertTrue(async_utils.get_job_count(), 1) async_utils.cancel_all_jobs() self.assertEqual(async_utils.get_job_count(), 0)
def get_object(self, request, **kwargs): """ Lookup the job and return it or return None if the job wasn't found. """ if not hasattr(request, 'instance'): request.instance = utils.get_job(kwargs['job_uuid']) return request.instance
def test_create_and_cancel(self): # Create 3 meaningless jobs. We're just testing job setup and # cancellation here, not the execution. job_options = { 'name': 'Job X', } job_x_id = utils.async_get_result_rows(None, None, {}, job_options) job_options = { 'name': 'Job Y', 'query_name': 'job_y_query', } job_y_id = utils.async_get_result_rows(None, None, {}, job_options) job_z_id = utils.async_get_result_rows(None, None, {}) self.assertEqual(async_utils.get_job_count(), 3) jobs = async_utils.get_jobs() self.assertEqual(len(jobs), 3) job_x = async_utils.get_job(job_x_id) self.assertTrue(job_x in jobs) self.assertEqual(job_x.meta['name'], 'Job X') job_y = async_utils.get_job(job_y_id) job_z = async_utils.get_job(job_z_id) self.assertEqual(async_utils.cancel_job(job_x_id), None) # Prove job is gone from Redis with self.assertRaises(NoSuchJobError): job_x.refresh() # Prove job is gone from queue self.assertEqual(async_utils.get_job_count(), 2) async_utils.cancel_job('invalid_id') self.assertEqual(async_utils.get_job_count(), 2) self.assertTrue('canceled' in async_utils.cancel_job(job_y_id)) self.assertTrue(async_utils.get_job_count(), 1) async_utils.cancel_all_jobs() with self.assertRaises(NoSuchJobError): job_y.refresh() with self.assertRaises(NoSuchJobError): job_z.refresh() self.assertEqual(async_utils.get_job_count(), 0)
def test_get_session(self): # Make sure we have a session query. query = DataQuery(user=self.user, name='Query', session=True) query.save() # All results for session query. response = self.client.get('/api/async/queries/session/results/', HTTP_ACCEPT='application/json') self.assertEqual( response.status_code, HttpResponseRedirect.status_code) normal_job_id = response['Location'].split('/')[-2] # Single page of results for session query. response = self.client.get('/api/async/queries/session/results/3/', HTTP_ACCEPT='application/json') self.assertEqual( response.status_code, HttpResponseRedirect.status_code) paged_job_id = response['Location'].split('/')[-2] # Page range of results for session query. response = self.client.get('/api/async/queries/session/results/1...5/', HTTP_ACCEPT='application/json') self.assertEqual( response.status_code, HttpResponseRedirect.status_code) range_job_id = response['Location'].split('/')[-2] # The three requests above should have triggered 3 queued jobs. self.assertEqual(utils.get_job_count(), 3) for job_id in [normal_job_id, paged_job_id, range_job_id]: self.assert_job_status_equal( utils.get_job(job_id), 'queued') # Sleeping a couple seconds should leave plenty of time for the worker # to do its thing and finish up the three jobs from above. utils.run_jobs() time.sleep(3) # The three previous requests should now all be completed and their # items should match what we expect. for job_id in [normal_job_id, paged_job_id, range_job_id]: self.assert_job_status_equal( utils.get_job(job_id), 'finished') self.assert_job_result_equal(utils.get_job(job_id), [])
def test_invalid_job_result(self): context = DataContext() view = DataView() query_options = { 'page': 0, } job_id = utils.async_get_result_rows(context, view, query_options) self.assertTrue(async_utils.get_job_count(), 1) async_utils.run_jobs() time.sleep(1) self.assertEqual(async_utils.get_job_result(job_id), None) self.assertEqual(async_utils.get_job(job_id).status, JobStatus.FAILED)
def test_job_result(self): context = DataContext() view = DataView() limit = 3 query_options = { 'limit': limit, 'page': 1, } job_id = utils.async_get_result_rows(context, view, query_options) self.assertTrue(async_utils.get_job_count(), 1) async_utils.run_jobs() time.sleep(1) result = async_utils.get_job_result(job_id) self.assertEqual(async_utils.get_job(job_id).status, JobStatus.FINISHED) self.assertEqual(len(result['rows']), limit) self.assertEqual(result['limit'], limit)
def test_job_result(self): context = DataContext() view = DataView() limit = 3 query_options = { 'limit': limit, 'page': 1, } job_id = utils.async_get_result_rows(context, view, query_options) self.assertTrue(async_utils.get_job_count(), 1) async_utils.run_jobs() time.sleep(1) result = async_utils.get_job_result(job_id) self.assertEqual( async_utils.get_job(job_id).status, JobStatus.FINISHED) self.assertEqual(len(result['rows']), limit) self.assertEqual(result['limit'], limit)
def test_no_processor(self): response = self.client.get('/api/async/preview/', HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, HttpResponseRedirect.status_code) normal_job_id = response['Location'].split('/')[-2] # Add an async request with a valid processor. response = self.client.get('/api/async/preview/?processor=manager', HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, HttpResponseRedirect.status_code) valid_job_id = response['Location'].split('/')[-2] # The Parametizer cleaning process should set this to the default # value if the processor is not in the list of choices which, in our # case, is the list of available query processors so we should just # end up with the default processor. response = self.client.get('/api/async/preview/?processor=INVALID', HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, HttpResponseRedirect.status_code) invalid_job_id = response['Location'].split('/')[-2] # The three requests above should have triggered 3 queued jobs. self.assertEqual(utils.get_job_count(), 3) for job_id in [normal_job_id, valid_job_id, invalid_job_id]: self.assert_job_status_equal(utils.get_job(job_id), 'queued') # Sleeping a couple seconds should leave plenty of time for the worker # to do its thing and finish up the three jobs from above. utils.run_jobs() time.sleep(3) # The three previous requests should now all be completed and their # items should match what we expect. for job_id in [normal_job_id, invalid_job_id, valid_job_id]: self.assert_job_status_equal(utils.get_job(job_id), 'finished') # When no processor is specified, all rows should be in the job result. response = self.client.get( '/api/jobs/{0}/result/'.format(normal_job_id), HTTP_ACCEPT='application/json') content = json.loads(response.content) self.assertEqual(len(content['items']), 6) # When the manager processor is used, only a single row should be in # the result set. response = self.client.get( '/api/jobs/{0}/result/'.format(valid_job_id), HTTP_ACCEPT='application/json') content = json.loads(response.content) self.assertEqual(len(content['items']), 1) # When an invalid processor is specified, the default processor should # be used. As we see above, the default processor should return all # the rows. response = self.client.get( '/api/jobs/{0}/result/'.format(invalid_job_id), HTTP_ACCEPT='application/json') content = json.loads(response.content) self.assertEqual(len(content['items']), 6)