def test_json(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" # Previous batch with three recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=3) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one reprocess_recipes message for the three recipes self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'reprocess_recipes') self.assertSetEqual(set(message._root_recipe_ids), {recipe_1.id, recipe_2.id, recipe_3.id})
def test_get_source_products(self): """Tests calling get_source_products()""" from batch.test import utils as batch_test_utils from product.test import utils as product_test_utils job_exe_1 = job_utils.create_job_exe() job_exe_2 = job_utils.create_job_exe() product_1 = product_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, workspace=self.workspace) product_2 = product_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, workspace=self.workspace) batch_1 = batch_test_utils.create_batch() batch_2 = batch_test_utils.create_batch() product_test_utils.create_file_link(ancestor=self.src_file, descendant=product_1, job=job_exe_1.job, job_exe=job_exe_1, batch=batch_1) product_test_utils.create_file_link(ancestor=self.src_file, descendant=product_2, job=job_exe_2.job, job_exe=job_exe_2, batch=batch_2) products = SourceFile.objects.get_source_products( self.src_file.id, batch_ids=[batch_1.id]) self.assertEqual(len(products), 1) self.assertEqual(products[0].id, product_1.id)
def setUp(self): django.setup() self.recipe_type1 = recipe_test_utils.create_recipe_type(name='test1', version='1.0') self.batch1 = batch_test_utils.create_batch(recipe_type=self.recipe_type1, status='SUBMITTED') self.recipe_type2 = recipe_test_utils.create_recipe_type(name='test2', version='1.0') self.batch2 = batch_test_utils.create_batch(recipe_type=self.recipe_type2, status='CREATED')
def setUp(self): django.setup() rest.login_client(self.client) self.recipe_type_1 = recipe_test_utils.create_recipe_type_v6() self.batch_1 = batch_test_utils.create_batch(recipe_type=self.recipe_type_1, is_creation_done=False) self.recipe_type_2 = recipe_test_utils.create_recipe_type_v6() self.batch_2 = batch_test_utils.create_batch(recipe_type=self.recipe_type_2, is_creation_done=True)
def test_schedule_trigger_rule_custom(self): """Tests calling BatchManager.schedule_recipes() using a custom trigger rule.""" file1 = storage_test_utils.create_file(media_type='text/custom', data_type='test') definition = { 'trigger_rule': { 'condition': { 'media_type': 'text/custom', 'data_types': ['test'], }, 'data': { 'input_data_name': 'Recipe Input', 'workspace_name': self.workspace.name, }, }, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].batch, batch) self.assertEqual(batch_recipes[0].recipe.recipe_type, self.recipe_type) self.assertIsNone(batch_recipes[0].superseded_recipe) self.assertEqual( batch_recipes[0].recipe.data['input_data'][0]['file_id'], file1.id)
def test_schedule_job_names(self): """Tests calling BatchManager.schedule_recipes() for a batch that forces all jobs to be re-processed""" handler = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) definition = { 'job_names': ['Job 1'], } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].batch, batch) self.assertEqual(batch_recipes[0].recipe.recipe_type, self.recipe_type) self.assertEqual(batch_recipes[0].superseded_recipe, handler.recipe) batch_jobs = BatchJob.objects.all() self.assertEqual(len(batch_jobs), 2) for batch_job in batch_jobs: self.assertIn(batch_job.job.job_type, [self.job_type_1, self.job_type_2])
def test_schedule_date_range_created(self): """Tests calling BatchManager.schedule_recipes() for a batch with a created date range restriction""" recipe1 = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event).recipe Recipe.objects.filter(pk=recipe1.id).update(created=datetime.datetime(2016, 1, 1)) recipe2 = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event).recipe Recipe.objects.filter(pk=recipe2.id).update(created=datetime.datetime(2016, 2, 1)) recipe3 = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event).recipe Recipe.objects.filter(pk=recipe3.id).update(created=datetime.datetime(2016, 3, 1)) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) definition = { 'date_range': { 'started': '2016-01-10T00:00:00.000Z', 'ended': '2016-02-10T00:00:00.000Z', }, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].superseded_recipe, recipe2)
def test_schedule_partial_batch(self): """Tests calling BatchManager.schedule_recipes() for a batch that is incomplete""" for i in range(5): Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) partials = [] for i in range(5): handler = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) handler.recipe.is_superseded = True handler.recipe.save() partials.append(handler.recipe) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) batch = batch_test_utils.create_batch(recipe_type=self.recipe_type) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.created_count, 5) self.assertEqual(batch.total_count, 5) for recipe in partials: recipe.is_superseded = False recipe.save() batch.status = 'SUBMITTED' batch.save() Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 10) self.assertEqual(batch.total_count, 10) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 10)
def test_json(self): """Tests converting a CreateConditions message to and from JSON""" batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe(batch=batch) conditions = [Condition('node_1', False), Condition('node_2', True)] # Create message message = create_conditions_messages(recipe, conditions)[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateConditions.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2 = recipe_nodes[1].condition # Should be one message for processing condition for node 2 self.assertEqual(len(new_message.new_messages), 1) process_condition_msg = new_message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id)
def setUp(self): django.setup() from batch.test import utils as batch_test_utils from product.test import utils as product_test_utils self.country = storage_test_utils.create_country() self.src_file = source_test_utils.create_source() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test.txt', countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1, job_exe=self.job_exe1) self.batch = batch_test_utils.create_batch() self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2a = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=False, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2a, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2b = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, is_superseded=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2b, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2c = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2c, job=self.job2, job_exe=self.job_exe2, batch=self.batch)
def test_invalid_version(self): """Tests calling the v6 batch comparison view with an invalid version""" batch = batch_test_utils.create_batch() url = '/v1/batches/comparison/%d/' % batch.root_batch_id response = self.client.generic('GET', url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.content)
def test_schedule_invalid_status(self): """Tests calling BatchManager.schedule_recipes() for a batch that was already created""" Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) batch = batch_test_utils.create_batch(recipe_type=self.recipe_type) Batch.objects.schedule_recipes(batch.id) self.assertRaises(BatchError, Batch.objects.schedule_recipes, batch.id)
def test_order_by(self): """Tests successfully calling the batches view with sorting.""" recipe_type1b = recipe_test_utils.create_recipe_type(name='test1', version='2.0') batch_test_utils.create_batch(recipe_type=recipe_type1b) recipe_type1c = recipe_test_utils.create_recipe_type(name='test1', version='3.0') batch_test_utils.create_batch(recipe_type=recipe_type1c) url = rest_util.get_url('/batches/?order=recipe_type__name&order=-recipe_type__version') response = self.client.generic('GET', url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertEqual(len(result['results']), 4) self.assertEqual(result['results'][0]['recipe_type']['id'], recipe_type1c.id) self.assertEqual(result['results'][1]['recipe_type']['id'], recipe_type1b.id) self.assertEqual(result['results'][2]['recipe_type']['id'], self.recipe_type1.id) self.assertEqual(result['results'][3]['recipe_type']['id'], self.recipe_type2.id)
def setUp(self): django.setup() add_message_backend(AMQPMessagingBackend) # mock out threading.start self.recipe_type = recipe_test_utils.create_recipe_type_v6() self.batch = batch_test_utils.create_batch( recipe_type=self.recipe_type)
def test_validate(self): """Tests calling BatchConfiguration.validate()""" batch = batch_test_utils.create_batch() # Valid configuration json_dict = {'version': '6', 'priority': 202} json = BatchConfigurationV6(configuration=json_dict) configuration = json.get_configuration() configuration.validate(batch)
def test_schedule_date_range_data_ended(self): """Tests calling BatchManager.schedule_recipes() for a batch with a data ended date range restriction""" file1 = storage_test_utils.create_file() file1.data_started = datetime.datetime(2016, 1, 1, tzinfo=utc) file1.data_ended = datetime.datetime(2016, 1, 10, tzinfo=utc) file1.save() data1 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file1.id, }], 'workspace_id': self.workspace.id, } recipe1 = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(data1), event=self.event).recipe file2 = storage_test_utils.create_file() file2.data_started = datetime.datetime(2016, 2, 1, tzinfo=utc) file2.data_ended = datetime.datetime(2016, 2, 10, tzinfo=utc) file2.save() data2 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file2.id, }], 'workspace_id': self.workspace.id, } Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(data2), event=self.event) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) definition = { 'date_range': { 'type': 'data', 'ended': '2016-01-15T00:00:00.000Z', }, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].superseded_recipe, recipe1)
def setUp(self): django.setup() rest.login_client(self.client, is_staff=True) self.recipe_type_1 = recipe_test_utils.create_recipe_type_v6() self.batch_1 = batch_test_utils.create_batch(recipe_type=self.recipe_type_1, is_creation_done=False) self.recipe_type_2 = recipe_test_utils.create_recipe_type_v6() self.batch_2 = batch_test_utils.create_batch(recipe_type=self.recipe_type_2, is_creation_done=True) self.job_type1 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) self.sub_definition = copy.deepcopy(recipe_test_utils.SUB_RECIPE_DEFINITION) self.sub_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type1.name self.sub_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type1.version self.sub_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type1.revision_num self.recipe_type_3 = recipe_test_utils.create_recipe_type_v6(definition=self.sub_definition) self.batch_3 = batch_test_utils.create_batch(recipe_type=self.recipe_type_3, is_creation_done=True) recipe_test_utils.create_recipe(recipe_type=self.recipe_type_3, batch=self.batch_3)
def test_json(self): """Tests coverting a ReprocessRecipes message to and from JSON""" batch = batch_test_utils.create_batch() event = trigger_test_utils.create_trigger_event() # Create message message = create_reprocess_recipes_messages( self.old_recipe_ids, self.recipe_1.recipe_type_rev_id, event.id, all_jobs=True, job_names=['Job 1', 'Job 2'], batch_id=batch.id)[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ReprocessRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Make sure new recipes supersede the old ones for recipe in Recipe.objects.filter(id__in=self.old_recipe_ids): self.assertTrue(recipe.is_superseded) new_recipe_1 = Recipe.objects.get( superseded_recipe_id=self.recipe_1.id) self.assertEqual(new_recipe_1.batch_id, batch.id) self.assertEqual(new_recipe_1.event_id, event.id) self.assertDictEqual(new_recipe_1.input, self.recipe_1.input) new_recipe_2 = Recipe.objects.get( superseded_recipe_id=self.recipe_2.id) self.assertEqual(new_recipe_2.batch_id, batch.id) self.assertEqual(new_recipe_2.event_id, event.id) self.assertDictEqual(new_recipe_2.input, self.recipe_2.input) # Make sure old jobs are superseded for job in Job.objects.filter(id__in=self.old_job_ids): self.assertTrue(job.is_superseded) # Should be three messages, two for processing new recipe input and one for canceling superseded jobs self.assertEqual(len(new_message.new_messages), 3) found_process_recipe_input_1 = False found_process_recipe_input_2 = False found_cancel_jobs = False for msg in new_message.new_messages: if msg.type == 'process_recipe_input': if found_process_recipe_input_1: found_process_recipe_input_2 = True found_process_recipe_input_1 = True elif msg.type == 'cancel_jobs': found_cancel_jobs = True self.assertTrue(found_process_recipe_input_1) self.assertTrue(found_process_recipe_input_2) self.assertTrue(found_cancel_jobs)
def test_json(self): """Tests coverting an UpdateBatchMetrics message to and from JSON""" batch = batch_test_utils.create_batch() recipe_1 = recipe_test_utils.create_recipe(batch=batch) job_1 = job_test_utils.create_job(status='FAILED') job_2 = job_test_utils.create_job(status='CANCELED') job_3 = job_test_utils.create_job(status='BLOCKED') job_4 = job_test_utils.create_job(status='BLOCKED') job_5 = job_test_utils.create_job(status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_3) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_4) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_5) recipe_2 = recipe_test_utils.create_recipe(batch=batch) recipe_2.is_completed = True recipe_2.save() job_6 = job_test_utils.create_job(status='COMPLETED') job_7 = job_test_utils.create_job(status='COMPLETED') job_8 = job_test_utils.create_job(status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_6) recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_7) recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_8) # Generate recipe metrics Recipe.objects.update_recipe_metrics([recipe_1.id, recipe_2.id]) # Add batch to message message = UpdateBatchMetrics() if message.can_fit_more(): message.add_batch(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateBatchMetrics.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) batch = Batch.objects.get(id=batch.id) self.assertEqual(batch.jobs_total, 8) self.assertEqual(batch.jobs_pending, 0) self.assertEqual(batch.jobs_blocked, 2) self.assertEqual(batch.jobs_queued, 0) self.assertEqual(batch.jobs_running, 0) self.assertEqual(batch.jobs_failed, 1) self.assertEqual(batch.jobs_completed, 4) self.assertEqual(batch.jobs_canceled, 1) self.assertEqual(batch.recipes_total, 2) self.assertEqual(batch.recipes_completed, 1)
def test_json(self): """Tests coverting a RequeueJobsBulk message to and from JSON""" sys_err = error_test_utils.create_error(category='SYSTEM') data = JobData() batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe() job_type = job_test_utils.create_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err, input=data.get_dict()) job_1.batch_id = batch.id job_1.recipe_id = recipe.id job_1.save() job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', error=sys_err, input=data.get_dict()) # Create message message = RequeueJobsBulk() message.started = job_1.last_modified - timedelta(seconds=1) message.ended = job_1.last_modified + timedelta(seconds=1) message.error_categories = ['SYSTEM'] message.error_ids = [sys_err.id] message.job_ids = [job_1.id] message.job_type_ids = [job_type.id] message.priority = 1 message.status = 'FAILED' message.job_type_names = [job_type.name] message.batch_ids = [batch.id] message.recipe_ids = [recipe.id] message.is_superseded = False # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = RequeueJobsBulk.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one re-queue message for job 1 self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'requeue_jobs') self.assertListEqual(message._requeue_jobs, [QueuedJob(job_1.id, job_1.num_exes)]) self.assertEqual(message.priority, 1)
def test_schedule_date_range_data_ended(self): """Tests calling BatchManager.schedule_recipes() for a batch with a data ended date range restriction""" file1 = storage_test_utils.create_file() file1.data_started = datetime.datetime(2016, 1, 1) file1.data_ended = datetime.datetime(2016, 1, 10) file1.save() data1 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file1.id, }], 'workspace_id': self.workspace.id, } recipe1 = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(data1), event=self.event).recipe file2 = storage_test_utils.create_file() file2.data_started = datetime.datetime(2016, 2, 1) file2.data_ended = datetime.datetime(2016, 2, 10) file2.save() data2 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file2.id, }], 'workspace_id': self.workspace.id, } Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(data2), event=self.event) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) definition = { 'date_range': { 'type': 'data', 'ended': '2016-01-15T00:00:00.000Z', }, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].superseded_recipe, recipe1)
def test_create_successful(self): """Tests calling BatchManager.create_batch() successfully""" batch = batch_test_utils.create_batch(self.recipe_type) batch = Batch.objects.get(pk=batch.id) self.assertIsNotNone(batch.title) self.assertIsNotNone(batch.description) self.assertEqual(batch.status, 'SUBMITTED') self.assertEqual(batch.recipe_type, self.recipe_type) jobs = Job.objects.filter(job_type__name='scale-batch-creator') self.assertEqual(len(jobs), 1) self.assertEqual(batch.creator_job.id, jobs[0].id)
def test_batch_recipe(self): """Tests creating a link that has a recipe and batch.""" parent_ids = [self.file_1.id] job_exe = job_test_utils.create_job_exe() recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) batch = batch_test_utils.create_batch() BatchRecipe.objects.create(batch_id=batch.id, recipe_id=recipe_job.recipe.id) BatchJob.objects.create(batch_id=batch.id, job_id=job_exe.job_id) FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe.job, job_exe.id) link = FileAncestryLink.objects.get(job_exe=job_exe) self.assertEqual(link.recipe_id, recipe_job.recipe_id) self.assertEqual(link.batch_id, batch.id)
def test_schedule_no_changes(self): """Tests calling BatchManager.schedule_recipes() for a recipe type that has nothing to reprocess""" Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) batch = batch_test_utils.create_batch(recipe_type=self.recipe_type) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.total_count, 0) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 0)
def test_batch(self): """Tests filtering jobs by batch""" batch = batch_test_utils.create_batch() self.job1.batch_id = batch.id self.job1.save() url = '/%s/sources/%d/jobs/?batch_id=%d' % (self.api, self.src_file.id, batch.id) response = self.client.generic('GET', url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertEqual(len(result['results']), 1) self.assertEqual(result['results'][0]['id'], self.job1.id)
def test_edit_invalid_configuration(self): """Tests editing a batch with an invalid configuration""" batch = batch_test_utils.create_batch() json_data = { 'title': 'New Title', 'description': 'New Description', 'configuration': { 'bad': 'foo' } } url = '/v6/batches/%d/' % batch.id response = self.client.generic('PATCH', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)
def test_edit_put_not_allowed(self): """Tests editing a batch with HTTP PUT to ensure it is not allowed""" batch = batch_test_utils.create_batch() json_data = { 'title': 'New Title', 'description': 'New Description', 'configuration': { 'priority': 267 } } url = '/v6/batches/%d/' % batch.id response = self.client.generic('PUT', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, 405, response.content)
def test_edit_invalid_version(self): """Tests editing a batch with an invalid REST API version""" batch = batch_test_utils.create_batch() json_data = { 'title': 'New Title', 'description': 'New Description', 'configuration': { 'priority': 200 } } url = '/v1/batches/%d/' % batch.id response = self.client.generic('PATCH', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.content)
def test_batch_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a batch""" job_type = job_test_utils.create_job_type(name='scale-batch-creator') job_exe = job_test_utils.create_job_exe(job_type=job_type) recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) batch = batch_test_utils.create_batch() BatchRecipe.objects.create(batch_id=batch.id, recipe_id=recipe_job.recipe.id) BatchJob.objects.create(batch_id=batch.id, job_id=job_exe.job_id) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], job_exe, self.workspace) self.assertEqual(batch.id, products[0].batch_id)
def test_execute(self): """Tests calling CreateConditions.execute() successfully""" batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe(batch=batch) conditions = [Condition('node_1', False), Condition('node_2', True)] # Create and execute message message = create_conditions_messages(recipe, conditions)[0] result = message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2 = recipe_nodes[1].condition # Should be one message for processing condition for node 2 self.assertEqual(len(message.new_messages), 1) process_condition_msg = message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id) # Test executing message again message_json_dict = message.to_json() message = CreateConditions.from_json(message_json_dict) result = message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2.id = recipe_nodes[1].condition_id # Should be one message for processing condition for node 2 self.assertEqual(len(message.new_messages), 1) process_condition_msg = message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id)
def test_batch_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a batch""" manifest = job_test_utils.create_seed_manifest(name='scale-batch-creator') job_type = job_test_utils.create_seed_job_type(manifest=manifest) job_exe = job_test_utils.create_job_exe(job_type=job_type) batch = batch_test_utils.create_batch() job_exe.batch = batch batch.save() recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], job_exe, self.workspace) self.assertEqual(batch.id, products[0].batch_id)
def test_create_successful_v6(self): """Tests calling BatchManager.create_batch_v6""" batch = batch_test_utils.create_batch(recipe_type=self.recipe_type_v6) batch = Batch.objects.get(pk=batch.id) self.assertIsNotNone(batch.title) self.assertIsNotNone(batch.description) self.assertEqual(batch.recipe_type, self.recipe_type_v6) # Create message from batch.messages.create_batch_recipes import create_batch_recipes_message message = create_batch_recipes_message(batch.id) result = message.execute() self.assertTrue(result) batch = Batch.objects.get(pk=batch.id) self.assertTrue(batch.is_creation_done)
def test_schedule_priority(self): """Tests calling BatchManager.schedule_recipes() for a batch that overrides job priority""" Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) definition = { 'all_jobs': True, 'priority': 1111, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_job = BatchJob.objects.get(batch=batch, job__job_type=self.job_type_1) self.assertEqual(batch_job.job.priority, 1111)
def test_schedule_new_batch(self): """Tests calling BatchManager.schedule_recipes() for a batch that has never been started""" handler = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) batch = batch_test_utils.create_batch(recipe_type=self.recipe_type) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].batch, batch) self.assertEqual(batch_recipes[0].recipe.recipe_type, self.recipe_type) self.assertEqual(batch_recipes[0].superseded_recipe, handler.recipe)
def setUp(self): django.setup() self.workspace1 = storage_test_utils.create_workspace(name='ws1') self.country = storage_test_utils.create_country() manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST) manifest['job']['name'] = 'test1' self.job_type1 = job_test_utils.create_seed_job_type(manifest=manifest) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.recipe_type1 = recipe_test_utils.create_recipe_type_v6() self.recipe1 = recipe_test_utils.create_recipe( recipe_type=self.recipe_type1) self.batch1 = batch_test_utils.create_batch( recipe_type=self.recipe_type1, is_creation_done=True) self.file = storage_test_utils.create_file( file_name='test.txt', file_type='SOURCE', media_type='image/png', file_size=1000, data_type_tags=['png'], file_path='/test/path', workspace=self.workspace1, is_deleted=False, last_modified='', data_started='2017-01-01T00:00:00Z', data_ended='2017-01-01T00:00:00Z', source_started='2017-01-01T00:00:00Z', source_ended='2017-01-01T00:00:00Z', geometry='', center_point='', meta_data='', countries=[self.country], job_exe=self.job_exe1, job_output='output_name_1', recipe=self.recipe1, recipe_node='my-recipe', batch=self.batch1, is_superseded=True, superseded='2017-01-01T00:00:00Z') rest.login_client(self.client)
def setUp(self): django.setup() self.workspace1 = storage_test_utils.create_workspace(name='ws1') self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.recipe_type1 = recipe_test_utils.create_recipe_type() self.recipe1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type1) self.batch1 = batch_test_utils.create_batch(recipe_type=self.recipe_type1, is_creation_done=True) self.file = storage_test_utils.create_file( file_name='test.txt', file_type='SOURCE', media_type='image/png', file_size=1000, data_type='png', file_path='/test/path', workspace=self.workspace1, is_deleted=False, last_modified='', data_started='2017-01-01T00:00:00Z', data_ended='2017-01-01T00:00:00Z', source_started='2017-01-01T00:00:00Z', source_ended='2017-01-01T00:00:00Z', geometry='', center_point='', meta_data='', countries=[self.country], job_exe=self.job_exe1, job_output='output_name_1', recipe=self.recipe1, recipe_node='my-recipe', batch=self.batch1, is_superseded=True, superseded='2017-01-01T00:00:00Z')
def test_schedule_date_range_data_none(self): """Tests calling BatchManager.schedule_recipes() for a batch data date range where no data matches""" Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2) definition = { 'date_range': { 'type': 'data', 'started': '2016-01-01T00:00:00.000Z', 'ended': '2016-01-10T00:00:00.000Z', }, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 0) self.assertEqual(batch.total_count, 0)
def test_schedule_all_jobs(self): """Tests calling BatchManager.schedule_recipes() for a batch that forces all jobs to be re-processed""" handler = Recipe.objects.create_recipe(recipe_type=self.recipe_type, data=RecipeData(self.data), event=self.event) definition = { 'all_jobs': True, } batch = batch_test_utils.create_batch(recipe_type=self.recipe_type, definition=definition) Batch.objects.schedule_recipes(batch.id) batch = Batch.objects.get(pk=batch.id) self.assertEqual(batch.status, 'CREATED') self.assertEqual(batch.created_count, 1) self.assertEqual(batch.total_count, 1) batch_recipes = BatchRecipe.objects.all() self.assertEqual(len(batch_recipes), 1) self.assertEqual(batch_recipes[0].batch, batch) self.assertEqual(batch_recipes[0].recipe.recipe_type, self.recipe_type) self.assertEqual(batch_recipes[0].superseded_recipe, handler.recipe)
def setUp(self): django.setup() self.recipe_type = recipe_test_utils.create_recipe_type() self.batch = batch_test_utils.create_batch(recipe_type=self.recipe_type)