def test_json(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" # Previous batch with three recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=3) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one reprocess_recipes message for the three recipes self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'reprocess_recipes') self.assertSetEqual(set(message._root_recipe_ids), {recipe_1.id, recipe_2.id, recipe_3.id})
def get_definition(self): """Returns the batch definition represented by this JSON :returns: The batch definition :rtype: :class:`batch.definition.definition.BatchDefinition`: """ definition = BatchDefinition() if 'dataset' in self._definition: definition.dataset = self._definition['dataset'] if 'forced_nodes' in self._definition: definition.forced_nodes = ForcedNodesV6( self._definition['forced_nodes']).get_forced_nodes() if 'supersedes' in self._definition: definition.supersedes = self._definition['supersedes'] if 'previous_batch' in self._definition: prev_batch_dict = self._definition['previous_batch'] definition.root_batch_id = prev_batch_dict['root_batch_id'] if 'forced_nodes' in prev_batch_dict: definition.forced_nodes = ForcedNodesV6( prev_batch_dict['forced_nodes']).get_forced_nodes() return definition
def create_batch(title=None, description=None, recipe_type=None, definition=None, configuration=None, is_creation_done=False, recipes_total=None): """Creates a batch model for unit testing :returns: The batch model :rtype: :class:`batch.models.Batch` """ if not recipe_type: recipe_type = recipe_test_utils.create_recipe_type() if not definition: definition = BatchDefinition() # Create a previous batch so we can create a valid definition # TODO: this can be replaced by a DataSet once they are implemented prev_batch = Batch() prev_batch.recipe_type = recipe_type prev_batch.recipe_type_rev = RecipeTypeRevision.objects.get_revision( recipe_type.id, recipe_type.revision_num) prev_batch.event = TriggerEvent.objects.create_trigger_event( 'USER', None, {'user': '******'}, now()) prev_batch.is_creation_done = True prev_batch.recipes_total = 10 prev_batch.save() prev_batch.root_batch_id = prev_batch.id prev_batch.save() definition.root_batch_id = prev_batch.root_batch_id if not configuration: configuration = BatchConfiguration() if not title: global BATCH_TITLE_COUNTER title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER BATCH_TITLE_COUNTER += 1 if not description: global BATCH_DESCRIPTION_COUNTER description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER BATCH_DESCRIPTION_COUNTER += 1 event = TriggerEvent.objects.create_trigger_event('USER', None, {'user': '******'}, now()) batch = Batch.objects.create_batch_v6(title, description, recipe_type, event, definition, configuration) if is_creation_done: batch.is_creation_done = True if recipes_total is not None: batch.recipes_total = recipes_total batch.save() return batch
def get_definition(self): """Returns the batch definition represented by this JSON :returns: The batch definition :rtype: :class:`batch.definition.definition.BatchDefinition`: """ definition = BatchDefinition() if 'previous_batch' in self._definition: prev_batch_dict = self._definition['previous_batch'] definition.root_batch_id = prev_batch_dict['root_batch_id'] if 'job_names' in prev_batch_dict: definition.job_names = prev_batch_dict['job_names'] if 'all_jobs' in prev_batch_dict: definition.all_jobs = prev_batch_dict['all_jobs'] return definition
def test_convert_definition_to_v6(self): """Tests calling convert_definition_to_v6()""" # Try definition with nothing set definition = BatchDefinition() json = convert_definition_to_v6(definition) BatchDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate # Try definition with previous batch ID set definition = BatchDefinition() definition.root_batch_id = 1234 json = convert_definition_to_v6(definition) BatchDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate self.assertEqual(json.get_definition().root_batch_id, definition.root_batch_id)
def test_successful(self, mock_msg_mgr): """Tests successfully calling the v6 batch comparison view""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() rt_definition_1 = { 'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}], 'json': []}, 'nodes': { 'job_a': { 'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_1.name, 'job_type_version': job_type_1.version, 'job_type_revision': 1, } }, 'job_b': { 'dependencies': [{'name': 'job_a'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_a', 'output': 'OUTPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_2.name, 'job_type_version': job_type_2.version, 'job_type_revision': 1, } } } } rt_definition_2 = { 'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}], 'json': []}, 'nodes': { 'job_c': { 'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_3.name, 'job_type_version': job_type_3.version, 'job_type_revision': 1, } }, 'job_b': { 'dependencies': [{'name': 'job_c'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_c', 'output': 'OUTPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_2.name, 'job_type_version': job_type_2.version, 'job_type_revision': 1, } } } } recipe_type = recipe_test_utils.create_recipe_type_v6(definition=rt_definition_1) # Create a chain of two batches batch_1 = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=2) # Right now test utils will automatically have batch_1 supersede another batch, so we reset this so batch_1 is # its own chain batch_1.root_batch_id = batch_1.id batch_1.superseded_batch = None batch_1.save() # Change recipe type to new revision recipe_test_utils.edit_recipe_type_v6(recipe_type=recipe_type, definition=rt_definition_2, auto_update=True) recipe_type = RecipeType.objects.get(id=recipe_type.id) definition_2 = BatchDefinition() definition_2.root_batch_id = batch_1.root_batch_id batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition_2) # Set metrics to test values Batch.objects.filter(id=batch_1.id).update(jobs_total=24, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2, jobs_completed=12, jobs_canceled=0, recipes_estimated=2, recipes_total=2, recipes_completed=1) Batch.objects.filter(id=batch_2.id).update(jobs_total=26, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=5, jobs_failed=6, jobs_completed=3, jobs_canceled=1, recipes_estimated=2, recipes_total=2, recipes_completed=0) min_seed_duration_1a = timedelta(seconds=43) avg_seed_duration_1a = timedelta(seconds=68) max_seed_duration_1a = timedelta(seconds=77) min_job_duration_1a = timedelta(seconds=45) avg_job_duration_1a = timedelta(seconds=70) max_job_duration_1a = timedelta(seconds=79) qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_a') qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=0, jobs_failed=0, jobs_completed=12, jobs_canceled=0, min_seed_duration=min_seed_duration_1a, avg_seed_duration=avg_seed_duration_1a, max_seed_duration=max_seed_duration_1a, min_job_duration=min_job_duration_1a, avg_job_duration=avg_job_duration_1a, max_job_duration=max_job_duration_1a) min_seed_duration_1b = timedelta(seconds=15) avg_seed_duration_1b = timedelta(seconds=18) max_seed_duration_1b = timedelta(seconds=23) min_job_duration_1b = timedelta(seconds=18) avg_job_duration_1b = timedelta(seconds=21) max_job_duration_1b = timedelta(seconds=26) qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_b') qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2, jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_1b, avg_seed_duration=avg_seed_duration_1b, max_seed_duration=max_seed_duration_1b, min_job_duration=min_job_duration_1b, avg_job_duration=avg_job_duration_1b, max_job_duration=max_job_duration_1b) min_seed_duration_2b = timedelta(seconds=9) avg_seed_duration_2b = timedelta(seconds=12) max_seed_duration_2b = timedelta(seconds=17) min_job_duration_2b = timedelta(seconds=12) avg_job_duration_2b = timedelta(seconds=15) max_job_duration_2b = timedelta(seconds=20) qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_b') qry.update(jobs_total=13, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=3, jobs_failed=6, jobs_completed=3, jobs_canceled=1, min_seed_duration=min_seed_duration_2b, avg_seed_duration=avg_seed_duration_2b, max_seed_duration=max_seed_duration_2b, min_job_duration=min_job_duration_2b, avg_job_duration=avg_job_duration_2b, max_job_duration=max_job_duration_2b) min_seed_duration_2c = timedelta(seconds=101) avg_seed_duration_2c = timedelta(seconds=136) max_seed_duration_2c = timedelta(seconds=158) min_job_duration_2c = timedelta(seconds=111) avg_job_duration_2c = timedelta(seconds=146) max_job_duration_2c = timedelta(seconds=168) qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_c') qry.update(jobs_total=13, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=2, jobs_failed=0, jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_2c, avg_seed_duration=avg_seed_duration_2c, max_seed_duration=max_seed_duration_2c, min_job_duration=min_job_duration_2c, avg_job_duration=avg_job_duration_2c, max_job_duration=max_job_duration_2c) expected_job_metrics = {'job_a': {'jobs_total': [12, None], 'jobs_pending': [0, None], 'jobs_blocked': [0, None], 'jobs_queued': [0, None], 'jobs_running': [0, None], 'jobs_failed': [0, None], 'jobs_completed': [12, None], 'jobs_canceled': [0, None], 'min_seed_duration': [duration_to_string(min_seed_duration_1a), None], 'avg_seed_duration': [duration_to_string(avg_seed_duration_1a), None], 'max_seed_duration': [duration_to_string(max_seed_duration_1a), None], 'min_job_duration': [duration_to_string(min_job_duration_1a), None], 'avg_job_duration': [duration_to_string(avg_job_duration_1a), None], 'max_job_duration': [duration_to_string(max_job_duration_1a), None]}, 'job_b': {'jobs_total': [12, 13], 'jobs_pending': [0, 0], 'jobs_blocked': [10, 0], 'jobs_queued': [0, 0], 'jobs_running': [0, 3], 'jobs_failed': [2, 6], 'jobs_completed': [0, 3], 'jobs_canceled': [0, 1], 'min_seed_duration': [duration_to_string(min_seed_duration_1b), duration_to_string(min_seed_duration_2b)], 'avg_seed_duration': [duration_to_string(avg_seed_duration_1b), duration_to_string(avg_seed_duration_2b)], 'max_seed_duration': [duration_to_string(max_seed_duration_1b), duration_to_string(max_seed_duration_2b)], 'min_job_duration': [duration_to_string(min_job_duration_1b), duration_to_string(min_job_duration_2b)], 'avg_job_duration': [duration_to_string(avg_job_duration_1b), duration_to_string(avg_job_duration_2b)], 'max_job_duration': [duration_to_string(max_job_duration_1b), duration_to_string(max_job_duration_2b)]}, 'job_c': {'jobs_total': [None, 13], 'jobs_pending': [None, 2], 'jobs_blocked': [None, 6], 'jobs_queued': [None, 3], 'jobs_running': [None, 2], 'jobs_failed': [None, 0], 'jobs_completed': [None, 0], 'jobs_canceled': [None, 0], 'min_seed_duration': [None, duration_to_string(min_seed_duration_2c)], 'avg_seed_duration': [None, duration_to_string(avg_seed_duration_2c)], 'max_seed_duration': [None, duration_to_string(max_seed_duration_2c)], 'min_job_duration': [None, duration_to_string(min_job_duration_2c)], 'avg_job_duration': [None, duration_to_string(avg_job_duration_2c)], 'max_job_duration': [None, duration_to_string(max_job_duration_2c)]} } expected_result = {'batches': [{'id': batch_1.id, 'title': batch_1.title, 'description': batch_1.description, 'created': datetime_to_string(batch_1.created)}, {'id': batch_2.id, 'title': batch_2.title, 'description': batch_2.description, 'created': datetime_to_string(batch_2.created)}], 'metrics': {'jobs_total': [24, 26], 'jobs_pending': [0, 2], 'jobs_blocked': [10, 6], 'jobs_queued': [0, 3], 'jobs_running': [0, 5], 'jobs_failed': [2, 6], 'jobs_completed': [12, 3], 'jobs_canceled': [0, 1], 'recipes_estimated': [2, 2], 'recipes_total': [2, 2], 'recipes_completed': [1, 0], 'job_metrics': expected_job_metrics} } url = '/v6/batches/comparison/%d/' % batch_2.root_batch_id response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertDictEqual(result, expected_result)
def test_execute(self): """Tests calling CreateBatchRecipes.execute() successfully""" # Importing module here to patch the max recipe num import batch.messages.create_batch_recipes batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5 # Previous batch with six recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=6) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_4 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_5 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_6 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id new_batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = batch.messages.create_batch_recipes.CreateBatchRecipes() message.batch_id = new_batch.id # Copy JSON for running same message again later message_json = message.to_json() # Execute message result = message.execute() self.assertTrue(result) # Should be two messages, one for next create_batch_recipes and one for re-processing recipes self.assertEqual(len(message.new_messages), 2) batch_recipes_message = message.new_messages[0] reprocess_message = message.new_messages[1] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.batch_id, new_batch.id) self.assertFalse(batch_recipes_message.is_prev_batch_done) self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual( set(reprocess_message._root_recipe_ids), {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id}) # Test executing message again message = batch.messages.create_batch_recipes.CreateBatchRecipes.from_json( message_json) result = message.execute() self.assertTrue(result) # Should have same messages returned self.assertEqual(len(message.new_messages), 2) batch_recipes_message = message.new_messages[0] reprocess_message = message.new_messages[1] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.batch_id, new_batch.id) self.assertFalse(batch_recipes_message.is_prev_batch_done) self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual( set(reprocess_message._root_recipe_ids), {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id}) # Execute next create_batch_recipes messages result = batch_recipes_message.execute() self.assertTrue(result) # Should only have one last reprocess message self.assertEqual(len(batch_recipes_message.new_messages), 1) reprocess_message = batch_recipes_message.new_messages[0] self.assertTrue(batch_recipes_message.is_prev_batch_done) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual(set(reprocess_message._root_recipe_ids), {recipe_1.id})