def test_json_forced_nodes(self): """Tests coverting a ProcessRecipeInput message to and from JSON with forced nodes provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() recipe = recipe_test_utils.create_recipe(input=data_dict) forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() # Create message message = create_process_recipe_input_messages( [recipe.id], forced_nodes=forced_nodes)[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessRecipeInput.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'update_recipe') self.assertEqual(msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6(msg.forced_nodes).get_dict(), forced_nodes_dict) # Recipe should have input_file_size set to 0 (no input files) self.assertEqual(recipe.input_file_size, 0.0)
def _get_forced_nodes_private(self, forced_nodes_dict): """A private helper method to recursively return the forced nodes object from the JSON dict :param forced_nodes_dict: The forced nodes JSON dict :type forced_nodes_dict: dict :returns: The forced nodes :rtype: :class:`recipe.diff.forced_nodes.ForcedNodes` """ forced_nodes = ForcedNodes() if forced_nodes_dict['all']: forced_nodes.set_all_nodes() else: if 'nodes' in forced_nodes_dict: for node_name in forced_nodes_dict['nodes']: forced_nodes.add_node(node_name) if 'sub_recipes' in forced_nodes_dict: for node_name, sub_recipe_dict in forced_nodes_dict[ 'sub_recipes'].items(): forced_nodes.add_subrecipe( node_name, self._get_forced_nodes_private(sub_recipe_dict)) return forced_nodes
def create_update_recipe_messages_from_node(root_recipe_ids): """Creates messages to update the given recipes from the root IDs. This is intended to be used by recipe nodes that have been updated and need to then update the recipes that contain the nodes. :param root_recipe_ids: The root recipe IDs :type root_recipe_ids: :func:`list` :return: The list of messages :rtype: :func:`list` """ # We force all nodes to reprocess because if we are updating due to a recipe node update (completed job, failed job, # completed recipe, etc) then we want all new nodes to be created, not copied. Copying should only occur in the # initial creation messages of a reprocess when recipe diffs are being evaluated. force_all_nodes = ForcedNodes() force_all_nodes.set_all_nodes() messages = [] for root_recipe_id in root_recipe_ids: messages.append( create_update_recipe_message(root_recipe_id, forced_nodes=force_all_nodes)) return messages
def test_convert_forced_nodes_to_v6_full(self): """Tests calling convert_forced_nodes_to_v6() with a full diff with all types (deleted, new, changed, etc) of nodes""" recipe_d_forced_nodes = ForcedNodes() recipe_d_forced_nodes.add_node('1') recipe_d_forced_nodes.add_node('2') top_forced_nodes = ForcedNodes() top_forced_nodes.add_node('C') top_forced_nodes.add_subrecipe('D', recipe_d_forced_nodes) v6 = convert_forced_nodes_to_v6(top_forced_nodes) full = { 'version': '7', 'all': False, 'nodes': [u'C', u'D'], 'sub_recipes': { 'D': { 'version': '7', 'all': False, 'nodes': ['1', '2'] } } } self.assertDictEqual(v6.get_dict(), full)
def test_json_forced_nodes(self): """Tests converting an UpdateRecipe message to and from JSON when forced nodes are provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_completed = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('job_completed', job_completed.job_type.name, job_completed.job_type.version, job_completed.job_type_rev.revision_num) definition.add_recipe_node('the_sub_recipe', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('job_completed', 'the_sub_recipe') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_completed', job=job_completed) forced_nodes = ForcedNodes() sub_forced_nodes = ForcedNodes() sub_forced_nodes.set_all_nodes() forced_nodes.add_subrecipe('the_sub_recipe', sub_forced_nodes) # Create message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to create sub-recipe self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'create_recipes') self.assertEqual(msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(msg.recipe_id, recipe.id) self.assertEqual(msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(msg.superseded_recipe_id) sub = SubRecipe(sub_recipe_type.name, sub_recipe_type.revision_num, 'the_sub_recipe', True) self.assertListEqual(msg.sub_recipes, [sub])
def test_execute(self): """Tests calling CompletedJobs.execute() successfully""" from recipe.test import utils as recipe_test_utils recipe_1 = recipe_test_utils.create_recipe() job_1 = job_test_utils.create_job(num_exes=1, status='QUEUED') job_test_utils.create_job_exe(job=job_1) job_2 = job_test_utils.create_job(num_exes=1, status='RUNNING', recipe=recipe_1) job_test_utils.create_job_exe(job=job_2, output=JobResults()) job_3 = job_test_utils.create_job(num_exes=0, status='PENDING') job_ids = [job_1.id, job_2.id, job_3.id] recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) when_ended = now() # Add jobs to message message = CompletedJobs() message.ended = when_ended if message.can_fit_more(): message.add_completed_job(CompletedJob(job_1.id, job_1.num_exes)) if message.can_fit_more(): message.add_completed_job(CompletedJob(job_2.id, job_2.num_exes)) if message.can_fit_more(): message.add_completed_job(CompletedJob(job_3.id, job_3.num_exes)) # Execute message result = message.execute() self.assertTrue(result) from recipe.diff.forced_nodes import ForcedNodes from recipe.diff.json.forced_nodes_v6 import convert_forced_nodes_to_v6 forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() jobs = Job.objects.filter(id__in=job_ids).order_by('id') self.assertEqual(len(message.new_messages), 3) update_recipe_metrics_msg = None update_recipe_msg = None publish_job_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'publish_job': publish_job_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(publish_job_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(publish_job_msg.job_id, job_2.id) # Job 1 should be completed self.assertEqual(jobs[0].status, 'COMPLETED') self.assertEqual(jobs[0].num_exes, 1) self.assertEqual(jobs[0].ended, when_ended) # Job 2 should be completed and has output, so should be in update_recipe message self.assertEqual(jobs[1].status, 'COMPLETED') self.assertEqual(jobs[1].num_exes, 1) self.assertEqual(jobs[1].ended, when_ended) self.assertEqual(update_recipe_msg.root_recipe_id, recipe_1.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) # Job 3 should ignore update self.assertEqual(jobs[2].status, 'PENDING') self.assertEqual(jobs[2].num_exes, 0) # Test executing message again new_ended = when_ended + datetime.timedelta(minutes=5) message_json_dict = message.to_json() message = CompletedJobs.from_json(message_json_dict) message.ended = new_ended result = message.execute() self.assertTrue(result) # Should have the same messages as before jobs = Job.objects.filter(id__in=job_ids).order_by('id') self.assertEqual(len(message.new_messages), 3) update_recipe_metrics_msg = None update_recipe_msg = None publish_job_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'publish_job': publish_job_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(publish_job_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(publish_job_msg.job_id, job_2.id) # Job 1 should be completed self.assertEqual(jobs[0].status, 'COMPLETED') self.assertEqual(jobs[0].num_exes, 1) self.assertEqual(jobs[0].ended, when_ended) # Job 2 should be completed and has output, so should be in update_recipe message self.assertEqual(jobs[1].status, 'COMPLETED') self.assertEqual(jobs[1].num_exes, 1) self.assertEqual(jobs[1].ended, when_ended) self.assertEqual(update_recipe_msg.root_recipe_id, recipe_1.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) # Job 3 should ignore update self.assertEqual(jobs[2].status, 'PENDING') self.assertEqual(jobs[2].num_exes, 0)
def test_execute_reprocess(self): """Tests calling CreateBatchRecipes.execute() successfully when re-processing recipes""" # Importing module here to patch the max recipe num import batch.messages.create_batch_recipes batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5 jt_1 = job_test_utils.create_seed_job_type() jt_2 = job_test_utils.create_seed_job_type() jt_3 = job_test_utils.create_seed_job_type() jt_4 = job_test_utils.create_seed_job_type() recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_1.name, 'job_type_version': jt_1.version, 'job_type_revision': jt_1.revision_num } }, 'node_b': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_2.name, 'job_type_version': jt_2.version, 'job_type_revision': jt_2.revision_num } } } } sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) # Recipe with two jobs and one subrecipe (c -> d -> r) recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'recipe_node': { 'dependencies': [{ 'name': 'node_d', 'acceptance': True }], 'input': { 'input_a': { 'type': 'dependency', 'node': 'node_d', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'recipe', 'recipe_type_name': sub_recipe_type.name, 'recipe_type_revision': sub_recipe_type.revision_num } }, 'node_c': { 'dependencies': [], 'input': { 'INPUT_IMAGE': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_3.name, 'job_type_version': jt_3.version, 'job_type_revision': jt_3.revision_num } }, 'node_d': { 'dependencies': [{ 'name': 'node_c', 'acceptance': True }], 'input': { 'INPUT_IMAGE': { 'type': 'dependency', 'node': 'node_c', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_4.name, 'job_type_version': jt_4.version, 'job_type_revision': jt_4.revision_num } } } } recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) # Create a dataset of 6 files dataset_def = { 'parameters': { 'files': [{ 'media_types': ['image/png'], 'required': True, 'multiple': False, 'name': 'INPUT_IMAGE' }], 'json': [] } } the_dataset = data_test_utils.create_dataset(definition=dataset_def) workspace = storage_test_utils.create_workspace() # Create 6 files & recipes to go along src_file_ids = [] recipe_ids = [] data_list = [] for i in range(0, 6): file_name = 'input_%d.png' % i src_file = storage_test_utils.create_file(file_name=file_name, file_type='SOURCE', media_type='image/png', file_size=10, data_type_tags=['type'], file_path='the_path', workspace=workspace) src_file_ids.append(src_file.id) data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file.id] }, 'json': {} } data_list.append(DataV6(data=data_dict).get_dict()) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_ids.append(recipe.id) members = data_test_utils.create_dataset_members(dataset=the_dataset, data_list=data_list) recipe_test_utils.process_recipe_inputs(recipe_ids) batch_definition = BatchDefinition() batch_definition.dataset = the_dataset.id batch_definition.supersedes = True forced_nodes = ForcedNodes() forced_nodes.all_nodes = True batch_definition.forced_nodes = forced_nodes new_batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=batch_definition) # Create message message = batch.messages.create_batch_recipes.CreateBatchRecipes() message.batch_id = new_batch.id # Execute message result = message.execute() self.assertTrue(result) self.assertEqual(len(message.new_messages), 2) batch_recipes_message = message.new_messages[0] create_recipes_message = message.new_messages[1] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.batch_id, new_batch.id) self.assertFalse(batch_recipes_message.is_prev_batch_done) self.assertEqual(batch_recipes_message.current_recipe_id, recipe_ids[1]) # Test the create_recipes_message self.assertEqual(create_recipes_message.type, 'create_recipes') self.assertSetEqual( set(create_recipes_message.root_recipe_ids), { recipe_ids[5], recipe_ids[4], recipe_ids[3], recipe_ids[2], recipe_ids[1] }) self.assertEqual(create_recipes_message.batch_id, new_batch.id) self.assertEqual(create_recipes_message.event_id, new_batch.event_id) self.assertEqual(create_recipes_message.recipe_type_name, new_batch.recipe_type.name) self.assertEqual(create_recipes_message.recipe_type_rev_num, new_batch.recipe_type.revision_num) # Execute next create_batch_recipes messages result = batch_recipes_message.execute() self.assertTrue(result) # Should only have one last rcreate_recipes message self.assertEqual(len(batch_recipes_message.new_messages), 1) create_recipes_message = batch_recipes_message.new_messages[0] self.assertTrue(batch_recipes_message.is_prev_batch_done) self.assertEqual(create_recipes_message.type, 'create_recipes') self.assertSetEqual(set(create_recipes_message.root_recipe_ids), {recipe_ids[0]}) self.assertEqual(create_recipes_message.batch_id, new_batch.id) self.assertEqual(create_recipes_message.event_id, new_batch.event_id) self.assertEqual(create_recipes_message.recipe_type_name, new_batch.recipe_type.name) self.assertEqual(create_recipes_message.recipe_type_rev_num, new_batch.recipe_type.revision_num) # Test setting supersedes to false and make sure we don't have any reprocess messages batch_definition_2 = BatchDefinition() batch_definition_2.dataset = the_dataset.id batch_definition_2.supersedes = False forced_nodes = ForcedNodes() forced_nodes.all_nodes = True batch_definition_2.forced_nodes = forced_nodes new_batch_2 = batch_test_utils.create_batch( recipe_type=recipe_type, definition=batch_definition_2) # Create message message_2 = batch.messages.create_batch_recipes.CreateBatchRecipes() message_2.batch_id = new_batch_2.id # Execute message result_2 = message_2.execute() self.assertTrue(result_2) self.assertEqual(len(message_2.new_messages), 6) batch_recipes_message_2 = message_2.new_messages[0] self.assertEqual(batch_recipes_message_2.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message_2.batch_id, new_batch_2.id) self.assertFalse(batch_recipes_message_2.is_prev_batch_done) # Make sure we've got 5 create-new-recipe messages for msg in message_2.new_messages[1:]: self.assertEqual(msg.create_recipes_type, 'new-recipe') self.assertEqual(msg.batch_id, new_batch_2.id) self.assertEqual(msg.event_id, new_batch_2.event_id) self.assertEqual(msg.recipe_type_name, new_batch_2.recipe_type.name) self.assertEqual(msg.recipe_type_rev_num, new_batch_2.recipe_type.revision_num) # Execute next create_batch_recipes messages result_3 = batch_recipes_message_2.execute() self.assertTrue(result_3) # Should only have one last rcreate_recipes message self.assertEqual(len(batch_recipes_message_2.new_messages), 1) create_recipes_message_3 = batch_recipes_message_2.new_messages[0] self.assertTrue(batch_recipes_message_2.is_prev_batch_done) self.assertEqual(create_recipes_message_3.type, 'create_recipes') self.assertEqual(create_recipes_message_3.batch_id, new_batch_2.id) self.assertEqual(create_recipes_message_3.event_id, new_batch_2.event_id) self.assertEqual(create_recipes_message_3.recipe_type_name, new_batch_2.recipe_type.name) self.assertEqual(create_recipes_message_3.recipe_type_rev_num, new_batch_2.recipe_type.revision_num)
def test_set_force_reprocess(self): """Tests calling RecipeDiff.set_force_reprocess()""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_dependency('C', 'E') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_1.add_dependency_input_connection('E', 'e_input_1', 'C', 'c_output_1') # No changes in definition 2 definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.0', 2) definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_job_node('E', 'job_type_4', '1.0', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('C', 'E') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_dependency_input_connection('E', 'e_input_1', 'C', 'c_output_1') recipe_d_forced_nodes = ForcedNodes() recipe_d_forced_nodes.add_node('1') recipe_d_forced_nodes.add_node('2') top_forced_nodes = ForcedNodes() top_forced_nodes.add_node('C') top_forced_nodes.add_subrecipe('D', recipe_d_forced_nodes) diff = RecipeDiff(definition_1, definition_2) diff.set_force_reprocess(top_forced_nodes) # No recipe input changes so recipe can be reprocessed self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Check each node for correct fields node_a = diff.graph['A'] self.assertEqual(node_a.status, NodeDiff.UNCHANGED) self.assertFalse(node_a.reprocess_new_node) self.assertListEqual(node_a.changes, []) node_b = diff.graph['B'] self.assertEqual(node_b.status, NodeDiff.UNCHANGED) self.assertFalse(node_b.reprocess_new_node) self.assertListEqual(node_b.changes, []) node_c = diff.graph['C'] self.assertEqual(node_c.status, NodeDiff.UNCHANGED) self.assertTrue(node_c.reprocess_new_node) # Force reprocess self.assertListEqual(node_c.changes, []) node_d = diff.graph['D'] self.assertEqual(node_d.status, NodeDiff.UNCHANGED) self.assertTrue(node_d.reprocess_new_node) # Force reprocess self.assertListEqual(node_d.changes, []) # Check forced nodes object that got passed to recipe node D self.assertEqual(node_d.force_reprocess_nodes, recipe_d_forced_nodes) node_e = diff.graph['E'] self.assertEqual(node_e.status, NodeDiff.UNCHANGED) self.assertTrue( node_e.reprocess_new_node) # Force reprocess due to C being forced self.assertListEqual(node_e.changes, []) # Check nodes to copy, supersede, and unpublish self.assertSetEqual(set(diff.get_nodes_to_copy().keys()), {'A', 'B'}) self.assertSetEqual(set(diff.get_nodes_to_supersede().keys()), {'C', 'D', 'E'}) self.assertSetEqual(set(diff.get_nodes_to_unpublish().keys()), set())
def post(self, request, recipe_id): """Schedules a recipe for reprocessing and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :param recipe_id: The id of the recipe :type recipe_id: int encoded as a str :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ job_names = rest_util.parse_string_list(request, 'job_names', required=False) all_jobs = rest_util.parse_bool(request, 'all_jobs', required=False) priority = rest_util.parse_int(request, 'priority', required=False) try: recipe = Recipe.objects.select_related('recipe_type', 'recipe_type_rev').get(id=recipe_id) except Recipe.DoesNotExist: raise Http404 if recipe.is_superseded: raise BadParameter('Cannot reprocess a superseded recipe') event = TriggerEvent.objects.create_trigger_event('USER', None, {'user': '******'}, now()) root_recipe_id = recipe.root_superseded_recipe_id if recipe.root_superseded_recipe_id else recipe.id recipe_type_name = recipe.recipe_type.name revision_num = recipe.recipe_type_rev.revision_num forced_nodes = ForcedNodes() if all_jobs: forced_nodes.set_all_nodes() elif job_names: for job_name in job_names: forced_nodes.add_node(job_name) # Execute all of the messages to perform the reprocess messages = create_reprocess_messages([root_recipe_id], recipe_type_name, revision_num, event.id, forced_nodes=forced_nodes) while messages: msg = messages.pop(0) result = msg.execute() if not result: raise Exception('Reprocess failed on message type \'%s\'' % msg.type) messages.extend(msg.new_messages) # Update job priorities if priority is not None: Job.objects.filter(event_id=event.id).update(priority=priority) from queue.models import Queue Queue.objects.filter(job__event_id=event.id).update(priority=priority) new_recipe = Recipe.objects.get(root_superseded_recipe_id=root_recipe_id, is_superseded=False) try: # TODO: remove this check when REST API v5 is removed if request.version == 'v6': new_recipe = Recipe.objects.get_details(new_recipe.id) else: new_recipe = Recipe.objects.get_details_v5(new_recipe.id) except Recipe.DoesNotExist: raise Http404 serializer = self.get_serializer(new_recipe) url = reverse('recipe_details_view', args=[new_recipe.id], request=request) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=dict(location=url))
def test_execute(self): """Tests calling CancelJobs.execute() successfully""" when = now() data = JobData() from recipe.test import utils as recipe_test_utils recipe = recipe_test_utils.create_recipe() job_type = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', input=data.get_dict(), recipe=recipe) job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', input=data.get_dict(), recipe=recipe) job_3 = job_test_utils.create_job(job_type=job_type, num_exes=1, status='COMPLETED', input=data.get_dict(), recipe=recipe) job_4 = job_test_utils.create_job(job_type=job_type, num_exes=0, status='PENDING', recipe=recipe) job_ids = [job_1.id, job_2.id, job_3.id, job_4.id] recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_2', job=job_2) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_3', job=job_3) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_4', job=job_4) # Add jobs to message message = CancelJobs() message.when = when if message.can_fit_more(): message.add_job(job_1.id) if message.can_fit_more(): message.add_job(job_2.id) if message.can_fit_more(): message.add_job(job_3.id) if message.can_fit_more(): message.add_job(job_4.id) # Execute message result = message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) # Job 2 was already canceled self.assertEqual(jobs[1].status, 'CANCELED') self.assertNotEqual(jobs[1].last_status_change, when) # Job 3 was already COMPLETED, so can't be canceled self.assertEqual(jobs[2].status, 'COMPLETED') self.assertNotEqual(jobs[2].last_status_change, when) # Job 4 should have been canceled self.assertEqual(jobs[3].status, 'CANCELED') self.assertEqual(jobs[3].last_status_change, when) from recipe.diff.forced_nodes import ForcedNodes from recipe.diff.json.forced_nodes_v6 import convert_forced_nodes_to_v6 forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() # Should be messages to update recipe and update recipe metrics after canceling jobs self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id]) # Test executing message again message.new_messages = [] result = message.execute() self.assertTrue(result) # All results should be the same jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) # Job 2 was already canceled self.assertEqual(jobs[1].status, 'CANCELED') self.assertNotEqual(jobs[1].last_status_change, when) # Job 3 was already COMPLETED, so can't be canceled self.assertEqual(jobs[2].status, 'COMPLETED') self.assertNotEqual(jobs[2].last_status_change, when) # Job 4 should have been canceled self.assertEqual(jobs[3].status, 'CANCELED') self.assertEqual(jobs[3].last_status_change, when) # Should be messages to update recipe and update recipe metrics after canceling jobs self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id])
def test_execute_forced_nodes(self): """Tests calling CreateBatchRecipes.execute() when only specific nodes are forced""" # Importing module here to patch the max recipe num import batch.messages.create_batch_recipes batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5 jt_1 = job_test_utils.create_seed_job_type() jt_2 = job_test_utils.create_seed_job_type() jt_3 = job_test_utils.create_seed_job_type() jt_4 = job_test_utils.create_seed_job_type() recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_1.name, 'job_type_version': jt_1.version, 'job_type_revision': jt_1.revision_num } }, 'node_b': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_2.name, 'job_type_version': jt_2.version, 'job_type_revision': jt_2.revision_num } } } } sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) sub_recipe = recipe_test_utils.create_recipe( recipe_type=sub_recipe_type) # Recipe with two jobs and one subrecipe (c -> d -> r) recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'recipe_node': { 'dependencies': [{ 'name': 'node_d', 'acceptance': True }], 'input': { 'input_a': { 'type': 'dependency', 'node': 'node_d', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'recipe', 'recipe_type_name': sub_recipe_type.name, 'recipe_type_revision': sub_recipe_type.revision_num } }, 'node_c': { 'dependencies': [], 'input': { 'INPUT_IMAGE': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_3.name, 'job_type_version': jt_3.version, 'job_type_revision': jt_3.revision_num } }, 'node_d': { 'dependencies': [{ 'name': 'node_c', 'acceptance': True }], 'input': { 'INPUT_IMAGE': { 'type': 'dependency', 'node': 'node_c', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_4.name, 'job_type_version': jt_4.version, 'job_type_revision': jt_4.revision_num } } } } recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) # Create a dataset of 6 files dataset_def = { 'parameters': { 'files': [{ 'media_types': ['image/png'], 'required': True, 'multiple': False, 'name': 'INPUT_IMAGE' }], 'json': [] } } the_dataset = data_test_utils.create_dataset(definition=dataset_def) workspace = storage_test_utils.create_workspace() # Create 6 files & recipes to go along src_file_ids = [] recipe_ids = [] data_list = [] for i in range(0, 6): file_name = 'input_%d.png' % i src_file = storage_test_utils.create_file(file_name=file_name, file_type='SOURCE', media_type='image/png', file_size=10, data_type_tags=['type'], file_path='the_path', workspace=workspace) src_file_ids.append(src_file.id) data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file.id] }, 'json': {} } data_list.append(DataV6(data=data_dict).get_dict()) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_ids.append(recipe.id) members = data_test_utils.create_dataset_members(dataset=the_dataset, data_list=data_list) recipe_test_utils.process_recipe_inputs(recipe_ids) batch_definition = BatchDefinition() batch_definition.dataset = the_dataset.id forced_nodes = ForcedNodes() forced_nodes.add_node('node_d') forced_nodes.all_nodes = False batch_definition.forced_nodes = forced_nodes new_batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=batch_definition)
def test_execute_with_top_level_recipe(self): """Tests calling UpdateRecipeMetrics.execute() successfully where messages need to be sent to update a top-level recipe """ batch = batch_test_utils.create_batch() top_recipe = recipe_test_utils.create_recipe(batch=batch) recipe = recipe_test_utils.create_recipe(batch=batch) recipe.recipe = top_recipe recipe.root_recipe = top_recipe recipe.save() recipe_node_1 = recipe_test_utils.create_recipe_node(recipe=top_recipe, sub_recipe=recipe) forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() # Recipe jobs job_1 = job_test_utils.create_job(status='FAILED', save=False) job_2 = job_test_utils.create_job(status='CANCELED', save=False) job_3 = job_test_utils.create_job(status='BLOCKED', save=False) job_4 = job_test_utils.create_job(status='BLOCKED', save=False) job_5 = job_test_utils.create_job(status='COMPLETED', save=False) Job.objects.bulk_create([job_1, job_2, job_3, job_4, job_5]) # Recipe nodes recipe_node_2 = recipe_test_utils.create_recipe_node(recipe=recipe, job=job_1) recipe_node_3 = recipe_test_utils.create_recipe_node(recipe=recipe, job=job_2) recipe_node_4 = recipe_test_utils.create_recipe_node(recipe=recipe, job=job_3) recipe_node_5 = recipe_test_utils.create_recipe_node(recipe=recipe, job=job_4) recipe_node_6 = recipe_test_utils.create_recipe_node(recipe=recipe, job=job_5) RecipeNode.objects.bulk_create([ recipe_node_1, recipe_node_2, recipe_node_3, recipe_node_4, recipe_node_5, recipe_node_6 ]) # Add recipes to message message = UpdateRecipeMetrics() if message.can_fit_more(): message.add_recipe(recipe.id) # Execute message result = message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) self.assertEqual(recipe.jobs_total, 5) self.assertEqual(recipe.jobs_pending, 0) self.assertEqual(recipe.jobs_blocked, 2) self.assertEqual(recipe.jobs_queued, 0) self.assertEqual(recipe.jobs_running, 0) self.assertEqual(recipe.jobs_failed, 1) self.assertEqual(recipe.jobs_completed, 1) self.assertEqual(recipe.jobs_canceled, 1) self.assertEqual(recipe.sub_recipes_total, 0) self.assertEqual(recipe.sub_recipes_completed, 0) # Make sure message is created to update top-level recipe and recipe metrics # There should be no message to update batch metrics since we did not update a top-level recipe self.assertEqual(len(message.new_messages), 2) update_recipe_metrics_msg = message.new_messages[0] update_recipe_msg = message.new_messages[1] self.assertEqual(update_recipe_metrics_msg.type, 'update_recipe_metrics') self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [top_recipe.id]) self.assertEqual(update_recipe_msg.type, 'update_recipe') self.assertEqual(update_recipe_msg.root_recipe_id, top_recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) # Test executing message again message_json_dict = message.to_json() message = UpdateRecipeMetrics.from_json(message_json_dict) result = message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) self.assertEqual(recipe.jobs_total, 5) self.assertEqual(recipe.jobs_pending, 0) self.assertEqual(recipe.jobs_blocked, 2) self.assertEqual(recipe.jobs_queued, 0) self.assertEqual(recipe.jobs_running, 0) self.assertEqual(recipe.jobs_failed, 1) self.assertEqual(recipe.jobs_completed, 1) self.assertEqual(recipe.jobs_canceled, 1) self.assertEqual(recipe.sub_recipes_total, 0) self.assertEqual(recipe.sub_recipes_completed, 0) # Make sure message is created to update top-level recipe and recipe metrics # There should be no message to update batch metrics since we did not update a top-level recipe self.assertEqual(len(message.new_messages), 2) update_recipe_metrics_msg = message.new_messages[0] update_recipe_msg = message.new_messages[1] self.assertEqual(update_recipe_metrics_msg.type, 'update_recipe_metrics') self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [top_recipe.id]) self.assertEqual(update_recipe_msg.type, 'update_recipe') self.assertEqual(update_recipe_msg.root_recipe_id, top_recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict)
def test_execute_new(self): """Tests calling CreateBatchRecipes.execute() successfully""" # Importing module here to patch the max recipe num import batch.messages.create_batch_recipes batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5 jt_1 = job_test_utils.create_seed_job_type() jt_2 = job_test_utils.create_seed_job_type() jt_3 = job_test_utils.create_seed_job_type() jt_4 = job_test_utils.create_seed_job_type() recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_1.name, 'job_type_version': jt_1.version, 'job_type_revision': jt_1.revision_num } }, 'node_b': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_2.name, 'job_type_version': jt_2.version, 'job_type_revision': jt_2.revision_num } } } } sub_recipe_type_1 = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_3.name, 'job_type_version': jt_3.version, 'job_type_revision': jt_3.revision_num } }, 'node_b': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_4.name, 'job_type_version': jt_4.version, 'job_type_revision': jt_4.revision_num } } } } sub_recipe_type_2 = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) jt_5 = job_test_utils.create_seed_job_type() jt_6 = job_test_utils.create_seed_job_type() recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'recipe_node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'recipe', 'recipe_type_name': sub_recipe_type_1.name, 'recipe_type_revision': sub_recipe_type_1.revision_num } }, 'recipe_node_b': { 'dependencies': [{ 'name': 'node_d', 'acceptance': True }], 'input': { 'input_a': { 'type': 'dependency', 'node': 'node_d', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'recipe', 'recipe_type_name': sub_recipe_type_2.name, 'recipe_type_revision': sub_recipe_type_2.revision_num } }, 'node_c': { 'dependencies': [], 'input': { 'INPUT_IMAGE': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_5.name, 'job_type_version': jt_5.version, 'job_type_revision': jt_5.revision_num } }, 'node_d': { 'dependencies': [{ 'name': 'node_c', 'acceptance': True }], 'input': { 'INPUT_IMAGE': { 'type': 'dependency', 'node': 'node_c', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_6.name, 'job_type_version': jt_6.version, 'job_type_revision': jt_6.revision_num } } } } recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) # Create a dataset of 6 files dataset_def = { 'parameters': { 'files': [{ 'media_types': ['image/png'], 'required': True, 'multiple': False, 'name': 'INPUT_IMAGE' }], 'json': [] } } the_dataset = data_test_utils.create_dataset(definition=dataset_def) workspace = storage_test_utils.create_workspace() # Create 6 files src_file_ids = [] data_list = [] for i in range(0, 6): file_name = 'input_%d.png' % i src_file = storage_test_utils.create_file(file_name=file_name, file_type='SOURCE', media_type='image/png', file_size=10, data_type_tags=['type'], file_path='the_path', workspace=workspace) src_file_ids.append(src_file.id) data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file.id] }, 'json': {} } data_list.append(DataV6(data=data_dict).get_dict()) members = data_test_utils.create_dataset_members(dataset=the_dataset, data_list=data_list) batch_definition = BatchDefinition() batch_definition.dataset = the_dataset.id forced_nodes = ForcedNodes() forced_nodes.all_nodes = True batch_definition.forced_nodes = forced_nodes new_batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=batch_definition) # Create message message = batch.messages.create_batch_recipes.CreateBatchRecipes() message.batch_id = new_batch.id # Copy JSON for running same message again later message_json = message.to_json() # Execute message result = message.execute() self.assertTrue(result) # Should be 6 messages, one for next create_batch_recipes and 5 for creating new recipes self.assertEqual(len(message.new_messages), 6) # Create batch message batch_recipes_message = message.new_messages[0] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.current_dataset_file_id, src_file_ids[1]) self.assertFalse(batch_recipes_message.is_prev_batch_done) from recipe.models import Recipe # Verify each message has a different input and execute src_ids = copy.deepcopy(src_file_ids) for msg in message.new_messages[1:]: self.assertEqual(msg.type, 'create_recipes') self.assertEqual(msg.create_recipes_type, 'new-recipe') file_id = DataV6(msg.recipe_input_data).get_data( ).values['INPUT_IMAGE'].file_ids[0] self.assertTrue(file_id in src_ids) src_ids.remove(file_id) # Execute the create_recipes messages result = msg.execute() self.assertTrue(result) # Verify 5 recipes have been created and they have the proper input files: recipes = Recipe.objects.all() self.assertEqual(len(recipes), 5) src_ids = copy.deepcopy(src_file_ids) for recipe in recipes: self.assertEqual(recipe.recipe_type.name, new_batch.recipe_type.name) file_id = recipe.get_input_data().values['INPUT_IMAGE'].file_ids[0] self.assertTrue(file_id in src_ids) src_ids.remove(file_id) # Execute next create_batch_recipes messages result = batch_recipes_message.execute() self.assertTrue(result) # Should only have one last create_recipes message self.assertEqual(len(batch_recipes_message.new_messages), 1) create_recipes_message = batch_recipes_message.new_messages[0] self.assertTrue(batch_recipes_message.is_prev_batch_done) self.assertEqual(create_recipes_message.type, 'create_recipes') self.assertEqual(create_recipes_message.create_recipes_type, 'new-recipe') self.assertEqual(create_recipes_message.batch_id, new_batch.id) self.assertEqual(create_recipes_message.event_id, new_batch.event_id) self.assertEqual(create_recipes_message.recipe_type_name, new_batch.recipe_type.name) self.assertEqual(create_recipes_message.recipe_type_rev_num, new_batch.recipe_type.revision_num)
def _create_messages(self, new_recipes): """Creates any messages resulting from the new recipes :param new_recipes: The list of new recipe models :type new_recipes: list """ forced_nodes_by_id = {} # {Recipe ID: Forced nodes} # Send supersede_recipe_nodes messages if new recipes are superseding old ones for recipe_diff in self._recipe_diffs: recipe_ids = [] supersede_jobs = set() supersede_subrecipes = set() unpublish_jobs = set() supersede_recursive = set() unpublish_recursive = set() # Gather up superseded recipe IDs for this diff for recipe_pair in recipe_diff.recipe_pairs: recipe_ids.append(recipe_pair.superseded_recipe.id) # Supersede applicable jobs and sub-recipes for node_diff in recipe_diff.diff.get_nodes_to_supersede().values( ): if node_diff.node_type == JobNodeDefinition.NODE_TYPE: supersede_jobs.add(node_diff.name) elif node_diff.node_type == RecipeNodeDefinition.NODE_TYPE: supersede_subrecipes.add(node_diff.name) # Recursively supersede applicable sub-recipes for node_diff in recipe_diff.diff.get_nodes_to_recursively_supersede( ).values(): if node_diff.node_type == RecipeNodeDefinition.NODE_TYPE: supersede_recursive.add(node_diff.name) # Update forced nodes so that the new sub-recipes know to force reprocess all nodes if not recipe_diff.diff.forced_nodes: recipe_diff.diff.forced_nodes = ForcedNodes() force_all = ForcedNodes() force_all.set_all_nodes() recipe_diff.diff.forced_nodes.add_subrecipe( node_diff.name, force_all) # Unpublish applicable jobs and recursively unpublish applicable sub-recipes for node_diff in recipe_diff.diff.get_nodes_to_unpublish().values( ): if node_diff.node_type == JobNodeDefinition.NODE_TYPE: unpublish_jobs.add(node_diff.name) elif node_diff.node_type == RecipeNodeDefinition.NODE_TYPE: unpublish_recursive.add(node_diff.name) if supersede_jobs or supersede_subrecipes or unpublish_jobs or supersede_recursive or unpublish_recursive: msgs = create_supersede_recipe_nodes_messages( recipe_ids, self._when, supersede_jobs, supersede_subrecipes, unpublish_jobs, supersede_recursive, unpublish_recursive) self.new_messages.extend(msgs) if recipe_diff.diff.forced_nodes: # Store the forced nodes for this diff by every new recipe ID in the diff ids = [pair.new_recipe.id for pair in recipe_diff.recipe_pairs] forced_nodes_by_id.update({ recipe_id: recipe_diff.diff.forced_nodes for recipe_id in ids }) # Send messages to further process/update the new recipes if self.create_recipes_type == NEW_RECIPE_TYPE or self.create_recipes_type == REPROCESS_TYPE: msgs = create_process_recipe_input_messages( [r.id for r in new_recipes], forced_nodes=self.forced_nodes) self.new_messages.extend(msgs) elif self.create_recipes_type == SUB_RECIPE_TYPE: from recipe.messages.update_recipe import create_update_recipe_message for new_recipe in new_recipes: process_input = self._process_input.get(new_recipe.id, False) forced_nodes = forced_nodes_by_id.get(new_recipe.id, None) if new_recipe.has_input() or process_input: # This new recipe is all ready to have its input processed msg = create_process_recipe_input_messages( [new_recipe.id], forced_nodes=forced_nodes)[0] else: # Recipe not ready for its input yet, but send update_recipe for it to create its nodes awhile root_id = new_recipe.id if new_recipe.root_superseded_recipe_id: root_id = new_recipe.root_superseded_recipe_id msg = create_update_recipe_message( root_id, forced_nodes=forced_nodes) self.new_messages.append(msg) if self.recipe_id: # Update the metrics for the recipe containing the new sub-recipes we just created self.new_messages.extend( create_update_recipe_metrics_messages([self.recipe_id]))
def test_convert_forced_nodes_to_v6_empty(self): """Tests calling convert_forced_nodes_to_v6() with an empty forced nodes object""" empty = ForcedNodes() v6 = convert_forced_nodes_to_v6(empty) self.assertDictEqual(v6.get_dict(), {'version': '7', 'all': False})
def test_execute(self): """Tests calling UpdateRecipe.execute() successfully""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_a = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) recipe_b = recipe_test_utils.create_recipe() job_c = job_test_utils.create_job(status='PENDING') job_d = job_test_utils.create_job(status='BLOCKED') recipe_type_e = recipe_test_utils.create_recipe_type_v6() job_type_f = job_test_utils.create_seed_job_type() job_type_k = job_test_utils.create_seed_job_type() job_g = job_test_utils.create_job(status='FAILED', input=data_dict) job_h = job_test_utils.create_job(status='PENDING') condition_i = recipe_test_utils.create_recipe_condition(save=True) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_a.job_type.name, job_a.job_type.version, job_a.job_type_rev.revision_num) definition.add_recipe_node('node_b', recipe_b.recipe_type.name, recipe_b.recipe_type.revision_num) definition.add_job_node('node_c', job_c.job_type.name, job_c.job_type.version, job_c.job_type_rev.revision_num) definition.add_job_node('node_d', job_d.job_type.name, job_d.job_type.version, job_d.job_type_rev.revision_num) definition.add_recipe_node('node_e', recipe_type_e.name, recipe_type_e.revision_num) definition.add_job_node('node_f', job_type_f.name, job_type_f.version, job_type_f.revision_num) definition.add_job_node('node_g', job_g.job_type.name, job_g.job_type.version, job_g.job_type_rev.revision_num) definition.add_job_node('node_h', job_h.job_type.name, job_h.job_type.version, job_h.job_type_rev.revision_num) definition.add_condition_node('node_i', Interface(), DataFilter()) #True definition.add_condition_node('node_j', Interface(), DataFilter()) #True definition.add_job_node('node_k', job_type_k.name, job_type_k.version, job_type_k.revision_num) definition.add_dependency('node_a', 'node_c') definition.add_dependency('node_a', 'node_e') definition.add_dependency('node_a', 'node_g') definition.add_dependency('node_c', 'node_d') definition.add_dependency('node_e', 'node_f') definition.add_dependency('node_g', 'node_h') definition.add_dependency('node_a', 'node_i') definition.add_dependency('node_a', 'node_j') definition.add_dependency('node_i', 'node_k') definition.add_dependency('node_j', 'node_k') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', job=job_a, save=False) node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_b', sub_recipe=recipe_b, save=False) node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_c', job=job_c, save=False) node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_d', job=job_d, save=False) node_g = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_g', job=job_g, save=False) node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_h', job=job_h, save=False) node_i = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_i', condition=condition_i, save=False) RecipeNode.objects.bulk_create([node_a, node_b, node_c, node_d, node_g, node_h, node_i]) forced_nodes = ForcedNodes() forced_nodes_e = ForcedNodes() forced_nodes_e.set_all_nodes() forced_nodes.add_subrecipe('node_e', forced_nodes_e) # Create and execute message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) result = message.execute() self.assertTrue(result) self.assertEqual(len(message.new_messages), 8) # Check messages blocked_jobs_msg = None pending_jobs_msg = None create_cond_msg = None create_jobs_msg = None create_recipes_msg = None process_condition_msg = None process_job_input_msg = None process_recipe_input_msg = None for msg in message.new_messages: if msg.type == 'blocked_jobs': blocked_jobs_msg = msg elif msg.type == 'pending_jobs': pending_jobs_msg = msg elif msg.type == 'create_conditions': create_cond_msg = msg elif msg.type == 'create_jobs': create_jobs_msg = msg elif msg.type == 'create_recipes': create_recipes_msg = msg elif msg.type == 'process_condition': process_condition_msg = msg elif msg.type == 'process_job_input': process_job_input_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg self.assertIsNotNone(blocked_jobs_msg) self.assertIsNotNone(pending_jobs_msg) self.assertIsNotNone(create_cond_msg) self.assertIsNotNone(create_jobs_msg) self.assertIsNotNone(create_recipes_msg) self.assertIsNotNone(process_condition_msg) self.assertIsNotNone(process_job_input_msg) self.assertIsNotNone(process_recipe_input_msg) # Check message to change jobs to BLOCKED self.assertListEqual(blocked_jobs_msg._blocked_job_ids, [job_h.id]) # Check message to change jobs to PENDING self.assertListEqual(pending_jobs_msg._pending_job_ids, [job_d.id]) # Check message to create conditions self.assertEqual(create_cond_msg.recipe_id, recipe.id) self.assertEqual(create_cond_msg.root_recipe_id, recipe.root_superseded_recipe_id) condition = Condition('node_j', True) self.assertListEqual(create_cond_msg.conditions, [condition]) # Check message to create jobs self.assertEqual(create_jobs_msg.event_id, recipe.event_id) self.assertEqual(create_jobs_msg.create_jobs_type, RECIPE_TYPE) self.assertEqual(create_jobs_msg.recipe_id, recipe.id) self.assertEqual(create_jobs_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_jobs_msg.superseded_recipe_id) recipe_job = RecipeJob(job_type_f.name, job_type_f.version, job_type_f.revision_num, 'node_f', False) self.assertListEqual(create_jobs_msg.recipe_jobs, [recipe_job]) # Check message to create sub-recipes self.assertEqual(create_recipes_msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(create_recipes_msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(create_recipes_msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(create_recipes_msg.recipe_id, recipe.id) self.assertEqual(create_recipes_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_recipes_msg.superseded_recipe_id) sub = SubRecipe(recipe_type_e.name, recipe_type_e.revision_num, 'node_e', True) self.assertListEqual(create_recipes_msg.sub_recipes, [sub]) # Check message to process condition self.assertEqual(process_condition_msg.condition_id, condition_i.id) # Check message to process job input self.assertEqual(process_job_input_msg.job_id, job_c.id) # Check message to process recipe input self.assertEqual(process_recipe_input_msg.recipe_id, recipe_b.id) # Test executing message again message_json_dict = message.to_json() message = UpdateRecipe.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Make sure the same messages are returned self.assertEqual(len(message.new_messages), 8) blocked_jobs_msg = None pending_jobs_msg = None create_cond_msg = None create_jobs_msg = None create_recipes_msg = None process_condition_msg = None process_job_input_msg = None process_recipe_input_msg = None for msg in message.new_messages: if msg.type == 'blocked_jobs': blocked_jobs_msg = msg elif msg.type == 'pending_jobs': pending_jobs_msg = msg elif msg.type == 'create_conditions': create_cond_msg = msg elif msg.type == 'create_jobs': create_jobs_msg = msg elif msg.type == 'create_recipes': create_recipes_msg = msg elif msg.type == 'process_condition': process_condition_msg = msg elif msg.type == 'process_job_input': process_job_input_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg self.assertIsNotNone(blocked_jobs_msg) self.assertIsNotNone(pending_jobs_msg) self.assertIsNotNone(create_cond_msg) self.assertIsNotNone(create_jobs_msg) self.assertIsNotNone(create_recipes_msg) self.assertIsNotNone(process_condition_msg) self.assertIsNotNone(process_job_input_msg) self.assertIsNotNone(process_recipe_input_msg) # Check message to change jobs to BLOCKED self.assertListEqual(blocked_jobs_msg._blocked_job_ids, [job_h.id]) # Check message to change jobs to PENDING self.assertListEqual(pending_jobs_msg._pending_job_ids, [job_d.id]) # Check message to create conditions self.assertEqual(create_cond_msg.recipe_id, recipe.id) self.assertEqual(create_cond_msg.root_recipe_id, recipe.root_superseded_recipe_id) condition = Condition('node_j', True) self.assertListEqual(create_cond_msg.conditions, [condition]) # Check message to create jobs self.assertEqual(create_jobs_msg.event_id, recipe.event_id) self.assertEqual(create_jobs_msg.create_jobs_type, RECIPE_TYPE) self.assertEqual(create_jobs_msg.recipe_id, recipe.id) self.assertEqual(create_jobs_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_jobs_msg.superseded_recipe_id) recipe_job = RecipeJob(job_type_f.name, job_type_f.version, job_type_f.revision_num, 'node_f', False) self.assertListEqual(create_jobs_msg.recipe_jobs, [recipe_job]) # Check message to create sub-recipes self.assertEqual(create_recipes_msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(create_recipes_msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(create_recipes_msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(create_recipes_msg.recipe_id, recipe.id) self.assertEqual(create_recipes_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_recipes_msg.superseded_recipe_id) sub = SubRecipe(recipe_type_e.name, recipe_type_e.revision_num, 'node_e', True) self.assertListEqual(create_recipes_msg.sub_recipes, [sub]) # Check message to process condition self.assertEqual(process_condition_msg.condition_id, condition_i.id) # Check message to process job input self.assertEqual(process_job_input_msg.job_id, job_c.id) # Check message to process recipe input self.assertEqual(process_recipe_input_msg.recipe_id, recipe_b.id)
def test_execute_subrecipes_superseded(self): """Tests calling CreateRecipes.execute() successfully when creating sub-recipes that supersede other sub-recipes """ # Creates definitions for sub-recipe A and sub-recipe B batch = batch_test_utils.create_batch() event = trigger_test_utils.create_trigger_event() top_recipe_type = recipe_test_utils.create_recipe_type() job_type_a_1 = job_test_utils.create_seed_job_type() job_type_a_2 = job_test_utils.create_seed_job_type() sub_definition_a = RecipeDefinition(Interface()) sub_definition_a.add_job_node('node_1', job_type_a_1.name, job_type_a_1.version, job_type_a_1.revision_num) sub_definition_a.add_job_node('node_2', job_type_a_2.name, job_type_a_2.version, job_type_a_2.revision_num) sub_definition_a.add_dependency('node_1', 'node_2') sub_definition_a_dict = convert_recipe_definition_to_v6_json( sub_definition_a).get_dict() recipe_type_a = recipe_test_utils.create_recipe_type( definition=sub_definition_a_dict) job_type_b_x = job_test_utils.create_seed_job_type() job_type_b_y = job_test_utils.create_seed_job_type() recipe_type_b_z = recipe_test_utils.create_recipe_type() sub_definition_b = RecipeDefinition(Interface()) sub_definition_b.add_job_node('node_x', job_type_b_x.name, job_type_b_x.version, job_type_b_x.revision_num) sub_definition_b.add_job_node('node_y', job_type_b_y.name, job_type_b_y.version, job_type_b_y.revision_num) sub_definition_b.add_recipe_node('node_z', recipe_type_b_z.name, recipe_type_b_z.revision_num) sub_definition_b.add_dependency('node_x', 'node_z') sub_definition_b.add_dependency('node_y', 'node_z') sub_definition_b_dict = convert_recipe_definition_to_v6_json( sub_definition_b).get_dict() recipe_type_b = recipe_test_utils.create_recipe_type( definition=sub_definition_b_dict) # Create previous recipe containing sub-recipe A and B in order to be superseded prev_recipe_a = recipe_test_utils.create_recipe( recipe_type=recipe_type_a, save=False) prev_job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, save=False) prev_job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, save=False) prev_recipe_b = recipe_test_utils.create_recipe( recipe_type=recipe_type_b, save=False) prev_job_b_x = job_test_utils.create_job(job_type=job_type_b_x, save=False) prev_job_b_y = job_test_utils.create_job(job_type=job_type_b_y, save=False) prev_recipe_b_z = recipe_test_utils.create_recipe( recipe_type=recipe_type_b_z, save=False) prev_top_recipe = recipe_test_utils.create_recipe( recipe_type=top_recipe_type, save=False) new_top_recipe = recipe_test_utils.create_recipe( recipe_type=top_recipe_type, save=False) Job.objects.bulk_create( [prev_job_a_1, prev_job_a_2, prev_job_b_x, prev_job_b_y]) Recipe.objects.bulk_create([ prev_recipe_a, prev_recipe_b, prev_recipe_b_z, prev_top_recipe, new_top_recipe ]) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=prev_top_recipe, sub_recipe=prev_recipe_a, node_name='node_a', save=False) recipe_node_a_1 = recipe_test_utils.create_recipe_node( recipe=prev_recipe_a, job=prev_job_a_1, node_name='node_1', save=False) recipe_node_a_2 = recipe_test_utils.create_recipe_node( recipe=prev_recipe_a, job=prev_job_a_2, node_name='node_2', save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=prev_top_recipe, sub_recipe=prev_recipe_b, node_name='node_b', save=False) recipe_node_b_x = recipe_test_utils.create_recipe_node( recipe=prev_recipe_b, job=prev_job_b_x, node_name='node_x', save=False) recipe_node_b_y = recipe_test_utils.create_recipe_node( recipe=prev_recipe_b, job=prev_job_b_y, node_name='node_y', save=False) recipe_node_b_z = recipe_test_utils.create_recipe_node( recipe=prev_recipe_b, sub_recipe=prev_recipe_b_z, node_name='node_z', save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_a_1, recipe_node_a_2, recipe_node_b, recipe_node_b_x, recipe_node_b_y, recipe_node_b_z ]) # Create message to create sub-recipes A and B for new_top_recipe which supersedes prev_top_recipe sub_recipes = [ SubRecipe(recipe_type_a.name, recipe_type_a.revision_num, 'node_a', True), SubRecipe(recipe_type_b.name, recipe_type_b.revision_num, 'node_b', False) ] forced_nodes = ForcedNodes() sub_forced_nodes_b = ForcedNodes() sub_forced_nodes_y = ForcedNodes() sub_forced_nodes_b.add_subrecipe('node_y', sub_forced_nodes_y) forced_nodes.add_subrecipe('node_b', sub_forced_nodes_b) message = create_subrecipes_messages( new_top_recipe.id, new_top_recipe.root_superseded_recipe_id, sub_recipes, event.id, superseded_recipe_id=prev_top_recipe.id, forced_nodes=forced_nodes, batch_id=batch.id)[0] # Execute message result = message.execute() self.assertTrue(result) # Check for new sub-recipes qry = RecipeNode.objects.select_related('sub_recipe') recipe_nodes = qry.filter( recipe_id=new_top_recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_a') self.assertEqual(recipe_nodes[1].node_name, 'node_b') sub_recipe_a = recipe_nodes[0].sub_recipe sub_recipe_b = recipe_nodes[1].sub_recipe self.assertEqual(sub_recipe_a.recipe_type_id, recipe_type_a.id) self.assertEqual(sub_recipe_a.superseded_recipe_id, prev_recipe_a.id) self.assertEqual(sub_recipe_a.root_superseded_recipe_id, prev_recipe_a.id) self.assertEqual(sub_recipe_b.recipe_type_id, recipe_type_b.id) self.assertEqual(sub_recipe_b.superseded_recipe_id, prev_recipe_b.id) self.assertEqual(sub_recipe_b.root_superseded_recipe_id, prev_recipe_b.id) # Check for sub-recipes to contain correct copied nodes # Nodes 1 and 2 in sub-recipe A should be copied recipe_nodes = RecipeNode.objects.select_related('job').filter( recipe_id=sub_recipe_a.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertFalse(recipe_nodes[0].is_original) self.assertEqual(recipe_nodes[0].job_id, prev_job_a_1.id) self.assertEqual(recipe_nodes[1].node_name, 'node_2') self.assertFalse(recipe_nodes[1].is_original) self.assertEqual(recipe_nodes[1].job_id, prev_job_a_2.id) # Node X in sub-recipe B should be copied recipe_nodes = RecipeNode.objects.select_related('sub_recipe').filter( recipe_id=sub_recipe_b.id) self.assertEqual(len(recipe_nodes), 1) self.assertEqual(recipe_nodes[0].node_name, 'node_x') self.assertFalse(recipe_nodes[0].is_original) # Should be four messages, two for superseding recipe nodes, one for processing recipe input, and one for # updating metrics for the recipe containing the new sub-recipes self.assertEqual(len(message.new_messages), 4) supersede_recipe_a_msg = None supersede_recipe_b_msg = None process_recipe_input_msg = None update_metrics_msg = None for msg in message.new_messages: if msg.type == 'supersede_recipe_nodes': if msg._recipe_ids[0] == prev_recipe_a.id: supersede_recipe_a_msg = msg if msg._recipe_ids[0] == prev_recipe_b.id: supersede_recipe_b_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg elif msg.type == 'update_recipe_metrics': update_metrics_msg = msg self.assertIsNotNone(supersede_recipe_a_msg) self.assertIsNotNone(supersede_recipe_b_msg) self.assertIsNotNone(process_recipe_input_msg) self.assertIsNotNone(update_metrics_msg) # Check message for superseding previous sub-recipe A self.assertFalse(supersede_recipe_a_msg.supersede_all) self.assertSetEqual(supersede_recipe_a_msg.supersede_jobs, set()) self.assertSetEqual(supersede_recipe_a_msg.supersede_subrecipes, set()) self.assertFalse(supersede_recipe_a_msg.unpublish_all) self.assertSetEqual(supersede_recipe_a_msg.unpublish_jobs, set()) self.assertFalse(supersede_recipe_a_msg.supersede_recursive_all) self.assertSetEqual(supersede_recipe_a_msg.supersede_recursive, set()) self.assertFalse(supersede_recipe_a_msg.unpublish_recursive_all) self.assertSetEqual(supersede_recipe_a_msg.unpublish_recursive, set()) # Check message for superseding previous sub-recipe B self.assertFalse(supersede_recipe_b_msg.supersede_all) self.assertSetEqual(supersede_recipe_b_msg.supersede_jobs, {'node_y'}) self.assertSetEqual(supersede_recipe_b_msg.supersede_subrecipes, {'node_z'}) self.assertFalse(supersede_recipe_b_msg.unpublish_all) self.assertSetEqual(supersede_recipe_b_msg.unpublish_jobs, set()) self.assertFalse(supersede_recipe_b_msg.supersede_recursive_all) self.assertSetEqual(supersede_recipe_b_msg.supersede_recursive, {'node_z'}) self.assertFalse(supersede_recipe_b_msg.unpublish_recursive_all) self.assertSetEqual(supersede_recipe_b_msg.unpublish_recursive, set()) # Check message to process recipe input for new sub-recipe A self.assertEqual(process_recipe_input_msg.recipe_id, sub_recipe_a.id) # Check message to update recipe metrics for the recipe containing the new sub-recipes self.assertListEqual(update_metrics_msg._recipe_ids, [new_top_recipe.id]) # Test executing message again message_json_dict = message.to_json() message = CreateRecipes.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Check sub-recipes to make sure we didn't create them again a second time qry = RecipeNode.objects.select_related('sub_recipe') recipe_nodes = qry.filter( recipe_id=new_top_recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_a') self.assertEqual(recipe_nodes[1].node_name, 'node_b') sub_recipe_a = recipe_nodes[0].sub_recipe sub_recipe_b = recipe_nodes[1].sub_recipe self.assertEqual(sub_recipe_a.recipe_type_id, recipe_type_a.id) self.assertEqual(sub_recipe_a.superseded_recipe_id, prev_recipe_a.id) self.assertEqual(sub_recipe_a.root_superseded_recipe_id, prev_recipe_a.id) self.assertEqual(sub_recipe_b.recipe_type_id, recipe_type_b.id) self.assertEqual(sub_recipe_b.superseded_recipe_id, prev_recipe_b.id) self.assertEqual(sub_recipe_b.root_superseded_recipe_id, prev_recipe_b.id) # Check for sub-recipes to contain correct copied nodes # Nodes 1 and 2 in sub-recipe A should be copied recipe_nodes = RecipeNode.objects.select_related('job').filter( recipe_id=sub_recipe_a.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertFalse(recipe_nodes[0].is_original) self.assertEqual(recipe_nodes[0].job_id, prev_job_a_1.id) self.assertEqual(recipe_nodes[1].node_name, 'node_2') self.assertFalse(recipe_nodes[1].is_original) self.assertEqual(recipe_nodes[1].job_id, prev_job_a_2.id) # Node X in sub-recipe B should be copied recipe_nodes = RecipeNode.objects.select_related('sub_recipe').filter( recipe_id=sub_recipe_b.id) self.assertEqual(len(recipe_nodes), 1) self.assertEqual(recipe_nodes[0].node_name, 'node_x') self.assertFalse(recipe_nodes[0].is_original) # Check messages again # Should be four messages, two for superseding recipe nodes, one for processing recipe input, and one for # updating metrics for the recipe containing the new sub-recipes self.assertEqual(len(message.new_messages), 4) supersede_recipe_a_msg = None supersede_recipe_b_msg = None process_recipe_input_msg = None update_metrics_msg = None for msg in message.new_messages: if msg.type == 'supersede_recipe_nodes': if msg._recipe_ids[0] == prev_recipe_a.id: supersede_recipe_a_msg = msg if msg._recipe_ids[0] == prev_recipe_b.id: supersede_recipe_b_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg elif msg.type == 'update_recipe_metrics': update_metrics_msg = msg self.assertIsNotNone(supersede_recipe_a_msg) self.assertIsNotNone(supersede_recipe_b_msg) self.assertIsNotNone(process_recipe_input_msg) self.assertIsNotNone(update_metrics_msg) # Check message for superseding previous sub-recipe A self.assertFalse(supersede_recipe_a_msg.supersede_all) self.assertSetEqual(supersede_recipe_a_msg.supersede_jobs, set()) self.assertSetEqual(supersede_recipe_a_msg.supersede_subrecipes, set()) self.assertFalse(supersede_recipe_a_msg.unpublish_all) self.assertSetEqual(supersede_recipe_a_msg.unpublish_jobs, set()) self.assertFalse(supersede_recipe_a_msg.supersede_recursive_all) self.assertSetEqual(supersede_recipe_a_msg.supersede_recursive, set()) self.assertFalse(supersede_recipe_a_msg.unpublish_recursive_all) self.assertSetEqual(supersede_recipe_a_msg.unpublish_recursive, set()) # Check message for superseding previous sub-recipe B self.assertFalse(supersede_recipe_b_msg.supersede_all) self.assertSetEqual(supersede_recipe_b_msg.supersede_jobs, {'node_y'}) self.assertSetEqual(supersede_recipe_b_msg.supersede_subrecipes, {'node_z'}) self.assertFalse(supersede_recipe_b_msg.unpublish_all) self.assertSetEqual(supersede_recipe_b_msg.unpublish_jobs, set()) self.assertFalse(supersede_recipe_b_msg.supersede_recursive_all) self.assertSetEqual(supersede_recipe_b_msg.supersede_recursive, {'node_z'}) self.assertFalse(supersede_recipe_b_msg.unpublish_recursive_all) self.assertSetEqual(supersede_recipe_b_msg.unpublish_recursive, set()) # Check message to process recipe input for new sub-recipe A self.assertEqual(process_recipe_input_msg.recipe_id, sub_recipe_a.id) # Check message to update recipe metrics for the recipe containing the new sub-recipes self.assertListEqual(update_metrics_msg._recipe_ids, [new_top_recipe.id])
def test_execute(self): """Tests calling UncancelJobs.execute() successfully""" old_when = now() when = old_when + datetime.timedelta(minutes=60) recipe = recipe_test_utils.create_recipe() job_1 = job_test_utils.create_job(num_exes=0, status='PENDING', last_status_change=old_when) job_2 = job_test_utils.create_job(num_exes=0, status='CANCELED', last_status_change=old_when, recipe=recipe) job_3 = job_test_utils.create_job(num_exes=1, status='CANCELED', last_status_change=old_when) job_4 = job_test_utils.create_job(num_exes=1, status='FAILED', last_status_change=old_when) job_ids = [job_1.id, job_2.id, job_3.id, job_4.id] recipe_test_utils.create_recipe_job(recipe=recipe, job=job_2) # Add jobs to message message = UncancelJobs() message.when = when if message.can_fit_more(): message.add_job(job_1.id) if message.can_fit_more(): message.add_job(job_2.id) if message.can_fit_more(): message.add_job(job_3.id) if message.can_fit_more(): message.add_job(job_4.id) # Execute message result = message.execute() self.assertTrue(result) from recipe.diff.forced_nodes import ForcedNodes from recipe.diff.json.forced_nodes_v6 import convert_forced_nodes_to_v6 forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should not be updated because it was not CANCELED self.assertEqual(jobs[0].status, 'PENDING') self.assertEqual(jobs[0].last_status_change, old_when) # Job 2 should be uncanceled self.assertEqual(jobs[1].status, 'PENDING') self.assertEqual(jobs[1].last_status_change, when) # Job 3 should not be updated since it has already been queued self.assertEqual(jobs[2].status, 'CANCELED') self.assertEqual(jobs[2].last_status_change, old_when) # Job 4 should not be updated because it was not CANCELED self.assertEqual(jobs[3].status, 'FAILED') self.assertEqual(jobs[3].last_status_change, old_when) # Make sure update_recipe and update_recipe_metrics messages were created self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id]) # Test executing message again newer_when = when + datetime.timedelta(minutes=60) message_json_dict = message.to_json() message = UncancelJobs.from_json(message_json_dict) message.when = newer_when result = message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should not be updated because it was not CANCELED self.assertEqual(jobs[0].status, 'PENDING') self.assertEqual(jobs[0].last_status_change, old_when) # Job 2 should not be updated since it already was last mexxage execution self.assertEqual(jobs[1].status, 'PENDING') self.assertEqual(jobs[1].last_status_change, when) # Job 3 should not be updated since it has already been queued self.assertEqual(jobs[2].status, 'CANCELED') self.assertEqual(jobs[2].last_status_change, old_when) # Job 4 should not be updated because it was not CANCELED self.assertEqual(jobs[3].status, 'FAILED') self.assertEqual(jobs[3].last_status_change, old_when) # Make sure update_recipe and update_recipe_metrics messages were created self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id])
def test_json_new(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" jt_1 = job_test_utils.create_seed_job_type() jt_2 = job_test_utils.create_seed_job_type() jt_3 = job_test_utils.create_seed_job_type() jt_4 = job_test_utils.create_seed_job_type() recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'node_a': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_1.name, 'job_type_version': jt_1.version, 'job_type_revision': jt_1.revision_num } }, 'node_b': { 'dependencies': [], 'input': { 'input_a': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_2.name, 'job_type_version': jt_2.version, 'job_type_revision': jt_2.revision_num } } } } sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) sub_recipe = recipe_test_utils.create_recipe( recipe_type=sub_recipe_type) # Recipe with two jobs and one subrecipe (c -> d -> r) recipe_def = { 'version': '7', 'input': { 'files': [{ 'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False }], 'json': [] }, 'nodes': { 'recipe_node': { 'dependencies': [{ 'name': 'node_d', 'acceptance': True }], 'input': { 'input_a': { 'type': 'dependency', 'node': 'node_d', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'recipe', 'recipe_type_name': sub_recipe_type.name, 'recipe_type_revision': sub_recipe_type.revision_num } }, 'node_c': { 'dependencies': [], 'input': { 'INPUT_IMAGE': { 'type': 'recipe', 'input': 'INPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_3.name, 'job_type_version': jt_3.version, 'job_type_revision': jt_3.revision_num } }, 'node_d': { 'dependencies': [{ 'name': 'node_c', 'acceptance': True }], 'input': { 'INPUT_IMAGE': { 'type': 'dependency', 'node': 'node_c', 'output': 'OUTPUT_IMAGE' } }, 'node_type': { 'node_type': 'job', 'job_type_name': jt_4.name, 'job_type_version': jt_4.version, 'job_type_revision': jt_4.revision_num } } } } recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_def) # Create a dataset of 6 files dataset_def = { 'parameters': { 'files': [{ 'media_types': ['image/png'], 'required': True, 'multiple': False, 'name': 'INPUT_IMAGE' }], 'json': [] } } the_dataset = data_test_utils.create_dataset(definition=dataset_def) workspace = storage_test_utils.create_workspace() src_file_a = storage_test_utils.create_file(file_name='input_a.PNG', file_type='SOURCE', media_type='image/png', file_size=10, data_type_tags=['type'], file_path='the_path', workspace=workspace) src_file_b = storage_test_utils.create_file(file_name='input_b.PNG', file_type='SOURCE', media_type='image/png', file_size=10, data_type_tags=['type'], file_path='the_path', workspace=workspace) data_list = [] data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file_a.id] }, 'json': {} } data_list.append(DataV6(data=data_dict).get_dict()) data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file_b.id] }, 'json': {} } data_list.append(DataV6(data=data_dict).get_dict()) member_2 = data_test_utils.create_dataset_members(dataset=the_dataset, data_list=data_list) # Create the batch batch_definition = BatchDefinition() batch_definition.dataset = the_dataset.id forced_nodes = ForcedNodes() forced_nodes.all_nodes = True batch_definition.forced_nodes = forced_nodes batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=batch_definition) # Create the message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be two create_recipes message for the two files in the dataset self.assertEqual(len(new_message.new_messages), 2) # Verify each message has a different input src_ids = [src_file_a.id, src_file_b.id] for message in new_message.new_messages: self.assertEqual(message.type, 'create_recipes') self.assertEqual(message.create_recipes_type, 'new-recipe') file_id = DataV6(message.recipe_input_data).get_data( ).values['INPUT_IMAGE'].file_ids[0] self.assertTrue(file_id in src_ids) src_ids.remove(file_id) # Test re-processing existing recipes data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file_a.id] }, 'json': {} } recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) data_dict = { 'version': '6', 'files': { 'INPUT_IMAGE': [src_file_b.id] }, 'json': {} } recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_test_utils.process_recipe_inputs([recipe_1.id, recipe_2.id]) batch_definition_2 = BatchDefinition() batch_definition_2.dataset = the_dataset.id forced_nodes = ForcedNodes() forced_nodes.all_nodes = True batch_definition_2.forced_nodes = forced_nodes batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=batch_definition_2) # Create the message message = create_batch_recipes_message(batch_2.id) # Convert message to JSON and back, and then execute message_json_dict_2 = message.to_json() new_message_2 = CreateBatchRecipes.from_json(message_json_dict_2) result_2 = new_message_2.execute() self.assertTrue(result_2) self.assertEqual(len(new_message_2.new_messages), 1) message = new_message_2.new_messages[0] self.assertEqual(message.type, 'create_recipes') self.assertEqual(message.create_recipes_type, 'reprocess') self.assertSetEqual(set(message.root_recipe_ids), {recipe_1.id, recipe_2.id})