def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_a', Interface(), DataFilter(True)) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }]) definition = RecipeDefinition(cond_interface_1) definition.add_condition_node('node_a', cond_interface_1, df1) definition.add_recipe_input_connection('node_a', 'cond_int', 'cond_int') definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_dict) data_1 = Data() data_1.add_value(JsonValue('cond_int', 0)) data_1_dict = convert_data_to_v6_json(data_1).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def execute(self): """See :meth:`messaging.messages.message.CommandMessage.execute` """ with transaction.atomic(): self._perform_locking() condition_models = self._find_existing_conditions() if not condition_models: condition_models = self._create_conditions() process_input_condition_ids = [] for condition_model in condition_models: # process_input indicates if condition is ready to get its input from its dependencies process_input = self._process_input.get(condition_model.id, False) if process_input: # This new condition is all ready to have its input processed process_input_condition_ids.append(condition_model.id) self.new_messages.extend( create_process_condition_messages(process_input_condition_ids)) return True
def execute(self): """See :meth:`messaging.messages.message.CommandMessage.execute` """ recipe = Recipe.objects.get_recipe_instance_from_root( self.root_recipe_id) recipe_model = recipe.recipe_model when = now() jobs_to_update = recipe.get_jobs_to_update() blocked_job_ids = jobs_to_update['BLOCKED'] pending_job_ids = jobs_to_update['PENDING'] nodes_to_create = recipe.get_nodes_to_create() nodes_to_process_input = recipe.get_nodes_to_process_input() if not recipe_model.is_completed and recipe.has_completed(): Recipe.objects.complete_recipes([recipe_model.id], when) # Create new messages for changing job statuses if len(blocked_job_ids): logger.info('Found %d job(s) that should transition to BLOCKED', len(blocked_job_ids)) self.new_messages.extend( create_blocked_jobs_messages(blocked_job_ids, when)) if len(pending_job_ids): logger.info('Found %d job(s) that should transition to PENDING', len(pending_job_ids)) self.new_messages.extend( create_pending_jobs_messages(pending_job_ids, when)) # Create new messages to create recipe nodes conditions = [] recipe_jobs = [] subrecipes = [] for node_name, node_def in nodes_to_create.items(): process_input = False if node_name in nodes_to_process_input: process_input = True del nodes_to_process_input[node_name] if node_def.node_type == ConditionNodeDefinition.NODE_TYPE: condition = Condition(node_name, process_input) conditions.append(condition) elif node_def.node_type == JobNodeDefinition.NODE_TYPE: job = RecipeJob(node_def.job_type_name, node_def.job_type_version, node_def.revision_num, node_name, process_input) recipe_jobs.append(job) elif node_def.node_type == RecipeNodeDefinition.NODE_TYPE: subrecipe = SubRecipe(node_def.recipe_type_name, node_def.revision_num, node_name, process_input) subrecipes.append(subrecipe) if len(conditions): logger.info('Found %d condition(s) to create for this recipe', len(conditions)) self.new_messages.extend( create_conditions_messages(recipe_model, conditions)) if len(recipe_jobs): logger.info('Found %d job(s) to create for this recipe', len(recipe_jobs)) self.new_messages.extend( create_jobs_messages_for_recipe(recipe_model, recipe_jobs)) if len(subrecipes): logger.info('Found %d sub-recipe(s) to create for this recipe', len(subrecipes)) self.new_messages.extend( create_subrecipes_messages(recipe_model, subrecipes, forced_nodes=self.forced_nodes)) # Create new messages for processing recipe node input process_condition_ids = [] process_job_ids = [] process_recipe_ids = [] for node_name, node in nodes_to_process_input.items(): if node.node_type == ConditionNodeDefinition.NODE_TYPE: process_condition_ids.append(node.condition.id) elif node.node_type == JobNodeDefinition.NODE_TYPE: process_job_ids.append(node.job.id) elif node.node_type == RecipeNodeDefinition.NODE_TYPE: process_recipe_ids.append(node.recipe.id) if len(process_condition_ids): logger.info('Found %d condition(s) to process their input', len(process_condition_ids)) self.new_messages.extend( create_process_condition_messages(process_condition_ids)) if len(process_job_ids): logger.info( 'Found %d job(s) to process their input and move to the queue', len(process_job_ids)) self.new_messages.extend( create_process_job_input_messages(process_job_ids)) if len(process_recipe_ids): logger.info( 'Found %d sub-recipe(s) to process their input and begin processing', len(process_recipe_ids)) self.new_messages.extend( create_process_recipe_input_messages(process_recipe_ids)) return True
def test_execute(self): """Tests calling ProcessCondition.execute() successfully""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_job_type(interface=manifest_1) manifest_2 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_B', 'pattern': '*.png', 'multiple': True }] } } } } job_type_2 = job_test_utils.create_job_type(interface=manifest_2) output_1_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_A', 'file_ids': [file_1.id, file_2.id] }] } output_2_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_B', 'file_ids': [file_3.id, file_4.id] }] } cond_interface = Interface() cond_interface.add_parameter( FileParameter('INPUT_C_1', [], multiple=True)) cond_interface.add_parameter( FileParameter('INPUT_C_2', [], multiple=True)) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('node_b', job_type_2.name, job_type_2.version, job_type_2.revision_num) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_c', cond_interface, DataFilter(True)) definition.add_dependency('node_a', 'node_c') definition.add_dependency('node_b', 'node_c') definition.add_dependency_input_connection('node_c', 'INPUT_C_1', 'node_a', 'OUTPUT_A') definition.add_dependency_input_connection('node_c', 'INPUT_C_2', 'node_b', 'OUTPUT_B') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) job_1 = job_test_utils.create_job(job_type=job_type_1, num_exes=1, status='COMPLETED', output=output_1_dict, recipe=recipe) job_2 = job_test_utils.create_job(job_type=job_type_2, num_exes=1, status='COMPLETED', output=output_2_dict, recipe=recipe) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', job=job_1, save=False) node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_b', job=job_2, save=False) node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_c', condition=condition, save=False) RecipeNode.objects.bulk_create([node_a, node_b, node_c]) # Create message message = create_process_condition_messages([condition.id])[0] # Execute message result = message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) # Check for update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id) # Check condition flags self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted) # Check condition for expected data self.assertSetEqual(set(condition.get_data().values.keys()), {'INPUT_C_1', 'INPUT_C_2'}) self.assertListEqual(condition.get_data().values['INPUT_C_1'].file_ids, [file_1.id, file_2.id]) self.assertListEqual(condition.get_data().values['INPUT_C_2'].file_ids, [file_3.id, file_4.id]) # Test executing message again message_json_dict = message.to_json() message = ProcessCondition.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id)