def setUp(self): django.setup() self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) cmd = 'command' cmd_args = 'args' outputs = [{ 'name': 'arg1', 'pattern': '*_.txt' }, { 'name': 'arg2', 'pattern': '*_.txt' }] manifest = job_utils.create_seed_manifest(command='command args', outputs_files=outputs) self.job_type = job_utils.create_seed_job_type(job_version='1.0', manifest=manifest) self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING', recipe=self.recipe) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING')
def setUp(self): django.setup() rest.login_client(self.client) self.recipe_type_1 = recipe_test_utils.create_recipe_type_v6() self.batch_1 = batch_test_utils.create_batch(recipe_type=self.recipe_type_1, is_creation_done=False) self.recipe_type_2 = recipe_test_utils.create_recipe_type_v6() self.batch_2 = batch_test_utils.create_batch(recipe_type=self.recipe_type_2, is_creation_done=True)
def setUp(self): django.setup() self.rt1 = recipe_test_utils.create_recipe_type_v6() self.rt2 = recipe_test_utils.create_recipe_type_v6() self.rt3 = recipe_test_utils.create_recipe_type_v6() self.jt3 = job_test_utils.create_seed_job_type() self.jt4 = job_test_utils.create_seed_job_type() self.jt5 = job_test_utils.create_seed_job_type() self.jt6 = job_test_utils.create_seed_job_type() self.parents = [self.rt1.id,self.rt1.id,self.rt2.id] self.children = [self.jt3.id,self.jt4.id,self.jt5.id] RecipeTypeJobLink.objects.create_recipe_type_job_links(self.parents, self.children)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) self.job_type2 = job_test_utils.create_seed_job_type() self.sub_definition = copy.deepcopy(recipe_test_utils.SUB_RECIPE_DEFINITION) self.sub_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type1.name self.sub_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type1.version self.sub_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type1.revision_num self.sub_def = RecipeDefinitionV6(self.sub_definition).get_definition() self.recipe_type1 = recipe_test_utils.create_recipe_type_v6(definition=self.sub_definition, description="A sub recipe", is_active=False, is_system=False) self.main_definition = copy.deepcopy(recipe_test_utils.RECIPE_DEFINITION) self.main_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type2.name self.main_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type2.version self.main_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type2.revision_num self.main_definition['nodes']['node_b']['node_type']['job_type_name'] = self.job_type2.name self.main_definition['nodes']['node_b']['node_type']['job_type_version'] = self.job_type2.version self.main_definition['nodes']['node_b']['node_type']['job_type_revision'] = self.job_type2.revision_num self.main_definition['nodes']['node_d']['node_type']['recipe_type_name'] = self.recipe_type1.name self.main_definition['nodes']['node_d']['node_type']['recipe_type_revision'] = self.recipe_type1.revision_num self.v6_recipe_def = RecipeDefinitionV6(self.main_definition).get_definition()
def test_json(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" # Previous batch with three recipes recipe_type = recipe_test_utils.create_recipe_type_v6() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=3) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one create_recipes message for the three recipes self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'create_recipes') self.assertSetEqual(set(message.root_recipe_ids), {recipe_1.id, recipe_2.id, recipe_3.id})
def test_validate_successful_all(self): """Tests calling StrikeConfiguration.validate() successfully with all information""" recipe = recipe_test_utils.create_recipe_type_v6( definition=recipe_test_utils.RECIPE_DEFINITION) config = { 'workspace': self.workspace.name, 'monitor': { 'type': 'dir-watcher', 'transfer_suffix': '_tmp', }, 'files_to_ingest': [{ 'filename_regex': '.*txt', 'data_types': ['one', 'two'], 'new_file_path': os.path.join('my', 'path'), 'new_workspace': self.new_workspace.name, }], 'recipe': { 'name': recipe.name, 'revision_num': recipe.revision_num, }, } # No exception is success configuration = StrikeConfigurationV6(config).get_configuration() configuration.validate()
def test_json(self): """Tests converting an UpdateRecipe message to and from JSON""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_failed = job_test_utils.create_job(status='FAILED', input=data_dict) job_pending = job_test_utils.create_job(status='PENDING') definition = RecipeDefinition(Interface()) definition.add_job_node('job_failed', job_failed.job_type.name, job_failed.job_type.version, job_failed.job_type_rev.revision_num) definition.add_job_node('job_pending', job_pending.job_type.name, job_pending.job_type.version, job_pending.job_type_rev.revision_num) definition.add_dependency('job_failed', 'job_pending') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_failed', job=job_failed) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_pending', job=job_pending) # Create message message = create_update_recipe_message(recipe.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to set job_pending to BLOCKED self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'blocked_jobs') self.assertListEqual(msg._blocked_job_ids, [job_pending.id])
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() job_type_4 = job_test_utils.create_seed_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type_v6() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) df1 = DataFilter(filter_list=[{'name': 'file_param_1', 'type': 'media-type', 'condition': '==', 'values': ['image/gif']}, {'name': 'json_param_1', 'type': 'object', 'condition': 'superset of', 'values': [{}]}], all=False) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_condition_node('F', interface, df1) #False definition.add_job_node('G', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('A', 'F') definition.add_dependency('F', 'G') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) condition_f = recipe_test_utils.create_recipe_condition(is_processed=True, is_accepted=False, save=True) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', condition=condition_f, save=False) recipe_nodes = [recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f] recipe_instance = RecipeInstance(definition, recipe, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def setUp(self): django.setup() add_message_backend(AMQPMessagingBackend) # mock out threading.start self.recipe_type = recipe_test_utils.create_recipe_type_v6() self.batch = batch_test_utils.create_batch( recipe_type=self.recipe_type)
def test_successful_manual_kickoff(self, mock_msg_mgr): """Tests successfully producing an ingest that immediately calls a recipe""" ingest = ingest_test_utils.create_ingest(source_file=self.source_file) recipe_type = recipe_test_utils.create_recipe_type_v6(definition=recipe_test_utils.RECIPE_DEFINITION) # Call method to test IngestRecipeHandler().process_manual_ingested_source_file(ingest.id, self.source_file, now(), recipe_type.id) self.assertEqual(Recipe.objects.all().count(), 1) self.assertEqual(Recipe.objects.first().recipe_type.name, recipe_type.name)
def test_json_forced_nodes(self): """Tests converting an UpdateRecipe message to and from JSON when forced nodes are provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_completed = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('job_completed', job_completed.job_type.name, job_completed.job_type.version, job_completed.job_type_rev.revision_num) definition.add_recipe_node('the_sub_recipe', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('job_completed', 'the_sub_recipe') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_completed', job=job_completed) forced_nodes = ForcedNodes() sub_forced_nodes = ForcedNodes() sub_forced_nodes.set_all_nodes() forced_nodes.add_subrecipe('the_sub_recipe', sub_forced_nodes) # Create message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to create sub-recipe self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'create_recipes') self.assertEqual(msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(msg.recipe_id, recipe.id) self.assertEqual(msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(msg.superseded_recipe_id) sub = SubRecipe(sub_recipe_type.name, sub_recipe_type.revision_num, 'the_sub_recipe', True) self.assertListEqual(msg.sub_recipes, [sub])
def test_successful_manual_kickoff(self, mock_create, mock_msg_mgr): """Tests successfully producing an ingest that immediately calls a recipe""" ingest = ingest_test_utils.create_ingest(source_file=self.source_file) recipe_type = recipe_test_utils.create_recipe_type_v6( definition=recipe_test_utils.RECIPE_DEFINITION) # Call method to test IngestRecipeHandler().process_manual_ingested_source_file( ingest.id, self.source_file, now(), recipe_type.id) mock_msg_mgr.assert_called_once() mock_create.assert_called_once()
def setUp(self): django.setup() rest.login_client(self.client, is_staff=True) self.recipe_type_1 = recipe_test_utils.create_recipe_type_v6() self.batch_1 = batch_test_utils.create_batch(recipe_type=self.recipe_type_1, is_creation_done=False) self.recipe_type_2 = recipe_test_utils.create_recipe_type_v6() self.batch_2 = batch_test_utils.create_batch(recipe_type=self.recipe_type_2, is_creation_done=True) self.job_type1 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) self.sub_definition = copy.deepcopy(recipe_test_utils.SUB_RECIPE_DEFINITION) self.sub_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type1.name self.sub_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type1.version self.sub_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type1.revision_num self.recipe_type_3 = recipe_test_utils.create_recipe_type_v6(definition=self.sub_definition) self.batch_3 = batch_test_utils.create_batch(recipe_type=self.recipe_type_3, is_creation_done=True) recipe_test_utils.create_recipe(recipe_type=self.recipe_type_3, batch=self.batch_3)
def setUp(self): django.setup() add_message_backend(AMQPMessagingBackend) self.workspace = storage_test_utils.create_workspace() self.source_file = ScaleFile.objects.create(file_name='input_file', file_type='SOURCE', media_type='text/plain', file_size=10, data_type_tags=['type1'], file_path='the_path', workspace=self.workspace) self.source_file.add_data_type_tag('type1') self.source_file.add_data_type_tag('type2') self.source_file.add_data_type_tag('type3') manifest = job_test_utils.create_seed_manifest(inputs_files=[{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], inputs_json=[]) self.jt1 = job_test_utils.create_seed_job_type(manifest=manifest) v6_recipe_type_def = {'version': '6', 'input': {'files': [{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt1.name, 'job_type_version': self.jt1.version, 'job_type_revision': 1}}}} self.recipe = recipe_test_utils.create_recipe_type_v6(name='test-recipe', definition=v6_recipe_type_def) v7_recipe_type_def = {'version': '7', 'input': {'files': [{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt1.name, 'job_type_version': self.jt1.version, 'job_type_revision': 1}}}} self.recipe_v7 = recipe_test_utils.create_recipe_type_v6(name='test-recipe-v7', definition=v7_recipe_type_def)
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }]) definition = RecipeDefinition(cond_interface_1) definition.add_condition_node('node_a', cond_interface_1, df1) definition.add_recipe_input_connection('node_a', 'cond_int', 'cond_int') definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_dict) data_1 = Data() data_1.add_value(JsonValue('cond_int', 0)) data_1_dict = convert_data_to_v6_json(data_1).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def setUp(self): django.setup() self.jt = job_test_utils.create_seed_job_type() self.jt2 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) def_v6_dict_sub = {'version': '6', 'input': { 'files': [], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt2.name, 'job_type_version': self.jt2.version, 'job_type_revision': self.jt2.revision_num}}}} self.sub = recipe_test_utils.create_recipe_type_v6(definition=def_v6_dict_sub) def_v6_dict_main = {'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/tiff'], 'required': True, 'multiple': False}], 'json': [{'name': 'bar', 'type': 'string', 'required': False}]}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt.name, 'job_type_version': self.jt.version, 'job_type_revision': self.jt.revision_num}}, 'node_b': {'dependencies': [{'name': 'node_a'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'node_a', 'output': 'OUTPUT_IMAGE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt.name, 'job_type_version': self.jt.version, 'job_type_revision': self.jt.revision_num}}, 'node_c': {'dependencies': [{'name': 'node_b'}], 'input': {'input_a': {'type': 'recipe', 'input': 'bar'}, 'input_b': {'type': 'dependency', 'node': 'node_b', 'output': 'OUTPUT_IMAGE'}}, 'node_type': {'node_type': 'recipe', 'recipe_type_name': self.sub.name, 'recipe_type_revision': self.sub.revision_num}}}} self.rt = recipe_test_utils.create_recipe_type_v6(definition=def_v6_dict_main)
def test_has_completed_empty(self): """Tests calling Recipe.has_completed() when a recipe is empty and has not created its nodes yet""" job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) self.assertFalse(recipe_instance.has_completed())
def create_batch(title=None, description=None, recipe_type=None, definition=None, configuration=None, is_creation_done=False, recipes_total=None): """Creates a batch model for unit testing :returns: The batch model :rtype: :class:`batch.models.Batch` """ if not recipe_type: recipe_type = recipe_test_utils.create_recipe_type_v6() if not definition: definition = BatchDefinition() # Create a previous batch so we can create a valid definition # TODO: this can be replaced by a DataSet once they are implemented prev_batch = Batch() prev_batch.recipe_type = recipe_type prev_batch.recipe_type_rev = RecipeTypeRevision.objects.get_revision(recipe_type.name, recipe_type.revision_num) prev_batch.event = TriggerEvent.objects.create_trigger_event('USER', None, {'user': '******'}, now()) prev_batch.is_creation_done = True prev_batch.recipes_total = 10 prev_batch.save() prev_batch.root_batch_id = prev_batch.id prev_batch.save() definition.root_batch_id = prev_batch.root_batch_id if not isinstance(definition, BatchDefinition): definition = BatchDefinitionV6(definition).get_definition() if not configuration: configuration = BatchConfiguration() if not title: global BATCH_TITLE_COUNTER title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER BATCH_TITLE_COUNTER += 1 if not description: global BATCH_DESCRIPTION_COUNTER description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER BATCH_DESCRIPTION_COUNTER += 1 event = TriggerEvent.objects.create_trigger_event('USER', None, {'user': '******'}, now()) batch = Batch.objects.create_batch_v6(title, description, recipe_type, event, definition, configuration) if is_creation_done: batch.is_creation_done = True if recipes_total is not None: batch.recipes_total = recipes_total batch.save() return batch
def setUp(self): django.setup() self.rt1 = recipe_test_utils.create_recipe_type_v6() self.rt2 = recipe_test_utils.create_recipe_type_v6() self.rt3 = recipe_test_utils.create_recipe_type_v6() self.rt4 = recipe_test_utils.create_recipe_type_v6() self.rt5 = recipe_test_utils.create_recipe_type_v6() self.rt6 = recipe_test_utils.create_recipe_type_v6() self.parents = [self.rt1.id,self.rt1.id,self.rt2.id] self.children = [self.rt3.id,self.rt4.id,self.rt5.id] RecipeTypeSubLink.objects.create_recipe_type_sub_links(self.parents, self.children)
def setUp(self): django.setup() self.workspace1 = storage_test_utils.create_workspace(name='ws1') self.country = storage_test_utils.create_country() manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST) manifest['job']['name'] = 'test1' self.job_type1 = job_test_utils.create_seed_job_type(manifest=manifest) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.recipe_type1 = recipe_test_utils.create_recipe_type_v6() self.recipe1 = recipe_test_utils.create_recipe( recipe_type=self.recipe_type1) self.batch1 = batch_test_utils.create_batch( recipe_type=self.recipe_type1, is_creation_done=True) self.file = storage_test_utils.create_file( file_name='test.txt', file_type='SOURCE', media_type='image/png', file_size=1000, data_type_tags=['png'], file_path='/test/path', workspace=self.workspace1, is_deleted=False, last_modified='', data_started='2017-01-01T00:00:00Z', data_ended='2017-01-01T00:00:00Z', source_started='2017-01-01T00:00:00Z', source_ended='2017-01-01T00:00:00Z', geometry='', center_point='', meta_data='', countries=[self.country], job_exe=self.job_exe1, job_output='output_name_1', recipe=self.recipe1, recipe_node='my-recipe', batch=self.batch1, is_superseded=True, superseded='2017-01-01T00:00:00Z') rest.login_client(self.client)
def test_successful(self): """Tests successfully calling the v6 batch details view""" job_type = job_test_utils.create_seed_job_type() recipe_definition_dict = { 'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}], 'json': []}, 'nodes': { 'job_a': { 'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type.name, 'job_type_version': job_type.version, 'job_type_revision': 1, } } } } recipe_type = recipe_test_utils.create_recipe_type_v6(definition=recipe_definition_dict) configuration = BatchConfiguration() configuration.priority = 100 batch = batch_test_utils.create_batch(recipe_type=recipe_type, configuration=configuration) url = '/v6/batches/%d/' % batch.id response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertEqual(result['id'], batch.id) self.assertEqual(result['title'], batch.title) self.assertEqual(result['description'], batch.description) self.assertEqual(result['recipe_type']['id'], batch.recipe_type.id) self.assertDictEqual(result['definition'], batch.get_v6_definition_json()) self.assertDictEqual(result['configuration'], batch.get_v6_configuration_json())
def test_condition_hit(self): """Tests calling Recipe.has_completed() when an entire recipe has completed""" """ Job -> Condition -> Recipe parse-job -> condition-node -> recipe-node """ manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'parse-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Parse Job', 'description': 'Test Parse job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/x-hdf5-image'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_seed_job_type(manifest=manifest_1) input_interface = Interface() input_interface.add_parameter( FileParameter('INPUT_FILE', ['image/x-hdf5-image'], multiple=False)) definition = RecipeDefinition(input_interface) definition.add_job_node('parse-job', job_type_1.name, job_type_1.version, job_type_1.revision_num) cond_interface = Interface() cond_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) df = DataFilter(filter_list=[{ 'name': 'cond', 'type': 'media-type', 'condition': '==', 'value': ['image/png'] }]) definition.add_condition_node('condition-node', cond_interface, df) sub_job_manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'recipe-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Recipe Job', 'description': 'Test Recipe job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/png'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } sub_job = job_test_utils.create_seed_job_type( manifest=sub_job_manifest) sub_interface = Interface() sub_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) definition_b = RecipeDefinition(sub_interface) definition_b.add_job_node('job_b', sub_job.name, sub_job.version, sub_job.revision_num) definition_b.add_recipe_input_connection('job_b', 'INPUT_FILE', 'INPUT_FILE') definition_b_dict = convert_recipe_definition_to_v6_json( definition_b).get_dict() sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_b_dict) definition.add_recipe_node('recipe-node', sub_recipe_type.name, sub_recipe_type.revision_num) # Connect the recipe input to the parse job definition.add_recipe_input_connection('parse-job', 'INPUT_FILE', 'INPUT_FILE') # Connect the condition node to the parse job output definition.add_dependency_input_connection('condition-node', 'cond', 'parse-job', 'OUTPUT_A') # Connect the sub recipe to the condition output definition.add_dependency_input_connection('recipe-node', 'INPUT_FILE', 'condition-node', 'cond')
def test_has_completed_true(self): """Tests calling Recipe.has_completed() when an entire recipe has completed""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df2 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=True) #always False definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('I', cond_interface_1, df2) #False definition.add_job_node('J', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') definition.add_dependency('A', 'I') definition.add_dependency('I', 'J') job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_c = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_d = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_e = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_f = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_h = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h]) condition_i = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=True) recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_b.is_completed = True recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_g.is_completed = True Recipe.objects.bulk_create([recipe_b, recipe_g]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False, is_original=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False, is_original=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', job=job_d, save=False, is_original=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', job=job_f, save=False) recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='H', job=job_h, save=False) recipe_node_i = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='I', condition=condition_i, save=False) recipe_node_g = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='G', sub_recipe=recipe_g, save=False, is_original=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h, recipe_node_i ]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) self.assertTrue(recipe_instance.has_completed())
def test_has_completed_false(self): """Tests calling Recipe.has_completed() when an entire recipe has not completed""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_c = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_d = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_e = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_f = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_h = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h]) recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_b.is_completed = True recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_g.is_completed = False Recipe.objects.bulk_create([recipe_b, recipe_g]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False, is_original=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False, is_original=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', job=job_d, save=False, is_original=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', job=job_f, save=False) recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='H', job=job_h, save=False) recipe_node_g = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='G', sub_recipe=recipe_g, save=False, is_original=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h ]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) self.assertFalse(recipe_instance.has_completed())
def test_get_original_leaf_nodes(self): """Tests calling Recipe.get_original_leaf_nodes()""" job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=False, is_superseded=True) job_c = job_test_utils.create_job(job_type=job_type, status='CANCELED', num_exes=0, save=False) job_d = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) job_e = job_test_utils.create_job(job_type=job_type, status='BLOCKED', num_exes=0, save=False) job_f = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) job_h = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h]) recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_b.jobs_completed = 3 recipe_b.jobs_running = 2 recipe_b.jobs_total = 5 recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_g.jobs_completed = 2 recipe_g.jobs_failed = 1 recipe_g.jobs_total = 3 Recipe.objects.bulk_create([recipe_b, recipe_g]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False, is_original=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False, is_original=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', job=job_d, save=False, is_original=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', job=job_f, save=False) recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='H', job=job_h, save=False) recipe_node_g = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='G', sub_recipe=recipe_g, save=False, is_original=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h ]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) results = recipe_instance.get_original_leaf_nodes() self.assertEqual(len(results.values()), 4) leaf_jobs = [ node.job.id for node in results.values() if node.node_type == JobNodeDefinition.NODE_TYPE ] leaf_recipes = [ node.recipe.id for node in results.values() if node.node_type == RecipeNodeDefinition.NODE_TYPE ] self.assertItemsEqual( leaf_jobs, [recipe_node_e.job.id, recipe_node_f.job.id, recipe_node_h.job.id]) self.assertItemsEqual(leaf_recipes, [recipe_node_b.sub_recipe.id])
def test_get_jobs_to_update(self): """Tests calling Recipe.get_jobs_to_update()""" job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('B', 'E') definition.add_dependency('B', 'G') definition.add_dependency('C', 'D') definition.add_dependency('E', 'F') definition.add_dependency('G', 'H') job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=False) job_c = job_test_utils.create_job(job_type=job_type, status='CANCELED', num_exes=0, save=False) job_d = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) job_e = job_test_utils.create_job(job_type=job_type, status='BLOCKED', num_exes=0, save=False) job_f = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) job_h = job_test_utils.create_job(job_type=job_type, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h]) recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_b.jobs_completed = 3 recipe_b.jobs_running = 2 recipe_b.jobs_total = 5 recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_g.jobs_completed = 2 recipe_g.jobs_failed = 1 recipe_g.jobs_total = 3 Recipe.objects.bulk_create([recipe_b, recipe_g]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', job=job_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', job=job_f, save=False) recipe_node_g = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='G', sub_recipe=recipe_g, save=False) recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='H', job=job_h, save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h ]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) results = recipe_instance.get_jobs_to_update() self.assertSetEqual(set(results['BLOCKED']), {job_d.id, job_h.id}) self.assertSetEqual(set(results['PENDING']), {job_e.id})
def test_get_nodes_to_process_input(self): """Tests calling Recipe.get_nodes_to_process_input()""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() # Create recipe definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=False) #always True df2 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=True) #always False definition.add_condition_node('B', cond_interface_1, df1) #True definition.add_condition_node('C', cond_interface_1, df1) #True definition.add_condition_node('D', cond_interface_1, df2) #False definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', input=data_dict, output=data_dict, save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_process = recipe_instance.get_nodes_to_process_input() self.assertSetEqual(set(nodes_to_process.keys()), {'C', 'E'})
def test_successful(self, mock_msg_mgr): """Tests successfully calling the v6 batch comparison view""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() rt_definition_1 = { 'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}], 'json': []}, 'nodes': { 'job_a': { 'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_1.name, 'job_type_version': job_type_1.version, 'job_type_revision': 1, } }, 'job_b': { 'dependencies': [{'name': 'job_a'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_a', 'output': 'OUTPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_2.name, 'job_type_version': job_type_2.version, 'job_type_revision': 1, } } } } rt_definition_2 = { 'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}], 'json': []}, 'nodes': { 'job_c': { 'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_3.name, 'job_type_version': job_type_3.version, 'job_type_revision': 1, } }, 'job_b': { 'dependencies': [{'name': 'job_c'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_c', 'output': 'OUTPUT_IMAGE'}}, 'node_type': { 'node_type': 'job', 'job_type_name': job_type_2.name, 'job_type_version': job_type_2.version, 'job_type_revision': 1, } } } } recipe_type = recipe_test_utils.create_recipe_type_v6(definition=rt_definition_1) # Create a chain of two batches batch_1 = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=2) # Right now test utils will automatically have batch_1 supersede another batch, so we reset this so batch_1 is # its own chain batch_1.root_batch_id = batch_1.id batch_1.superseded_batch = None batch_1.save() # Change recipe type to new revision recipe_test_utils.edit_recipe_type_v6(recipe_type=recipe_type, definition=rt_definition_2, auto_update=True) recipe_type = RecipeType.objects.get(id=recipe_type.id) definition_2 = BatchDefinition() definition_2.root_batch_id = batch_1.root_batch_id batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition_2) # Set metrics to test values Batch.objects.filter(id=batch_1.id).update(jobs_total=24, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2, jobs_completed=12, jobs_canceled=0, recipes_estimated=2, recipes_total=2, recipes_completed=1) Batch.objects.filter(id=batch_2.id).update(jobs_total=26, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=5, jobs_failed=6, jobs_completed=3, jobs_canceled=1, recipes_estimated=2, recipes_total=2, recipes_completed=0) min_seed_duration_1a = timedelta(seconds=43) avg_seed_duration_1a = timedelta(seconds=68) max_seed_duration_1a = timedelta(seconds=77) min_job_duration_1a = timedelta(seconds=45) avg_job_duration_1a = timedelta(seconds=70) max_job_duration_1a = timedelta(seconds=79) qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_a') qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=0, jobs_failed=0, jobs_completed=12, jobs_canceled=0, min_seed_duration=min_seed_duration_1a, avg_seed_duration=avg_seed_duration_1a, max_seed_duration=max_seed_duration_1a, min_job_duration=min_job_duration_1a, avg_job_duration=avg_job_duration_1a, max_job_duration=max_job_duration_1a) min_seed_duration_1b = timedelta(seconds=15) avg_seed_duration_1b = timedelta(seconds=18) max_seed_duration_1b = timedelta(seconds=23) min_job_duration_1b = timedelta(seconds=18) avg_job_duration_1b = timedelta(seconds=21) max_job_duration_1b = timedelta(seconds=26) qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_b') qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2, jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_1b, avg_seed_duration=avg_seed_duration_1b, max_seed_duration=max_seed_duration_1b, min_job_duration=min_job_duration_1b, avg_job_duration=avg_job_duration_1b, max_job_duration=max_job_duration_1b) min_seed_duration_2b = timedelta(seconds=9) avg_seed_duration_2b = timedelta(seconds=12) max_seed_duration_2b = timedelta(seconds=17) min_job_duration_2b = timedelta(seconds=12) avg_job_duration_2b = timedelta(seconds=15) max_job_duration_2b = timedelta(seconds=20) qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_b') qry.update(jobs_total=13, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=3, jobs_failed=6, jobs_completed=3, jobs_canceled=1, min_seed_duration=min_seed_duration_2b, avg_seed_duration=avg_seed_duration_2b, max_seed_duration=max_seed_duration_2b, min_job_duration=min_job_duration_2b, avg_job_duration=avg_job_duration_2b, max_job_duration=max_job_duration_2b) min_seed_duration_2c = timedelta(seconds=101) avg_seed_duration_2c = timedelta(seconds=136) max_seed_duration_2c = timedelta(seconds=158) min_job_duration_2c = timedelta(seconds=111) avg_job_duration_2c = timedelta(seconds=146) max_job_duration_2c = timedelta(seconds=168) qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_c') qry.update(jobs_total=13, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=2, jobs_failed=0, jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_2c, avg_seed_duration=avg_seed_duration_2c, max_seed_duration=max_seed_duration_2c, min_job_duration=min_job_duration_2c, avg_job_duration=avg_job_duration_2c, max_job_duration=max_job_duration_2c) expected_job_metrics = {'job_a': {'jobs_total': [12, None], 'jobs_pending': [0, None], 'jobs_blocked': [0, None], 'jobs_queued': [0, None], 'jobs_running': [0, None], 'jobs_failed': [0, None], 'jobs_completed': [12, None], 'jobs_canceled': [0, None], 'min_seed_duration': [duration_to_string(min_seed_duration_1a), None], 'avg_seed_duration': [duration_to_string(avg_seed_duration_1a), None], 'max_seed_duration': [duration_to_string(max_seed_duration_1a), None], 'min_job_duration': [duration_to_string(min_job_duration_1a), None], 'avg_job_duration': [duration_to_string(avg_job_duration_1a), None], 'max_job_duration': [duration_to_string(max_job_duration_1a), None]}, 'job_b': {'jobs_total': [12, 13], 'jobs_pending': [0, 0], 'jobs_blocked': [10, 0], 'jobs_queued': [0, 0], 'jobs_running': [0, 3], 'jobs_failed': [2, 6], 'jobs_completed': [0, 3], 'jobs_canceled': [0, 1], 'min_seed_duration': [duration_to_string(min_seed_duration_1b), duration_to_string(min_seed_duration_2b)], 'avg_seed_duration': [duration_to_string(avg_seed_duration_1b), duration_to_string(avg_seed_duration_2b)], 'max_seed_duration': [duration_to_string(max_seed_duration_1b), duration_to_string(max_seed_duration_2b)], 'min_job_duration': [duration_to_string(min_job_duration_1b), duration_to_string(min_job_duration_2b)], 'avg_job_duration': [duration_to_string(avg_job_duration_1b), duration_to_string(avg_job_duration_2b)], 'max_job_duration': [duration_to_string(max_job_duration_1b), duration_to_string(max_job_duration_2b)]}, 'job_c': {'jobs_total': [None, 13], 'jobs_pending': [None, 2], 'jobs_blocked': [None, 6], 'jobs_queued': [None, 3], 'jobs_running': [None, 2], 'jobs_failed': [None, 0], 'jobs_completed': [None, 0], 'jobs_canceled': [None, 0], 'min_seed_duration': [None, duration_to_string(min_seed_duration_2c)], 'avg_seed_duration': [None, duration_to_string(avg_seed_duration_2c)], 'max_seed_duration': [None, duration_to_string(max_seed_duration_2c)], 'min_job_duration': [None, duration_to_string(min_job_duration_2c)], 'avg_job_duration': [None, duration_to_string(avg_job_duration_2c)], 'max_job_duration': [None, duration_to_string(max_job_duration_2c)]} } expected_result = {'batches': [{'id': batch_1.id, 'title': batch_1.title, 'description': batch_1.description, 'created': datetime_to_string(batch_1.created)}, {'id': batch_2.id, 'title': batch_2.title, 'description': batch_2.description, 'created': datetime_to_string(batch_2.created)}], 'metrics': {'jobs_total': [24, 26], 'jobs_pending': [0, 2], 'jobs_blocked': [10, 6], 'jobs_queued': [0, 3], 'jobs_running': [0, 5], 'jobs_failed': [2, 6], 'jobs_completed': [12, 3], 'jobs_canceled': [0, 1], 'recipes_estimated': [2, 2], 'recipes_total': [2, 2], 'recipes_completed': [1, 0], 'job_metrics': expected_job_metrics} } url = '/v6/batches/comparison/%d/' % batch_2.root_batch_id response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertDictEqual(result, expected_result)
def test_execute_with_data(self): """Tests calling ProcessRecipeInput.execute() successfully when the recipe already has data populated""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=10485760.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) recipe_interface = Interface() recipe_interface.add_parameter(FileParameter('input_a', ['text/plain'])) recipe_interface.add_parameter( FileParameter('input_b', ['text/plain'], multiple=True)) definition = RecipeDefinition(recipe_interface) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_dict) data = Data() data.add_value(FileValue('input_a', [file_1.id])) data.add_value(FileValue('input_b', [file_2.id, file_3.id])) data_dict = convert_data_to_v6_json(data).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) # Create message message = ProcessRecipeInput() message.recipe_id = recipe.id # Execute message result = message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) # Check for update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id) # Check recipe for expected input_file_size self.assertEqual(recipe.input_file_size, 1052.0) # Make sure recipe input file models are created recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3) for recipe_input_file in recipe_input_files: if recipe_input_file.input_file_id == file_1.id: self.assertEqual(recipe_input_file.recipe_input, 'input_a') elif recipe_input_file.input_file_id == file_2.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') elif recipe_input_file.input_file_id == file_3.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') else: self.fail('Invalid input file ID: %s' % recipe_input_file.input_file_id) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') # Make sure recipe input file models are unchanged recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3)
def test_execute_with_recipe_legacy(self): """Tests calling ProcessRecipeInput.execute() successfully when a legacy sub-recipe has to get its data from its recipe """ workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_a = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_a', 'pattern': '*.png' }] } } } } job_type_a = job_test_utils.create_seed_job_type(manifest=manifest_a) output_data_a = Data() output_data_a.add_value(FileValue('output_a', [file_1.id])) output_data_a_dict = convert_data_to_v6_json(output_data_a).get_dict() manifest_b = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_b', 'pattern': '*.png', 'multiple': True }] } } } } job_type_b = job_test_utils.create_seed_job_type(manifest=manifest_b) output_data_b = Data() output_data_b.add_value( FileValue('output_b', [file_2.id, file_3.id, file_4.id])) output_data_b_dict = convert_data_to_v6_json(output_data_b).get_dict() job_a = job_test_utils.create_job(job_type=job_type_a, num_exes=1, status='COMPLETED', output=output_data_a_dict) job_b = job_test_utils.create_job(job_type=job_type_b, num_exes=1, status='COMPLETED', output=output_data_b_dict) sub_recipe_interface_c = Interface() sub_recipe_interface_c.add_parameter( FileParameter('input_a', ['image/png'])) sub_recipe_interface_c.add_parameter( FileParameter('input_b', ['image/png'], multiple=True)) sub_recipe_def_c = RecipeDefinition(sub_recipe_interface_c) sub_recipe_def_dict_c = convert_recipe_definition_to_v1_json( sub_recipe_def_c).get_dict() sub_recipe_type_c = recipe_test_utils.create_recipe_type_v6( definition=sub_recipe_def_dict_c) sub_recipe_c = recipe_test_utils.create_recipe( recipe_type=sub_recipe_type_c) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_a.name, job_type_a.version, job_type_a.revision_num) definition.add_job_node('node_b', job_type_b.name, job_type_b.version, job_type_b.revision_num) definition.add_recipe_node('node_c', sub_recipe_type_c.name, sub_recipe_type_c.revision_num) definition.add_dependency('node_c', 'node_a') definition.add_dependency_input_connection('node_c', 'input_a', 'node_a', 'output_a') definition.add_dependency('node_c', 'node_b') definition.add_dependency_input_connection('node_c', 'input_b', 'node_b', 'output_b') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_a', job=job_a) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_b', job=job_b) recipe_node_c = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_c', sub_recipe=sub_recipe_c) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_c]) job_a.recipe = recipe job_a.save() job_b.recipe = recipe job_b.save() sub_recipe_c.recipe = recipe sub_recipe_c.save() # Create message message = ProcessRecipeInput() message.recipe_id = sub_recipe_c.id # Execute message result = message.execute() self.assertTrue(result) sub_recipe_c = Recipe.objects.get(id=sub_recipe_c.id) # Check for update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') # Check sub-recipe for expected input_file_size self.assertEqual(sub_recipe_c.input_file_size, 24469.0) # Check sub-recipe for expected input data self.assertEqual(sub_recipe_c.input['version'], '7') # Should be legacy input data with workspace ID self.assertSetEqual(set(sub_recipe_c.get_input_data().values.keys()), {'input_a', 'input_b'}) self.assertListEqual( sub_recipe_c.get_input_data().values['input_a'].file_ids, [file_1.id]) self.assertListEqual( sub_recipe_c.get_input_data().values['input_b'].file_ids, [file_2.id, file_3.id, file_4.id]) # Make sure sub-recipe input file models are created input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4) file_ids = {input_file.input_file_id for input_file in input_files} self.assertSetEqual(file_ids, {file_1.id, file_2.id, file_3.id, file_4.id}) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') # Make sure recipe input file models are unchanged input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4)