def setUp(self): django.setup() self.recipe_type1 = recipe_test_utils.create_recipe_type(name='test1', version='1.0') self.batch1 = batch_test_utils.create_batch(recipe_type=self.recipe_type1, status='SUBMITTED') self.recipe_type2 = recipe_test_utils.create_recipe_type(name='test2', version='1.0') self.batch2 = batch_test_utils.create_batch(recipe_type=self.recipe_type2, status='CREATED')
def test_get_nodes_to_create(self): """Tests calling Recipe.get_nodes_to_create()""" job_type = job_test_utils.create_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type() # Create recipe definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('B', Interface(), DataFilter(True)) definition.add_condition_node('C', Interface(), DataFilter(True)) definition.add_condition_node('D', Interface(), DataFilter(False)) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_create = recipe_instance.get_nodes_to_create() self.assertSetEqual(set(nodes_to_create.keys()), {'C', 'E', 'H'})
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING', num_exes=1) self.job_2 = job_test_utils.create_job(input={}, num_exes=0) self.job_3 = job_test_utils.create_job(status='FAILED', num_exes=1) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING') self.job_2 = job_test_utils.create_job(data={}) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def test_successful(self): """Tests calling the queue recipe view successfully.""" recipe_type = recipe_test_utils.create_recipe_type() workspace = storage_test_utils.create_workspace() recipe_data = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id, } json_data = { 'recipe_type_id': recipe_type.id, 'recipe_data': recipe_data, } url = rest_util.get_url('/queue/new-recipe/') response = self.client.generic('POST', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) result = json.loads(response.content) self.assertTrue(response['Location']) self.assertEqual(result['recipe_type']['id'], recipe_type.id)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status="RUNNING") self.job_2 = job_test_utils.create_job(data={}) self.job_3 = job_test_utils.create_job(status="FAILED") definition = { "version": "1.0", "input_data": [], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_1.job_type.name, "version": self.job_1.job_type.version}, }, { "name": "Job 2", "job_type": {"name": self.job_2.job_type.name, "version": self.job_2.job_type.version}, "dependencies": [{"name": "Job 1"}], }, ], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 1", job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 2", job=self.job_2)
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_a', Interface(), DataFilter(True)) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def test_execute_force_stop(self): """Tests calling PurgeRecipe.execute() successfully""" # Create PurgeResults entry file_2 = storage_test_utils.create_file(file_type='SOURCE') trigger = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=file_2.id, trigger_event=trigger, force_stop_purge=True) self.assertEqual( PurgeResults.objects.values_list( 'num_recipes_deleted', flat=True).get(trigger_event=trigger.id), 0) # Create recipes recipe_type = recipe_test_utils.create_recipe_type() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=trigger.id, source_file_id=file_2.id) # Execute message result = message.execute() self.assertTrue(result) # Check results are accurate self.assertEqual( PurgeResults.objects.values_list( 'num_recipes_deleted', flat=True).get(source_file_id=file_2.id), 0)
def test_json(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" # Previous batch with three recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=3) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one reprocess_recipes message for the three recipes self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'reprocess_recipes') self.assertSetEqual(set(message._root_recipe_ids), {recipe_1.id, recipe_2.id, recipe_3.id})
def create_batch_old(recipe_type=None, definition=None, title=None, description=None, status=None, recipe_count=0): """Creates a batch model for unit testing :returns: The batch model :rtype: :class:`batch.models.Batch` """ if not recipe_type: recipe_type = recipe_test_utils.create_recipe_type() if not definition: definition = {} if not isinstance(definition, OldBatchDefinition): definition = OldBatchDefinition(definition) if not title: global BATCH_TITLE_COUNTER title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER BATCH_TITLE_COUNTER += 1 if not description: global BATCH_DESCRIPTION_COUNTER description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER BATCH_DESCRIPTION_COUNTER += 1 for i in range(recipe_count): recipe_test_utils.create_recipe(recipe_type=recipe_type) batch = Batch.objects.create_batch_old(recipe_type=recipe_type, definition=definition, title=title, description=description) if status: batch.status = status batch.save() return batch
def test_json(self): """Tests converting an UpdateRecipe message to and from JSON""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_failed = job_test_utils.create_job(status='FAILED', input=data_dict) job_pending = job_test_utils.create_job(status='PENDING') definition = RecipeDefinition(Interface()) definition.add_job_node('job_failed', job_failed.job_type.name, job_failed.job_type.version, job_failed.job_type_rev.revision_num) definition.add_job_node('job_pending', job_pending.job_type.name, job_pending.job_type.version, job_pending.job_type_rev.revision_num) definition.add_dependency('job_failed', 'job_pending') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_failed', job=job_failed) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_pending', job=job_pending) # Create message message = create_update_recipe_message(recipe.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to set job_pending to BLOCKED self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'blocked_jobs') self.assertListEqual(msg._blocked_job_ids, [job_pending.id])
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe_job1 = recipe_test_utils.create_recipe_job(recipe=self.recipe1) self.recipe2 = recipe_test_utils.create_recipe()
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 1", "type": "file", "media_types": ["text/plain"]}], "output_data": [{"name": "Test Output 1", "type": "files", "media_type": "image/png"}], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 2", "type": "files", "media_types": ["image/png", "image/tiff"]}], "output_data": [{"name": "Test Output 2", "type": "file"}], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { "version": "1.0", "input_data": [{"name": "Recipe Input", "type": "file", "media_types": ["text/plain"]}], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_type_1.name, "version": self.job_type_1.version}, "recipe_inputs": [{"recipe_input": "Recipe Input", "job_input": "Test Input 1"}], }, { "name": "Job 2", "job_type": {"name": self.job_type_2.name, "version": self.job_type_2.version}, "dependencies": [ {"name": "Job 1", "connections": [{"output": "Test Output 1", "input": "Test Input 2"}]} ], }, ], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.data = { "version": "1.0", "input_data": [{"name": "Recipe Input", "file_id": source_file.id}], "workspace_id": workspace.id, } # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_order_by(self): """Tests successfully calling the batches view with sorting.""" recipe_type1b = recipe_test_utils.create_recipe_type(name='test1', version='2.0') batch_test_utils.create_batch(recipe_type=recipe_type1b) recipe_type1c = recipe_test_utils.create_recipe_type(name='test1', version='3.0') batch_test_utils.create_batch(recipe_type=recipe_type1c) url = rest_util.get_url('/batches/?order=recipe_type__name&order=-recipe_type__version') response = self.client.generic('GET', url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertEqual(len(result['results']), 4) self.assertEqual(result['results'][0]['recipe_type']['id'], recipe_type1c.id) self.assertEqual(result['results'][1]['recipe_type']['id'], recipe_type1b.id) self.assertEqual(result['results'][2]['recipe_type']['id'], self.recipe_type1.id) self.assertEqual(result['results'][3]['recipe_type']['id'], self.recipe_type2.id)
def test_execute_with_sub_recipe(self): """Tests calling PurgeRecipe.execute() successfully""" # Create recipes sub_recipe_type = recipe_test_utils.create_recipe_type() definition = RecipeDefinition(Interface()) definition.add_recipe_node('A', sub_recipe_type.name, sub_recipe_type.revision_num) recipe_a = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_a.jobs_completed = 3 recipe_a.jobs_running = 2 recipe_a.jobs_total = 5 Recipe.objects.bulk_create([recipe_a]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='A', sub_recipe=recipe_a, save=False) RecipeNode.objects.bulk_create([recipe_node_a]) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=self.trigger.id, source_file_id=self.file_1.id) # Execute message result = message.execute() self.assertTrue(result) # Test to see that a message to purge the parent recipe was sent msgs = [ msg for msg in message.new_messages if msg.type == 'purge_recipe' ] self.assertEqual(len(msgs), 1) for msg in msgs: self.assertEqual(msg.recipe_id, recipe_node_a.sub_recipe.id)
def test_json_forced_nodes(self): """Tests converting an UpdateRecipe message to and from JSON when forced nodes are provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_completed = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) sub_recipe_type = recipe_test_utils.create_recipe_type() definition = RecipeDefinition(Interface()) definition.add_job_node('job_completed', job_completed.job_type.name, job_completed.job_type.version, job_completed.job_type_rev.revision_num) definition.add_recipe_node('the_sub_recipe', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('job_completed', 'the_sub_recipe') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_completed', job=job_completed) forced_nodes = ForcedNodes() sub_forced_nodes = ForcedNodes() sub_forced_nodes.set_all_nodes() forced_nodes.add_subrecipe('the_sub_recipe', sub_forced_nodes) # Create message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to create sub-recipe self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'create_recipes') self.assertEqual(msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(msg.recipe_id, recipe.id) self.assertEqual(msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(msg.superseded_recipe_id) sub = SubRecipe(sub_recipe_type.name, sub_recipe_type.revision_num, 'the_sub_recipe', True) self.assertListEqual(msg.sub_recipes, [sub])
def create_batch(title=None, description=None, recipe_type=None, definition=None, configuration=None, is_creation_done=False, recipes_total=None): """Creates a batch model for unit testing :returns: The batch model :rtype: :class:`batch.models.Batch` """ if not recipe_type: recipe_type = recipe_test_utils.create_recipe_type() if not definition: definition = BatchDefinition() # Create a previous batch so we can create a valid definition # TODO: this can be replaced by a DataSet once they are implemented prev_batch = Batch() prev_batch.recipe_type = recipe_type prev_batch.recipe_type_rev = RecipeTypeRevision.objects.get_revision( recipe_type.id, recipe_type.revision_num) prev_batch.event = TriggerEvent.objects.create_trigger_event( 'USER', None, {'user': '******'}, now()) prev_batch.is_creation_done = True prev_batch.recipes_total = 10 prev_batch.save() prev_batch.root_batch_id = prev_batch.id prev_batch.save() definition.root_batch_id = prev_batch.root_batch_id if not configuration: configuration = BatchConfiguration() if not title: global BATCH_TITLE_COUNTER title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER BATCH_TITLE_COUNTER += 1 if not description: global BATCH_DESCRIPTION_COUNTER description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER BATCH_DESCRIPTION_COUNTER += 1 event = TriggerEvent.objects.create_trigger_event('USER', None, {'user': '******'}, now()) batch = Batch.objects.create_batch_v6(title, description, recipe_type, event, definition, configuration) if is_creation_done: batch.is_creation_done = True if recipes_total is not None: batch.recipes_total = recipes_total batch.save() return batch
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type( name='scale-batch-creator') definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } workspace1 = storage_test_utils.create_workspace() file1 = storage_test_utils.create_file(workspace=workspace1) data = { 'version': '1.0', 'input_data': [{ 'name': 'input_file', 'file_id': file1.id, }], 'workspace_id': workspace1.id, } self.recipe_type = recipe_test_utils.create_recipe_type( name='my-type', definition=definition) recipe_handler = recipe_test_utils.create_recipe_handler( recipe_type=self.recipe_type, data=data) self.recipe1 = recipe_handler.recipe self.recipe1_jobs = recipe_handler.recipe_jobs self.recipe2 = recipe_test_utils.create_recipe() self.recipe3 = recipe_test_utils.create_recipe(is_superseded=True)
def test_has_completed_empty(self): """Tests calling Recipe.has_completed() when a recipe is empty and has not created its nodes yet""" job_type = job_test_utils.create_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type() definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) self.assertFalse(recipe_instance.has_completed())
def test_successful(self): """Tests calling the queue recipe view successfully.""" recipe_type = recipe_test_utils.create_recipe_type() workspace = storage_test_utils.create_workspace() recipe_data = {"version": "1.0", "input_data": [], "workspace_id": workspace.id} json_data = {"recipe_type_id": recipe_type.id, "recipe_data": recipe_data} url = rest_util.get_url("/queue/new-recipe/") response = self.client.generic("POST", url, json.dumps(json_data), "application/json") self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content) result = json.loads(response.content) self.assertTrue(response["Location"]) self.assertEqual(result["recipe_type"]["id"], recipe_type.id)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } workspace1 = storage_test_utils.create_workspace() file1 = storage_test_utils.create_file(workspace=workspace1) data = { 'version': '1.0', 'input_data': [{ 'name': 'input_file', 'file_id': file1.id, }], 'workspace_id': workspace1.id, } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) recipe_handler = recipe_test_utils.create_recipe_handler(recipe_type=self.recipe_type, data=data) self.recipe1 = recipe_handler.recipe self.recipe1_jobs = recipe_handler.recipe_jobs self.recipe2 = recipe_test_utils.create_recipe() self.recipe3 = recipe_test_utils.create_recipe(is_superseded=True)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.workspace = storage_test_utils.create_workspace() self.trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': 'input_file', 'workspace_name': self.workspace.name, } } self.trigger_rule = trigger_test_utils.create_trigger_rule( trigger_type='PARSE', is_active=True, configuration=self.trigger_config) self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], } self.recipe_type = recipe_test_utils.create_recipe_type( name='my-type', definition=self.definition, trigger_rule=self.trigger_rule)
def test_execute_no_leaf_nodes(self): """Tests calling PurgeRecipe.execute() successfully""" # Create recipes recipe_type = recipe_test_utils.create_recipe_type() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=self.trigger.id, source_file_id=self.file_1.id) # Execute message result = message.execute() self.assertTrue(result) # Assert models were deleted self.assertEqual(Recipe.objects.filter(id=recipe.id).count(), 0) self.assertEqual(RecipeNode.objects.filter(recipe=recipe).count(), 0)
def setUp(self): django.setup() self.workspace1 = storage_test_utils.create_workspace(name='ws1') self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.recipe_type1 = recipe_test_utils.create_recipe_type() self.recipe1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type1) self.batch1 = batch_test_utils.create_batch(recipe_type=self.recipe_type1, is_creation_done=True) self.file = storage_test_utils.create_file( file_name='test.txt', file_type='SOURCE', media_type='image/png', file_size=1000, data_type='png', file_path='/test/path', workspace=self.workspace1, is_deleted=False, last_modified='', data_started='2017-01-01T00:00:00Z', data_ended='2017-01-01T00:00:00Z', source_started='2017-01-01T00:00:00Z', source_ended='2017-01-01T00:00:00Z', geometry='', center_point='', meta_data='', countries=[self.country], job_exe=self.job_exe1, job_output='output_name_1', recipe=self.recipe1, recipe_node='my-recipe', batch=self.batch1, is_superseded=True, superseded='2017-01-01T00:00:00Z')
def test_successful(self): """Tests calling the queue recipe view successfully.""" recipe_type = recipe_test_utils.create_recipe_type() workspace = storage_test_utils.create_workspace() recipe_data = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id, } json_data = { 'recipe_type_id': recipe_type.id, 'recipe_data': recipe_data, } url = '/queue/new-recipe/' response = self.client.generic('POST', url, json.dumps(json_data), 'application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.workspace = storage_test_utils.create_workspace() self.trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': 'input_file', 'workspace_name': self.workspace.name, } } self.trigger_rule = trigger_test_utils.create_trigger_rule(trigger_type='PARSE', is_active=True, configuration=self.trigger_config) self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=self.definition, trigger_rule=self.trigger_rule)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition={ 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], })
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_job_type() job_type_2 = job_test_utils.create_job_type() job_type_3 = job_test_utils.create_job_type() job_type_4 = job_test_utils.create_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_nodes = [ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e ] recipe_instance = RecipeInstance(definition, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E'})
def setUp(self): django.setup() self.input_name = 'Test Input' self.output_name = 'Test Output' self.workspace = storage_test_utils.create_workspace() # This job trigger should not match due to a different media type job_trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'image/png', }, 'data': { 'input_data_name': self.input_name, 'workspace_name': self.workspace.name }, } job_trigger_rule = trigger_test_utils.create_trigger_rule(configuration=job_trigger_config) interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1, trigger_rule=job_trigger_rule) interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], 'output_data': [{ 'name': self.output_name, 'type': 'file', }], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2, trigger_rule=job_trigger_rule) # create a recipe that runs both jobs definition_1 = { 'version': '1.0', 'input_data': [{ 'name': self.input_name, 'type': 'file', 'required': True, }], 'jobs': [{ 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'recipe_inputs': [{ 'recipe_input': self.input_name, 'job_input': self.input_name, }], }, { 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'dependencies': [{ 'name': 'Job 2', 'connections': [{ 'output': self.output_name, 'input': self.input_name, }], }], }], } self.when_parsed = now() self.file_name = 'my_file.txt' self.data_type = 'test_file_type' self.media_type = 'text/plain' self.source_file = SourceFile.objects.create(file_name=self.file_name, media_type=self.media_type, file_size=10, data_type=self.data_type, file_path='the_path', workspace=self.workspace) self.source_file.add_data_type_tag('type1') self.source_file.add_data_type_tag('type2') self.source_file.add_data_type_tag('type3') self.source_file.parsed = now() recipe_trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': self.input_name, 'workspace_name': self.workspace.name }, } self.trigger_rule = trigger_test_utils.create_trigger_rule(configuration=recipe_trigger_config) self.recipe_type_1 = recipe_test_utils.create_recipe_type(definition=definition_1, trigger_rule=self.trigger_rule)
def test_execute_with_data(self): """Tests calling ProcessRecipeInput.execute() successfully when the recipe already has data populated""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=10485760.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) recipe_interface = Interface() recipe_interface.add_parameter(FileParameter('input_a', ['text/plain'])) recipe_interface.add_parameter( FileParameter('input_b', ['text/plain'], multiple=True)) definition = RecipeDefinition(recipe_interface) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) data = Data() data.add_value(FileValue('input_a', [file_1.id])) data.add_value(FileValue('input_b', [file_2.id, file_3.id])) data_dict = convert_data_to_v6_json(data).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) # Create message message = ProcessRecipeInput() message.recipe_id = recipe.id # Execute message result = message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) # Check for update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Check recipe for expected input_file_size self.assertEqual(recipe.input_file_size, 1052.0) # Make sure recipe input file models are created recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3) for recipe_input_file in recipe_input_files: if recipe_input_file.input_file_id == file_1.id: self.assertEqual(recipe_input_file.recipe_input, 'input_a') elif recipe_input_file.input_file_id == file_2.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') elif recipe_input_file.input_file_id == file_3.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') else: self.fail('Invalid input file ID: %s' % recipe_input_file.input_file_id) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Make sure recipe input file models are unchanged recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3)
def test_successful_supersede(self): """Tests calling QueueManager.queue_new_recipe() successfully when superseding a recipe.""" # Queue initial recipe and complete its first job node = node_test_utils.create_node() recipe_id = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) recipe = Recipe.objects.get(id=recipe_id) recipe_job_1 = RecipeJob.objects.select_related('job__job_exe').get(recipe_id=recipe_id, job_name='Job 1') job_exe_1 = JobExecution.objects.get(job_id=recipe_job_1.job_id) queued_job_exe = QueuedJobExecution(Queue.objects.get(job_exe_id=job_exe_1.id)) queued_job_exe.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [queued_job_exe], {}) results = JobResults() results.add_file_list_parameter('Test Output 1', [product_test_utils.create_product().file_id]) JobExecution.objects.filter(id=job_exe_1.id).update(results=results.get_dict()) Queue.objects.handle_job_completion(job_exe_1.id, now()) # Create a new recipe type that has a new version of job 2 (job 1 is identical) new_job_type_2 = job_test_utils.create_job_type(name=self.job_type_2.name, version='New Version', interface=self.job_type_2.interface) new_definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'New Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'New Job 2', 'job_type': { 'name': new_job_type_2.name, 'version': new_job_type_2.version, }, 'dependencies': [{ 'name': 'New Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } new_recipe_type = recipe_test_utils.create_recipe_type(name=self.recipe_type.name, definition=new_definition) event = trigger_test_utils.create_trigger_event() recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=recipe_id, job_name='Job 1') recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=recipe_id, job_name='Job 2') superseded_jobs = {'Job 1': recipe_job_1.job, 'Job 2': recipe_job_2.job} graph_a = self.recipe_type.get_recipe_definition().get_graph() graph_b = new_recipe_type.get_recipe_definition().get_graph() delta = RecipeGraphDelta(graph_a, graph_b) # Queue new recipe that supersedes the old recipe new_recipe_id = Queue.objects.queue_new_recipe(new_recipe_type, None, event, recipe, delta, superseded_jobs) # Ensure old recipe is superseded recipe = Recipe.objects.get(id=recipe_id) self.assertTrue(recipe.is_superseded) # Ensure new recipe supersedes old recipe new_recipe = Recipe.objects.get(id=new_recipe_id) self.assertEqual(new_recipe.superseded_recipe_id, recipe_id) # Ensure that job 1 is already completed (it was copied from original recipe) and that job 2 is queued new_recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=new_recipe_id, job_name='New Job 1') new_recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=new_recipe_id, job_name='New Job 2') self.assertEqual(new_recipe_job_1.job.status, 'COMPLETED') self.assertFalse(new_recipe_job_1.is_original) self.assertEqual(new_recipe_job_2.job.status, 'QUEUED') self.assertTrue(new_recipe_job_2.is_original) # Complete both the old and new job 2 and check that only the new recipe completes job_exe_2 = JobExecution.objects.get(job_id=recipe_job_2.job_id) queued_job_exe_2 = QueuedJobExecution(Queue.objects.get(job_exe_id=job_exe_2.id)) queued_job_exe_2.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [queued_job_exe_2], {}) Queue.objects.handle_job_completion(job_exe_2.id, now()) new_job_exe_2 = JobExecution.objects.get(job_id=new_recipe_job_2.job_id) new_queued_job_exe_2 = QueuedJobExecution(Queue.objects.get(job_exe_id=new_job_exe_2.id)) new_queued_job_exe_2.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [new_queued_job_exe_2], {}) Queue.objects.handle_job_completion(new_job_exe_2.id, now()) recipe = Recipe.objects.get(id=recipe.id) new_recipe = Recipe.objects.get(id=new_recipe.id) self.assertIsNone(recipe.completed) self.assertIsNotNone(new_recipe.completed)
def setUp(self): django.setup() self.new_priority = 200 self.standalone_failed_job = job_test_utils.create_job(status='FAILED', num_exes=3, priority=100) self.standalone_superseded_job = job_test_utils.create_job(status='FAILED', num_exes=1) self.standalone_canceled_job = job_test_utils.create_job(status='CANCELED', num_exes=1, priority=100) self.standalone_completed_job = job_test_utils.create_job(status='COMPLETED') Job.objects.supersede_jobs([self.standalone_superseded_job], now()) # Create recipe for re-queing a job that should now be PENDING (and its dependencies) job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 3', job=self.job_b_3) # Job IDs to re-queue self.job_ids = [self.standalone_failed_job.id, self.standalone_canceled_job.id, self.standalone_completed_job.id, self.job_a_1.id, self.job_b_2.id] # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_json(self): """Tests coverting a UpdateRecipes message to and from JSON""" self.job_1_failed = job_test_utils.create_job(status='FAILED') self.job_1_pending = job_test_utils.create_job(status='PENDING') definition_1 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_1_failed.job_type.name, 'version': self.job_1_failed.job_type.version, }, }, { 'name': 'job_pending', 'job_type': { 'name': self.job_1_pending.job_type.name, 'version': self.job_1_pending.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }], }], } self.recipe_type_1 = recipe_test_utils.create_recipe_type(definition=definition_1) self.recipe_1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='job_failed', job=self.job_1_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='job_pending', job=self.job_1_pending) self.job_2_running = job_test_utils.create_job(status='RUNNING') self.job_2_blocked = job_test_utils.create_job(status='BLOCKED') definition_2 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_running', 'job_type': { 'name': self.job_2_running.job_type.name, 'version': self.job_2_running.job_type.version, }, }, { 'name': 'job_blocked', 'job_type': { 'name': self.job_2_blocked.job_type.name, 'version': self.job_2_blocked.job_type.version, }, 'dependencies': [{ 'name': 'job_running', }], }], } self.recipe_type_2 = recipe_test_utils.create_recipe_type(definition=definition_2) self.recipe_2 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_2) recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='job_running', job=self.job_2_running) recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='job_blocked', job=self.job_2_blocked) # Add recipes to message message = UpdateRecipes() if message.can_fit_more(): message.add_recipe(self.recipe_1.id) if message.can_fit_more(): message.add_recipe(self.recipe_2.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) self.assertEqual(len(new_message.new_messages), 2)
def test_successful_supersede_different_recipe_type(self): """Tests calling RecipeManager.create_recipe() to supersede a recipe with a different recipe type version that has one identical node, and deletes another node to replace it with a new one. """ interface_3 = { 'version': '1.0', 'command': 'my_command', 'command_arguments': 'args', 'input_data': [{ 'name': 'Test Input 3', 'type': 'files', 'media_types': ['image/tiff'], }], 'output_data': [{ 'name': 'Test Output 3', 'type': 'file', }]} job_type_3 = job_test_utils.create_job_type(interface=interface_3) new_definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'Job 3', 'job_type': { 'name': job_type_3.name, 'version': job_type_3.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 3', }] }] }] } new_recipe_type = recipe_test_utils.create_recipe_type(name=self.recipe_type.name, definition=new_definition) event = trigger_test_utils.create_trigger_event() handler = Recipe.objects.create_recipe(recipe_type=self.recipe_type, event=event, data=RecipeData(self.data)) recipe = Recipe.objects.get(id=handler.recipe.id) recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=handler.recipe.id, job_name='Job 1') recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=handler.recipe.id, job_name='Job 2') job_exe_2 = job_test_utils.create_job_exe(job=recipe_job_2.job) try: from product.models import ProductFile from product.test import utils as product_test_utils product = product_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) except ImportError: product = None superseded_jobs = {'Job 1': recipe_job_1.job, 'Job 2': recipe_job_2.job} # Create a new recipe with a different version graph_a = self.recipe_type.get_recipe_definition().get_graph() graph_b = new_recipe_type.get_recipe_definition().get_graph() delta = RecipeGraphDelta(graph_a, graph_b) new_handler = Recipe.objects.create_recipe(recipe_type=new_recipe_type, event=event, data=None, superseded_recipe=recipe, delta=delta, superseded_jobs=superseded_jobs) # Check that old recipe and job 2 are superseded, job 1 should be copied (not superseded) recipe = Recipe.objects.get(id=recipe.id) job_1 = Job.objects.get(id=recipe_job_1.job_id) job_2 = Job.objects.get(id=recipe_job_2.job_id) self.assertTrue(recipe.is_superseded) self.assertFalse(job_1.is_superseded) self.assertTrue(job_2.is_superseded) # Check that product of job 2 (which was superseded with no new job) was unpublished if product: product = ProductFile.objects.get(id=product.id) self.assertFalse(product.is_published) self.assertIsNotNone(product.unpublished) # Check that new recipe supersedes the old one, job 1 is copied from old recipe, and job 2 is new and does not # supersede anything new_recipe = Recipe.objects.get(id=new_handler.recipe.id) new_recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=new_handler.recipe.id, job_name='Job 1') new_recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=new_handler.recipe.id, job_name='Job 3') self.assertEqual(new_recipe.superseded_recipe_id, recipe.id) self.assertEqual(new_recipe.root_superseded_recipe_id, recipe.id) self.assertDictEqual(new_recipe.data, recipe.data) self.assertEqual(new_recipe_job_1.job.id, job_1.id) self.assertFalse(new_recipe_job_1.is_original) self.assertIsNone(new_recipe_job_1.job.superseded_job) self.assertIsNone(new_recipe_job_1.job.root_superseded_job) self.assertNotEqual(new_recipe_job_2.job.id, job_2.id) self.assertTrue(new_recipe_job_2.is_original) self.assertIsNone(new_recipe_job_2.job.superseded_job_id) self.assertIsNone(new_recipe_job_2.job.root_superseded_job_id)
def test_get_existing_jobs_to_queue(self): """Tests calling RecipeHandler.get_existing_jobs_to_queue()""" input_name_1 = 'Test Input 1' output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': output_name_1, 'type': 'files', 'media_type': 'image/png', }], } job_type_1 = job_test_utils.create_job_type(interface=interface_1) job_1 = job_test_utils.create_job(job_type=job_type_1) input_name_2 = 'Test Input 2' output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': output_name_2, 'type': 'file', }], } job_type_2 = job_test_utils.create_job_type(interface=interface_2) job_2 = job_test_utils.create_job(job_type=job_type_2) workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, media_type='text/plain') definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_1.name, 'version': job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': job_type_2.name, 'version': job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': output_name_1, 'input': input_name_2, }], }], }], } data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file_1.id, }], 'workspace_id': workspace.id, } recipe_type = recipe_test_utils.create_recipe_type(definition=definition) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, data=data) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 2', job=job_2) recipe_jobs = list(RecipeJob.objects.filter(recipe_id=recipe.id)) handler = RecipeHandler(recipe, recipe_jobs) jobs_to_queue = handler.get_existing_jobs_to_queue() # Make sure only Job 1 is returned and that its job data is correct self.assertEqual(len(jobs_to_queue), 1) self.assertEqual(jobs_to_queue[0][0].id, job_1.id) self.assertDictEqual(jobs_to_queue[0][1].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': input_name_1, 'file_id': file_1.id, }], 'output_data': [{ 'name': output_name_1, 'workspace_id': workspace.id, }], })
def setUp(self): django.setup() self.trigger = trigger_test_utils.create_trigger_event() self.workspace = storage_test_utils.create_workspace() self.file_1 = storage_test_utils.create_file() self.file_2 = storage_test_utils.create_file() interface_1 = { 'version': '1.0', 'command': 'my_command', 'command_arguments': 'args', 'input_data': [{ 'name': 'Test Input 1', 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': 'Test Output 1', 'type': 'files', 'media_type': 'image/png', }] } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { 'version': '1.0', 'command': 'my_command', 'command_arguments': 'args', 'input_data': [{ 'name': 'Test Input 2', 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': 'Test Output 2', 'type': 'file', }] } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }] } self.recipe_type = recipe_test_utils.create_recipe_type( definition=definition) self.input_1 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': self.file_1.id, }], 'workspace_id': self.workspace.id, } self.recipe_1 = recipe_test_utils.create_recipe( recipe_type=self.recipe_type) recipe_test_utils.create_input_file(recipe=self.recipe_1) self.job_1_1 = job_test_utils.create_job(job_type=self.job_type_1, status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='Job 1', job=self.job_1_1) self.job_1_2 = job_test_utils.create_job(job_type=self.job_type_2, status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='Job 2', job=self.job_1_2) self.input_2 = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': self.file_2.id, }], 'workspace_id': self.workspace.id, } self.recipe_2 = recipe_test_utils.create_recipe( recipe_type=self.recipe_type, input=self.input_2) self.job_2_1 = job_test_utils.create_job(job_type=self.job_type_1, status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='Job 1', job=self.job_2_1) self.job_2_2 = job_test_utils.create_job(job_type=self.job_type_2, status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='Job 2', job=self.job_2_2) self.old_recipe_ids = [self.recipe_1.id, self.recipe_2.id] self.old_job_ids = [ self.job_1_1.id, self.job_1_2.id, self.job_2_1.id, self.job_2_2.id ] self.old_job_1_ids = [self.job_1_1.id, self.job_2_1.id] self.old_job_2_ids = [self.job_1_2.id, self.job_2_2.id]
def setUp(self): django.setup() self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type') self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe2 = recipe_test_utils.create_recipe()
def setUp(self): django.setup() self.recipe_type = recipe_test_utils.create_recipe_type() self.batch = batch_test_utils.create_batch(recipe_type=self.recipe_type)
def setUp(self): django.setup() self.recipe_type1 = recipe_test_utils.create_recipe_type(name='test1', version='1.0') self.recipe1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type1)
def setUp(self): django.setup() self.input_name = 'Test Input' self.output_name = 'Test Output' self.workspace = storage_test_utils.create_workspace() # This job trigger should not match due to a different media type job_trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'image/png', }, 'data': { 'input_data_name': self.input_name, 'workspace_name': self.workspace.name }, } job_trigger_rule = trigger_test_utils.create_trigger_rule( configuration=job_trigger_config) interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], } self.job_type_1 = job_test_utils.create_job_type( interface=interface_1, trigger_rule=job_trigger_rule) interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], 'output_data': [{ 'name': self.output_name, 'type': 'file', }], } self.job_type_2 = job_test_utils.create_job_type( interface=interface_2, trigger_rule=job_trigger_rule) # create a recipe that runs both jobs definition_1 = { 'version': '1.0', 'input_data': [{ 'name': self.input_name, 'type': 'file', 'required': True, }], 'jobs': [{ 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'recipe_inputs': [{ 'recipe_input': self.input_name, 'job_input': self.input_name, }], }, { 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'dependencies': [{ 'name': 'Job 2', 'connections': [{ 'output': self.output_name, 'input': self.input_name, }], }], }], } self.when_parsed = now() self.file_name = 'my_file.txt' self.data_type = 'test_file_type' self.media_type = 'text/plain' self.source_file = ScaleFile.objects.create(file_name=self.file_name, file_type='SOURCE', media_type=self.media_type, file_size=10, data_type=self.data_type, file_path='the_path', workspace=self.workspace) self.source_file.add_data_type_tag('type1') self.source_file.add_data_type_tag('type2') self.source_file.add_data_type_tag('type3') self.source_file.parsed = now() recipe_trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': self.input_name, 'workspace_name': self.workspace.name }, } self.trigger_rule = trigger_test_utils.create_trigger_rule( configuration=recipe_trigger_config) self.recipe_type_1 = recipe_test_utils.create_recipe_type( definition=definition_1, trigger_rule=self.trigger_rule)
def setUp(self): django.setup() self.standalone_job = job_test_utils.create_job(status='RUNNING') job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 3', job=self.job_b_3) self.job_ids = [self.standalone_job.id, self.job_a_1.id, self.job_b_2.id] self.dependent_job_ids = {self.job_a_2.id, self.job_b_3.id}
def test_execute(self): """Tests calling UpdateRecipes.execute() successfully""" # Create recipes for testing the setting of jobs to BLOCKED/PENDING self.job_1_failed = job_test_utils.create_job(status='FAILED') self.job_1_pending = job_test_utils.create_job(status='PENDING') definition_1 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_1_failed.job_type.name, 'version': self.job_1_failed.job_type.version, }, }, { 'name': 'job_pending', 'job_type': { 'name': self.job_1_pending.job_type.name, 'version': self.job_1_pending.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }], }], } self.recipe_type_1 = recipe_test_utils.create_recipe_type(definition=definition_1) self.recipe_1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='job_failed', job=self.job_1_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe_1, job_name='job_pending', job=self.job_1_pending) self.job_2_running = job_test_utils.create_job(status='RUNNING') self.job_2_blocked = job_test_utils.create_job(status='BLOCKED') definition_2 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_running', 'job_type': { 'name': self.job_2_running.job_type.name, 'version': self.job_2_running.job_type.version, }, }, { 'name': 'job_blocked', 'job_type': { 'name': self.job_2_blocked.job_type.name, 'version': self.job_2_blocked.job_type.version, }, 'dependencies': [{ 'name': 'job_running', }], }], } self.recipe_type_2 = recipe_test_utils.create_recipe_type(definition=definition_2) self.recipe_2 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_2) recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='job_running', job=self.job_2_running) recipe_test_utils.create_recipe_job(recipe=self.recipe_2, job_name='job_blocked', job=self.job_2_blocked) # Create recipe for testing the setting of input for a starting job in a recipe (no parents) input_name_1 = 'Test Input 1' output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': output_name_1, 'type': 'files', 'media_type': 'image/png', }], } job_type_3 = job_test_utils.create_job_type(interface=interface_1) job_3 = job_test_utils.create_job(job_type=job_type_3, status='PENDING', num_exes=0) input_name_2 = 'Test Input 2' output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': output_name_2, 'type': 'file', }], } job_type_4 = job_test_utils.create_job_type(interface=interface_2) job_4 = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0) workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, media_type='text/plain') definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_3.name, 'version': job_type_3.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': job_type_4.name, 'version': job_type_4.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': output_name_1, 'input': input_name_2, }], }], }], } data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file_1.id, }], 'workspace_id': workspace.id, } self.recipe_type_3 = recipe_test_utils.create_recipe_type(definition=definition) self.recipe_3 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_3, input=data) recipe_test_utils.create_recipe_job(recipe=self.recipe_3, job_name='Job 1', job=job_3) recipe_test_utils.create_recipe_job(recipe=self.recipe_3, job_name='Job 2', job=job_4) # Create recipe for testing the setting of input for a child job job_5 = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED') file_2 = storage_test_utils.create_file(workspace=workspace, media_type='text/plain') job_5_output_dict = { 'version': '1.0', 'output_data': [{ 'name': output_name_1, 'file_ids': [file_2.id] }] } job_test_utils.create_job_exe(job=job_5, output=JobResults(job_5_output_dict)) # Complete job 5 and set its output so that update recipe message can give go ahead for child job 6 Job.objects.process_job_output([job_5.id], now()) job_6 = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0) self.recipe_4 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type_3, input=data) recipe_test_utils.create_recipe_job(recipe=self.recipe_4, job_name='Job 1', job=job_5) recipe_test_utils.create_recipe_job(recipe=self.recipe_4, job_name='Job 2', job=job_6) # Add recipes to message message = UpdateRecipes() if message.can_fit_more(): message.add_recipe(self.recipe_1.id) if message.can_fit_more(): message.add_recipe(self.recipe_2.id) if message.can_fit_more(): message.add_recipe(self.recipe_3.id) if message.can_fit_more(): message.add_recipe(self.recipe_4.id) # Execute message result = message.execute() self.assertTrue(result) self.assertEqual(len(message.new_messages), 4) # Check message types blocked_jobs_msg = False pending_jobs_msg = False process_job_input_msg_job_3 = False process_job_input_msg_job_6 = False for new_msg in message.new_messages: if new_msg.type == 'blocked_jobs': blocked_jobs_msg = True elif new_msg.type == 'pending_jobs': pending_jobs_msg = True elif new_msg.type == 'process_job_input': if new_msg.job_id == job_3.id: process_job_input_msg_job_3 = True elif new_msg.job_id == job_6.id: process_job_input_msg_job_6 = True self.assertTrue(blocked_jobs_msg) self.assertTrue(pending_jobs_msg) self.assertTrue(process_job_input_msg_job_3) self.assertTrue(process_job_input_msg_job_6) # Make sure Job 3 has its input populated job = Job.objects.get(id=job_3.id) self.assertDictEqual(job.input, { 'version': '1.0', 'input_data': [{ 'name': input_name_1, 'file_id': file_1.id, }], 'output_data': [{ 'name': output_name_1, 'workspace_id': workspace.id, }], }) # Make sure Job 6 has its input populated job = Job.objects.get(id=job_6.id) self.assertDictEqual(job.input, { 'version': '1.0', 'input_data': [{ 'name': input_name_2, 'file_ids': [file_2.id], }], 'output_data': [{ 'name': output_name_2, 'workspace_id': workspace.id, }], }) # Test executing message again message_json_dict = message.to_json() message = UpdateRecipes.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Make sure the same three messages are returned self.assertEqual(len(message.new_messages), 4) # Check message types blocked_jobs_msg = False pending_jobs_msg = False process_job_input_msg_job_3 = False process_job_input_msg_job_6 = False for new_msg in message.new_messages: if new_msg.type == 'blocked_jobs': blocked_jobs_msg = True elif new_msg.type == 'pending_jobs': pending_jobs_msg = True elif new_msg.type == 'process_job_input': if new_msg.job_id == job_3.id: process_job_input_msg_job_3 = True elif new_msg.job_id == job_6.id: process_job_input_msg_job_6 = True self.assertTrue(blocked_jobs_msg) self.assertTrue(pending_jobs_msg) self.assertTrue(process_job_input_msg_job_3) self.assertTrue(process_job_input_msg_job_6)
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.file = storage_test_utils.create_file() interface_1 = { 'version': '1.0', 'command': 'my_command', 'command_arguments': 'args', 'input_data': [{ 'name': 'Test Input 1', 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': 'Test Output 1', 'type': 'files', 'media_type': 'image/png', }]} self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { 'version': '1.0', 'command': 'my_command', 'command_arguments': 'args', 'input_data': [{ 'name': 'Test Input 2', 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': 'Test Output 2', 'type': 'file', }]} self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } RecipeDefinition(definition).validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': self.file.id, }], 'workspace_id': self.workspace.id, }
def test_execute(self): """Tests calling CreateBatchRecipes.execute() successfully""" # Importing module here to patch the max recipe num import batch.messages.create_batch_recipes batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5 # Previous batch with six recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=6) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_4 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_5 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_6 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id new_batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = batch.messages.create_batch_recipes.CreateBatchRecipes() message.batch_id = new_batch.id # Copy JSON for running same message again later message_json = message.to_json() # Execute message result = message.execute() self.assertTrue(result) # Should be two messages, one for next create_batch_recipes and one for re-processing recipes self.assertEqual(len(message.new_messages), 2) batch_recipes_message = message.new_messages[0] reprocess_message = message.new_messages[1] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.batch_id, new_batch.id) self.assertFalse(batch_recipes_message.is_prev_batch_done) self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual( set(reprocess_message._root_recipe_ids), {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id}) # Test executing message again message = batch.messages.create_batch_recipes.CreateBatchRecipes.from_json( message_json) result = message.execute() self.assertTrue(result) # Should have same messages returned self.assertEqual(len(message.new_messages), 2) batch_recipes_message = message.new_messages[0] reprocess_message = message.new_messages[1] self.assertEqual(batch_recipes_message.type, 'create_batch_recipes') self.assertEqual(batch_recipes_message.batch_id, new_batch.id) self.assertFalse(batch_recipes_message.is_prev_batch_done) self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual( set(reprocess_message._root_recipe_ids), {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id}) # Execute next create_batch_recipes messages result = batch_recipes_message.execute() self.assertTrue(result) # Should only have one last reprocess message self.assertEqual(len(batch_recipes_message.new_messages), 1) reprocess_message = batch_recipes_message.new_messages[0] self.assertTrue(batch_recipes_message.is_prev_batch_done) self.assertEqual(reprocess_message.type, 'reprocess_recipes') self.assertSetEqual(set(reprocess_message._root_recipe_ids), {recipe_1.id})
def setUp(self): django.setup() self.input_name = 'Test Input' self.output_name = 'Test Output' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name, 'type': 'file', }], 'output_data': [{ 'name': self.output_name, 'type': 'file', }], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) # create a recipe that runs both jobs definition_1 = { 'version': '1.0', 'input_data': [{ 'name': self.input_name, 'type': 'file', 'required': True, }], 'jobs': [{ 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'recipe_inputs': [{ 'recipe_input': self.input_name, 'job_input': self.input_name, }], }, { 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'dependencies': [{ 'name': 'Job 2', 'connections': [{ 'output': self.output_name, 'input': self.input_name, }], }], }], } self.recipe_type_1 = recipe_test_utils.create_recipe_type(definition=definition_1) self.file_name = 'my_file.txt' self.data_type = 'test_file_type' self.media_type = 'text/plain' self.workspace = storage_test_utils.create_workspace() self.source_file = SourceFile.objects.create(file_name=self.file_name, media_type=self.media_type, file_size=10, data_type=self.data_type, file_path='the_path', workspace=self.workspace) self.source_file.add_data_type_tag('type1') self.source_file.add_data_type_tag('type2') self.source_file.add_data_type_tag('type3')
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.recipe_type_1 = recipe_test_utils.create_recipe_type() self.recipe_type_2 = recipe_test_utils.create_recipe_type()
def test_execute_create_jobs(self): """Tests calling UpdateRecipes.execute() successfully where recipe jobs need to be created""" configuration = BatchConfiguration() configuration.priority = 999 batch = batch_test_utils.create_batch(configuration=configuration) # Create recipes job_type_1 = job_test_utils.create_job_type() job_type_2 = job_test_utils.create_job_type() definition_1 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_1', 'job_type': { 'name': job_type_1.name, 'version': job_type_1.version, }, }, { 'name': 'job_2', 'job_type': { 'name': job_type_2.name, 'version': job_type_2.version, }, 'dependencies': [{ 'name': 'job_1', }], }] } recipe_type_1 = recipe_test_utils.create_recipe_type(definition=definition_1) recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) job_type_3 = job_test_utils.create_job_type() job_type_4 = job_test_utils.create_job_type() definition_2 = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_a', 'job_type': { 'name': job_type_3.name, 'version': job_type_3.version, }, }, { 'name': 'job_b', 'job_type': { 'name': job_type_4.name, 'version': job_type_4.version, }, 'dependencies': [{ 'name': 'job_a', }], }] } superseded_recipe = recipe_test_utils.create_recipe(is_superseded=True) superseded_job_a = job_test_utils.create_job(is_superseded=True) superseded_job_b = job_test_utils.create_job(is_superseded=True) recipe_test_utils.create_recipe_job(recipe=superseded_recipe, job_name='job_a', job=superseded_job_a) recipe_test_utils.create_recipe_job(recipe=superseded_recipe, job_name='job_b', job=superseded_job_b) recipe_type_2 = recipe_test_utils.create_recipe_type(definition=definition_2) recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type_2, batch=batch, superseded_recipe=superseded_recipe) # Add recipes to message message = UpdateRecipes() if message.can_fit_more(): message.add_recipe(recipe_1.id) if message.can_fit_more(): message.add_recipe(recipe_2.id) # Execute message result = message.execute() self.assertTrue(result) # Make sure jobs get created and that "top" recipe jobs (job_1 and job_a) have input populated # Recipe 2 jobs (job_a and job_b) should have priority set to 999 from batch # Recipe 2 jobs (job_a and job_b) should supersede old jobs rj_qry = RecipeNode.objects.select_related('job').filter(recipe_id__in=[recipe_1.id, recipe_2.id]) recipe_jobs = rj_qry.order_by('recipe_id', 'node_name') self.assertEqual(len(recipe_jobs), 4) self.assertEqual(recipe_jobs[0].recipe_id, recipe_1.id) self.assertEqual(recipe_jobs[0].node_name, 'job_1') self.assertEqual(recipe_jobs[0].job.job_type_id, job_type_1.id) self.assertTrue(recipe_jobs[0].is_original) self.assertTrue(recipe_jobs[0].job.has_input()) self.assertEqual(recipe_jobs[1].recipe_id, recipe_1.id) self.assertEqual(recipe_jobs[1].node_name, 'job_2') self.assertEqual(recipe_jobs[1].job.job_type_id, job_type_2.id) self.assertTrue(recipe_jobs[1].is_original) self.assertFalse(recipe_jobs[1].job.has_input()) self.assertEqual(recipe_jobs[2].recipe_id, recipe_2.id) self.assertEqual(recipe_jobs[2].node_name, 'job_a') self.assertEqual(recipe_jobs[2].job.job_type_id, job_type_3.id) self.assertTrue(recipe_jobs[2].is_original) self.assertTrue(recipe_jobs[2].job.has_input()) self.assertEqual(recipe_jobs[2].job.priority, 999) self.assertEqual(recipe_jobs[2].job.superseded_job_id, superseded_job_a.id) self.assertEqual(recipe_jobs[3].recipe_id, recipe_2.id) self.assertEqual(recipe_jobs[3].node_name, 'job_b') self.assertEqual(recipe_jobs[3].job.job_type_id, job_type_4.id) self.assertTrue(recipe_jobs[3].is_original) self.assertFalse(recipe_jobs[3].job.has_input()) self.assertEqual(recipe_jobs[3].job.priority, 999) self.assertEqual(recipe_jobs[3].job.superseded_job_id, superseded_job_b.id) jobs = Job.objects.filter(recipe_id__in=[recipe_1.id, recipe_2.id]) self.assertEqual(len(jobs), 4) # Should have two messages for processing inputs for job_1 and job_a self.assertEqual(len(message.new_messages), 2) self.assertEqual(message.new_messages[0].type, 'process_job_input') self.assertEqual(message.new_messages[1].type, 'process_job_input') self.assertSetEqual({message.new_messages[0].job_id, message.new_messages[1].job_id}, {recipe_jobs[0].job_id, recipe_jobs[2].job_id}) # Test executing message again message_json_dict = message.to_json() message = UpdateRecipes.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Make sure no additional jobs are created rj_qry = RecipeNode.objects.select_related('job').filter(recipe_id__in=[recipe_1.id, recipe_2.id]) recipe_jobs = rj_qry.order_by('recipe_id', 'node_name') self.assertEqual(len(recipe_jobs), 4) # Make sure the same messages are returned self.assertEqual(len(message.new_messages), 2) self.assertEqual(message.new_messages[0].type, 'process_job_input') self.assertEqual(message.new_messages[1].type, 'process_job_input') self.assertSetEqual({message.new_messages[0].job_id, message.new_messages[1].job_id}, {recipe_jobs[0].job_id, recipe_jobs[2].job_id})
def setUp(self): django.setup() self.new_priority = 200 self.standalone_failed_job = job_test_utils.create_job(status='FAILED', num_exes=3, priority=100) self.standalone_canceled_job = job_test_utils.create_job( status='CANCELED', num_exes=1, priority=100) self.standalone_completed_job = job_test_utils.create_job( status='COMPLETED') # Create recipe for re-queing a job that should now be PENDING (and its dependencies) job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type( definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type( definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 3', job=self.job_b_3) # Job IDs to re-queue self.job_ids = [ self.standalone_failed_job.id, self.standalone_canceled_job.id, self.standalone_completed_job.id, self.job_a_1.id, self.job_b_2.id ] # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_execute(self): """Tests calling UpdateBatchMetrics.execute() successfully""" job_type = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'a', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, }, { 'name': 'b', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, }, { 'name': 'c', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, 'dependencies': [{ 'name': 'b', }], }, { 'name': 'd', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, 'dependencies': [{ 'name': 'b', }], }, { 'name': 'e', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, 'dependencies': [{ 'name': 'd', }], }, { 'name': 'f', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, }, { 'name': 'g', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, 'dependencies': [{ 'name': 'f', }], }, { 'name': 'h', 'job_type': { 'name': job_type.name, 'version': job_type.version, }, }] } recipe_type = recipe_test_utils.create_recipe_type( definition=definition) batch = batch_test_utils.create_batch(recipe_type=recipe_type) started = now() ended_1 = started + datetime.timedelta(minutes=1) ended_2 = started + datetime.timedelta(minutes=2) ended_3 = started + datetime.timedelta(minutes=3) ended_4 = started + datetime.timedelta(minutes=7) recipe_1 = recipe_test_utils.create_recipe(batch=batch, recipe_type=recipe_type) job_1 = job_test_utils.create_job(status='COMPLETED', started=started, ended=ended_1) job_2 = job_test_utils.create_job(status='COMPLETED') task_results = { 'version': '1.0', 'tasks': [{ 'task_id': '1234', 'type': 'main', 'started': datetime_to_string(started), 'ended': datetime_to_string(ended_2) }] } task_results = TaskResults(task_results=task_results, do_validate=False) job_test_utils.create_job_exe(job=job_2, status='COMPLETED', task_results=task_results) job_3 = job_test_utils.create_job(status='QUEUED') job_4 = job_test_utils.create_job(status='QUEUED') job_5 = job_test_utils.create_job(status='RUNNING') job_6 = job_test_utils.create_job(status='RUNNING') job_7 = job_test_utils.create_job(status='RUNNING') job_8 = job_test_utils.create_job(status='PENDING') job_9 = job_test_utils.create_job(status='PENDING') job_10 = job_test_utils.create_job(status='CANCELED') job_11 = job_test_utils.create_job(status='BLOCKED') job_12 = job_test_utils.create_job(status='FAILED') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_1, job_name='a') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2, job_name='b') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_3, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_4, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_5, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_6, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_7, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_8, job_name='e') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_9, job_name='e') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_10, job_name='f') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_11, job_name='g') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_12, job_name='h') recipe_2 = recipe_test_utils.create_recipe(batch=batch, recipe_type=recipe_type) recipe_2.is_completed = True recipe_2.save() job_13 = job_test_utils.create_job(status='FAILED') job_14 = job_test_utils.create_job(status='COMPLETED') job_15 = job_test_utils.create_job(status='RUNNING') job_16 = job_test_utils.create_job(status='RUNNING') job_17 = job_test_utils.create_job(status='QUEUED') job_18 = job_test_utils.create_job(status='QUEUED') job_19 = job_test_utils.create_job(status='QUEUED') job_20 = job_test_utils.create_job(status='QUEUED') job_21 = job_test_utils.create_job(status='PENDING') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_13, job_name='a') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_14, job_name='b') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_15, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_16, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_17, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_18, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_19, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_20, job_name='d') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_21, job_name='e') recipe_3 = recipe_test_utils.create_recipe(batch=batch, recipe_type=recipe_type) recipe_3.is_completed = True recipe_3.save() job_22 = job_test_utils.create_job(status='COMPLETED') job_23 = job_test_utils.create_job(status='COMPLETED') task_results = { 'version': '1.0', 'tasks': [{ 'task_id': '1234', 'type': 'main', 'started': datetime_to_string(started), 'ended': datetime_to_string(ended_3) }] } task_results = TaskResults(task_results=task_results, do_validate=False) job_test_utils.create_job_exe(job=job_23, status='COMPLETED', task_results=task_results) job_24 = job_test_utils.create_job(status='COMPLETED', started=started, ended=ended_2) job_25 = job_test_utils.create_job(status='COMPLETED', started=started, ended=ended_3) job_26 = job_test_utils.create_job(status='COMPLETED', started=started, ended=ended_4) job_27 = job_test_utils.create_job(status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_22, job_name='a') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_23, job_name='b') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_24, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_25, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_26, job_name='c') recipe_test_utils.create_recipe_job(recipe=recipe_3, job=job_27, job_name='c') # Generate recipe metrics Recipe.objects.update_recipe_metrics( [recipe_1.id, recipe_2.id, recipe_3.id]) # Add batch to message message = UpdateBatchMetrics() if message.can_fit_more(): message.add_batch(batch.id) # Execute message result = message.execute() self.assertTrue(result) batch = Batch.objects.get(id=batch.id) self.assertEqual(batch.jobs_total, 27) self.assertEqual(batch.jobs_pending, 3) self.assertEqual(batch.jobs_blocked, 1) self.assertEqual(batch.jobs_queued, 6) self.assertEqual(batch.jobs_running, 5) self.assertEqual(batch.jobs_failed, 2) self.assertEqual(batch.jobs_completed, 9) self.assertEqual(batch.jobs_canceled, 1) self.assertEqual(batch.recipes_total, 3) self.assertEqual(batch.recipes_completed, 2) batch_metrics = BatchMetrics.objects.filter( batch_id=batch.id).order_by('job_name') self.assertEqual(len(batch_metrics), 8) # Job A self.assertEqual(batch_metrics[0].job_name, 'a') self.assertEqual(batch_metrics[0].jobs_total, 3) self.assertEqual(batch_metrics[0].jobs_pending, 0) self.assertEqual(batch_metrics[0].jobs_blocked, 0) self.assertEqual(batch_metrics[0].jobs_queued, 0) self.assertEqual(batch_metrics[0].jobs_running, 0) self.assertEqual(batch_metrics[0].jobs_failed, 1) self.assertEqual(batch_metrics[0].jobs_completed, 2) self.assertEqual(batch_metrics[0].jobs_canceled, 0) self.assertEqual(batch_metrics[0].min_job_duration, datetime.timedelta(minutes=1)) self.assertEqual(batch_metrics[0].avg_job_duration, datetime.timedelta(minutes=1)) self.assertEqual(batch_metrics[0].max_job_duration, datetime.timedelta(minutes=1)) self.assertIsNone(batch_metrics[0].min_seed_duration) self.assertIsNone(batch_metrics[0].avg_seed_duration) self.assertIsNone(batch_metrics[0].max_seed_duration) # Job B self.assertEqual(batch_metrics[1].job_name, 'b') self.assertEqual(batch_metrics[1].jobs_total, 3) self.assertEqual(batch_metrics[1].jobs_pending, 0) self.assertEqual(batch_metrics[1].jobs_blocked, 0) self.assertEqual(batch_metrics[1].jobs_queued, 0) self.assertEqual(batch_metrics[1].jobs_running, 0) self.assertEqual(batch_metrics[1].jobs_failed, 0) self.assertEqual(batch_metrics[1].jobs_completed, 3) self.assertEqual(batch_metrics[1].jobs_canceled, 0) self.assertIsNone(batch_metrics[1].min_job_duration) self.assertIsNone(batch_metrics[1].avg_job_duration) self.assertIsNone(batch_metrics[1].max_job_duration) self.assertEqual(batch_metrics[1].min_seed_duration, datetime.timedelta(minutes=2)) self.assertEqual(batch_metrics[1].avg_seed_duration, datetime.timedelta(minutes=2, seconds=30)) self.assertEqual(batch_metrics[1].max_seed_duration, datetime.timedelta(minutes=3)) # Job C self.assertEqual(batch_metrics[2].job_name, 'c') self.assertEqual(batch_metrics[2].jobs_total, 9) self.assertEqual(batch_metrics[2].jobs_pending, 0) self.assertEqual(batch_metrics[2].jobs_blocked, 0) self.assertEqual(batch_metrics[2].jobs_queued, 2) self.assertEqual(batch_metrics[2].jobs_running, 3) self.assertEqual(batch_metrics[2].jobs_failed, 0) self.assertEqual(batch_metrics[2].jobs_completed, 4) self.assertEqual(batch_metrics[2].jobs_canceled, 0) self.assertEqual(batch_metrics[2].min_job_duration, datetime.timedelta(minutes=2)) self.assertEqual(batch_metrics[2].avg_job_duration, datetime.timedelta(minutes=4)) self.assertEqual(batch_metrics[2].max_job_duration, datetime.timedelta(minutes=7)) self.assertIsNone(batch_metrics[2].min_seed_duration) self.assertIsNone(batch_metrics[2].avg_seed_duration) self.assertIsNone(batch_metrics[2].max_seed_duration) # Job D self.assertEqual(batch_metrics[3].job_name, 'd') self.assertEqual(batch_metrics[3].jobs_total, 6) self.assertEqual(batch_metrics[3].jobs_pending, 0) self.assertEqual(batch_metrics[3].jobs_blocked, 0) self.assertEqual(batch_metrics[3].jobs_queued, 4) self.assertEqual(batch_metrics[3].jobs_running, 2) self.assertEqual(batch_metrics[3].jobs_failed, 0) self.assertEqual(batch_metrics[3].jobs_completed, 0) self.assertEqual(batch_metrics[3].jobs_canceled, 0) self.assertIsNone(batch_metrics[3].min_job_duration) self.assertIsNone(batch_metrics[3].avg_job_duration) self.assertIsNone(batch_metrics[3].max_job_duration) self.assertIsNone(batch_metrics[3].min_seed_duration) self.assertIsNone(batch_metrics[3].avg_seed_duration) self.assertIsNone(batch_metrics[3].max_seed_duration) # Job E self.assertEqual(batch_metrics[4].job_name, 'e') self.assertEqual(batch_metrics[4].jobs_total, 3) self.assertEqual(batch_metrics[4].jobs_pending, 3) self.assertEqual(batch_metrics[4].jobs_blocked, 0) self.assertEqual(batch_metrics[4].jobs_queued, 0) self.assertEqual(batch_metrics[4].jobs_running, 0) self.assertEqual(batch_metrics[4].jobs_failed, 0) self.assertEqual(batch_metrics[4].jobs_completed, 0) self.assertEqual(batch_metrics[4].jobs_canceled, 0) self.assertIsNone(batch_metrics[4].min_job_duration) self.assertIsNone(batch_metrics[4].avg_job_duration) self.assertIsNone(batch_metrics[4].max_job_duration) self.assertIsNone(batch_metrics[4].min_seed_duration) self.assertIsNone(batch_metrics[4].avg_seed_duration) self.assertIsNone(batch_metrics[4].max_seed_duration) # Job F self.assertEqual(batch_metrics[5].job_name, 'f') self.assertEqual(batch_metrics[5].jobs_total, 1) self.assertEqual(batch_metrics[5].jobs_pending, 0) self.assertEqual(batch_metrics[5].jobs_blocked, 0) self.assertEqual(batch_metrics[5].jobs_queued, 0) self.assertEqual(batch_metrics[5].jobs_running, 0) self.assertEqual(batch_metrics[5].jobs_failed, 0) self.assertEqual(batch_metrics[5].jobs_completed, 0) self.assertEqual(batch_metrics[5].jobs_canceled, 1) self.assertIsNone(batch_metrics[5].min_job_duration) self.assertIsNone(batch_metrics[5].avg_job_duration) self.assertIsNone(batch_metrics[5].max_job_duration) self.assertIsNone(batch_metrics[5].min_seed_duration) self.assertIsNone(batch_metrics[5].avg_seed_duration) self.assertIsNone(batch_metrics[5].max_seed_duration) # Job G self.assertEqual(batch_metrics[6].job_name, 'g') self.assertEqual(batch_metrics[6].jobs_total, 1) self.assertEqual(batch_metrics[6].jobs_pending, 0) self.assertEqual(batch_metrics[6].jobs_blocked, 1) self.assertEqual(batch_metrics[6].jobs_queued, 0) self.assertEqual(batch_metrics[6].jobs_running, 0) self.assertEqual(batch_metrics[6].jobs_failed, 0) self.assertEqual(batch_metrics[6].jobs_completed, 0) self.assertEqual(batch_metrics[6].jobs_canceled, 0) self.assertIsNone(batch_metrics[6].min_job_duration) self.assertIsNone(batch_metrics[6].avg_job_duration) self.assertIsNone(batch_metrics[6].max_job_duration) self.assertIsNone(batch_metrics[6].min_seed_duration) self.assertIsNone(batch_metrics[6].avg_seed_duration) self.assertIsNone(batch_metrics[6].max_seed_duration) # Job H self.assertEqual(batch_metrics[7].job_name, 'h') self.assertEqual(batch_metrics[7].jobs_total, 1) self.assertEqual(batch_metrics[7].jobs_pending, 0) self.assertEqual(batch_metrics[7].jobs_blocked, 0) self.assertEqual(batch_metrics[7].jobs_queued, 0) self.assertEqual(batch_metrics[7].jobs_running, 0) self.assertEqual(batch_metrics[7].jobs_failed, 1) self.assertEqual(batch_metrics[7].jobs_completed, 0) self.assertEqual(batch_metrics[7].jobs_canceled, 0) self.assertIsNone(batch_metrics[7].min_job_duration) self.assertIsNone(batch_metrics[7].avg_job_duration) self.assertIsNone(batch_metrics[7].max_job_duration) self.assertIsNone(batch_metrics[7].min_seed_duration) self.assertIsNone(batch_metrics[7].avg_seed_duration) self.assertIsNone(batch_metrics[7].max_seed_duration) # Test executing message again message_json_dict = message.to_json() message = UpdateBatchMetrics.from_json(message_json_dict) result = message.execute() self.assertTrue(result)
def setUp(self): django.setup() self.job_failed = job_test_utils.create_job(status='FAILED') self.job_completed = job_test_utils.create_job(status='COMPLETED') self.job_running = job_test_utils.create_job(status='RUNNING') self.job_queued = job_test_utils.create_job(status='QUEUED') self.job_canceled = job_test_utils.create_job(status='CANCELED') self.job_fa_co_a = job_test_utils.create_job(status='BLOCKED') self.job_fa_co_b = job_test_utils.create_job(status='PENDING') self.job_co_ru_qu_a = job_test_utils.create_job(status='BLOCKED') self.job_co_ru_qu_b = job_test_utils.create_job(status='BLOCKED') self.job_qu_ca_a = job_test_utils.create_job(status='PENDING') self.job_qu_ca_b = job_test_utils.create_job(status='PENDING') self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_failed.job_type.name, 'version': self.job_failed.job_type.version, }, }, { 'name': 'job_completed', 'job_type': { 'name': self.job_completed.job_type.name, 'version': self.job_completed.job_type.version, }, }, { 'name': 'job_running', 'job_type': { 'name': self.job_running.job_type.name, 'version': self.job_running.job_type.version, }, }, { 'name': 'job_queued', 'job_type': { 'name': self.job_queued.job_type.name, 'version': self.job_queued.job_type.version, }, }, { 'name': 'job_canceled', 'job_type': { 'name': self.job_canceled.job_type.name, 'version': self.job_canceled.job_type.version, }, }, { 'name': 'job_fa_co_a', 'job_type': { 'name': self.job_fa_co_a.job_type.name, 'version': self.job_fa_co_a.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }, { 'name': 'job_completed', }], }, { 'name': 'job_fa_co_b', 'job_type': { 'name': self.job_fa_co_b.job_type.name, 'version': self.job_fa_co_b.job_type.version, }, 'dependencies': [{ 'name': 'job_fa_co_a', }], }, { 'name': 'job_co_ru_qu_a', 'job_type': { 'name': self.job_co_ru_qu_a.job_type.name, 'version': self.job_co_ru_qu_a.job_type.version, }, 'dependencies': [{ 'name': 'job_completed', }, { 'name': 'job_running', }, { 'name': 'job_queued', }], }, { 'name': 'job_co_ru_qu_b', 'job_type': { 'name': self.job_co_ru_qu_b.job_type.name, 'version': self.job_co_ru_qu_b.job_type.version, }, 'dependencies': [{ 'name': 'job_co_ru_qu_a', }], }, { 'name': 'job_qu_ca_a', 'job_type': { 'name': self.job_qu_ca_a.job_type.name, 'version': self.job_qu_ca_a.job_type.version, }, 'dependencies': [{ 'name': 'job_queued', }, { 'name': 'job_canceled', }], }, { 'name': 'job_qu_ca_b', 'job_type': { 'name': self.job_qu_ca_b.job_type.name, 'version': self.job_qu_ca_b.job_type.version, }, 'dependencies': [{ 'name': 'job_qu_ca_a', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=self.definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_failed', job=self.job_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_completed', job=self.job_completed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_running', job=self.job_running) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_queued', job=self.job_queued) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_canceled', job=self.job_canceled) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_a', job=self.job_fa_co_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_b', job=self.job_fa_co_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_a', job=self.job_co_ru_qu_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_b', job=self.job_co_ru_qu_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_a', job=self.job_qu_ca_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_b', job=self.job_qu_ca_b) self.recipe_jobs = list(RecipeJob.objects.filter(recipe_id=self.recipe.id))
def test_execute_with_recipe_legacy(self): """Tests calling ProcessRecipeInput.execute() successfully when a legacy sub-recipe has to get its data from its recipe """ workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_a = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_a', 'pattern': '*.png' }] } } } } job_type_a = job_test_utils.create_job_type(interface=manifest_a) output_data_a = Data() output_data_a.add_value(FileValue('output_a', [file_1.id])) output_data_a_dict = convert_data_to_v6_json(output_data_a).get_dict() manifest_b = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_b', 'pattern': '*.png', 'multiple': True }] } } } } job_type_b = job_test_utils.create_job_type(interface=manifest_b) output_data_b = Data() output_data_b.add_value( FileValue('output_b', [file_2.id, file_3.id, file_4.id])) output_data_b_dict = convert_data_to_v6_json(output_data_b).get_dict() job_a = job_test_utils.create_job(job_type=job_type_a, num_exes=1, status='COMPLETED', output=output_data_a_dict) job_b = job_test_utils.create_job(job_type=job_type_b, num_exes=1, status='COMPLETED', output=output_data_b_dict) sub_recipe_interface_c = Interface() sub_recipe_interface_c.add_parameter( FileParameter('input_a', ['image/png'])) sub_recipe_interface_c.add_parameter( FileParameter('input_b', ['image/png'], multiple=True)) sub_recipe_def_c = RecipeDefinition(sub_recipe_interface_c) sub_recipe_def_dict_c = convert_recipe_definition_to_v1_json( sub_recipe_def_c).get_dict() sub_recipe_type_c = recipe_test_utils.create_recipe_type( definition=sub_recipe_def_dict_c) sub_recipe_c = recipe_test_utils.create_recipe( recipe_type=sub_recipe_type_c) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_a.name, job_type_a.version, job_type_a.revision_num) definition.add_job_node('node_b', job_type_b.name, job_type_b.version, job_type_b.revision_num) definition.add_recipe_node('node_c', sub_recipe_type_c.name, sub_recipe_type_c.revision_num) definition.add_dependency('node_c', 'node_a') definition.add_dependency_input_connection('node_c', 'input_a', 'node_a', 'output_a') definition.add_dependency('node_c', 'node_b') definition.add_dependency_input_connection('node_c', 'input_b', 'node_b', 'output_b') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_a', job=job_a) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_b', job=job_b) recipe_node_c = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_c', sub_recipe=sub_recipe_c) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_c]) job_a.recipe = recipe job_a.save() job_b.recipe = recipe job_b.save() sub_recipe_c.recipe = recipe sub_recipe_c.save() # Create message message = ProcessRecipeInput() message.recipe_id = sub_recipe_c.id # Execute message result = message.execute() self.assertTrue(result) sub_recipe_c = Recipe.objects.get(id=sub_recipe_c.id) # Check for update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Check sub-recipe for expected input_file_size self.assertEqual(sub_recipe_c.input_file_size, 24469.0) # Check sub-recipe for expected input data self.assertEqual( sub_recipe_c.input['version'], '1.0') # Should be legacy input data with workspace ID self.assertEqual(sub_recipe_c.input['workspace_id'], workspace.id) self.assertSetEqual(set(sub_recipe_c.get_input_data().values.keys()), {'input_a', 'input_b'}) self.assertListEqual( sub_recipe_c.get_input_data().values['input_a'].file_ids, [file_1.id]) self.assertListEqual( sub_recipe_c.get_input_data().values['input_b'].file_ids, [file_2.id, file_3.id, file_4.id]) # Make sure sub-recipe input file models are created input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4) file_ids = {input_file.input_file_id for input_file in input_files} self.assertSetEqual(file_ids, {file_1.id, file_2.id, file_3.id, file_4.id}) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Make sure recipe input file models are unchanged input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4)
def test_execute_with_recipe(self): """Tests calling CreateJobs.execute() successfully with a recipe that supersedes another recipe""" from batch.test import utils as batch_test_utils from recipe.models import RecipeNode from recipe.test import utils as recipe_test_utils job_type = job_test_utils.create_seed_job_type() recipe_type = recipe_test_utils.create_recipe_type() superseded_recipe = recipe_test_utils.create_recipe( recipe_type=recipe_type, is_superseded=True) node_name = 'recipe_node' superseded_job = job_test_utils.create_job(job_type=job_type, is_superseded=True) recipe_test_utils.create_recipe_node(recipe=superseded_recipe, node_name=node_name, job=superseded_job, save=True) batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe( recipe_type=recipe_type, superseded_recipe=superseded_recipe, batch=batch) # Create and execute message message = create_jobs_message_for_recipe(recipe, node_name, job_type.name, job_type.version, job_type.revision_num, process_input=True) result = message.execute() self.assertTrue(result) new_job = Job.objects.get(job_type_id=job_type.id, recipe_id=recipe.id) self.assertEqual(new_job.event_id, recipe.event_id) self.assertEqual(new_job.recipe_id, recipe.id) self.assertEqual(new_job.root_recipe_id, superseded_recipe.id) self.assertEqual(new_job.root_superseded_job_id, superseded_job.id) self.assertEqual(new_job.superseded_job_id, superseded_job.id) self.assertEqual(new_job.batch_id, batch.id) node_count = RecipeNode.objects.filter(recipe_id=recipe.id, node_name=node_name, job_id=new_job.id).count() self.assertEqual(node_count, 1) # Check for process_job_input message (because process_input=True) self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'process_job_input') self.assertEqual(message.new_messages[0].job_id, new_job.id) # Test executing message again message.new_messages = [] result = message.execute() self.assertTrue(result) # Make sure a new job is not created self.assertEqual(Job.objects.filter(recipe_id=recipe.id).count(), 1) self.assertEqual( RecipeNode.objects.filter(recipe_id=recipe.id, node_name=node_name).count(), 1) # Check for same process_job_input message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'process_job_input') self.assertEqual(message.new_messages[0].job_id, new_job.id)
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { 'version': '1.0', 'command': 'test_command', 'command_arguments': 'test_arg', 'input_data': [{ 'name': 'Test Input 1', 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': 'Test Output 1', 'type': 'files', 'media_type': 'image/png', }] } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { 'version': '1.0', 'command': 'test_command', 'command_arguments': 'test_arg', 'input_data': [{ 'name': 'Test Input 2', 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': 'Test Output 2', 'type': 'file', }] } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': source_file.id, }], 'workspace_id': workspace.id, } self.data = RecipeData(data) # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)