def create_batch_old(recipe_type=None, definition=None, title=None, description=None, status=None, recipe_count=0): """Creates a batch model for unit testing :returns: The batch model :rtype: :class:`batch.models.Batch` """ if not recipe_type: recipe_type = recipe_test_utils.create_recipe_type() if not definition: definition = {} if not isinstance(definition, OldBatchDefinition): definition = OldBatchDefinition(definition) if not title: global BATCH_TITLE_COUNTER title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER BATCH_TITLE_COUNTER += 1 if not description: global BATCH_DESCRIPTION_COUNTER description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER BATCH_DESCRIPTION_COUNTER += 1 for i in range(recipe_count): recipe_test_utils.create_recipe(recipe_type=recipe_type) batch = Batch.objects.create_batch_old(recipe_type=recipe_type, definition=definition, title=title, description=description) if status: batch.status = status batch.save() return batch
def test_json(self): """Tests coverting a CreateBatchRecipes message to and from JSON""" # Previous batch with three recipes recipe_type = recipe_test_utils.create_recipe_type() prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=3) recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch) recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch) definition = BatchDefinition() definition.root_batch_id = prev_batch.root_batch_id batch = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition) # Create message message = create_batch_recipes_message(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateBatchRecipes.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one reprocess_recipes message for the three recipes self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'reprocess_recipes') self.assertSetEqual(set(message._root_recipe_ids), {recipe_1.id, recipe_2.id, recipe_3.id})
def setUp(self): django.setup() self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) cmd = 'command' cmd_args = 'args' outputs = [{ 'name': 'arg1', 'pattern': '*_.txt' }, { 'name': 'arg2', 'pattern': '*_.txt' }] manifest = job_utils.create_seed_manifest(command='command args', outputs_files=outputs) self.job_type = job_utils.create_seed_job_type(job_version='1.0', manifest=manifest) self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING', recipe=self.recipe) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING')
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe_job1 = recipe_test_utils.create_recipe_job(recipe=self.recipe1) self.recipe2 = recipe_test_utils.create_recipe()
def test_execute_with_parent_recipe(self): """Tests calling PurgeRecipe.execute() successfully""" # Create recipes recipe = recipe_test_utils.create_recipe() parent_recipe = recipe_test_utils.create_recipe() recipe_test_utils.create_recipe_node(recipe=parent_recipe, node_name='A', sub_recipe=recipe, save=True) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=self.trigger.id, source_file_id=self.file_1.id) # Execute message result = message.execute() self.assertTrue(result) # Test to see that a message to purge the parent recipe was sent msgs = [ msg for msg in message.new_messages if msg.type == 'purge_recipe' ] self.assertEqual(len(msgs), 1) for msg in msgs: self.assertEqual(msg.recipe_id, parent_recipe.id) # Assert models were deleted self.assertEqual(Recipe.objects.filter(id=recipe.id).count(), 0) self.assertEqual(RecipeNode.objects.filter(recipe=recipe).count(), 0)
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() job_type_4 = job_test_utils.create_seed_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type_v6() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) df1 = DataFilter(filter_list=[{'name': 'file_param_1', 'type': 'media-type', 'condition': '==', 'values': ['image/gif']}, {'name': 'json_param_1', 'type': 'object', 'condition': 'superset of', 'values': [{}]}], all=False) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_condition_node('F', interface, df1) #False definition.add_job_node('G', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('A', 'F') definition.add_dependency('F', 'G') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) condition_f = recipe_test_utils.create_recipe_condition(is_processed=True, is_accepted=False, save=True) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', condition=condition_f, save=False) recipe_nodes = [recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f] recipe_instance = RecipeInstance(definition, recipe, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def test_json(self): """Tests coverting an UpdateBatchMetrics message to and from JSON""" batch = batch_test_utils.create_batch() recipe_1 = recipe_test_utils.create_recipe(batch=batch) job_1 = job_test_utils.create_job(status='FAILED') job_2 = job_test_utils.create_job(status='CANCELED') job_3 = job_test_utils.create_job(status='BLOCKED') job_4 = job_test_utils.create_job(status='BLOCKED') job_5 = job_test_utils.create_job(status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_3) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_4) recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_5) recipe_2 = recipe_test_utils.create_recipe(batch=batch) recipe_2.is_completed = True recipe_2.save() job_6 = job_test_utils.create_job(status='COMPLETED') job_7 = job_test_utils.create_job(status='COMPLETED') job_8 = job_test_utils.create_job(status='COMPLETED') recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_6) recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_7) recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_8) # Generate recipe metrics Recipe.objects.update_recipe_metrics([recipe_1.id, recipe_2.id]) # Add batch to message message = UpdateBatchMetrics() if message.can_fit_more(): message.add_batch(batch.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateBatchMetrics.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) batch = Batch.objects.get(id=batch.id) self.assertEqual(batch.jobs_total, 8) self.assertEqual(batch.jobs_pending, 0) self.assertEqual(batch.jobs_blocked, 2) self.assertEqual(batch.jobs_queued, 0) self.assertEqual(batch.jobs_running, 0) self.assertEqual(batch.jobs_failed, 1) self.assertEqual(batch.jobs_completed, 4) self.assertEqual(batch.jobs_canceled, 1) self.assertEqual(batch.recipes_total, 2) self.assertEqual(batch.recipes_completed, 1)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type( name='scale-batch-creator') definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } workspace1 = storage_test_utils.create_workspace() file1 = storage_test_utils.create_file(workspace=workspace1) data = { 'version': '1.0', 'input_data': [{ 'name': 'input_file', 'file_id': file1.id, }], 'workspace_id': workspace1.id, } self.recipe_type = recipe_test_utils.create_recipe_type( name='my-type', definition=definition) recipe_handler = recipe_test_utils.create_recipe_handler( recipe_type=self.recipe_type, data=data) self.recipe1 = recipe_handler.recipe self.recipe1_jobs = recipe_handler.recipe_jobs self.recipe2 = recipe_test_utils.create_recipe() self.recipe3 = recipe_test_utils.create_recipe(is_superseded=True)
def setUp(self): django.setup() self.recipe = recipe_test_utils.create_recipe() self.recipe_job1 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 1') self.recipe_job2 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 2') self.recipe_job3 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 3')
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_a', Interface(), DataFilter(True)) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status="RUNNING") self.job_2 = job_test_utils.create_job(data={}) self.job_3 = job_test_utils.create_job(status="FAILED") definition = { "version": "1.0", "input_data": [], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_1.job_type.name, "version": self.job_1.job_type.version}, }, { "name": "Job 2", "job_type": {"name": self.job_2.job_type.name, "version": self.job_2.job_type.version}, "dependencies": [{"name": "Job 1"}], }, ], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 1", job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 2", job=self.job_2)
def test_execute_force_stop(self): """Tests calling PurgeRecipe.execute() successfully""" # Create PurgeResults entry file_2 = storage_test_utils.create_file(file_type='SOURCE') trigger = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=file_2.id, trigger_event=trigger, force_stop_purge=True) self.assertEqual( PurgeResults.objects.values_list( 'num_recipes_deleted', flat=True).get(trigger_event=trigger.id), 0) # Create recipes recipe_type = recipe_test_utils.create_recipe_type() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=trigger.id, source_file_id=file_2.id) # Execute message result = message.execute() self.assertTrue(result) # Check results are accurate self.assertEqual( PurgeResults.objects.values_list( 'num_recipes_deleted', flat=True).get(source_file_id=file_2.id), 0)
def test_json(self): """Tests converting an UpdateRecipe message to and from JSON""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_failed = job_test_utils.create_job(status='FAILED', input=data_dict) job_pending = job_test_utils.create_job(status='PENDING') definition = RecipeDefinition(Interface()) definition.add_job_node('job_failed', job_failed.job_type.name, job_failed.job_type.version, job_failed.job_type_rev.revision_num) definition.add_job_node('job_pending', job_pending.job_type.name, job_pending.job_type.version, job_pending.job_type_rev.revision_num) definition.add_dependency('job_failed', 'job_pending') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_failed', job=job_failed) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_pending', job=job_pending) # Create message message = create_update_recipe_message(recipe.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to set job_pending to BLOCKED self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'blocked_jobs') self.assertListEqual(msg._blocked_job_ids, [job_pending.id])
def test_json_forced_nodes(self): """Tests coverting a ProcessRecipeInput message to and from JSON with forced nodes provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() recipe = recipe_test_utils.create_recipe(input=data_dict) forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() # Create message message = create_process_recipe_input_messages( [recipe.id], forced_nodes=forced_nodes)[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessRecipeInput.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'update_recipe') self.assertEqual(msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6(msg.forced_nodes).get_dict(), forced_nodes_dict) # Recipe should have input_file_size set to 0 (no input files) self.assertEqual(recipe.input_file_size, 0.0)
def test_json(self): """Tests converting a CreateConditions message to and from JSON""" batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe(batch=batch) conditions = [Condition('node_1', False), Condition('node_2', True)] # Create message message = create_conditions_messages(recipe, conditions)[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CreateConditions.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2 = recipe_nodes[1].condition # Should be one message for processing condition for node 2 self.assertEqual(len(new_message.new_messages), 1) process_condition_msg = new_message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING', num_exes=1) self.job_2 = job_test_utils.create_job(input={}, num_exes=0) self.job_3 = job_test_utils.create_job(status='FAILED', num_exes=1) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def test_execute_with_recipe(self): """Tests calling PurgeSourceFile.execute() successfully""" # Create a file source_file = storage_test_utils.create_file(file_type='SOURCE') trigger = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=source_file.id, trigger_event=trigger) # Create a recipe and other models recipe = recipe_test_utils.create_recipe() recipe_test_utils.create_input_file(recipe=recipe, input_file=source_file) # Create message message = create_purge_source_file_message( source_file_id=source_file.id, trigger_id=trigger.id) # Execute message result = message.execute() self.assertTrue(result) # Test to see that a message to purge the recipe was created self.assertEqual(len(message.new_messages), 1) for msg in message.new_messages: self.assertEqual(msg.recipe_id, recipe.id) self.assertEqual(msg.type, 'purge_recipe')
def test_execute_with_recipe(self): """Tests calling PurgeJobs.execute() successfully with job as part of recipe""" recipe = recipe_test_utils.create_recipe() job_exe = job_test_utils.create_job_exe(status='COMPLETED') job = job_exe.job recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job, save=True) source_file = storage_test_utils.create_file(file_type='SOURCE') trigger = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=source_file.id, trigger_event=trigger) # Add job to message message = PurgeJobs() message._purge_job_ids = [job.id] message.trigger_id = trigger.id message.source_file_id = source_file.id message.status_change = timezone.now() # Execute message result = message.execute() self.assertTrue(result) # Check that a new message to purge source file was created msgs = [msg for msg in message.new_messages if msg.type == 'purge_recipe'] self.assertEqual(len(msgs), 1) for msg in msgs: self.assertEqual(msg.recipe_id, recipe.id)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING') self.job_2 = job_test_utils.create_job(data={}) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def create_batch_recipe(batch=None, recipe=None, superseded_recipe=None): """Creates a BatchRecipe model for unit testing :returns: The BatchRecipe model :rtype: :class:`batch.models.BatchRecipe` """ if not batch: batch = create_batch() if not recipe: recipe = recipe_test_utils.create_recipe() if not superseded_recipe: superseded_recipe = recipe_test_utils.create_recipe() batch_recipe = BatchRecipe.objects.create(batch=batch, recipe=recipe, superseded_recipe=superseded_recipe) return batch_recipe
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } workspace1 = storage_test_utils.create_workspace() file1 = storage_test_utils.create_file(workspace=workspace1) data = { 'version': '1.0', 'input_data': [{ 'name': 'input_file', 'file_id': file1.id, }], 'workspace_id': workspace1.id, } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) recipe_handler = recipe_test_utils.create_recipe_handler(recipe_type=self.recipe_type, data=data) self.recipe1 = recipe_handler.recipe self.recipe1_jobs = recipe_handler.recipe_jobs self.recipe2 = recipe_test_utils.create_recipe() self.recipe3 = recipe_test_utils.create_recipe(is_superseded=True)
def test_get_nodes_to_create(self): """Tests calling Recipe.get_nodes_to_create()""" job_type = job_test_utils.create_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type() # Create recipe definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('B', Interface(), DataFilter(True)) definition.add_condition_node('C', Interface(), DataFilter(True)) definition.add_condition_node('D', Interface(), DataFilter(False)) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_create = recipe_instance.get_nodes_to_create() self.assertSetEqual(set(nodes_to_create.keys()), {'C', 'E', 'H'})
def test_convert_recipe_to_v6_json_empty(self): """Tests calling convert_recipe_to_v6_json() with an empty recipe instance""" recipe = recipe_test_utils.create_recipe() definition = RecipeDefinition(Interface()) recipe_instance = RecipeInstance(definition, recipe, []) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertDictEqual(json.get_dict()['nodes'], {})
def setUp(self): django.setup() self.recipe = recipe_test_utils.create_recipe() self.recipe_job1 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 1') self.recipe_job2 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 2') self.recipe_job3 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 3')
def test_execute_with_sub_recipe(self): """Tests calling PurgeRecipe.execute() successfully""" # Create recipes sub_recipe_type = recipe_test_utils.create_recipe_type() definition = RecipeDefinition(Interface()) definition.add_recipe_node('A', sub_recipe_type.name, sub_recipe_type.revision_num) recipe_a = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_a.jobs_completed = 3 recipe_a.jobs_running = 2 recipe_a.jobs_total = 5 Recipe.objects.bulk_create([recipe_a]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='A', sub_recipe=recipe_a, save=False) RecipeNode.objects.bulk_create([recipe_node_a]) # Create message message = create_purge_recipe_message(recipe_id=recipe.id, trigger_id=self.trigger.id, source_file_id=self.file_1.id) # Execute message result = message.execute() self.assertTrue(result) # Test to see that a message to purge the parent recipe was sent msgs = [ msg for msg in message.new_messages if msg.type == 'purge_recipe' ] self.assertEqual(len(msgs), 1) for msg in msgs: self.assertEqual(msg.recipe_id, recipe_node_a.sub_recipe.id)
def setUp(self): django.setup() rest.login_client(self.client, is_staff=True) self.recipe_type_1 = recipe_test_utils.create_recipe_type_v6() self.batch_1 = batch_test_utils.create_batch(recipe_type=self.recipe_type_1, is_creation_done=False) self.recipe_type_2 = recipe_test_utils.create_recipe_type_v6() self.batch_2 = batch_test_utils.create_batch(recipe_type=self.recipe_type_2, is_creation_done=True) self.job_type1 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) self.sub_definition = copy.deepcopy(recipe_test_utils.SUB_RECIPE_DEFINITION) self.sub_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type1.name self.sub_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type1.version self.sub_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type1.revision_num self.recipe_type_3 = recipe_test_utils.create_recipe_type_v6(definition=self.sub_definition) self.batch_3 = batch_test_utils.create_batch(recipe_type=self.recipe_type_3, is_creation_done=True) recipe_test_utils.create_recipe(recipe_type=self.recipe_type_3, batch=self.batch_3)
def test_update_recipe_fields(self): """Tests running the database update to populate new recipe fields in job model""" recipe_1 = recipe_test_utils.create_recipe(is_superseded=True) recipe_2 = recipe_test_utils.create_recipe() recipe_2.root_superseded_recipe = recipe_1 recipe_2.superseded_recipe = recipe_1 recipe_2.completed = now() recipe_2.save() job_1 = job_test_utils.create_job() job_2 = job_test_utils.create_job(is_superseded=True) job_3 = job_test_utils.create_job() recipe_job_1 = recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_1) recipe_job_2 = recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) recipe_job_3 = recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_1) recipe_job_3.is_original = False recipe_job_4 = recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_3) # Run update updater = DatabaseUpdater() updater.update() # Check results job_1 = Job.objects.get(id=job_1.id) self.assertEqual(job_1.recipe_id, recipe_1.id) self.assertEqual(job_1.root_recipe_id, recipe_1.id) job_2 = Job.objects.get(id=job_2.id) self.assertEqual(job_2.recipe_id, recipe_1.id) self.assertEqual(job_2.root_recipe_id, recipe_1.id) job_3 = Job.objects.get(id=job_3.id) self.assertEqual(job_3.recipe_id, recipe_2.id) self.assertEqual(job_3.root_recipe_id, recipe_1.id) recipe_2 = Recipe.objects.get(id=recipe_2.id) self.assertTrue(recipe_2.is_completed)
def test_successful_recipe_path(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully with a job that is in a recipe""" job_exe_in_recipe = job_utils.create_job_exe(status='RUNNING') recipe = recipe_utils.create_recipe() _recipe_job = recipe_utils.create_recipe_job(recipe=recipe, job_name='My Job', job=job_exe_in_recipe.job) remote_base_path_with_recipe = os.path.join('recipes', get_valid_filename(recipe.recipe_type.name), get_valid_filename(recipe.recipe_type.version), 'jobs', get_valid_filename(job_exe_in_recipe.job.job_type.name), get_valid_filename(job_exe_in_recipe.job.job_type.version)) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(upload_dir, work_dir, file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for recipe type and job type information self.assertTrue(file_entry[1].startswith(remote_base_path_with_recipe)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} # Dummy values upload_dir = 'upload_dir' ProductDataFileStore().store_files(upload_dir, 'work_dir', data_files, parent_ids, job_exe_in_recipe)
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }]) definition = RecipeDefinition(cond_interface_1) definition.add_condition_node('node_a', cond_interface_1, df1) definition.add_recipe_input_connection('node_a', 'cond_int', 'cond_int') definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_dict) data_1 = Data() data_1.add_value(JsonValue('cond_int', 0)) data_1_dict = convert_data_to_v6_json(data_1).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def test_json(self): """Tests coverting a RequeueJobsBulk message to and from JSON""" sys_err = error_test_utils.create_error(category='SYSTEM') data = JobData() batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe() job_type = job_test_utils.create_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err, input=data.get_dict()) job_1.batch_id = batch.id job_1.recipe_id = recipe.id job_1.save() job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', error=sys_err, input=data.get_dict()) # Create message message = RequeueJobsBulk() message.started = job_1.last_modified - timedelta(seconds=1) message.ended = job_1.last_modified + timedelta(seconds=1) message.error_categories = ['SYSTEM'] message.error_ids = [sys_err.id] message.job_ids = [job_1.id] message.job_type_ids = [job_type.id] message.priority = 1 message.status = 'FAILED' message.job_type_names = [job_type.name] message.batch_ids = [batch.id] message.recipe_ids = [recipe.id] message.is_superseded = False # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = RequeueJobsBulk.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one re-queue message for job 1 self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'requeue_jobs') self.assertListEqual(message._requeue_jobs, [QueuedJob(job_1.id, job_1.num_exes)]) self.assertEqual(message.priority, 1)
def test_successful_recipe_path(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully with a job that is in a recipe""" job_exe_in_recipe = job_utils.create_job_exe(status='RUNNING') recipe = recipe_utils.create_recipe() _recipe_job = recipe_utils.create_recipe_job(recipe=recipe, job_name='My Job', job=job_exe_in_recipe.job) remote_base_path_with_recipe = os.path.join('recipes', get_valid_filename(recipe.recipe_type.name), get_valid_filename(recipe.recipe_type.version), 'jobs', get_valid_filename(job_exe_in_recipe.job.job_type.name), get_valid_filename(job_exe_in_recipe.job.job_type.version)) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for recipe type and job type information self.assertTrue(file_entry[1].startswith(remote_base_path_with_recipe)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} # Dummy values ProductDataFileStore().store_files(data_files, parent_ids, job_exe_in_recipe)
def setUp(self): django.setup() # self.recipe_type = recipe_test_utils.create_recipe_type_v6(definition=recipe_test_utils.RECIPE_DEFINITION) self.recipe = recipe_test_utils.create_recipe() job_type_1 = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type_1) job_type_2 = job_test_utils.create_seed_job_type() job_2 = job_test_utils.create_job(job_type=job_type_2) job_type_3 = job_test_utils.create_seed_job_type() job_3 = job_test_utils.create_job(job_type=job_type_3) self.recipe_node1 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-1', job=job_1) self.recipe_node2 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-2', job=job_2) self.recipe_node3 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-3', job=job_3) RecipeNode.objects.bulk_create([self.recipe_node1, self.recipe_node2, self.recipe_node3])
def test_execute(self): """Tests calling CreateConditions.execute() successfully""" batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe(batch=batch) conditions = [Condition('node_1', False), Condition('node_2', True)] # Create and execute message message = create_conditions_messages(recipe, conditions)[0] result = message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2 = recipe_nodes[1].condition # Should be one message for processing condition for node 2 self.assertEqual(len(message.new_messages), 1) process_condition_msg = message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id) # Test executing message again message_json_dict = message.to_json() message = CreateConditions.from_json(message_json_dict) result = message.execute() self.assertTrue(result) self.assertEqual( RecipeCondition.objects.filter(recipe_id=recipe.id).count(), 2) recipe_nodes = RecipeNode.objects.select_related('condition').filter( recipe_id=recipe.id).order_by('node_name') self.assertEqual(len(recipe_nodes), 2) self.assertEqual(recipe_nodes[0].node_name, 'node_1') self.assertEqual(recipe_nodes[1].node_name, 'node_2') condition_2.id = recipe_nodes[1].condition_id # Should be one message for processing condition for node 2 self.assertEqual(len(message.new_messages), 1) process_condition_msg = message.new_messages[0] self.assertEqual(process_condition_msg.type, 'process_condition') self.assertEqual(process_condition_msg.condition_id, condition_2.id)
def test_recipe_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a recipe""" test_recipe = recipe_test_utils.create_recipe() recipe_job = recipe_test_utils.create_recipe_job(job=self.job_exe.job, recipe=test_recipe) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], self.job_exe, self.workspace) self.assertEqual(recipe_job.recipe.id, products[0].recipe_id) self.assertEqual(recipe_job.node_name, products[0].recipe_node) self.assertEqual(self.files[0].output_name, products[0].job_output) recipe_manager = RecipeManager() self.assertEqual(recipe_manager.get_details(recipe_job.recipe.id).recipe_type, products[0].recipe_type)
def test_json(self): """Tests coverting a CompletedJobs message to and from JSON""" job_1 = job_test_utils.create_job(num_exes=1, status='QUEUED') job_test_utils.create_job_exe(job=job_1) job_2 = job_test_utils.create_job(num_exes=1, status='RUNNING') job_test_utils.create_job_exe(job=job_2) job_3 = job_test_utils.create_job(num_exes=0, status='PENDING') job_ids = [job_1.id, job_2.id, job_3.id] from recipe.test import utils as recipe_test_utils recipe_1 = recipe_test_utils.create_recipe() recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) when_ended = now() # Add jobs to message message = CompletedJobs() message.ended = when_ended if message.can_fit_more(): message.add_completed_job(CompletedJob(job_1.id, job_1.num_exes)) if message.can_fit_more(): message.add_completed_job(CompletedJob(job_2.id, job_2.num_exes)) if message.can_fit_more(): message.add_completed_job(CompletedJob(job_3.id, job_3.num_exes)) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CompletedJobs.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should be completed self.assertEqual(jobs[0].status, 'COMPLETED') self.assertEqual(jobs[0].num_exes, 1) self.assertEqual(jobs[0].ended, when_ended) # Job 2 should be completed self.assertEqual(jobs[1].status, 'COMPLETED') self.assertEqual(jobs[1].num_exes, 1) self.assertEqual(jobs[1].ended, when_ended) # Job 3 should ignore update self.assertEqual(jobs[2].status, 'PENDING') self.assertEqual(jobs[2].num_exes, 0)
def setUp(self): django.setup() self.file = storage_test_utils.create_file() self.job = job_test_utils.create_job(data={"input_data": [{"name": "input_file", "file_id": self.file.id}]}) # Attempt to stage related models self.job_exe = job_test_utils.create_job_exe(job=self.job) try: import recipe.test.utils as recipe_test_utils self.recipe = recipe_test_utils.create_recipe() self.recipe_job = recipe_test_utils.create_recipe_job(recipe, job=self.job) except: self.recipe = None self.receip_job = None try: import product.test.utils as product_test_utils self.product = product_test_utils.create_product(job_exe=self.job_exe) except: self.product = None
def setUp(self): django.setup() self.job_failed = job_test_utils.create_job(status='FAILED') self.job_completed = job_test_utils.create_job(status='COMPLETED') self.job_running = job_test_utils.create_job(status='RUNNING') self.job_queued = job_test_utils.create_job(status='QUEUED') self.job_canceled = job_test_utils.create_job(status='CANCELED') self.job_fa_co_a = job_test_utils.create_job(status='BLOCKED') self.job_fa_co_b = job_test_utils.create_job(status='PENDING') self.job_co_ru_qu_a = job_test_utils.create_job(status='BLOCKED') self.job_co_ru_qu_b = job_test_utils.create_job(status='BLOCKED') self.job_qu_ca_a = job_test_utils.create_job(status='PENDING') self.job_qu_ca_b = job_test_utils.create_job(status='PENDING') self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_failed.job_type.name, 'version': self.job_failed.job_type.version, }, }, { 'name': 'job_completed', 'job_type': { 'name': self.job_completed.job_type.name, 'version': self.job_completed.job_type.version, }, }, { 'name': 'job_running', 'job_type': { 'name': self.job_running.job_type.name, 'version': self.job_running.job_type.version, }, }, { 'name': 'job_queued', 'job_type': { 'name': self.job_queued.job_type.name, 'version': self.job_queued.job_type.version, }, }, { 'name': 'job_canceled', 'job_type': { 'name': self.job_canceled.job_type.name, 'version': self.job_canceled.job_type.version, }, }, { 'name': 'job_fa_co_a', 'job_type': { 'name': self.job_fa_co_a.job_type.name, 'version': self.job_fa_co_a.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }, { 'name': 'job_completed', }], }, { 'name': 'job_fa_co_b', 'job_type': { 'name': self.job_fa_co_b.job_type.name, 'version': self.job_fa_co_b.job_type.version, }, 'dependencies': [{ 'name': 'job_fa_co_a', }], }, { 'name': 'job_co_ru_qu_a', 'job_type': { 'name': self.job_co_ru_qu_a.job_type.name, 'version': self.job_co_ru_qu_a.job_type.version, }, 'dependencies': [{ 'name': 'job_completed', }, { 'name': 'job_running', }, { 'name': 'job_queued', }], }, { 'name': 'job_co_ru_qu_b', 'job_type': { 'name': self.job_co_ru_qu_b.job_type.name, 'version': self.job_co_ru_qu_b.job_type.version, }, 'dependencies': [{ 'name': 'job_co_ru_qu_a', }], }, { 'name': 'job_qu_ca_a', 'job_type': { 'name': self.job_qu_ca_a.job_type.name, 'version': self.job_qu_ca_a.job_type.version, }, 'dependencies': [{ 'name': 'job_queued', }, { 'name': 'job_canceled', }], }, { 'name': 'job_qu_ca_b', 'job_type': { 'name': self.job_qu_ca_b.job_type.name, 'version': self.job_qu_ca_b.job_type.version, }, 'dependencies': [{ 'name': 'job_qu_ca_a', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=self.definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_failed', job=self.job_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_completed', job=self.job_completed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_running', job=self.job_running) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_queued', job=self.job_queued) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_canceled', job=self.job_canceled) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_a', job=self.job_fa_co_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_b', job=self.job_fa_co_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_a', job=self.job_co_ru_qu_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_b', job=self.job_co_ru_qu_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_a', job=self.job_qu_ca_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_b', job=self.job_qu_ca_b) self.recipe_jobs = list(RecipeJob.objects.filter(recipe_id=self.recipe.id))
def test_get_existing_jobs_to_queue(self): """Tests calling RecipeHandler.get_existing_jobs_to_queue()""" input_name_1 = 'Test Input 1' output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': output_name_1, 'type': 'files', 'media_type': 'image/png', }], } job_type_1 = job_test_utils.create_job_type(interface=interface_1) job_1 = job_test_utils.create_job(job_type=job_type_1) input_name_2 = 'Test Input 2' output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': output_name_2, 'type': 'file', }], } job_type_2 = job_test_utils.create_job_type(interface=interface_2) job_2 = job_test_utils.create_job(job_type=job_type_2) workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, media_type='text/plain') definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_1.name, 'version': job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': job_type_2.name, 'version': job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': output_name_1, 'input': input_name_2, }], }], }], } data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file_1.id, }], 'workspace_id': workspace.id, } recipe_type = recipe_test_utils.create_recipe_type(definition=definition) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, data=data) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 2', job=job_2) recipe_jobs = list(RecipeJob.objects.filter(recipe_id=recipe.id)) handler = RecipeHandler(recipe, recipe_jobs) jobs_to_queue = handler.get_existing_jobs_to_queue() # Make sure only Job 1 is returned and that its job data is correct self.assertEqual(len(jobs_to_queue), 1) self.assertEqual(jobs_to_queue[0][0].id, job_1.id) self.assertDictEqual(jobs_to_queue[0][1].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': input_name_1, 'file_id': file_1.id, }], 'output_data': [{ 'name': output_name_1, 'workspace_id': workspace.id, }], })
def setUp(self): django.setup() self.standalone_job = job_test_utils.create_job(status='RUNNING') job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 3', job=self.job_b_3) self.job_ids = [self.standalone_job.id, self.job_a_1.id, self.job_b_2.id] self.dependent_job_ids = {self.job_a_2.id, self.job_b_3.id}
def setUp(self): django.setup() self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type') self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe2 = recipe_test_utils.create_recipe()
def setUp(self): django.setup() self.new_priority = 200 self.standalone_failed_job = job_test_utils.create_job(status='FAILED', num_exes=3, priority=100) self.standalone_superseded_job = job_test_utils.create_job(status='FAILED', num_exes=1) self.standalone_canceled_job = job_test_utils.create_job(status='CANCELED', num_exes=1, priority=100) self.standalone_completed_job = job_test_utils.create_job(status='COMPLETED') Job.objects.supersede_jobs([self.standalone_superseded_job], now()) # Create recipe for re-queing a job that should now be PENDING (and its dependencies) job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 3', job=self.job_b_3) # Job IDs to re-queue self.job_ids = [self.standalone_failed_job.id, self.standalone_canceled_job.id, self.standalone_completed_job.id, self.job_a_1.id, self.job_b_2.id] # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def setUp(self): django.setup() self.recipe_type1 = recipe_test_utils.create_recipe_type(name='test1', version='1.0') self.recipe1 = recipe_test_utils.create_recipe(recipe_type=self.recipe_type1)