def test_json(self): """Tests converting an UpdateRecipe message to and from JSON""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_failed = job_test_utils.create_job(status='FAILED', input=data_dict) job_pending = job_test_utils.create_job(status='PENDING') definition = RecipeDefinition(Interface()) definition.add_job_node('job_failed', job_failed.job_type.name, job_failed.job_type.version, job_failed.job_type_rev.revision_num) definition.add_job_node('job_pending', job_pending.job_type.name, job_pending.job_type.version, job_pending.job_type_rev.revision_num) definition.add_dependency('job_failed', 'job_pending') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_failed', job=job_failed) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_pending', job=job_pending) # Create message message = create_update_recipe_message(recipe.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to set job_pending to BLOCKED self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'blocked_jobs') self.assertListEqual(msg._blocked_job_ids, [job_pending.id])
def test_products(self): """Tests creating links for inputs with generated products at the same time.""" file_8 = storage_test_utils.create_file() parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] child_ids = [file_8.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) FileAncestryLink.objects.create_file_ancestry_links( parent_ids, child_ids, job_exe) direct_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 3) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id}) indirect_qry = FileAncestryLink.objects.filter( descendant=file_8, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(indirect_qry.count(), 3) file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry} self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
def setUp(self): django.setup() self.src_file_1 = source_test_utils.create_source() self.src_file_2 = source_test_utils.create_source() self.src_file_3 = source_test_utils.create_source() self.src_file_4 = source_test_utils.create_source() self.job_exe_1 = job_test_utils.create_job_exe() self.recipe_job_1 = recipe_test_utils.create_recipe_job(job=self.job_exe_1.job) self.product_1 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True) self.product_2 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True) FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_1, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_2, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_1, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_2, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) self.job_exe_2 = job_test_utils.create_job_exe() self.recipe_job_2 = recipe_test_utils.create_recipe_job(job=self.job_exe_2.job) self.product_3 = prod_test_utils.create_product(self.job_exe_2, has_been_published=True) FileAncestryLink.objects.create(ancestor=self.src_file_3, descendant=self.product_3, job_exe=self.job_exe_2, job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_4, descendant=self.product_3, job_exe=self.job_exe_2, job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING', num_exes=1) self.job_2 = job_test_utils.create_job(input={}, num_exes=0) self.job_3 = job_test_utils.create_job(status='FAILED', num_exes=1) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status='RUNNING') self.job_2 = job_test_utils.create_job(data={}) definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_1.job_type.name, 'version': self.job_1.job_type.version, } }, { 'name': 'Job 2', 'job_type': { 'name': self.job_2.job_type.name, 'version': self.job_2.job_type.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 1', job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='Job 2', job=self.job_2)
def test_inputs_and_products(self): """Tests creating links for inputs and then later replacing with generated products.""" file_8 = storage_test_utils.create_file() parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] child_ids = [file_8.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) # First create only the input files FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe) # Replace the inputs with the new links for both inputs and products FileAncestryLink.objects.create_file_ancestry_links(parent_ids, child_ids, job_exe) # Make sure the old entries were deleted old_direct_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(len(old_direct_qry), 0) old_indirect_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(len(old_indirect_qry), 0) direct_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 3) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id}) indirect_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(indirect_qry.count(), 3) file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry} self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
def setUp(self): django.setup() self.job_1 = job_test_utils.create_job(status="RUNNING") self.job_2 = job_test_utils.create_job(data={}) self.job_3 = job_test_utils.create_job(status="FAILED") definition = { "version": "1.0", "input_data": [], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_1.job_type.name, "version": self.job_1.job_type.version}, }, { "name": "Job 2", "job_type": {"name": self.job_2.job_type.name, "version": self.job_2.job_type.version}, "dependencies": [{"name": "Job 1"}], }, ], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 1", job=self.job_1) self.recipe_job = recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name="Job 2", job=self.job_2)
def setUp(self): django.setup() self.recipe = recipe_test_utils.create_recipe() self.recipe_job1 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 1') self.recipe_job2 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 2') self.recipe_job3 = recipe_test_utils.create_recipe_job(self.recipe, job_name='job 3')
def test_inputs_and_products(self): """Tests creating links for inputs and then later replacing with generated products.""" file_8 = storage_test_utils.create_file() parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] child_ids = [file_8.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) # First create only the input files FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe.job, job_exe.id) # Replace the inputs with the new links for both inputs and products FileAncestryLink.objects.create_file_ancestry_links(parent_ids, child_ids, job_exe.job, job_exe.id) # Make sure the old entries were deleted old_direct_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(len(old_direct_qry), 0) old_indirect_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(len(old_indirect_qry), 0) direct_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 2) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_1.id, self.file_2.id})
def test_inputs(self): """Tests creating links for only input files before any products are generated.""" parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) FileAncestryLink.objects.create_file_ancestry_links( parent_ids, None, job_exe) direct_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 3) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id}) indirect_qry = FileAncestryLink.objects.filter( descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(indirect_qry.count(), 3) file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry} self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
def setUp(self): django.setup() self.recipe = recipe_test_utils.create_recipe() self.recipe_job1 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 1') self.recipe_job2 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 2') self.recipe_job3 = recipe_test_utils.create_recipe_job( self.recipe, job_name='job 3')
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe_job1 = recipe_test_utils.create_recipe_job(recipe=self.recipe1) self.recipe2 = recipe_test_utils.create_recipe()
def setUp(self): django.setup() # Generation 1 self.file_1 = storage_test_utils.create_file() self.file_2 = storage_test_utils.create_file() # Generation 2 job_exe_1 = job_test_utils.create_job_exe() recipe_job_1 = recipe_test_utils.create_recipe_job(job=job_exe_1.job) self.file_3 = prod_test_utils.create_product(job_exe=job_exe_1) self.file_4 = prod_test_utils.create_product(job_exe=job_exe_1) self.file_5 = prod_test_utils.create_product(job_exe=job_exe_1) # Generation 3 job_exe_2 = job_test_utils.create_job_exe() recipe_job_2 = recipe_test_utils.create_recipe_job(job=job_exe_2.job) self.file_6 = prod_test_utils.create_product(job_exe=job_exe_2) # Stand alone file self.file_7 = prod_test_utils.create_product() # First job links generation 1 to 2 FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_3, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_4, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_5, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_3, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_4, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_5, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) # Second job links generation 2 to 3 FileAncestryLink.objects.create(ancestor=self.file_3, descendant=self.file_6, job_exe=job_exe_2, job=job_exe_2.job, recipe=recipe_job_2.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_6, job_exe=job_exe_2, job=job_exe_2.job, recipe=recipe_job_2.recipe, ancestor_job_exe=job_exe_1, ancestor_job=job_exe_1.job) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_6, job_exe=job_exe_2, job=job_exe_2.job, recipe=recipe_job_2.recipe, ancestor_job_exe=job_exe_1, ancestor_job=job_exe_1.job)
def test_inputs(self): """Tests creating links for only input files before any products are generated.""" parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe) direct_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 3) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id}) indirect_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(indirect_qry.count(), 3) file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry} self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
def test_json_forced_nodes(self): """Tests converting an UpdateRecipe message to and from JSON when forced nodes are provided""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_completed = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) sub_recipe_type = recipe_test_utils.create_recipe_type_v6() definition = RecipeDefinition(Interface()) definition.add_job_node('job_completed', job_completed.job_type.name, job_completed.job_type.version, job_completed.job_type_rev.revision_num) definition.add_recipe_node('the_sub_recipe', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('job_completed', 'the_sub_recipe') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_completed', job=job_completed) forced_nodes = ForcedNodes() sub_forced_nodes = ForcedNodes() sub_forced_nodes.set_all_nodes() forced_nodes.add_subrecipe('the_sub_recipe', sub_forced_nodes) # Create message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = UpdateRecipe.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Check for message to create sub-recipe self.assertEqual(len(new_message.new_messages), 1) msg = new_message.new_messages[0] self.assertEqual(msg.type, 'create_recipes') self.assertEqual(msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(msg.recipe_id, recipe.id) self.assertEqual(msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(msg.superseded_recipe_id) sub = SubRecipe(sub_recipe_type.name, sub_recipe_type.revision_num, 'the_sub_recipe', True) self.assertListEqual(msg.sub_recipes, [sub])
def test_products(self): """Tests creating links for inputs with generated products at the same time.""" file_8 = storage_test_utils.create_file() parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id] child_ids = [file_8.id] job_exe = job_test_utils.create_job_exe() recipe_test_utils.create_recipe_job(job=job_exe.job) FileAncestryLink.objects.create_file_ancestry_links(parent_ids, child_ids, job_exe) direct_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=True) self.assertEqual(direct_qry.count(), 3) file_8_parent_ids = {link.ancestor_id for link in direct_qry} self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id}) indirect_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=False) self.assertEqual(indirect_qry.count(), 3) file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry} self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
def test_update_recipe_fields(self): """Tests running the database update to populate new recipe fields in job model""" recipe_1 = recipe_test_utils.create_recipe(is_superseded=True) recipe_2 = recipe_test_utils.create_recipe() recipe_2.root_superseded_recipe = recipe_1 recipe_2.superseded_recipe = recipe_1 recipe_2.completed = now() recipe_2.save() job_1 = job_test_utils.create_job() job_2 = job_test_utils.create_job(is_superseded=True) job_3 = job_test_utils.create_job() recipe_job_1 = recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_1) recipe_job_2 = recipe_test_utils.create_recipe_job(recipe=recipe_1, job=job_2) recipe_job_3 = recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_1) recipe_job_3.is_original = False recipe_job_4 = recipe_test_utils.create_recipe_job(recipe=recipe_2, job=job_3) # Run update updater = DatabaseUpdater() updater.update() # Check results job_1 = Job.objects.get(id=job_1.id) self.assertEqual(job_1.recipe_id, recipe_1.id) self.assertEqual(job_1.root_recipe_id, recipe_1.id) job_2 = Job.objects.get(id=job_2.id) self.assertEqual(job_2.recipe_id, recipe_1.id) self.assertEqual(job_2.root_recipe_id, recipe_1.id) job_3 = Job.objects.get(id=job_3.id) self.assertEqual(job_3.recipe_id, recipe_2.id) self.assertEqual(job_3.root_recipe_id, recipe_1.id) recipe_2 = Recipe.objects.get(id=recipe_2.id) self.assertTrue(recipe_2.is_completed)
def test_successful_recipe_path(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully with a job that is in a recipe""" job_exe_in_recipe = job_utils.create_job_exe(status='RUNNING') recipe = recipe_utils.create_recipe() _recipe_job = recipe_utils.create_recipe_job(recipe=recipe, job_name='My Job', job=job_exe_in_recipe.job) remote_base_path_with_recipe = os.path.join('recipes', get_valid_filename(recipe.recipe_type.name), get_valid_filename(recipe.recipe_type.version), 'jobs', get_valid_filename(job_exe_in_recipe.job.job_type.name), get_valid_filename(job_exe_in_recipe.job.job_type.version)) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(upload_dir, work_dir, file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for recipe type and job type information self.assertTrue(file_entry[1].startswith(remote_base_path_with_recipe)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} # Dummy values upload_dir = 'upload_dir' ProductDataFileStore().store_files(upload_dir, 'work_dir', data_files, parent_ids, job_exe_in_recipe)
def test_successful_recipe_path(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully with a job that is in a recipe""" job_exe_in_recipe = job_utils.create_job_exe(status='RUNNING') recipe = recipe_utils.create_recipe() _recipe_job = recipe_utils.create_recipe_job(recipe=recipe, job_name='My Job', job=job_exe_in_recipe.job) remote_base_path_with_recipe = os.path.join('recipes', get_valid_filename(recipe.recipe_type.name), get_valid_filename(recipe.recipe_type.version), 'jobs', get_valid_filename(job_exe_in_recipe.job.job_type.name), get_valid_filename(job_exe_in_recipe.job.job_type.version)) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for recipe type and job type information self.assertTrue(file_entry[1].startswith(remote_base_path_with_recipe)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} # Dummy values ProductDataFileStore().store_files(data_files, parent_ids, job_exe_in_recipe)
def test_batch_recipe(self): """Tests creating a link that has a recipe and batch.""" parent_ids = [self.file_1.id] job_exe = job_test_utils.create_job_exe() recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) batch = batch_test_utils.create_batch() BatchRecipe.objects.create(batch_id=batch.id, recipe_id=recipe_job.recipe.id) BatchJob.objects.create(batch_id=batch.id, job_id=job_exe.job_id) FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe.job, job_exe.id) link = FileAncestryLink.objects.get(job_exe=job_exe) self.assertEqual(link.recipe_id, recipe_job.recipe_id) self.assertEqual(link.batch_id, batch.id)
def test_batch_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a batch""" job_type = job_test_utils.create_job_type(name='scale-batch-creator') job_exe = job_test_utils.create_job_exe(job_type=job_type) recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) batch = batch_test_utils.create_batch() BatchRecipe.objects.create(batch_id=batch.id, recipe_id=recipe_job.recipe.id) BatchJob.objects.create(batch_id=batch.id, job_id=job_exe.job_id) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], job_exe, self.workspace) self.assertEqual(batch.id, products[0].batch_id)
def test_recipe_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a recipe""" test_recipe = recipe_test_utils.create_recipe() recipe_job = recipe_test_utils.create_recipe_job(job=self.job_exe.job, recipe=test_recipe) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], self.job_exe, self.workspace) self.assertEqual(recipe_job.recipe.id, products[0].recipe_id) self.assertEqual(recipe_job.node_name, products[0].recipe_node) self.assertEqual(self.files[0].output_name, products[0].job_output) recipe_manager = RecipeManager() self.assertEqual(recipe_manager.get_details(recipe_job.recipe.id).recipe_type, products[0].recipe_type)
def test_batch_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a batch""" manifest = job_test_utils.create_seed_manifest(name='scale-batch-creator') job_type = job_test_utils.create_seed_job_type(manifest=manifest) job_exe = job_test_utils.create_job_exe(job_type=job_type) batch = batch_test_utils.create_batch() job_exe.batch = batch batch.save() recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], job_exe, self.workspace) self.assertEqual(batch.id, products[0].batch_id)
def setUp(self): django.setup() self.file = storage_test_utils.create_file() self.job = job_test_utils.create_job(data={"input_data": [{"name": "input_file", "file_id": self.file.id}]}) # Attempt to stage related models self.job_exe = job_test_utils.create_job_exe(job=self.job) try: import recipe.test.utils as recipe_test_utils self.recipe = recipe_test_utils.create_recipe() self.recipe_job = recipe_test_utils.create_recipe_job(recipe, job=self.job) except: self.recipe = None self.receip_job = None try: import product.test.utils as product_test_utils self.product = product_test_utils.create_product(job_exe=self.job_exe) except: self.product = None
def test_get_existing_jobs_to_queue(self): """Tests calling RecipeHandler.get_existing_jobs_to_queue()""" input_name_1 = 'Test Input 1' output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': output_name_1, 'type': 'files', 'media_type': 'image/png', }], } job_type_1 = job_test_utils.create_job_type(interface=interface_1) job_1 = job_test_utils.create_job(job_type=job_type_1) input_name_2 = 'Test Input 2' output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': output_name_2, 'type': 'file', }], } job_type_2 = job_test_utils.create_job_type(interface=interface_2) job_2 = job_test_utils.create_job(job_type=job_type_2) file_1 = storage_test_utils.create_file(media_type='text/plain') definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_1.name, 'version': job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': job_type_2.name, 'version': job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': output_name_1, 'input': input_name_2, }], }], }], } data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file_1.id, }], 'workspace_id': 1, } recipe_type = recipe_test_utils.create_recipe_type( definition=definition) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, data=data) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 2', job=job_2) handler = Recipe.objects.get_recipe_handlers_for_jobs([job_1.id ])[job_1.id] jobs_to_queue = handler.get_existing_jobs_to_queue() # Make sure only Job 1 is returned and that its job data is correct self.assertEqual(len(jobs_to_queue), 1) self.assertEqual(jobs_to_queue[0][0].id, job_1.id) self.assertDictEqual( jobs_to_queue[0][1].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': input_name_1, 'file_id': file_1.id, }], 'output_data': [{ 'name': output_name_1, 'workspace_id': 1, }], })
def setUp(self): django.setup() self.job_failed = job_test_utils.create_job(status='FAILED') self.job_completed = job_test_utils.create_job(status='COMPLETED') self.job_running = job_test_utils.create_job(status='RUNNING') self.job_queued = job_test_utils.create_job(status='QUEUED') self.job_canceled = job_test_utils.create_job(status='CANCELED') self.job_fa_co_a = job_test_utils.create_job(status='BLOCKED') self.job_fa_co_b = job_test_utils.create_job(status='PENDING') self.job_co_ru_qu_a = job_test_utils.create_job(status='BLOCKED') self.job_co_ru_qu_b = job_test_utils.create_job(status='BLOCKED') self.job_qu_ca_a = job_test_utils.create_job(status='PENDING') self.job_qu_ca_b = job_test_utils.create_job(status='PENDING') self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_failed.job_type.name, 'version': self.job_failed.job_type.version, }, }, { 'name': 'job_completed', 'job_type': { 'name': self.job_completed.job_type.name, 'version': self.job_completed.job_type.version, }, }, { 'name': 'job_running', 'job_type': { 'name': self.job_running.job_type.name, 'version': self.job_running.job_type.version, }, }, { 'name': 'job_queued', 'job_type': { 'name': self.job_queued.job_type.name, 'version': self.job_queued.job_type.version, }, }, { 'name': 'job_canceled', 'job_type': { 'name': self.job_canceled.job_type.name, 'version': self.job_canceled.job_type.version, }, }, { 'name': 'job_fa_co_a', 'job_type': { 'name': self.job_fa_co_a.job_type.name, 'version': self.job_fa_co_a.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }, { 'name': 'job_completed', }], }, { 'name': 'job_fa_co_b', 'job_type': { 'name': self.job_fa_co_b.job_type.name, 'version': self.job_fa_co_b.job_type.version, }, 'dependencies': [{ 'name': 'job_fa_co_a', }], }, { 'name': 'job_co_ru_qu_a', 'job_type': { 'name': self.job_co_ru_qu_a.job_type.name, 'version': self.job_co_ru_qu_a.job_type.version, }, 'dependencies': [{ 'name': 'job_completed', }, { 'name': 'job_running', }, { 'name': 'job_queued', }], }, { 'name': 'job_co_ru_qu_b', 'job_type': { 'name': self.job_co_ru_qu_b.job_type.name, 'version': self.job_co_ru_qu_b.job_type.version, }, 'dependencies': [{ 'name': 'job_co_ru_qu_a', }], }, { 'name': 'job_qu_ca_a', 'job_type': { 'name': self.job_qu_ca_a.job_type.name, 'version': self.job_qu_ca_a.job_type.version, }, 'dependencies': [{ 'name': 'job_queued', }, { 'name': 'job_canceled', }], }, { 'name': 'job_qu_ca_b', 'job_type': { 'name': self.job_qu_ca_b.job_type.name, 'version': self.job_qu_ca_b.job_type.version, }, 'dependencies': [{ 'name': 'job_qu_ca_a', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(definition=self.definition) self.recipe = recipe_test_utils.create_recipe(recipe_type=self.recipe_type) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_failed', job=self.job_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_completed', job=self.job_completed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_running', job=self.job_running) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_queued', job=self.job_queued) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_canceled', job=self.job_canceled) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_a', job=self.job_fa_co_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_b', job=self.job_fa_co_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_a', job=self.job_co_ru_qu_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_b', job=self.job_co_ru_qu_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_a', job=self.job_qu_ca_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_b', job=self.job_qu_ca_b) self.recipe_jobs = list(RecipeJob.objects.filter(recipe_id=self.recipe.id))
def test_get_existing_jobs_to_queue(self): """Tests calling RecipeHandler.get_existing_jobs_to_queue()""" input_name_1 = 'Test Input 1' output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': output_name_1, 'type': 'files', 'media_type': 'image/png', }], } job_type_1 = job_test_utils.create_job_type(interface=interface_1) job_1 = job_test_utils.create_job(job_type=job_type_1) input_name_2 = 'Test Input 2' output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': output_name_2, 'type': 'file', }], } job_type_2 = job_test_utils.create_job_type(interface=interface_2) job_2 = job_test_utils.create_job(job_type=job_type_2) workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, media_type='text/plain') definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_1.name, 'version': job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': job_type_2.name, 'version': job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': output_name_1, 'input': input_name_2, }], }], }], } data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': file_1.id, }], 'workspace_id': workspace.id, } recipe_type = recipe_test_utils.create_recipe_type(definition=definition) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, data=data) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='Job 2', job=job_2) recipe_jobs = list(RecipeJob.objects.filter(recipe_id=recipe.id)) handler = RecipeHandler(recipe, recipe_jobs) jobs_to_queue = handler.get_existing_jobs_to_queue() # Make sure only Job 1 is returned and that its job data is correct self.assertEqual(len(jobs_to_queue), 1) self.assertEqual(jobs_to_queue[0][0].id, job_1.id) self.assertDictEqual(jobs_to_queue[0][1].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': input_name_1, 'file_id': file_1.id, }], 'output_data': [{ 'name': output_name_1, 'workspace_id': workspace.id, }], })
def setUp(self): django.setup() self.standalone_job = job_test_utils.create_job(status='RUNNING') job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } self.recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=self.recipe_b, job_name='Job 3', job=self.job_b_3) self.job_ids = [self.standalone_job.id, self.job_a_1.id, self.job_b_2.id] self.dependent_job_ids = {self.job_a_2.id, self.job_b_3.id}
def setUp(self): django.setup() self.job_failed = job_test_utils.create_job(status='FAILED') self.job_completed = job_test_utils.create_job(status='COMPLETED') self.job_running = job_test_utils.create_job(status='RUNNING') self.job_queued = job_test_utils.create_job(status='QUEUED') self.job_canceled = job_test_utils.create_job(status='CANCELED') self.job_fa_co_a = job_test_utils.create_job(status='BLOCKED') self.job_fa_co_b = job_test_utils.create_job(status='PENDING') self.job_co_ru_qu_a = job_test_utils.create_job(status='BLOCKED') self.job_co_ru_qu_b = job_test_utils.create_job(status='BLOCKED') self.job_qu_ca_a = job_test_utils.create_job(status='PENDING') self.job_qu_ca_b = job_test_utils.create_job(status='PENDING') self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'job_failed', 'job_type': { 'name': self.job_failed.job_type.name, 'version': self.job_failed.job_type.version, }, }, { 'name': 'job_completed', 'job_type': { 'name': self.job_completed.job_type.name, 'version': self.job_completed.job_type.version, }, }, { 'name': 'job_running', 'job_type': { 'name': self.job_running.job_type.name, 'version': self.job_running.job_type.version, }, }, { 'name': 'job_queued', 'job_type': { 'name': self.job_queued.job_type.name, 'version': self.job_queued.job_type.version, }, }, { 'name': 'job_canceled', 'job_type': { 'name': self.job_canceled.job_type.name, 'version': self.job_canceled.job_type.version, }, }, { 'name': 'job_fa_co_a', 'job_type': { 'name': self.job_fa_co_a.job_type.name, 'version': self.job_fa_co_a.job_type.version, }, 'dependencies': [{ 'name': 'job_failed', }, { 'name': 'job_completed', }], }, { 'name': 'job_fa_co_b', 'job_type': { 'name': self.job_fa_co_b.job_type.name, 'version': self.job_fa_co_b.job_type.version, }, 'dependencies': [{ 'name': 'job_fa_co_a', }], }, { 'name': 'job_co_ru_qu_a', 'job_type': { 'name': self.job_co_ru_qu_a.job_type.name, 'version': self.job_co_ru_qu_a.job_type.version, }, 'dependencies': [{ 'name': 'job_completed', }, { 'name': 'job_running', }, { 'name': 'job_queued', }], }, { 'name': 'job_co_ru_qu_b', 'job_type': { 'name': self.job_co_ru_qu_b.job_type.name, 'version': self.job_co_ru_qu_b.job_type.version, }, 'dependencies': [{ 'name': 'job_co_ru_qu_a', }], }, { 'name': 'job_qu_ca_a', 'job_type': { 'name': self.job_qu_ca_a.job_type.name, 'version': self.job_qu_ca_a.job_type.version, }, 'dependencies': [{ 'name': 'job_queued', }, { 'name': 'job_canceled', }], }, { 'name': 'job_qu_ca_b', 'job_type': { 'name': self.job_qu_ca_b.job_type.name, 'version': self.job_qu_ca_b.job_type.version, }, 'dependencies': [{ 'name': 'job_qu_ca_a', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type( definition=self.definition) self.recipe = recipe_test_utils.create_recipe( recipe_type=self.recipe_type) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_failed', job=self.job_failed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_completed', job=self.job_completed) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_running', job=self.job_running) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_queued', job=self.job_queued) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_canceled', job=self.job_canceled) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_a', job=self.job_fa_co_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_fa_co_b', job=self.job_fa_co_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_a', job=self.job_co_ru_qu_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_co_ru_qu_b', job=self.job_co_ru_qu_b) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_a', job=self.job_qu_ca_a) recipe_test_utils.create_recipe_job(recipe=self.recipe, job_name='job_qu_ca_b', job=self.job_qu_ca_b)
def setUp(self): django.setup() self.new_priority = 200 self.standalone_failed_job = job_test_utils.create_job(status='FAILED', num_exes=3, priority=100) self.standalone_canceled_job = job_test_utils.create_job( status='CANCELED', num_exes=1, priority=100) self.standalone_completed_job = job_test_utils.create_job( status='COMPLETED') # Create recipe for re-queing a job that should now be PENDING (and its dependencies) job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type( definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type( definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 3', job=self.job_b_3) # Job IDs to re-queue self.job_ids = [ self.standalone_failed_job.id, self.standalone_canceled_job.id, self.standalone_completed_job.id, self.job_a_1.id, self.job_b_2.id ] # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def setUp(self): django.setup() self.new_priority = 200 self.standalone_failed_job = job_test_utils.create_job(status='FAILED', num_exes=3, priority=100) self.standalone_superseded_job = job_test_utils.create_job(status='FAILED', num_exes=1) self.standalone_canceled_job = job_test_utils.create_job(status='CANCELED', num_exes=1, priority=100) self.standalone_completed_job = job_test_utils.create_job(status='COMPLETED') Job.objects.supersede_jobs([self.standalone_superseded_job], now()) # Create recipe for re-queing a job that should now be PENDING (and its dependencies) job_type_a_1 = job_test_utils.create_job_type() job_type_a_2 = job_test_utils.create_job_type() definition_a = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_a_1.name, 'version': job_type_a_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_a_2.name, 'version': job_type_a_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }], } recipe_type_a = recipe_test_utils.create_recipe_type(definition=definition_a) self.job_a_1 = job_test_utils.create_job(job_type=job_type_a_1, status='FAILED', num_exes=1) self.job_a_2 = job_test_utils.create_job(job_type=job_type_a_2, status='BLOCKED') data_a = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_a = recipe_test_utils.create_recipe(recipe_type=recipe_type_a, data=data_a) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 1', job=self.job_a_1) recipe_test_utils.create_recipe_job(recipe=recipe_a, job_name='Job 2', job=self.job_a_2) # Create recipe for re-queing a job that should now be BLOCKED (and its dependencies) job_type_b_1 = job_test_utils.create_job_type() job_type_b_2 = job_test_utils.create_job_type() job_type_b_3 = job_test_utils.create_job_type() definition_b = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': job_type_b_1.name, 'version': job_type_b_1.version, } }, { 'name': 'Job 2', 'job_type': { 'name': job_type_b_2.name, 'version': job_type_b_2.version, }, 'dependencies': [{ 'name': 'Job 1' }], }, { 'name': 'Job 3', 'job_type': { 'name': job_type_b_3.name, 'version': job_type_b_3.version, }, 'dependencies': [{ 'name': 'Job 2' }], }], } recipe_type_b = recipe_test_utils.create_recipe_type(definition=definition_b) self.job_b_1 = job_test_utils.create_job(job_type=job_type_b_1, status='FAILED') self.job_b_2 = job_test_utils.create_job(job_type=job_type_b_2, status='CANCELED') self.job_b_3 = job_test_utils.create_job(job_type=job_type_b_3, status='BLOCKED') data_b = { 'version': '1.0', 'input_data': [], 'workspace_id': 1, } recipe_b = recipe_test_utils.create_recipe(recipe_type=recipe_type_b, data=data_b) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 1', job=self.job_b_1) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 2', job=self.job_b_2) recipe_test_utils.create_recipe_job(recipe=recipe_b, job_name='Job 3', job=self.job_b_3) # Job IDs to re-queue self.job_ids = [self.standalone_failed_job.id, self.standalone_canceled_job.id, self.standalone_completed_job.id, self.job_a_1.id, self.job_b_2.id] # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_execute(self): """Tests calling CancelJobs.execute() successfully""" when = now() data = JobData() from recipe.test import utils as recipe_test_utils recipe = recipe_test_utils.create_recipe() job_type = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', input=data.get_dict(), recipe=recipe) job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', input=data.get_dict(), recipe=recipe) job_3 = job_test_utils.create_job(job_type=job_type, num_exes=1, status='COMPLETED', input=data.get_dict(), recipe=recipe) job_4 = job_test_utils.create_job(job_type=job_type, num_exes=0, status='PENDING', recipe=recipe) job_ids = [job_1.id, job_2.id, job_3.id, job_4.id] recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_1', job=job_1) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_2', job=job_2) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_3', job=job_3) recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_4', job=job_4) # Add jobs to message message = CancelJobs() message.when = when if message.can_fit_more(): message.add_job(job_1.id) if message.can_fit_more(): message.add_job(job_2.id) if message.can_fit_more(): message.add_job(job_3.id) if message.can_fit_more(): message.add_job(job_4.id) # Execute message result = message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) # Job 2 was already canceled self.assertEqual(jobs[1].status, 'CANCELED') self.assertNotEqual(jobs[1].last_status_change, when) # Job 3 was already COMPLETED, so can't be canceled self.assertEqual(jobs[2].status, 'COMPLETED') self.assertNotEqual(jobs[2].last_status_change, when) # Job 4 should have been canceled self.assertEqual(jobs[3].status, 'CANCELED') self.assertEqual(jobs[3].last_status_change, when) from recipe.diff.forced_nodes import ForcedNodes from recipe.diff.json.forced_nodes_v6 import convert_forced_nodes_to_v6 forced_nodes = ForcedNodes() forced_nodes.set_all_nodes() forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() # Should be messages to update recipe and update recipe metrics after canceling jobs self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id]) # Test executing message again message.new_messages = [] result = message.execute() self.assertTrue(result) # All results should be the same jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Job 1 should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) # Job 2 was already canceled self.assertEqual(jobs[1].status, 'CANCELED') self.assertNotEqual(jobs[1].last_status_change, when) # Job 3 was already COMPLETED, so can't be canceled self.assertEqual(jobs[2].status, 'COMPLETED') self.assertNotEqual(jobs[2].last_status_change, when) # Job 4 should have been canceled self.assertEqual(jobs[3].status, 'CANCELED') self.assertEqual(jobs[3].last_status_change, when) # Should be messages to update recipe and update recipe metrics after canceling jobs self.assertEqual(len(message.new_messages), 2) update_recipe_msg = None update_recipe_metrics_msg = None for msg in message.new_messages: if msg.type == 'update_recipe': update_recipe_msg = msg elif msg.type == 'update_recipe_metrics': update_recipe_metrics_msg = msg self.assertIsNotNone(update_recipe_msg) self.assertIsNotNone(update_recipe_metrics_msg) self.assertEqual(update_recipe_msg.root_recipe_id, recipe.id) self.assertDictEqual( convert_forced_nodes_to_v6( update_recipe_msg.forced_nodes).get_dict(), forced_nodes_dict) self.assertListEqual(update_recipe_metrics_msg._recipe_ids, [recipe.id])