def test_successful_with_partial_recipe(self): """Tests calling QueueManager.handle_job_completion() successfully with a job in a recipe.""" # Queue the recipe recipe_id = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) # Fake out completing Job 1 job_1 = RecipeJob.objects.select_related('job').get(recipe_id=recipe_id, job_name='Job 1').job job_exe_1 = JobExecution.objects.get(job_id=job_1.id) output_file_1 = product_test_utils.create_product(job_exe=job_exe_1, workspace=self.workspace) output_file_2 = product_test_utils.create_product(job_exe=job_exe_1, workspace=self.workspace) results = JobResults() results.add_file_list_parameter('Test Output 1', [output_file_1.id, output_file_2.id]) JobExecution.objects.post_steps_results(job_exe_1.id, results, ResultsManifest()) Job.objects.filter(pk=job_1.id).update(status='RUNNING') JobExecution.objects.filter(pk=job_exe_1.id).update(status='RUNNING') # Call method to test Queue.objects.handle_job_completion(job_exe_1.id, now()) # Make sure processor was called self.assertTrue(self.mock_processor.process_completed.called) # Make sure Job 2 in the recipe is successfully queued recipe_job_2 = RecipeJob.objects.select_related('job', 'recipe').get(recipe_id=recipe_id, job_name='Job 2') self.assertEqual(recipe_job_2.job.status, 'QUEUED') self.assertIsNone(recipe_job_2.recipe.completed)
def test_successful_with_full_recipe(self): """Tests calling QueueManager.handle_job_completion() successfully with all jobs in a recipe.""" # Queue the recipe handler = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) # Fake out completing Job 1 job_1 = RecipeJob.objects.select_related('job').get( recipe_id=handler.recipe.id, job_name='Job 1').job job_exe_1 = job_test_utils.create_job_exe(job=job_1, status='RUNNING') output_file_1 = product_test_utils.create_product( job_exe=job_exe_1, workspace=self.workspace) output_file_2 = product_test_utils.create_product( job_exe=job_exe_1, workspace=self.workspace) results = JobResults() results.add_file_list_parameter('Test Output 1', [output_file_1.id, output_file_2.id]) job_exe_output_1 = JobExecutionOutput() job_exe_output_1.job_exe_id = job_exe_1.id job_exe_output_1.job_id = job_exe_1.job_id job_exe_output_1.job_type_id = job_exe_1.job_type_id job_exe_output_1.exe_num = job_exe_1.exe_num job_exe_output_1.output = results.get_dict() job_exe_output_1.save() Job.objects.filter(pk=job_1.id).update(status='RUNNING') Queue.objects.handle_job_completion(job_1.id, job_1.num_exes, now()) # Fake out completing Job 2 job_2 = RecipeJob.objects.select_related('job').get( recipe_id=handler.recipe.id, job_name='Job 2').job job_exe_2 = job_test_utils.create_job_exe(job=job_2, status='RUNNING') output_file_1 = product_test_utils.create_product( job_exe=job_exe_2, workspace=self.workspace) output_file_2 = product_test_utils.create_product( job_exe=job_exe_2, workspace=self.workspace) results = JobResults() results.add_file_list_parameter('Test Output 2', [output_file_1.id, output_file_2.id]) job_exe_output_2 = JobExecutionOutput() job_exe_output_2.job_exe_id = job_exe_2.id job_exe_output_2.job_id = job_exe_2.job_id job_exe_output_2.job_type_id = job_exe_2.job_type_id job_exe_output_2.exe_num = job_exe_2.exe_num job_exe_output_2.output = results.get_dict() job_exe_output_2.save() Job.objects.filter(pk=job_2.id).update(status='RUNNING') # Call method to test Queue.objects.handle_job_completion(job_2.id, job_2.num_exes, now()) # Make sure final recipe attributes are updated recipe = Recipe.objects.get(pk=handler.recipe.id) self.assertIsNotNone(recipe.completed)
def test_successful_file_list(self): """Tests calling JobResults.add_output_to_data() successfully with a file list parameter""" output_name = 'foo' file_ids = [1, 2, 3, 4] input_name = 'bar' results = JobResults() results.add_file_list_parameter(output_name, file_ids) job_data = MagicMock() results.add_output_to_data(output_name, job_data, input_name) job_data.add_file_list_input.assert_called_with(input_name, file_ids)
def test_successful_file_list(self): '''Tests calling JobResults.add_output_to_data() successfully with a file list parameter''' output_name = u'foo' file_ids = [1, 2, 3, 4] input_name = u'bar' results = JobResults() results.add_file_list_parameter(output_name, file_ids) job_data = MagicMock() results.add_output_to_data(output_name, job_data, input_name) job_data.add_file_list_input.assert_called_with(input_name, file_ids)
def test_successful_with_full_recipe(self): """Tests calling QueueManager.handle_job_completion() successfully with all jobs in a recipe.""" # Queue the recipe recipe_id = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) # Fake out completing Job 1 job_1 = RecipeJob.objects.select_related("job").get(recipe_id=recipe_id, job_name="Job 1").job job_exe_1 = JobExecution.objects.get(job_id=job_1.id) output_file_1 = product_test_utils.create_product(job_exe=job_exe_1, workspace=self.workspace) output_file_2 = product_test_utils.create_product(job_exe=job_exe_1, workspace=self.workspace) results = JobResults() results.add_file_list_parameter("Test Output 1", [output_file_1.id, output_file_2.id]) JobExecution.objects.post_steps_results(job_exe_1.id, results, ResultsManifest()) Job.objects.filter(pk=job_1.id).update(status="RUNNING") JobExecution.objects.filter(pk=job_exe_1.id).update(status="RUNNING") Queue.objects.handle_job_completion(job_exe_1.id, now()) # Fake out completing Job 2 job_2 = RecipeJob.objects.select_related("job").get(recipe_id=recipe_id, job_name="Job 2").job job_exe_2 = JobExecution.objects.get(job_id=job_2.id) output_file_1 = product_test_utils.create_product(job_exe=job_exe_2, workspace=self.workspace) output_file_2 = product_test_utils.create_product(job_exe=job_exe_2, workspace=self.workspace) results = JobResults() results.add_file_list_parameter("Test Output 2", [output_file_1.id, output_file_2.id]) JobExecution.objects.post_steps_results(job_exe_2.id, results, ResultsManifest()) Job.objects.filter(pk=job_2.id).update(status="RUNNING") JobExecution.objects.filter(pk=job_exe_2.id).update(status="RUNNING") # Call method to test Queue.objects.handle_job_completion(job_exe_2.id, now()) # Make sure processor was called self.assertEqual(self.mock_processor.process_completed.call_count, 2) # Make sure final recipe attributes are updated recipe = Recipe.objects.get(pk=recipe_id) self.assertIsNotNone(recipe.completed)
def store_output_data_files(self, data_files, job_exe): """Stores the given data output files :param data_files: Dict with each file parameter name mapping to a list of ProductFileMetadata classes :type data_files: {string: [`ProductFileMetadata`]} :param job_exe: The job execution model (with related job and job_type fields) that is storing the output data files :type job_exe: :class:`job.models.JobExecution` :returns: The job results :rtype: :class:`job.configuration.results.job_results.JobResults` """ # Organize the data files workspace_files = { } # Workspace ID -> [(absolute local file path, media type)] params_by_file_path = { } # Absolute local file path -> output parameter name output_workspaces = JobData.create_output_workspace_dict( data_files.keys(), self, job_exe) for name in data_files: workspace_id = output_workspaces[name] if workspace_id in workspace_files: workspace_file_list = workspace_files[workspace_id] else: workspace_file_list = [] workspace_files[workspace_id] = workspace_file_list data_file_entry = data_files[name] if isinstance(data_file_entry, list): for file_entry in data_file_entry: file_path = os.path.normpath(file_entry.local_path) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name workspace_file_list.append(file_entry) else: file_path = os.path.normpath(data_file_entry.local_path) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name data_file_entry.local_path = file_path workspace_file_list.append(data_file_entry) data_file_store = DATA_FILE_STORE['DATA_FILE_STORE'] if not data_file_store: raise Exception('No data file store found') stored_files = data_file_store.store_files(workspace_files, self.get_input_file_ids(), job_exe) # Organize results param_file_ids = {} # Output parameter name -> file ID or [file IDs] for file_path in stored_files: file_id = stored_files[file_path] name = params_by_file_path[file_path] if isinstance(data_files[name], list): if name in param_file_ids: file_id_list = param_file_ids[name] else: file_id_list = [] param_file_ids[name] = file_id_list file_id_list.append(file_id) else: param_file_ids[name] = file_id # Create job results results = JobResults() for name in param_file_ids: param_entry = param_file_ids[name] if isinstance(param_entry, list): results.add_file_list_parameter(name, param_entry) else: results.add_file_parameter(name, param_entry) return results
def store_output_data_files(self, data_files, job_exe): """Stores the given data output files :param data_files: Dict with each file parameter name mapping to a tuple of absolute local file path and media type (media type is optionally None) for a single file parameter and a list of tuples for a multiple file parameter :type data_files: {string: tuple(string, string)} or [tuple(string, string)] :param job_exe: The job execution model (with related job and job_type fields) that is storing the output data files :type job_exe: :class:`job.models.JobExecution` :returns: The job results :rtype: :class:`job.configuration.results.job_results.JobResults` """ # Organize the data files workspace_files = { } # Workspace ID -> [(absolute local file path, media type)] params_by_file_path = { } # Absolute local file path -> output parameter name for name in data_files: file_output = self.data_outputs_by_name[name] workspace_id = file_output['workspace_id'] if workspace_id in workspace_files: workspace_file_list = workspace_files[workspace_id] else: workspace_file_list = [] workspace_files[workspace_id] = workspace_file_list data_file_entry = data_files[name] if isinstance(data_file_entry, list): for file_tuple in data_file_entry: file_path = os.path.normpath(file_tuple[0]) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name # Adjust file path to be relative to upload_dir if len(file_tuple) == 2: new_tuple = (file_path, file_tuple[1], name) else: new_tuple = (file_path, file_tuple[1], name, file_tuple[2]) workspace_file_list.append(new_tuple) else: file_path = os.path.normpath(data_file_entry[0]) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name # Adjust file path to be relative to upload_dir if len(data_file_entry) == 2: new_tuple = (file_path, data_file_entry[1], name) else: new_tuple = (file_path, data_file_entry[1], name, data_file_entry[2]) workspace_file_list.append(new_tuple) data_file_store = DATA_FILE_STORE['DATA_FILE_STORE'] if not data_file_store: raise Exception('No data file store found') stored_files = data_file_store.store_files(workspace_files, self.get_input_file_ids(), job_exe) # Organize results param_file_ids = {} # Output parameter name -> file ID or [file IDs] for file_path in stored_files: file_id = stored_files[file_path] name = params_by_file_path[file_path] if isinstance(data_files[name], list): if name in param_file_ids: file_id_list = param_file_ids[name] else: file_id_list = [] param_file_ids[name] = file_id_list file_id_list.append(file_id) else: param_file_ids[name] = file_id # Create job results results = JobResults() for name in param_file_ids: param_entry = param_file_ids[name] if isinstance(param_entry, list): results.add_file_list_parameter(name, param_entry) else: results.add_file_parameter(name, param_entry) return results
def store_output_data_files(self, data_files, job_exe): """Stores the given data output files :param data_files: Dict with each file parameter name mapping to a tuple of absolute local file path and media type (media type is optionally None) for a single file parameter and a list of tuples for a multiple file parameter :type data_files: dict of str -> tuple(str, str) or list of tuple(str, str) :param job_exe: The job execution model (with related job and job_type fields) that is storing the output data files :type job_exe: :class:`job.models.JobExecution` :returns: The job results :rtype: :class:`job.configuration.results.job_results.JobResults` """ # Organize the data files workspace_files = {} # Workspace ID -> list of (absolute local file path, media type) params_by_file_path = {} # Absolute local file path -> output parameter name for name in data_files: file_output = self.data_outputs_by_name[name] workspace_id = file_output['workspace_id'] if workspace_id in workspace_files: workspace_file_list = workspace_files[workspace_id] else: workspace_file_list = [] workspace_files[workspace_id] = workspace_file_list data_file_entry = data_files[name] if isinstance(data_file_entry, list): for file_tuple in data_file_entry: file_path = os.path.normpath(file_tuple[0]) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name # Adjust file path to be relative to upload_dir if len(file_tuple) == 2: new_tuple = (file_path, file_tuple[1]) else: new_tuple = (file_path, file_tuple[1], file_tuple[2]) workspace_file_list.append(new_tuple) else: file_path = os.path.normpath(data_file_entry[0]) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name # Adjust file path to be relative to upload_dir if len(data_file_entry) == 2: new_tuple = (file_path, data_file_entry[1]) else: new_tuple = (file_path, data_file_entry[1], data_file_entry[2]) workspace_file_list.append(new_tuple) data_file_store = DATA_FILE_STORE['DATA_FILE_STORE'] if not data_file_store: raise Exception('No data file store found') stored_files = data_file_store.store_files(workspace_files, self.get_input_file_ids(), job_exe) # Organize results param_file_ids = {} # Output parameter name -> file ID or list of file IDs for file_path in stored_files: file_id = stored_files[file_path] name = params_by_file_path[file_path] if isinstance(data_files[name], list): if name in param_file_ids: file_id_list = param_file_ids[name] else: file_id_list = [] param_file_ids[name] = file_id_list file_id_list.append(file_id) else: param_file_ids[name] = file_id # Create job results results = JobResults() for name in param_file_ids: param_entry = param_file_ids[name] if isinstance(param_entry, list): results.add_file_list_parameter(name, param_entry) else: results.add_file_parameter(name, param_entry) return results
def test_successful_supersede(self): """Tests calling QueueManager.queue_new_recipe() successfully when superseding a recipe.""" # Queue initial recipe and complete its first job node = node_test_utils.create_node() recipe_id = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) recipe = Recipe.objects.get(id=recipe_id) recipe_job_1 = RecipeJob.objects.select_related('job__job_exe').get(recipe_id=recipe_id, job_name='Job 1') job_exe_1 = JobExecution.objects.get(job_id=recipe_job_1.job_id) queued_job_exe = QueuedJobExecution(Queue.objects.get(job_exe_id=job_exe_1.id)) queued_job_exe.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [queued_job_exe], {}) results = JobResults() results.add_file_list_parameter('Test Output 1', [product_test_utils.create_product().file_id]) JobExecution.objects.filter(id=job_exe_1.id).update(results=results.get_dict()) Queue.objects.handle_job_completion(job_exe_1.id, now()) # Create a new recipe type that has a new version of job 2 (job 1 is identical) new_job_type_2 = job_test_utils.create_job_type(name=self.job_type_2.name, version='New Version', interface=self.job_type_2.interface) new_definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'New Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'New Job 2', 'job_type': { 'name': new_job_type_2.name, 'version': new_job_type_2.version, }, 'dependencies': [{ 'name': 'New Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } new_recipe_type = recipe_test_utils.create_recipe_type(name=self.recipe_type.name, definition=new_definition) event = trigger_test_utils.create_trigger_event() recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=recipe_id, job_name='Job 1') recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=recipe_id, job_name='Job 2') superseded_jobs = {'Job 1': recipe_job_1.job, 'Job 2': recipe_job_2.job} graph_a = self.recipe_type.get_recipe_definition().get_graph() graph_b = new_recipe_type.get_recipe_definition().get_graph() delta = RecipeGraphDelta(graph_a, graph_b) # Queue new recipe that supersedes the old recipe new_recipe_id = Queue.objects.queue_new_recipe(new_recipe_type, None, event, recipe, delta, superseded_jobs) # Ensure old recipe is superseded recipe = Recipe.objects.get(id=recipe_id) self.assertTrue(recipe.is_superseded) # Ensure new recipe supersedes old recipe new_recipe = Recipe.objects.get(id=new_recipe_id) self.assertEqual(new_recipe.superseded_recipe_id, recipe_id) # Ensure that job 1 is already completed (it was copied from original recipe) and that job 2 is queued new_recipe_job_1 = RecipeJob.objects.select_related('job').get(recipe_id=new_recipe_id, job_name='New Job 1') new_recipe_job_2 = RecipeJob.objects.select_related('job').get(recipe_id=new_recipe_id, job_name='New Job 2') self.assertEqual(new_recipe_job_1.job.status, 'COMPLETED') self.assertFalse(new_recipe_job_1.is_original) self.assertEqual(new_recipe_job_2.job.status, 'QUEUED') self.assertTrue(new_recipe_job_2.is_original) # Complete both the old and new job 2 and check that only the new recipe completes job_exe_2 = JobExecution.objects.get(job_id=recipe_job_2.job_id) queued_job_exe_2 = QueuedJobExecution(Queue.objects.get(job_exe_id=job_exe_2.id)) queued_job_exe_2.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [queued_job_exe_2], {}) Queue.objects.handle_job_completion(job_exe_2.id, now()) new_job_exe_2 = JobExecution.objects.get(job_id=new_recipe_job_2.job_id) new_queued_job_exe_2 = QueuedJobExecution(Queue.objects.get(job_exe_id=new_job_exe_2.id)) new_queued_job_exe_2.accepted(node, JobResources(cpus=10, mem=1000, disk_in=1000, disk_out=1000, disk_total=2000)) Queue.objects.schedule_job_executions('123', [new_queued_job_exe_2], {}) Queue.objects.handle_job_completion(new_job_exe_2.id, now()) recipe = Recipe.objects.get(id=recipe.id) new_recipe = Recipe.objects.get(id=new_recipe.id) self.assertIsNone(recipe.completed) self.assertIsNotNone(new_recipe.completed)
def test_successful_job_1_completed(self, mock_store): '''Tests calling RecipeDefinition.get_next_jobs_to_queue() successfully when job 1 has been completed.''' definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': self.input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': self.output_name_1, 'input': self.input_name_2, }], }], }], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': self.file_1.id, }], 'workspace_id': 1, } recipe_data = RecipeData(data) recipe_definition.validate_data(recipe_data) png_file_ids = [98, 99, 100] job_results = JobResults() job_results.add_file_list_parameter(self.output_name_1, png_file_ids) job_1 = Job.objects.select_related('job_type').get(pk=self.job_1.id) job_1.results = job_results.get_dict() job_1.save() job_2 = Job.objects.select_related('job_type').get(pk=self.job_2.id) results = recipe_definition.get_next_jobs_to_queue( recipe_data, {'Job 2': job_2}, {'Job 1': job_1}) # Make sure only Job 2 is returned and that its job data is correct self.assertListEqual([self.job_2.id], results.keys()) self.assertDictEqual( results[self.job_2.id].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': self.input_name_2, 'file_ids': png_file_ids, }], 'output_data': [{ 'name': self.output_name_2, 'workspace_id': 1, }], })
def test_successful_job_1_completed(self, mock_store): '''Tests calling RecipeDefinition.get_next_jobs_to_queue() successfully when job 1 has been completed.''' definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': self.input_name_1, }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': self.output_name_1, 'input': self.input_name_2, }], }], }], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': self.file_1.id, }], 'workspace_id': 1, } recipe_data = RecipeData(data) recipe_definition.validate_data(recipe_data) png_file_ids = [98, 99, 100] job_results = JobResults() job_results.add_file_list_parameter(self.output_name_1, png_file_ids) job_1 = Job.objects.select_related('job_type').get(pk=self.job_1.id) job_1.results = job_results.get_dict() job_1.save() job_2 = Job.objects.select_related('job_type').get(pk=self.job_2.id) results = recipe_definition.get_next_jobs_to_queue(recipe_data, {'Job 2': job_2}, {'Job 1': job_1}) # Make sure only Job 2 is returned and that its job data is correct self.assertListEqual([self.job_2.id], results.keys()) self.assertDictEqual(results[self.job_2.id].get_dict(), { 'version': '1.0', 'input_data': [{ 'name': self.input_name_2, 'file_ids': png_file_ids, }], 'output_data': [{ 'name': self.output_name_2, 'workspace_id': 1, }], })
def test_successful_supersede(self): """Tests calling QueueManager.queue_new_recipe() successfully when superseding a recipe.""" # Queue initial recipe and complete its first job handler = Queue.objects.queue_new_recipe(self.recipe_type, self.data, self.event) recipe = Recipe.objects.get(id=handler.recipe.id) recipe_job_1 = RecipeNode.objects.select_related('job') recipe_job_1 = recipe_job_1.get(recipe_id=handler.recipe.id, node_name='Job 1') Job.objects.update_jobs_to_running([recipe_job_1.job], now()) results = JobResults() results.add_file_list_parameter( 'Test Output 1', [product_test_utils.create_product().id]) job_test_utils.create_job_exe(job=recipe_job_1.job, status='COMPLETED', output=results) Job.objects.update_jobs_to_completed([recipe_job_1.job], now()) Job.objects.process_job_output([recipe_job_1.job_id], now()) # Create a new recipe type that has a new version of job 2 (job 1 is identical) new_job_type_2 = job_test_utils.create_job_type( name=self.job_type_2.name, version='New Version', interface=self.job_type_2.manifest) new_definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'New Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'New Job 2', 'job_type': { 'name': new_job_type_2.name, 'version': new_job_type_2.version, }, 'dependencies': [{ 'name': 'New Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } new_recipe_type = recipe_test_utils.create_recipe_type( name=self.recipe_type.name, definition=new_definition) event = trigger_test_utils.create_trigger_event() recipe_job_1 = RecipeNode.objects.select_related('job').get( recipe_id=handler.recipe.id, node_name='Job 1') recipe_job_2 = RecipeNode.objects.select_related('job').get( recipe_id=handler.recipe.id, node_name='Job 2') superseded_jobs = { 'Job 1': recipe_job_1.job, 'Job 2': recipe_job_2.job } graph_a = self.recipe_type.get_recipe_definition().get_graph() graph_b = new_recipe_type.get_recipe_definition().get_graph() delta = RecipeGraphDelta(graph_a, graph_b) # Queue new recipe that supersedes the old recipe new_handler = Queue.objects.queue_new_recipe( new_recipe_type, None, event, superseded_recipe=recipe, delta=delta, superseded_jobs=superseded_jobs) # Ensure old recipe is superseded recipe = Recipe.objects.get(id=handler.recipe.id) self.assertTrue(recipe.is_superseded) # Ensure new recipe supersedes old recipe new_recipe = Recipe.objects.get(id=new_handler.recipe.id) self.assertEqual(new_recipe.superseded_recipe_id, handler.recipe.id) # Ensure that job 1 is already completed (it was copied from original recipe) and that job 2 is queued new_recipe_job_1 = RecipeNode.objects.select_related('job').get( recipe_id=new_handler.recipe.id, node_name='New Job 1') new_recipe_job_2 = RecipeNode.objects.select_related('job').get( recipe_id=new_handler.recipe.id, node_name='New Job 2') self.assertEqual(new_recipe_job_1.job.status, 'COMPLETED') self.assertFalse(new_recipe_job_1.is_original) self.assertEqual(new_recipe_job_2.job.status, 'QUEUED') self.assertTrue(new_recipe_job_2.is_original)