def setUp(self): django.setup() self.job_type1 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) self.job_type2 = job_test_utils.create_seed_job_type() self.sub_definition = copy.deepcopy(recipe_test_utils.SUB_RECIPE_DEFINITION) self.sub_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type1.name self.sub_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type1.version self.sub_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type1.revision_num self.sub_def = RecipeDefinitionV6(self.sub_definition).get_definition() self.recipe_type1 = recipe_test_utils.create_recipe_type_v6(definition=self.sub_definition, description="A sub recipe", is_active=False, is_system=False) self.main_definition = copy.deepcopy(recipe_test_utils.RECIPE_DEFINITION) self.main_definition['nodes']['node_a']['node_type']['job_type_name'] = self.job_type2.name self.main_definition['nodes']['node_a']['node_type']['job_type_version'] = self.job_type2.version self.main_definition['nodes']['node_a']['node_type']['job_type_revision'] = self.job_type2.revision_num self.main_definition['nodes']['node_b']['node_type']['job_type_name'] = self.job_type2.name self.main_definition['nodes']['node_b']['node_type']['job_type_version'] = self.job_type2.version self.main_definition['nodes']['node_b']['node_type']['job_type_revision'] = self.job_type2.revision_num self.main_definition['nodes']['node_d']['node_type']['recipe_type_name'] = self.recipe_type1.name self.main_definition['nodes']['node_d']['node_type']['recipe_type_revision'] = self.recipe_type1.revision_num self.v6_recipe_def = RecipeDefinitionV6(self.main_definition).get_definition()
def test_by_hour(self): """Tests successfully binning the metric plot view by hour.""" # Create job type for two hours prior job_type4 = job_test_utils.create_seed_job_type() occurred = timezone.now() - datetime.timedelta(hours=2) metrics_test_utils.create_job_type(job_type=job_type4, occurred=occurred, job_time_sum=2200, job_time_min=200, job_time_max=2000, job_time_avg=200) # Create job type for one hour prior job_type5 = job_test_utils.create_seed_job_type() occurred = timezone.now() - datetime.timedelta(hours=1) metrics_test_utils.create_job_type(job_type=job_type5, occurred=occurred, job_time_sum=1100, job_time_min=100, job_time_max=1000, job_time_avg=100) url = '/v6/metrics/job-types/plot-data/?column=job_time_avg' response = self.client.generic('GET', url) self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) result = json.loads(response.content) self.assertEqual(len(result['results']), 1) self.assertEqual(len(result['results'][0]['values']), 3)
def setUp(self): django.setup() manifest1 = job_test_utils.create_seed_manifest(name='type-1', jobVersion='1.0.0') self.job_type_1 = job_test_utils.create_seed_job_type( manifest=manifest1) manifest2 = job_test_utils.create_seed_manifest(name='type-2', jobVersion='2.0.0') self.job_type_2 = job_test_utils.create_seed_job_type( manifest=manifest2) manifest3 = job_test_utils.create_seed_manifest(name='type-1', jobVersion='2.0.0') self.job_type_3 = job_test_utils.create_seed_job_type( manifest=manifest3) self.entry_1_longest = datetime.datetime.utcfromtimestamp( 500000).replace(tzinfo=timezone.utc) self.entry_1_shortest = datetime.datetime.utcfromtimestamp( 650000).replace(tzinfo=timezone.utc) self.entry_2_longest = datetime.datetime.utcfromtimestamp( 600000).replace(tzinfo=timezone.utc) self.entry_2_shortest = datetime.datetime.utcfromtimestamp( 750000).replace(tzinfo=timezone.utc) self.entry_3_longest = datetime.datetime.utcfromtimestamp( 700000).replace(tzinfo=timezone.utc) self.entry_3_shortest = datetime.datetime.utcfromtimestamp( 800000).replace(tzinfo=timezone.utc) job_test_utils.create_job(job_type=self.job_type_1, status='RUNNING', last_status_change=self.entry_1_longest) job_test_utils.create_job(job_type=self.job_type_1, status='RUNNING', last_status_change=self.entry_1_shortest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_longest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_shortest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_shortest)
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() job_type_4 = job_test_utils.create_seed_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type_v6() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) df1 = DataFilter(filter_list=[{'name': 'file_param_1', 'type': 'media-type', 'condition': '==', 'values': ['image/gif']}, {'name': 'json_param_1', 'type': 'object', 'condition': 'superset of', 'values': [{}]}], all=False) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_condition_node('F', interface, df1) #False definition.add_job_node('G', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('A', 'F') definition.add_dependency('F', 'G') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) condition_f = recipe_test_utils.create_recipe_condition(is_processed=True, is_accepted=False, save=True) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', condition=condition_f, save=False) recipe_nodes = [recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f] recipe_instance = RecipeInstance(definition, recipe, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def test_get_metrics_type_choices(self): """Tests getting the metrics type with choices.""" job_test_utils.create_seed_job_type() metrics_type = MetricsJobType.objects.get_metrics_type( include_choices=True) self.assertEqual(metrics_type.name, 'job-types') self.assertEqual(len(metrics_type.filters), 2) self.assertEqual(len(metrics_type.choices), 1)
def setUp(self): django.setup() self.input_name_1 = 'Test_Input_1' self.output_name_1 = 'Test_Output_1' inputs = [{'name': self.input_name_1, 'mediaTypes': ['text/plain']}] outputs = [{ 'name': self.output_name_1, 'mediaType': 'image/png', 'pattern': '*_.png' }] manifest_1 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=outputs, outputs_json=[]) self.job_type_1 = job_test_utils.create_seed_job_type( manifest=manifest_1) self.input_name_2 = 'Test_Input_2' self.output_name_2 = 'Test_Output_2' inputs = [{ 'name': self.input_name_2, 'mediaTypes': ['image/png', 'image/tiff'] }] outputs = [{ 'name': self.output_name_2, 'mediaType': 'text/plain', 'pattern': '*_.txt' }] manifest_2 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=outputs, outputs_json=[]) self.job_type_2 = job_test_utils.create_seed_job_type( manifest=manifest_2) self.input_name_3 = 'Test_Input_3' self.output_name_3 = 'Test_Output_3' inputs = [{'name': self.input_name_3, 'mediaTypes': ['text/plain']}] manifest_3 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=[], outputs_json=[]) self.job_type_3 = job_test_utils.create_seed_job_type( manifest=manifest_3) self.file_1 = storage_test_utils.create_file(media_type='text/plain')
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_seed_job_type() self.tag_set1 = ["tag1", "tag2", "oneandfour"] self.job_type2 = job_test_utils.create_seed_job_type() self.tag_set2 = ["tag3", "tag4"] self.job_type3 = job_test_utils.create_seed_job_type() self.tag_set3 = ["tag5", "tag6"] self.job_type4 = job_test_utils.create_seed_job_type() self.tag_set4 = ["tag7", "tag8", "oneandfour"] JobTypeTag.objects.create_job_type_tags(self.job_type1, self.tag_set1) JobTypeTag.objects.create_job_type_tags(self.job_type3, self.tag_set3) JobTypeTag.objects.create_job_type_tags(self.job_type4, self.tag_set4)
def setUp(self): django.setup() self.rt1 = recipe_test_utils.create_recipe_type_v6() self.rt2 = recipe_test_utils.create_recipe_type_v6() self.rt3 = recipe_test_utils.create_recipe_type_v6() self.jt3 = job_test_utils.create_seed_job_type() self.jt4 = job_test_utils.create_seed_job_type() self.jt5 = job_test_utils.create_seed_job_type() self.jt6 = job_test_utils.create_seed_job_type() self.parents = [self.rt1.id,self.rt1.id,self.rt2.id] self.children = [self.jt3.id,self.jt4.id,self.jt5.id] RecipeTypeJobLink.objects.create_recipe_type_job_links(self.parents, self.children)
def setUp(self): django.setup() rest.login_client(self.client) self.job_type1 = job_test_utils.create_seed_job_type() metrics_test_utils.create_job_type(job_type=self.job_type1, completed_count=8, failed_count=2, total_count=10) self.job_type2 = job_test_utils.create_seed_job_type() metrics_test_utils.create_job_type(job_type=self.job_type2, job_time_sum=220, job_time_min=20, job_time_max=200, job_time_avg=110) self.job_type3 = job_test_utils.create_seed_job_type() metrics_test_utils.create_job_type(job_type=self.job_type3, job_time_sum=1100, job_time_min=100, job_time_max=1000, job_time_avg=550)
def setUp(self): django.setup() # self.recipe_type = recipe_test_utils.create_recipe_type_v6(definition=recipe_test_utils.RECIPE_DEFINITION) self.recipe = recipe_test_utils.create_recipe() job_type_1 = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type_1) job_type_2 = job_test_utils.create_seed_job_type() job_2 = job_test_utils.create_job(job_type=job_type_2) job_type_3 = job_test_utils.create_seed_job_type() job_3 = job_test_utils.create_job(job_type=job_type_3) self.recipe_node1 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-1', job=job_1) self.recipe_node2 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-2', job=job_2) self.recipe_node3 = recipe_test_utils.create_recipe_node(recipe=self.recipe, node_name='job-3', job=job_3) RecipeNode.objects.bulk_create([self.recipe_node1, self.recipe_node2, self.recipe_node3])
def test_uuid_use_properties(self): """Tests setting UUIDs on products with different property values.""" inputs_json=[ {'name': 'property1', 'type': 'string'}, {'name': 'property2', 'type': 'string'} ] manifest = job_test_utils.create_seed_manifest(name='test-job', inputs_json=inputs_json, command='my_command') manifest['job']['interface']['inputs']['files'] = [] job_type = job_test_utils.create_seed_job_type(manifest=manifest) job1 = job_test_utils.create_job(job_type=job_type) job_exe1 = job_test_utils.create_job_exe(job=job1) data1 = job_exe1.job.get_input_data() data1.add_value(JsonValue('property1', 'value1')) data1.add_value(JsonValue('property2', 'value2')) job_exe1.job.input = convert_data_to_v6_json(data1).get_dict() job2 = job_test_utils.create_job(job_type=job_type) job_exe2 = job_test_utils.create_job_exe(job=job2) data2 = job_exe2.job.get_input_data() data2.add_value(JsonValue('property1', 'diffvalue1')) data2.add_value(JsonValue('property2', 'value2')) job_exe2.job.input = convert_data_to_v6_json(data2).get_dict() products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace) products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace) # Make sure the product files have different UUIDs self.assertIsNotNone(products1[0].uuid) self.assertIsNotNone(products1[1].uuid) self.assertNotEqual(products1[0].uuid, products2[0].uuid) self.assertNotEqual(products1[1].uuid, products2[1].uuid)
def test_calculate_negative_times(self): """Tests calculating times when machine clocks are out of sync.""" job_type = job_test_utils.create_seed_job_type() job = job_test_utils.create_job(job_type=job_type, status='COMPLETED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc)) job_test_utils.create_job_exe( job=job, status=job.status, queued=datetime.datetime(2015, 1, 1, 1, 10, tzinfo=utc), started=datetime.datetime(2015, 1, 1, 1, 5, tzinfo=utc), ended=datetime.datetime(2015, 1, 1, tzinfo=utc), ) MetricsJobType.objects.calculate( datetime.datetime(2015, 1, 1, tzinfo=utc)) entries = MetricsJobType.objects.filter( occurred=datetime.datetime(2015, 1, 1, tzinfo=utc)) self.assertEqual(len(entries), 1) entry = entries.first() self.assertEqual(entry.queue_time_min, 0) self.assertEqual(entry.queue_time_max, 0)
def setUp(self): django.setup() self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) cmd = 'command' cmd_args = 'args' outputs = [{ 'name': 'arg1', 'pattern': '*_.txt' }, { 'name': 'arg2', 'pattern': '*_.txt' }] manifest = job_utils.create_seed_manifest(command='command args', outputs_files=outputs) self.job_type = job_utils.create_seed_job_type(job_version='1.0', manifest=manifest) self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING', recipe=self.recipe) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING')
def setUp(self): django.setup() self.seed_job_type = job_test_utils.create_seed_job_type() self.seed_job_type_rev = JobTypeRevision.objects.get_revision( self.seed_job_type.name, self.seed_job_type.version, self.seed_job_type.revision_num)
def test_json(self): """Tests coverting a CancelJobs message to and from JSON""" when = now() data = JobData() job_type = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type, status='PENDING') job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', input=data.get_dict()) job_ids = [job_1.id, job_2.id] # Add jobs to message message = CancelJobs() message.when = when if message.can_fit_more(): message.add_job(job_1.id) if message.can_fit_more(): message.add_job(job_2.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CancelJobs.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Both jobs should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) self.assertEqual(jobs[1].status, 'CANCELED') self.assertEqual(jobs[1].last_status_change, when) # No new messages since these jobs do not belong to a recipe self.assertEqual(len(new_message.new_messages), 0)
def setUp(self): django.setup() cmd = 'command' cmd_args = 'run test' timeout = 60 interface = { 'version': '1.0', 'command': cmd, 'command_arguments': cmd_args } self.job_type = job_utils.create_job_type(name='Test', version='1.0', interface=interface) self.seed_job_type = job_utils.create_seed_job_type() self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING') self.seed_job = job_utils.create_job(job_type=self.seed_job_type, event=self.event, status='RUNNING') self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', timeout=timeout, queued=now()) self.seed_exe = job_utils.create_job_exe(job=self.seed_job, status='RUNNING', timeout=timeout, queued=now())
def test_job_type_limit(self): """Tests calling perform_scheduling() with a job type limit""" Queue.objects.all().delete() job_type_with_limit = job_test_utils.create_seed_job_type() job_type_with_limit.max_scheduled = 4 job_type_with_limit.save() running_job_exe_1 = job_test_utils.create_running_job_exe(agent_id=self.agent_1.agent_id, job_type=job_type_with_limit, node=self.node_1) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) job_type_mgr.sync_with_database() # One job of this type is already running job_exe_mgr.schedule_job_exes([running_job_exe_1], []) offer_1 = ResourceOffer('offer_1', self.agent_1.agent_id, self.framework_id, NodeResources([Cpus(0.0), Mem(1024.0), Disk(1024.0)]), now(), None) offer_2 = ResourceOffer('offer_2', self.agent_2.agent_id, self.framework_id, NodeResources([Cpus(25.0), Mem(2048.0), Disk(2048.0)]), now(), None) resource_mgr.add_new_offers([offer_1, offer_2]) scheduling_manager = SchedulingManager() num_tasks = scheduling_manager.perform_scheduling(self._client, now()) self.assertEqual(num_tasks, 3) # One is already running, should only be able to schedule 3 more
def test_publish_products_unpublish_superseded(self): """Tests calling ProductFileManager.publish_products() where the job has superseded job products that must be unpublished """ # Job 1 is superseded by Job 2 and Job 2 is superseded by Job 3 job_exe_1 = job_test_utils.create_job_exe() product_1_a = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) product_1_b = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) job_type = job_test_utils.create_seed_job_type() event = trigger_test_utils.create_trigger_event() job_type_rev = JobTypeRevision.objects.get_by_natural_key(job_type, job_type.revision_num) job_2 = Job.objects.create_job_v6(job_type_rev, event_id=event.id, superseded_job=job_exe_1.job) job_2.save() job_exe_2 = job_test_utils.create_job_exe(job=job_2) Job.objects.supersede_jobs([job_exe_1.job.id], now()) product_2_a = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) product_2_b = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) job_3 = Job.objects.create_job_v6(job_type_rev, event_id=event.id, superseded_job=job_exe_2.job) job_3.save() job_exe_3 = job_test_utils.create_job_exe(job=job_3) Job.objects.supersede_jobs([job_2.id], now()) product_3_a = prod_test_utils.create_product(job_exe=job_exe_3) product_3_b = prod_test_utils.create_product(job_exe=job_exe_3) when = now() ProductFile.objects.publish_products(job_exe_3, job_3, when) # Make sure products from Job 1 and Job 2 are unpublished product_1_a = ScaleFile.objects.get(id=product_1_a.id) product_1_b = ScaleFile.objects.get(id=product_1_b.id) product_2_a = ScaleFile.objects.get(id=product_2_a.id) product_2_b = ScaleFile.objects.get(id=product_2_b.id) self.assertTrue(product_1_a.has_been_published) self.assertFalse(product_1_a.is_published) self.assertEqual(product_1_a.unpublished, when) self.assertTrue(product_1_b.has_been_published) self.assertFalse(product_1_b.is_published) self.assertEqual(product_1_b.unpublished, when) self.assertTrue(product_2_a.has_been_published) self.assertFalse(product_2_a.is_published) self.assertEqual(product_2_a.unpublished, when) self.assertTrue(product_2_b.has_been_published) self.assertFalse(product_2_b.is_published) self.assertEqual(product_2_b.unpublished, when) # Make sure Job 3 products are published product_3_a = ScaleFile.objects.get(id=product_3_a.id) product_3_b = ScaleFile.objects.get(id=product_3_b.id) self.assertTrue(product_3_a.has_been_published) self.assertTrue(product_3_a.is_published) self.assertFalse(product_3_a.is_superseded) self.assertEqual(product_3_a.published, when) self.assertIsNone(product_3_a.superseded) self.assertTrue(product_3_b.has_been_published) self.assertTrue(product_3_b.is_published) self.assertFalse(product_3_b.is_superseded) self.assertEqual(product_3_b.published, when) self.assertIsNone(product_3_b.superseded)
def test_execute_input_data(self): """Tests calling CreateJobs.execute() with input data""" manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'name', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Title', 'description': 'This is a description', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': 'the command' } } } job_type = job_test_utils.create_seed_job_type(manifest=manifest) event = trigger_test_utils.create_trigger_event() data = Data() # Create and execute message message = create_jobs_message(job_type.name, job_type.version, job_type.revision_num, event.id, input_data=data) result = message.execute() self.assertTrue(result) # Check for job creation self.assertEqual( Job.objects.filter(job_type_id=job_type.id, event_id=event.id).count(), 1) # Check for process_job_input message self.assertEqual(len(message.new_messages), 1) msg = message.new_messages[0] self.assertEqual(msg.type, 'process_job_input') # Test executing message again message_json_dict = message.to_json() message = CreateJobs.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Check that a second job is not created self.assertEqual( Job.objects.filter(job_type_id=job_type.id, event_id=event.id).count(), 1) # Check for process_job_input message self.assertEqual(len(message.new_messages), 1) msg = message.new_messages[0] self.assertEqual(msg.type, 'process_job_input')
def test_get_seed_job_results(self): """Test retrieving job results from a Seed job type""" job_type = job_test_utils.create_seed_job_type() input = {"version": "1.0", "input_data": {}, "output_data": {}} job = job_test_utils.create_job(job_type, input=input) self.assertIsInstance(job.get_job_results(), SeedJobResults)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_seed_job_type(priority=1) queue_test_utils.create_job_load(job_type=self.job_type1, pending_count=1) # sleep's are needed because if the job load entries end up with the same timestamp, there will be fewer # entries in the GET then expected in the tests. sleep's ensure the timestamps will be different as they # maintain 3 sig figs in the decimal time.sleep(0.001) self.job_type2 = job_test_utils.create_seed_job_type(priority=2) queue_test_utils.create_job_load(job_type=self.job_type2, queued_count=1) time.sleep(0.001) self.job_type3 = job_test_utils.create_seed_job_type(priority=3) queue_test_utils.create_job_load(job_type=self.job_type3, running_count=1)
def setUp(self): django.setup() self.country = storage_test_utils.create_country() manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST) manifest['job']['name'] = 'test1' self.job_type1 = job_test_utils.create_seed_job_type(manifest=manifest) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.f1_source_started = dt.datetime(2016, 1, 1, tzinfo=utc) self.f1_source_ended = dt.datetime(2016, 1, 2, tzinfo=utc) self.source_sensor_class = 'classA' self.source_sensor = '1' self.source_collection = '12345' self.source_task = 'test-task' self.file1 = storage_test_utils.create_file( job_exe=self.job_exe1, job_output='out_name', file_name='test.txt', countries=[self.country], recipe_node='test-recipe-node', source_started=self.f1_source_started, source_ended=self.f1_source_ended, source_sensor_class=self.source_sensor_class, source_sensor=self.source_sensor, source_collection=self.source_collection, source_task=self.source_task, data_type_tags=['type1', 'type2']) manifest['job']['name'] = 'test2' self.job_type2 = job_test_utils.create_seed_job_type(manifest=manifest) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.f2_source_started = dt.datetime(2016, 1, 2, tzinfo=utc) self.f2_source_ended = dt.datetime(2016, 1, 3, tzinfo=utc) self.file2 = storage_test_utils.create_file( job_exe=self.job_exe2, countries=[self.country], source_started=self.f2_source_started, source_ended=self.f2_source_ended) rest.login_client(self.client)
def test_successful(self): """Tests calling QueueManager.queue_new_job() successfully with a Seed job type""" workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) event = trigger_test_utils.create_trigger_event() manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'test-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Job', 'description': 'This is a test job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'input_a' }] }, 'outputs': { 'files': [{ 'name': 'output_a', 'multiple': True, 'pattern': '*.png' }] } } } } job_type = job_test_utils.create_seed_job_type(manifest=manifest) data_dict = { 'version': '1.0', 'input_data': [{ 'name': 'input_a', 'file_id': source_file.id, }], 'output_data': [{ 'name': 'output_a', 'workspace_id': workspace.id }] } data = JobData(data_dict) job = Queue.objects.queue_new_job(job_type, data, event) self.assertEqual(job.status, 'QUEUED')
def create_job_type(job_type=None, occurred=None, **kwargs): """Creates a metrics job type model for unit testing :returns: The metrics job type model :rtype: :class:`metrics.models.MetricsJobType` """ if not job_type: job_type = job_test_utils.create_seed_job_type() if not occurred: occurred = timezone.now() return MetricsJobType.objects.create(job_type=job_type, occurred=occurred, **kwargs)
def setUp(self): django.setup() def upload_files(file_uploads): for file_upload in file_uploads: file_upload.file.save() def delete_files(files): for scale_file in files: scale_file.save() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock(side_effect=upload_files) self.workspace.delete_files = MagicMock(side_effect=delete_files) self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace) inputs_json=[ {'name': 'property1', 'type': 'string'}, {'name': 'property2', 'type': 'string'} ] manifest = job_test_utils.create_seed_manifest(inputs_json=inputs_json, command='my_command') manifest['job']['interface']['inputs']['files'] = [] job_type = job_test_utils.create_seed_job_type(manifest=manifest) self.job_exe = job_test_utils.create_job_exe(job_type=job_type) data = self.job_exe.job.get_input_data() data.add_value(JsonValue('property1', 'value1')) data.add_value(JsonValue('property2', 'value2')) self.job_exe.job.input = convert_data_to_v6_json(data).get_dict() self.job_exe.job.source_sensor_class = 'classA' self.job_exe.job.source_sensor = '1' self.job_exe.job.source_collection = '12345' self.job_exe.job.source_task = 'my-task' self.job_exe.job.save() self.job_exe_no = job_test_utils.create_job_exe() self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt') self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json') self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5') self.files = [ ProductFileMetadata(output_name='output_name_1', local_path=self.local_path_1, remote_path='remote/1/file.txt'), ProductFileMetadata(output_name='output_name_2', local_path=self.local_path_2, media_type='application/x-custom-json', remote_path='remote/2/file.json', source_sensor_class='classB', source_sensor='2', source_collection='12346', source_task='my-task-2'), ] self.files_no = [ ProductFileMetadata(output_name='output_name_3', local_path=self.local_path_3, media_type='image/x-hdf5-image', remote_path='remote/3/file.h5') ]
def test_json(self): """Tests coverting a RequeueJobsBulk message to and from JSON""" sys_err = error_test_utils.create_error(category='SYSTEM') data = JobData() batch = batch_test_utils.create_batch() recipe = recipe_test_utils.create_recipe() job_type = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err, input=data.get_dict()) job_1.batch_id = batch.id job_1.recipe_id = recipe.id job_1.save() job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', error=sys_err, input=data.get_dict()) # Create message message = RequeueJobsBulk() message.started = job_1.last_modified - timedelta(seconds=1) message.ended = job_1.last_modified + timedelta(seconds=1) message.error_categories = ['SYSTEM'] message.error_ids = [sys_err.id] message.job_ids = [job_1.id] message.job_type_ids = [job_type.id] message.priority = 1 message.status = 'FAILED' message.job_type_names = [job_type.name] message.batch_ids = [batch.id] message.recipe_ids = [recipe.id] message.is_superseded = False # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = RequeueJobsBulk.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one re-queue message for job 1 self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'requeue_jobs') self.assertListEqual(message._requeue_jobs, [QueuedJob(job_1.id, job_1.num_exes)]) self.assertEqual(message.priority, 1)
def test_execute(self): """Tests calling CancelJobsBulk.execute() successfully""" # Importing module here to patch the max batch size import job.messages.cancel_jobs_bulk job.messages.cancel_jobs_bulk.MAX_BATCH_SIZE = 5 sys_err = error_test_utils.create_error(category='SYSTEM') job_type = job_test_utils.create_seed_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err) job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err) job_3 = job_test_utils.create_job(job_type=job_type, num_exes=1, status='COMPLETED') job_4 = job_test_utils.create_job(job_type=job_type, status='BLOCKED') job_5 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED') job_6 = job_test_utils.create_job(job_type=job_type, status='PENDING') job_7 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err) # Create message message = job.messages.cancel_jobs_bulk.CancelJobsBulk() message.job_type_ids = [job_type.id] # Execute message result = message.execute() self.assertTrue(result) # Should be two messages, one for next bulk cancel and one for canceling the specific jobs self.assertEqual(len(message.new_messages), 2) cancel_bulk_message = message.new_messages[0] cancel_message = message.new_messages[1] self.assertEqual(cancel_bulk_message.type, 'cancel_jobs_bulk') self.assertEqual(cancel_bulk_message.current_job_id, job_3.id) self.assertEqual(cancel_message.type, 'cancel_jobs') # Job 5 is skipped due to being CANCELED and job 3 is skipped due to being COMPLETED self.assertListEqual(cancel_message._job_ids, [job_7.id, job_6.id, job_4.id]) # Test executing message again message.new_messages = [] result = message.execute() self.assertTrue(result) # Should have same messages returned self.assertEqual(len(message.new_messages), 2) cancel_bulk_message = message.new_messages[0] cancel_message = message.new_messages[1] self.assertEqual(cancel_bulk_message.type, 'cancel_jobs_bulk') self.assertEqual(cancel_bulk_message.current_job_id, job_3.id) self.assertEqual(cancel_message.type, 'cancel_jobs') # Job 5 is skipped due to being CANCELED and job 3 is skipped due to being COMPLETED self.assertListEqual(cancel_message._job_ids, [job_7.id, job_6.id, job_4.id])
def setUp(self): django.setup() self.jt = job_test_utils.create_seed_job_type() self.jt2 = job_test_utils.create_seed_job_type(manifest=job_test_utils.MINIMUM_MANIFEST) def_v6_dict_sub = {'version': '6', 'input': { 'files': [], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt2.name, 'job_type_version': self.jt2.version, 'job_type_revision': self.jt2.revision_num}}}} self.sub = recipe_test_utils.create_recipe_type_v6(definition=def_v6_dict_sub) def_v6_dict_main = {'version': '6', 'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/tiff'], 'required': True, 'multiple': False}], 'json': [{'name': 'bar', 'type': 'string', 'required': False}]}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt.name, 'job_type_version': self.jt.version, 'job_type_revision': self.jt.revision_num}}, 'node_b': {'dependencies': [{'name': 'node_a'}], 'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'node_a', 'output': 'OUTPUT_IMAGE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt.name, 'job_type_version': self.jt.version, 'job_type_revision': self.jt.revision_num}}, 'node_c': {'dependencies': [{'name': 'node_b'}], 'input': {'input_a': {'type': 'recipe', 'input': 'bar'}, 'input_b': {'type': 'dependency', 'node': 'node_b', 'output': 'OUTPUT_IMAGE'}}, 'node_type': {'node_type': 'recipe', 'recipe_type_name': self.sub.name, 'recipe_type_revision': self.sub.revision_num}}}} self.rt = recipe_test_utils.create_recipe_type_v6(definition=def_v6_dict_main)
def setUp(self): django.setup() self.count = 1 self.job_type = job_test_utils.create_seed_job_type() self.job = job_test_utils.create_job(job_type=self.job_type) self.job_exe = job_test_utils.create_job_exe(status='COMPLETED', job=self.job) self.wp1 = storage_test_utils.create_workspace() self.wp2 = storage_test_utils.create_workspace() self.prod1 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp1, job_exe=self.job_exe) self.prod2 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp1, job_exe=self.job_exe) self.prod3 = storage_test_utils.create_file(file_type='PRODUCT', workspace=self.wp2, job_exe=self.job_exe) self.file_1 = storage_test_utils.create_file(file_type='SOURCE') self.event = trigger_test_utils.create_trigger_event() PurgeResults.objects.create(source_file_id=self.file_1.id, trigger_event=self.event)
def test_execute_invalid_data(self): """Tests calling CreateJobs.execute() when the input data is invalid""" manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'name', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Title', 'description': 'This is a description', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': 'the command', 'inputs': { 'files': [{ 'name': 'input_a' }] } } } } job_type = job_test_utils.create_seed_job_type(manifest=manifest) event = trigger_test_utils.create_trigger_event() # Data does not provide required input_a so it is invalid data_dict = convert_data_to_v6_json(Data()).get_dict() # Create and execute message message = create_jobs_message(job_type.name, job_type.version, job_type.revision_num, event.id, count=10, input_data_dict=data_dict) result = message.execute() self.assertTrue(result) self.assertEqual( Job.objects.filter(job_type_id=job_type.id, event_id=event.id).count(), 0) # Should be no new messages self.assertEqual(len(message.new_messages), 0)