def setUp(self): django.setup() self.job_type_1 = job_test_utils.create_job_type(name=u'Type 1', version=u'1.0') self.job_type_2 = job_test_utils.create_job_type(name=u'Type 2', version=u'2.0') self.job_type_3 = job_test_utils.create_job_type(name=u'Type 1', version=u'2.0') self.entry_1_longest = datetime.datetime.utcfromtimestamp(500000).replace(tzinfo=timezone.utc) self.entry_1_shortest = datetime.datetime.utcfromtimestamp(650000).replace(tzinfo=timezone.utc) self.entry_2_longest = datetime.datetime.utcfromtimestamp(600000).replace(tzinfo=timezone.utc) self.entry_2_shortest = datetime.datetime.utcfromtimestamp(750000).replace(tzinfo=timezone.utc) self.entry_3_longest = datetime.datetime.utcfromtimestamp(700000).replace(tzinfo=timezone.utc) self.entry_3_shortest = datetime.datetime.utcfromtimestamp(800000).replace(tzinfo=timezone.utc) job_test_utils.create_job(job_type=self.job_type_1, status=u'RUNNING', last_status_change=self.entry_1_longest) job_test_utils.create_job(job_type=self.job_type_1, status=u'RUNNING', last_status_change=self.entry_1_shortest) job_test_utils.create_job(job_type=self.job_type_2, status=u'RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_2, status=u'RUNNING', last_status_change=self.entry_2_longest) job_test_utils.create_job(job_type=self.job_type_2, status=u'RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_3, status=u'RUNNING', last_status_change=self.entry_3_shortest) job_test_utils.create_job(job_type=self.job_type_3, status=u'RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status=u'RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status=u'RUNNING', last_status_change=self.entry_3_shortest)
def setUp(self): django.setup() from product.test import utils as product_test_utils self.src_file = source_test_utils.create_source() self.job_type1 = job_test_utils.create_job_type( name='scale-batch-creator', version='1.0', category='test-1') self.job1 = job_test_utils.create_job(job_type=self.job_type1, status='RUNNING') self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job1, job_exe=self.job_exe1) self.job_type2 = job_test_utils.create_job_type(name='test2', version='1.0', category='test-2') self.job2 = job_test_utils.create_job(job_type=self.job_type2, status='PENDING') self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job2, job_exe=self.job_exe2) self.job3 = job_test_utils.create_job(is_superseded=True) self.job_exe3 = job_test_utils.create_job_exe(job=self.job3) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job3, job_exe=self.job_exe3)
def setUp(self): django.setup() cpus = 50 mem = 500 disk = 50 self.job_type_1 = job_test_utils.create_job_type(priority=1, cpus=cpus, mem=mem, disk=disk) self.job_type_2 = job_test_utils.create_job_type(priority=2, cpus=cpus, mem=mem, disk=disk) self.job_type_3 = job_test_utils.create_job_type(priority=3, cpus=cpus, mem=mem, disk=disk) self.job_type_4 = job_test_utils.create_job_type(priority=4, cpus=cpus, mem=mem, disk=disk) self.job_type_5 = job_test_utils.create_job_type(priority=5, cpus=cpus, mem=mem, disk=disk) resource_1 = shared_resource_test_utils.create_resource() resource_2 = shared_resource_test_utils.create_resource(is_global=False) resource_3 = shared_resource_test_utils.create_resource(limit=1000) shared_resource_test_utils.create_requirement(job_type=self.job_type_1, shared_resource=resource_1) shared_resource_test_utils.create_requirement(job_type=self.job_type_2, shared_resource=resource_2) shared_resource_test_utils.create_requirement(job_type=self.job_type_3, shared_resource=resource_3, usage=400) shared_resource_test_utils.create_requirement(job_type=self.job_type_4, shared_resource=resource_3, usage=200) shared_resource_test_utils.create_requirement(job_type=self.job_type_5, shared_resource=resource_3, usage=100) self.trigger_event_1 = trigger_test_utils.create_trigger_event() self.node_1 = node_test_utils.create_node() self.node_2 = node_test_utils.create_node() self.node_3 = node_test_utils.create_node()
def setUp(self): django.setup() self.resource_no_limit = shared_resource_test_utils.create_resource() self.resource_1 = shared_resource_test_utils.create_resource( limit=RESOURCE_LIMIT) self.resource_2 = shared_resource_test_utils.create_resource( limit=RESOURCE_LIMIT) self.resource_restricted = shared_resource_test_utils.create_resource( limit=RESOURCE_LIMIT, is_global=False) self.job_type_1 = job_test_utils.create_job_type() self.job_type_1a = job_test_utils.create_job_type() self.job_type_2 = job_test_utils.create_job_type() self.non_global_job = job_test_utils.create_job_type() shared_resource_test_utils.create_requirement( job_type=self.job_type_1, shared_resource=self.resource_1, usage=JOB_TYPE_1_USAGE) shared_resource_test_utils.create_requirement( job_type=self.job_type_1a, shared_resource=self.resource_1, usage=JOB_TYPE_1A_USAGE) shared_resource_test_utils.create_requirement( job_type=self.non_global_job, shared_resource=self.resource_restricted, usage=JOB_TYPE_3_USAGE) self.global_job_types = [self.job_type_1, self.job_type_2] self.node_without_special_access = node_test_utils.create_node() self.node_with_special_access = node_test_utils.create_node() self.resource_restricted.nodes.add(self.node_with_special_access)
def setUp(self): django.setup() self.job_type_1 = job_test_utils.create_job_type(priority=1) self.job_type_2 = job_test_utils.create_job_type(priority=1) self.job_type_3 = job_test_utils.create_job_type(priority=3) self.job_type_4 = job_test_utils.create_job_type(priority=4) self.job_type_5 = job_test_utils.create_job_type(priority=4) self.time_1 = now() self.time_2 = self.time_1 + timedelta(hours=1) self.time_3 = self.time_2 + timedelta(hours=1) # Set up queue depth entries QueueDepthByJobType.objects.create(job_type=self.job_type_1, depth_time=self.time_1, depth=5) QueueDepthByJobType.objects.create(job_type=self.job_type_2, depth_time=self.time_1, depth=7) QueueDepthByJobType.objects.create(job_type=self.job_type_3, depth_time=self.time_1, depth=3) QueueDepthByJobType.objects.create(job_type=self.job_type_4, depth_time=self.time_1, depth=1) QueueDepthByJobType.objects.create(job_type=self.job_type_1, depth_time=self.time_2, depth=25) QueueDepthByJobType.objects.create(job_type=self.job_type_2, depth_time=self.time_2, depth=19) QueueDepthByJobType.objects.create(job_type=self.job_type_4, depth_time=self.time_2, depth=10) QueueDepthByJobType.objects.create(job_type=self.job_type_1, depth_time=self.time_3, depth=0) QueueDepthByPriority.objects.create(priority=1, depth_time=self.time_1, depth=12) QueueDepthByPriority.objects.create(priority=3, depth_time=self.time_1, depth=3) QueueDepthByPriority.objects.create(priority=4, depth_time=self.time_1, depth=1) QueueDepthByPriority.objects.create(priority=1, depth_time=self.time_2, depth=44) QueueDepthByPriority.objects.create(priority=4, depth_time=self.time_2, depth=10) QueueDepthByPriority.objects.create(priority=1, depth_time=self.time_3, depth=0)
def setUp(self): django.setup() from batch.test import utils as batch_test_utils from product.test import utils as product_test_utils self.country = storage_test_utils.create_country() self.src_file = source_test_utils.create_source() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test.txt', countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1, job_exe=self.job_exe1) self.batch = batch_test_utils.create_batch() self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2a = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=False, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2a, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2b = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, is_superseded=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2b, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2c = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2c, job=self.job2, job_exe=self.job_exe2, batch=self.batch)
def setUp(self): django.setup() self.resource_no_limit = shared_resource_test_utils.create_resource() self.resource_1 = shared_resource_test_utils.create_resource(limit=RESOURCE_LIMIT) self.resource_2 = shared_resource_test_utils.create_resource(limit=RESOURCE_LIMIT) self.resource_restricted = shared_resource_test_utils.create_resource(limit=RESOURCE_LIMIT, is_global=False) self.job_type_1 = job_test_utils.create_job_type() self.job_type_1a = job_test_utils.create_job_type() self.job_type_2 = job_test_utils.create_job_type() self.non_global_job = job_test_utils.create_job_type() shared_resource_test_utils.create_requirement(job_type=self.job_type_1, shared_resource=self.resource_1, usage=JOB_TYPE_1_USAGE) shared_resource_test_utils.create_requirement(job_type=self.job_type_1a, shared_resource=self.resource_1, usage=JOB_TYPE_1A_USAGE) shared_resource_test_utils.create_requirement(job_type=self.non_global_job, shared_resource=self.resource_restricted, usage=JOB_TYPE_3_USAGE) self.global_job_types = [self.job_type_1, self.job_type_2] self.node_without_special_access = node_test_utils.create_node() self.node_with_special_access = node_test_utils.create_node() self.resource_restricted.nodes.add(self.node_with_special_access)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type(name='test1', version='1.0', category='test-1', priority=1) queue_test_utils.create_job_load(job_type=self.job_type1, pending_count=1) # sleep's are needed because if the job load entries end up with the same timestamp, there will be fewer # entries in the GET then expected in the tests. sleep's ensure the timestamps will be different as they # maintain 3 sig figs in the decimal time.sleep(0.001) self.job_type2 = job_test_utils.create_job_type(name='test2', version='1.0', category='test-2', priority=2) queue_test_utils.create_job_load(job_type=self.job_type2, queued_count=1) time.sleep(0.001) self.job_type3 = job_test_utils.create_job_type(name='test3', version='1.0', category='test-3', priority=3) queue_test_utils.create_job_load(job_type=self.job_type3, running_count=1)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type(name="test1", version="1.0", category="test-1") self.job1 = job_test_utils.create_job(job_type=self.job_type1, status="RUNNING") self.job_type2 = job_test_utils.create_job_type(name="test2", version="1.0", category="test-2") self.job2 = job_test_utils.create_job(job_type=self.job_type2, status="PENDING")
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() metrics_test_utils.create_job_type(job_type=self.job_type1) self.job_type2 = job_test_utils.create_job_type() metrics_test_utils.create_job_type(job_type=self.job_type2)
def test_get_metrics_type_choices(self): '''Tests getting the metrics type with choices.''' job_test_utils.create_job_type() metrics_type = MetricsJobType.objects.get_metrics_type(include_choices=True) self.assertEqual(metrics_type.name, 'job-types') self.assertEqual(len(metrics_type.filters), 2) self.assertEqual(len(metrics_type.choices), 1)
def test_get_metrics_type_choices(self): """Tests getting the metrics type with choices.""" job_test_utils.create_job_type() metrics_type = MetricsJobType.objects.get_metrics_type(include_choices=True) self.assertEqual(metrics_type.name, 'job-types') self.assertEqual(len(metrics_type.filters), 2) self.assertEqual(len(metrics_type.choices), 1)
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 1", "type": "file", "media_types": ["text/plain"]}], "output_data": [{"name": "Test Output 1", "type": "files", "media_type": "image/png"}], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 2", "type": "files", "media_types": ["image/png", "image/tiff"]}], "output_data": [{"name": "Test Output 2", "type": "file"}], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { "version": "1.0", "input_data": [{"name": "Recipe Input", "type": "file", "media_types": ["text/plain"]}], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_type_1.name, "version": self.job_type_1.version}, "recipe_inputs": [{"recipe_input": "Recipe Input", "job_input": "Test Input 1"}], }, { "name": "Job 2", "job_type": {"name": self.job_type_2.name, "version": self.job_type_2.version}, "dependencies": [ {"name": "Job 1", "connections": [{"output": "Test Output 1", "input": "Test Input 2"}]} ], }, ], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.data = { "version": "1.0", "input_data": [{"name": "Recipe Input", "file_id": source_file.id}], "workspace_id": workspace.id, } # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def setUp(self): django.setup() self.job_type_1 = job_test_utils.create_job_type(priority=1) self.job_type_2 = job_test_utils.create_job_type(priority=1) self.job_type_3 = job_test_utils.create_job_type(priority=3) self.job_type_4 = job_test_utils.create_job_type(priority=4) self.job_type_5 = job_test_utils.create_job_type(priority=4) self.trigger_event_1 = trigger_test_utils.create_trigger_event()
def setUp(self): django.setup() self.input_name_1 = 'Test Input 1' self.output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': self.output_name_1, 'type': 'files', 'media_type': 'image/png', }], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) self.input_name_2 = 'Test Input 2' self.output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': self.output_name_2, 'type': 'file', }], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) self.input_name_3 = 'Test Input 3' self.output_name_3 = 'Test Output 3' interface_3 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name_3, 'type': 'file', 'media_types': ['text/plain'], }], } self.job_type_3 = job_test_utils.create_job_type(interface=interface_3) self.file_1 = storage_test_utils.create_file(media_type='text/plain')
def setUp(self): django.setup() self.node = node_test_utils.create_node() self.job_type_1 = job_test_utils.create_job_type() # job_exe_2 has an invalid JSON and will not schedule correctly self.job_type_2 = job_test_utils.create_job_type( configuration={'INVALID': 'SCHEMA'}) self.queue_1 = queue_test_utils.create_queue(job_type=self.job_type_1) self.queue_2 = queue_test_utils.create_queue(job_type=self.job_type_2) self.job_exe_1 = self.queue_1.job_exe self.job_exe_2 = self.queue_2.job_exe
def setUp(self): django.setup() self.job_type_1 = job_test_utils.create_job_type() self.job_type_2 = job_test_utils.create_job_type() self.job_1a = job_test_utils.create_job(job_type=self.job_type_1, status="COMPLETED") job_test_utils.create_job_exe( job=self.job_1a, status="FAILED", created=timezone.now() - datetime.timedelta(hours=3) ) time.sleep(0.01) job_test_utils.create_job_exe( job=self.job_1a, status="FAILED", created=timezone.now() - datetime.timedelta(hours=2) ) time.sleep(0.01) job_test_utils.create_job_exe( job=self.job_1a, status="COMPLETED", created=timezone.now() - datetime.timedelta(hours=1), last_modified=timezone.now() - datetime.timedelta(hours=1), ) time.sleep(0.01) self.last_run_1a = job_test_utils.create_job_exe(job=self.job_1a, status="RUNNING") self.job_1b = job_test_utils.create_job(job_type=self.job_type_1, status="FAILED") time.sleep(0.01) self.last_run_1b = job_test_utils.create_job_exe(job=self.job_1b, status="FAILED") self.job_2a = job_test_utils.create_job(job_type=self.job_type_2, status="RUNNING") time.sleep(0.01) job_test_utils.create_job_exe( job=self.job_2a, status="FAILED", created=timezone.now() - datetime.timedelta(hours=3), last_modified=timezone.now() - datetime.timedelta(hours=2), ) time.sleep(0.01) job_test_utils.create_job_exe( job=self.job_2a, status="FAILED", created=timezone.now() - datetime.timedelta(hours=2), last_modified=timezone.now() - datetime.timedelta(hours=1), ) time.sleep(0.01) job_test_utils.create_job_exe( job=self.job_2a, status="COMPLETED", created=timezone.now() - datetime.timedelta(hours=1) ) time.sleep(0.01) self.last_run_2a = job_test_utils.create_job_exe(job=self.job_2a, status="RUNNING") self.job_2b = job_test_utils.create_job(job_type=self.job_type_2, status="COMPLETED") time.sleep(0.01) self.last_run_2b = job_test_utils.create_job_exe(job=self.job_2b, status="COMPLETED")
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, file_name='test.txt') self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2 = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() metrics_test_utils.create_job_type(job_type=self.job_type1, completed_count=8, failed_count=2, total_count=10) self.job_type2 = job_test_utils.create_job_type() metrics_test_utils.create_job_type(job_type=self.job_type2, job_time_sum=220, job_time_min=20, job_time_max=200, job_time_avg=110) self.job_type3 = job_test_utils.create_job_type() metrics_test_utils.create_job_type(job_type=self.job_type3, job_time_sum=1100, job_time_min=100, job_time_max=1000, job_time_avg=550)
def setUp(self): django.setup() self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.file1 = storage_test_utils.create_file(job_exe=self.job_exe1, job_output='out_name', file_name='test.txt', countries=[self.country], recipe_node='test-recipe-node') self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.file2 = storage_test_utils.create_file(job_exe=self.job_exe2, countries=[self.country])
def setUp(self): django.setup() self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, file_name='test.txt', countries=[self.country]) self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2 = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, countries=[self.country])
def setUp(self): django.setup() self.rt1 = recipe_test_utils.create_recipe_type_v6() self.rt2 = recipe_test_utils.create_recipe_type_v6() self.rt3 = recipe_test_utils.create_recipe_type_v6() self.jt3 = job_test_utils.create_job_type() self.jt4 = job_test_utils.create_job_type() self.jt5 = job_test_utils.create_job_type() self.jt6 = job_test_utils.create_job_type() self.parents = [self.rt1.id, self.rt1.id, self.rt2.id] self.children = [self.jt3.id, self.jt4.id, self.jt5.id] RecipeTypeJobLink.objects.create_recipe_type_job_links( self.parents, self.children)
def setUp(self): django.setup() self.input_name_1 = 'Test Input 1' self.output_name_1 = 'Test Output 1' interface_1 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name_1, 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': self.output_name_1, 'type': 'files', 'media_type': 'image/png', }], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) self.job_1 = job_test_utils.create_job(job_type=self.job_type_1) self.input_name_2 = 'Test Input 2' self.output_name_2 = 'Test Output 2' interface_2 = { 'version': '1.0', 'command': 'my_cmd', 'command_arguments': 'args', 'input_data': [{ 'name': self.input_name_2, 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': self.output_name_2, 'type': 'file', }], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) self.job_2 = job_test_utils.create_job(job_type=self.job_type_2) self.file_1 = storage_test_utils.create_file(media_type='text/plain')
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() definition = { 'version': '1.0', 'input_data': [{ 'media_types': [ 'image/x-hdf5-image', ], 'type': 'file', 'name': 'input_file', }], 'jobs': [{ 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, 'name': 'kml', 'recipe_inputs': [{ 'job_input': 'input_file', 'recipe_input': 'input_file', }], }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=definition) self.recipe1 = recipe_test_utils.create_recipe(self.recipe_type) self.recipe_job1 = recipe_test_utils.create_recipe_job(recipe=self.recipe1) self.recipe2 = recipe_test_utils.create_recipe()
def setUp(self): django.setup() job_type = job_test_utils.create_job_type(max_tries=1) job = job_test_utils.create_job(job_type=job_type, num_exes=1) job_exe = job_test_utils.create_job_exe(job=job, status='RUNNING') self._job_exe_id = job_exe.id
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, scan=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, data_started=source_file.data_started, data_ended=source_file.data_ended, workspace=workspace, strike=strike, scan=scan, source_file=source_file)
def test_uuid_use_properties(self): """Tests setting UUIDs on products with different property values.""" job_type = job_test_utils.create_job_type() job1 = job_test_utils.create_job(job_type=job_type) job_exe1 = job_test_utils.create_job_exe(job=job1) data1 = job_exe1.job.get_job_data() data1.add_property_input('property1', 'value1') data1.add_property_input('property2', 'value2') job_exe1.job.data = data1.get_dict() job2 = job_test_utils.create_job(job_type=job_type) job_exe2 = job_test_utils.create_job_exe(job=job2) data2 = job_exe2.job.get_job_data() data2.add_property_input('property1', 'diffvalue1') data2.add_property_input('property2', 'value2') job_exe2.job.data = data2.get_dict() products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace) products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace) # Make sure the product files have different UUIDs self.assertIsNotNone(products1[0].uuid) self.assertIsNotNone(products1[1].uuid) self.assertNotEqual(products1[0].uuid, products2[0].uuid) self.assertNotEqual(products1[1].uuid, products2[1].uuid)
def setUp(self): django.setup() cmd = 'command' cmd_args = 'args' interface = { 'version': '1.0', 'command': cmd, 'command_arguments': cmd_args, 'input_data': [], 'output_data': [{ 'name': 'arg1', 'type': 'file' }, { 'name': 'arg2', 'type': 'file' }] } self.job_type = job_utils.create_job_type(name='Test', version='1.0', interface=interface) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING') self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING')
def test_job_type_limit(self, mock_taskinfo): """Tests running the scheduling thread with a job type limit""" mock_taskinfo.return_value = MagicMock() Queue.objects.all().delete() job_type_with_limit = job_test_utils.create_job_type() job_type_with_limit.max_scheduled = 4 job_type_with_limit.save() job_exe_1 = job_test_utils.create_job_exe(job_type=job_type_with_limit, status='RUNNING') queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) job_type_mgr.sync_with_database() # One job of this type is already running running_job_mgr.add_job_exes([RunningJobExecution(job_exe_1)]) offer_1 = ResourceOffer('offer_1', self.node_agent_1, NodeResources(cpus=200.0, mem=102400.0, disk=102400.0)) offer_2 = ResourceOffer('offer_2', self.node_agent_2, NodeResources(cpus=200.0, mem=204800.0, disk=204800.0)) offer_mgr.add_new_offers([offer_1, offer_2]) # Ignore cleanup tasks for node in node_mgr.get_nodes(): node.initial_cleanup_completed() num_tasks = self._scheduling_thread._perform_scheduling() self.assertEqual(num_tasks, 3) # One is already running, should only be able to schedule 3 more
def test_job_type_limit(self, mock_taskinfo): """Tests running the scheduling thread with a job type limit""" mock_taskinfo.return_value = MagicMock() Queue.objects.all().delete() job_type_with_limit = job_test_utils.create_job_type() job_type_with_limit.max_scheduled = 4 job_type_with_limit.save() job_exe_1 = job_test_utils.create_job_exe(job_type=job_type_with_limit, status='RUNNING') queue_1_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) queue_2_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) queue_3_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) queue_4_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) queue_5_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) queue_6_limit = queue_test_utils.create_queue(job_type=job_type_with_limit) self._job_type_manager.sync_with_database() # One job of this type is already running self._job_exe_manager.add_job_exes([RunningJobExecution(job_exe_1)]) offer_1 = ResourceOffer('offer_1', self.node_agent_1, NodeResources(cpus=200.0, mem=102400.0, disk=102400.0)) offer_2 = ResourceOffer('offer_2', self.node_agent_2, NodeResources(cpus=200.0, mem=204800.0, disk=204800.0)) self._offer_manager.add_new_offers([offer_1, offer_2]) num_tasks = self._scheduling_thread._perform_scheduling() self.assertEqual(num_tasks, 3) # One is already running, should only be able to schedule 3 more
def setUp(self): django.setup() cmd = 'command' cmd_args = 'run test' priority = 100 timeout = 60 max_tries = 1 cpus = 1.0 mem = 1.0 disk = 1.0 interface = { 'version': '1.0', 'command': cmd, 'command_arguments': cmd_args } self.job_type = job_utils.create_job_type(name='Test', version='1.0', interface=interface) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING') self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', command_arguments=cmd_args, timeout=timeout, queued=now())
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not strike: strike = create_strike() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, workspace=workspace, strike=strike, source_file=source_file)
def setUp(self): django.setup() self.interface = { 'version': '1.1', 'command': 'test_cmd', 'command_arguments': 'test_arg', 'input_data': [{ 'media_types': ['image/png'], 'type': 'file', 'name': 'input_file', }], 'output_data': [{ 'name': 'output_file', 'type': 'file', 'media_type': 'image/png', }], 'shared_resources': [], } self.job_type = job_test_utils.create_job_type( interface=self.interface) self.workspace = storage_test_utils.create_workspace() self.file1 = storage_test_utils.create_file(workspace=self.workspace)
def test_job_type_limit(self, mock_taskinfo): """Tests calling perform_scheduling() with a job type limit""" mock_taskinfo.return_value = MagicMock() Queue.objects.all().delete() job_type_with_limit = job_test_utils.create_job_type() job_type_with_limit.max_scheduled = 4 job_type_with_limit.save() running_job_exe_1 = job_test_utils.create_running_job_exe(agent_id=self.agent_1.agent_id, job_type=job_type_with_limit, node=self.node_1) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) queue_test_utils.create_queue(job_type=job_type_with_limit) job_type_mgr.sync_with_database() # One job of this type is already running job_exe_mgr.schedule_job_exes([running_job_exe_1], []) offer_1 = ResourceOffer('offer_1', self.agent_1.agent_id, self.framework_id, NodeResources([Cpus(0.0), Mem(1024.0), Disk(1024.0)]), now()) offer_2 = ResourceOffer('offer_2', self.agent_2.agent_id, self.framework_id, NodeResources([Cpus(25.0), Mem(2048.0), Disk(2048.0)]), now()) resource_mgr.add_new_offers([offer_1, offer_2]) scheduling_manager = SchedulingManager() num_tasks = scheduling_manager.perform_scheduling(self._driver, now()) self.assertEqual(num_tasks, 3) # One is already running, should only be able to schedule 3 more
def setUp(self): django.setup() self.workspace_1 = Workspace.objects.create(name='Test workspace 1') self.workspace_2 = Workspace.objects.create(name='Test workspace 2', is_active=False) interface = {'version': '1.0', 'command': 'my command'} job_type = job_utils.create_job_type(name='Type 1', version='1.0', interface=interface) event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=job_type, event=event, status='RUNNING', last_status_change=now()) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', timeout=1, queued=now()) self.remote_base_path = os.path.join( 'jobs', get_valid_filename(self.job.job_type.name), get_valid_filename(self.job.job_type.version))
def setUp(self): django.setup() from product.test import utils as product_test_utils self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.src_file = source_test_utils.create_source( data_started='2016-01-01T00:00:00Z', data_ended='2016-01-01T00:00:00Z', file_name='test.txt', is_parsed=True) self.product1 = product_test_utils.create_product( job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test_prod.txt', countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1, job_exe=self.job_exe1)
def test_calculate_negative_times(self): """Tests calculating times when machine clocks are out of sync.""" job_type = job_test_utils.create_job_type() job = job_test_utils.create_job(job_type=job_type, status='COMPLETED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc)) job_test_utils.create_job_exe( job=job, status=job.status, queued=datetime.datetime(2015, 1, 1, 1, 10, tzinfo=utc), started=datetime.datetime(2015, 1, 1, 1, 5, tzinfo=utc), ended=datetime.datetime(2015, 1, 1, tzinfo=utc), ) MetricsJobType.objects.calculate(datetime.date(2015, 1, 1)) entries = MetricsJobType.objects.filter( occurred=datetime.date(2015, 1, 1)) self.assertEqual(len(entries), 1) entry = entries.first() self.assertEqual(entry.queue_time_min, 0) self.assertEqual(entry.queue_time_max, 0)
def test_json(self): """Tests coverting a RequeueJobsBulk message to and from JSON""" sys_err = error_test_utils.create_error(category='SYSTEM') data = JobData() job_type = job_test_utils.create_job_type() job_1 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', error=sys_err, input=data.get_dict()) job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='CANCELED', error=sys_err, input=data.get_dict()) # Create message message = RequeueJobsBulk() message.started = job_1.last_modified - timedelta(seconds=1) message.ended = job_1.last_modified + timedelta(seconds=1) message.error_categories = ['SYSTEM'] message.error_ids = [sys_err.id] message.job_ids = [job_1.id] message.job_type_ids = [job_type.id] message.priority = 1 message.status = 'FAILED' # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = RequeueJobsBulk.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) # Should be one re-queue message for job 1 self.assertEqual(len(new_message.new_messages), 1) message = new_message.new_messages[0] self.assertEqual(message.type, 'requeue_jobs') self.assertListEqual(message._requeue_jobs, [QueuedJob(job_1.id, job_1.num_exes)]) self.assertEqual(message.priority, 1)
def test_json(self): """Tests coverting a CancelJobs message to and from JSON""" when = now() data = JobData() job_type = job_test_utils.create_job_type() job_1 = job_test_utils.create_job(job_type=job_type, status='PENDING') job_2 = job_test_utils.create_job(job_type=job_type, num_exes=3, status='FAILED', input=data.get_dict()) job_ids = [job_1.id, job_2.id] # Add jobs to message message = CancelJobs() message.when = when if message.can_fit_more(): message.add_job(job_1.id) if message.can_fit_more(): message.add_job(job_2.id) # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = CancelJobs.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) jobs = Job.objects.filter(id__in=job_ids).order_by('id') # Both jobs should have been canceled self.assertEqual(jobs[0].status, 'CANCELED') self.assertEqual(jobs[0].last_status_change, when) self.assertEqual(jobs[1].status, 'CANCELED') self.assertEqual(jobs[1].last_status_change, when) # No new messages since these jobs do not belong to a recipe self.assertEqual(len(new_message.new_messages), 0)
def setUp(self): django.setup() Scheduler.objects.initialize_scheduler() job_type = job_test_utils.create_job_type(max_tries=1) job = job_test_utils.create_job(job_type=job_type, num_exes=1) job_exe = job_test_utils.create_job_exe(job=job, status='RUNNING') self._job_exe_id = job_exe.id
def test_get_nodes_to_create(self): """Tests calling Recipe.get_nodes_to_create()""" job_type = job_test_utils.create_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type() # Create recipe definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('B', Interface(), DataFilter(True)) definition.add_condition_node('C', Interface(), DataFilter(True)) definition.add_condition_node('D', Interface(), DataFilter(False)) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_create = recipe_instance.get_nodes_to_create() self.assertSetEqual(set(nodes_to_create.keys()), {'C', 'E', 'H'})
def test_publish_products_unpublish_superseded(self): """Tests calling ProductFileManager.publish_products() where the job has superseded job products that must be unpublished """ # Job 1 is superseded by Job 2 and Job 2 is superseded by Job 3 job_exe_1 = job_test_utils.create_job_exe() product_1_a = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) product_1_b = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) job_type = job_test_utils.create_job_type() event = trigger_test_utils.create_trigger_event() job_2 = Job.objects.create_job(job_type=job_type, event=event, superseded_job=job_exe_1.job) job_2.save() job_exe_2 = job_test_utils.create_job_exe(job=job_2) Job.objects.supersede_jobs([job_exe_1.job], now()) product_2_a = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) product_2_b = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) job_3 = Job.objects.create_job(job_type=job_type, event=event, superseded_job=job_exe_2.job) job_3.save() job_exe_3 = job_test_utils.create_job_exe(job=job_3) Job.objects.supersede_jobs([job_2], now()) product_3_a = prod_test_utils.create_product(job_exe=job_exe_3) product_3_b = prod_test_utils.create_product(job_exe=job_exe_3) when = now() ProductFile.objects.publish_products(job_exe_3, when) # Make sure products from Job 1 and Job 2 are unpublished product_1_a = ProductFile.objects.get(id=product_1_a.id) product_1_b = ProductFile.objects.get(id=product_1_b.id) product_2_a = ProductFile.objects.get(id=product_2_a.id) product_2_b = ProductFile.objects.get(id=product_2_b.id) self.assertTrue(product_1_a.has_been_published) self.assertFalse(product_1_a.is_published) self.assertEqual(product_1_a.unpublished, when) self.assertTrue(product_1_b.has_been_published) self.assertFalse(product_1_b.is_published) self.assertEqual(product_1_b.unpublished, when) self.assertTrue(product_2_a.has_been_published) self.assertFalse(product_2_a.is_published) self.assertEqual(product_2_a.unpublished, when) self.assertTrue(product_2_b.has_been_published) self.assertFalse(product_2_b.is_published) self.assertEqual(product_2_b.unpublished, when) # Make sure Job 3 products are published product_3_a = ProductFile.objects.get(id=product_3_a.id) product_3_b = ProductFile.objects.get(id=product_3_b.id) self.assertTrue(product_3_a.has_been_published) self.assertTrue(product_3_a.is_published) self.assertFalse(product_3_a.is_superseded) self.assertEqual(product_3_a.published, when) self.assertIsNone(product_3_a.superseded) self.assertTrue(product_3_b.has_been_published) self.assertTrue(product_3_b.is_published) self.assertFalse(product_3_b.is_superseded) self.assertEqual(product_3_b.published, when) self.assertIsNone(product_3_b.superseded)
def test_publish_products_unpublish_superseded(self): """Tests calling ProductFileManager.publish_products() where the job has superseded job products that must be unpublished """ # Job 1 is superseded by Job 2 and Job 2 is superseded by Job 3 job_exe_1 = job_test_utils.create_job_exe() product_1_a = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) product_1_b = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True) job_type = job_test_utils.create_job_type() event = trigger_test_utils.create_trigger_event() job_2 = Job.objects.create_job_old(job_type=job_type, event_id=event.id, superseded_job=job_exe_1.job) job_2.save() job_exe_2 = job_test_utils.create_job_exe(job=job_2) Job.objects.supersede_jobs_old([job_exe_1.job], now()) product_2_a = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) product_2_b = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True) job_3 = Job.objects.create_job_old(job_type=job_type, event_id=event.id, superseded_job=job_exe_2.job) job_3.save() job_exe_3 = job_test_utils.create_job_exe(job=job_3) Job.objects.supersede_jobs_old([job_2], now()) product_3_a = prod_test_utils.create_product(job_exe=job_exe_3) product_3_b = prod_test_utils.create_product(job_exe=job_exe_3) when = now() ProductFile.objects.publish_products(job_exe_3, job_3, when) # Make sure products from Job 1 and Job 2 are unpublished product_1_a = ScaleFile.objects.get(id=product_1_a.id) product_1_b = ScaleFile.objects.get(id=product_1_b.id) product_2_a = ScaleFile.objects.get(id=product_2_a.id) product_2_b = ScaleFile.objects.get(id=product_2_b.id) self.assertTrue(product_1_a.has_been_published) self.assertFalse(product_1_a.is_published) self.assertEqual(product_1_a.unpublished, when) self.assertTrue(product_1_b.has_been_published) self.assertFalse(product_1_b.is_published) self.assertEqual(product_1_b.unpublished, when) self.assertTrue(product_2_a.has_been_published) self.assertFalse(product_2_a.is_published) self.assertEqual(product_2_a.unpublished, when) self.assertTrue(product_2_b.has_been_published) self.assertFalse(product_2_b.is_published) self.assertEqual(product_2_b.unpublished, when) # Make sure Job 3 products are published product_3_a = ScaleFile.objects.get(id=product_3_a.id) product_3_b = ScaleFile.objects.get(id=product_3_b.id) self.assertTrue(product_3_a.has_been_published) self.assertTrue(product_3_a.is_published) self.assertFalse(product_3_a.is_superseded) self.assertEqual(product_3_a.published, when) self.assertIsNone(product_3_a.superseded) self.assertTrue(product_3_b.has_been_published) self.assertTrue(product_3_b.is_published) self.assertFalse(product_3_b.is_superseded) self.assertEqual(product_3_b.published, when) self.assertIsNone(product_3_b.superseded)
def setUp(self): django.setup() self.file = storage_test_utils.create_file() self.job_type1 = job_test_utils.create_job_type(name="test1", category="test-1") self.job1 = job_test_utils.create_job( job_type=self.job_type1, status="RUNNING", data={"input_data": [{"name": "input_file", "file_id": self.file.id}]}, ) self.job_type2 = job_test_utils.create_job_type(name="test2", category="test-2") self.job2 = job_test_utils.create_job( job_type=self.job_type2, status="PENDING", data={"input_data": [{"name": "input_file", "file_id": self.file.id}]}, )
def setUp(self): django.setup() self.job_type_1 = job_test_utils.create_job_type() self.job_type_2 = job_test_utils.create_job_type() self.job_1 = job_test_utils.create_job(job_type=self.job_type_1, status="COMPLETED") self.job_exe_1a = job_test_utils.create_job_exe( job=self.job_1, status="FAILED", created=timezone.now() - datetime.timedelta(hours=3) ) self.job_exe_1b = job_test_utils.create_job_exe( job=self.job_1, status="FAILED", created=timezone.now() - datetime.timedelta(hours=2) ) self.job_exe_1c = job_test_utils.create_job_exe( job=self.job_1, status="FAILED", created=timezone.now() - datetime.timedelta(hours=1), last_modified=timezone.now() - datetime.timedelta(hours=1), ) self.last_exe_1 = job_test_utils.create_job_exe(job=self.job_1, status="RUNNING") self.job_2 = job_test_utils.create_job(job_type=self.job_type_1, status="FAILED") self.last_exe_2 = job_test_utils.create_job_exe(job=self.job_2, status="FAILED") job_3 = job_test_utils.create_job(job_type=self.job_type_2, status="RUNNING") job_test_utils.create_job_exe( job=job_3, status="FAILED", created=timezone.now() - datetime.timedelta(hours=3), last_modified=timezone.now() - datetime.timedelta(hours=2), ) job_test_utils.create_job_exe( job=job_3, status="FAILED", created=timezone.now() - datetime.timedelta(hours=2), last_modified=timezone.now() - datetime.timedelta(hours=1), ) job_test_utils.create_job_exe( job=job_3, status="COMPLETED", created=timezone.now() - datetime.timedelta(hours=1) ) job_test_utils.create_job_exe(job=job_3, status="RUNNING") job_4 = job_test_utils.create_job(job_type=self.job_type_2, status="COMPLETED") job_test_utils.create_job_exe(job=job_4, status="COMPLETED")
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.workspace = storage_test_utils.create_workspace() self.trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': 'input_file', 'workspace_name': self.workspace.name, } } self.trigger_rule = trigger_test_utils.create_trigger_rule( trigger_type='PARSE', is_active=True, configuration=self.trigger_config) self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], } self.recipe_type = recipe_test_utils.create_recipe_type( name='my-type', definition=self.definition, trigger_rule=self.trigger_rule)
def test_calculate_stats_partial(self): """Tests individual statistics are null when information is unavailable.""" job_type = job_test_utils.create_job_type() job_test_utils.create_job(job_type=job_type, status='FAILED', ended=datetime.datetime(2015, 1, 1)) job_test_utils.create_job(job_type=job_type, status='CANCELED', ended=datetime.datetime(2015, 1, 1)) MetricsJobType.objects.calculate(datetime.date(2015, 1, 1)) entries = MetricsJobType.objects.filter( occurred=datetime.date(2015, 1, 1)) self.assertEqual(len(entries), 1) entry = entries.first() self.assertEqual(entry.occurred, datetime.date(2015, 1, 1)) self.assertEqual(entry.completed_count, 0) self.assertEqual(entry.failed_count, 1) self.assertEqual(entry.canceled_count, 1) self.assertEqual(entry.total_count, 2) self.assertEqual(entry.error_system_count, 0) self.assertEqual(entry.error_data_count, 0) self.assertEqual(entry.error_algorithm_count, 0) self.assertIsNone(entry.queue_time_sum) self.assertIsNone(entry.queue_time_min) self.assertIsNone(entry.queue_time_max) self.assertIsNone(entry.queue_time_avg) self.assertIsNone(entry.pre_time_sum) self.assertIsNone(entry.pre_time_min) self.assertIsNone(entry.pre_time_max) self.assertIsNone(entry.pre_time_avg) self.assertIsNone(entry.job_time_sum) self.assertIsNone(entry.job_time_min) self.assertIsNone(entry.job_time_max) self.assertIsNone(entry.job_time_avg) self.assertIsNone(entry.post_time_sum) self.assertIsNone(entry.post_time_min) self.assertIsNone(entry.post_time_max) self.assertIsNone(entry.post_time_avg) self.assertIsNone(entry.run_time_sum) self.assertIsNone(entry.run_time_min) self.assertIsNone(entry.run_time_max) self.assertIsNone(entry.run_time_avg) self.assertIsNone(entry.stage_time_sum) self.assertIsNone(entry.stage_time_min) self.assertIsNone(entry.stage_time_max) self.assertIsNone(entry.stage_time_avg)
def test_order_by(self): """Tests successfully calling the jobs view with sorting.""" job_type1b = job_test_utils.create_job_type(name="test1", version="2.0", category="test-1") job1b = job_test_utils.create_job(job_type=job_type1b, status="RUNNING") job_type1c = job_test_utils.create_job_type(name="test1", version="3.0", category="test-1") job1c = job_test_utils.create_job(job_type=job_type1c, status="RUNNING") url = "/jobs/?order=job_type__name&order=-job_type__version" response = self.client.generic("GET", url) result = json.loads(response.content) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(result["results"]), 4) self.assertEqual(result["results"][0]["job_type"]["id"], job_type1c.id) self.assertEqual(result["results"][1]["job_type"]["id"], job_type1b.id) self.assertEqual(result["results"][2]["job_type"]["id"], self.job_type1.id) self.assertEqual(result["results"][3]["job_type"]["id"], self.job_type2.id)
def setUp(self): django.setup() cmd = 'command' cmd_args = 'args' interface = {'version': '1.0', 'command': cmd, 'command_arguments': cmd_args, 'inputs': [], 'outputs': [{'name': 'arg1'}, {'name': 'arg2'}]} self.job_type = job_utils.create_job_type(name='Test', version='1.0', interface=interface) self.event = TriggerEvent.objects.create_trigger_event('TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING') self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', command_arguments=cmd_args, queued=now())
def create_job_type(job_type=None, occurred=None, **kwargs): '''Creates a metrics job type model for unit testing :returns: The metrics job type model :rtype: :class:`metrics.models.MetricsJobType` ''' if not job_type: job_type = job_test_utils.create_job_type() if not occurred: occurred = timezone.now() return MetricsJobType.objects.create(job_type=job_type, occurred=occurred, **kwargs)
def setUp(self): django.setup() self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test.txt', countries=[self.country]) self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2a = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=False, countries=[self.country]) self.product2b = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, is_superseded=True, countries=[self.country]) self.product2c = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, countries=[self.country])
def create_requirement(job_type=None, shared_resource=None, usage=None): """Creates a shared resource requirement model for unit testing :returns: The shared resource requirement model :rtype: :class:`shared_resource.models.SharedResourceRequirement` """ if not job_type: job_type = job_test_utils.create_job_type() if not shared_resource: shared_resource = create_resource() return SharedResourceRequirement.objects.create(job_type=job_type, shared_resource=shared_resource, usage=usage)
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition={ 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], })
def setUp(self): django.setup() self.workspace_1 = Workspace.objects.create(name='Test workspace 1') self.workspace_2 = Workspace.objects.create(name='Test workspace 2', is_active=False) interface = {'version': '1.0', 'command': 'my command'} job_type = job_utils.create_job_type(name='Type 1', version='1.0', interface=interface) event = TriggerEvent.objects.create_trigger_event('TEST', None, {}, now()) self.job = job_utils.create_job(job_type=job_type, event=event, status='RUNNING', last_status_change=now()) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', timeout=1, queued=now())
def setUp(self): django.setup() self.job_type1 = job_test_utils.create_job_type() self.job_type2 = job_test_utils.create_job_type() self.workspace = storage_test_utils.create_workspace() self.trigger_config = { 'version': '1.0', 'condition': { 'media_type': 'text/plain', }, 'data': { 'input_data_name': 'input_file', 'workspace_name': self.workspace.name, } } self.trigger_rule = trigger_test_utils.create_trigger_rule(trigger_type='PARSE', is_active=True, configuration=self.trigger_config) self.definition = { 'version': '1.0', 'input_data': [], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type1.name, 'version': self.job_type1.version, }, }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type2.name, 'version': self.job_type2.version, }, }], } self.recipe_type = recipe_test_utils.create_recipe_type(name='my-type', definition=self.definition, trigger_rule=self.trigger_rule)