예제 #1
0
    def setUp(self):
        django.setup()

        cmd = 'command'
        cmd_args = 'run test'
        timeout = 60
        interface = {
            'version': '1.0',
            'command': cmd,
            'command_arguments': cmd_args
        }

        self.job_type = job_utils.create_job_type(name='Test',
                                                  version='1.0',
                                                  interface=interface)
        self.seed_job_type = job_utils.create_seed_job_type()
        self.event = TriggerEvent.objects.create_trigger_event(
            'TEST', None, {}, now())
        self.job = job_utils.create_job(job_type=self.job_type,
                                        event=self.event,
                                        status='RUNNING')
        self.seed_job = job_utils.create_job(job_type=self.seed_job_type,
                                             event=self.event,
                                             status='RUNNING')
        self.job_exe = job_utils.create_job_exe(job=self.job,
                                                status='RUNNING',
                                                timeout=timeout,
                                                queued=now())
        self.seed_exe = job_utils.create_job_exe(job=self.seed_job,
                                                 status='RUNNING',
                                                 timeout=timeout,
                                                 queued=now())
예제 #2
0
    def setUp(self):
        django.setup()

        self.workspace = storage_test_utils.create_workspace()
        self.workspace.upload_files = MagicMock()
        self.workspace.delete_files = MagicMock()

        self.upload_dir = os.path.join('upload', 'dir')
        self.work_dir = os.path.join('work', 'dir')
        self.workspace_work_dir = ScaleFile.objects._get_workspace_work_dir(
            self.work_dir, self.workspace)

        self.source_file = source_test_utils.create_source(
            file_name=u'input1.txt', workspace=self.workspace)

        self.job_exe = job_test_utils.create_job_exe()
        self.job_exe_no = job_test_utils.create_job_exe()
        with transaction.atomic():
            self.job_exe_no.job.is_operational = False
            self.job_exe_no.job.job_type.is_operational = False
            self.job_exe_no.job.save()
            self.job_exe_no.job.job_type.save()

        self.files = [
            (u'local/1/file.txt', u'remote/1/file.txt', None),
            (u'local/2/file.json', u'remote/2/file.json',
             u'application/x-custom-json'),
        ]
        self.files_no = [
            (u'local/3/file.h5', u'remote/3/file.h5', u'image/x-hdf5-image'),
        ]
예제 #3
0
    def test_uuid_use_properties(self):
        """Tests setting UUIDs on products with different property values."""

        inputs_json=[
            {'name': 'property1', 'type': 'string'},
            {'name': 'property2', 'type': 'string'}
        ]

        manifest = job_test_utils.create_seed_manifest(name='test-job', inputs_json=inputs_json, command='my_command')
        manifest['job']['interface']['inputs']['files'] = []
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)

        job1 = job_test_utils.create_job(job_type=job_type)
        job_exe1 = job_test_utils.create_job_exe(job=job1)
        data1 = job_exe1.job.get_input_data()
        data1.add_value(JsonValue('property1', 'value1'))
        data1.add_value(JsonValue('property2', 'value2'))
        job_exe1.job.input = convert_data_to_v6_json(data1).get_dict()
        job2 = job_test_utils.create_job(job_type=job_type)
        job_exe2 = job_test_utils.create_job_exe(job=job2)
        data2 = job_exe2.job.get_input_data()
        data2.add_value(JsonValue('property1', 'diffvalue1'))
        data2.add_value(JsonValue('property2', 'value2'))
        job_exe2.job.input = convert_data_to_v6_json(data2).get_dict()

        products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace)
        products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace)

        # Make sure the product files have different UUIDs
        self.assertIsNotNone(products1[0].uuid)
        self.assertIsNotNone(products1[1].uuid)
        self.assertNotEqual(products1[0].uuid, products2[0].uuid)
        self.assertNotEqual(products1[1].uuid, products2[1].uuid)
예제 #4
0
    def test_priority(self):
        """Tests successfully calling the requeue view changing the job priority."""

        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')
        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')

        # make sure the job is in the right state despite not actually having been run
        Job.objects.update_status([self.job_2], 'FAILED', timezone.now(), error_test_utils.create_error())
        self.job_2.num_exes = 2
        self.job_2.save()

        json_data = {
            'job_ids': [self.job_2.id],
            'priority': 123,
        }

        url = '/queue/requeue-jobs/'
        response = self.client.post(url, json.dumps(json_data), 'application/json')
        result = json.loads(response.content)

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(len(result['results']), 1)
        self.assertEqual(result['results'][0]['id'], self.job_2.id)
        self.assertEqual(result['results'][0]['status'], 'QUEUED')
        self.assertEqual(result['results'][0]['priority'], 123)
예제 #5
0
파일: utils.py 프로젝트: AppliedIS/scale
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None,
                  ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None,
                  strike=None, source_file=None):
    if not workspace:
        workspace = storage_test_utils.create_workspace()
    if not strike:
        strike = create_strike()
    if not source_file:
        source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started,
                                                      data_ended=data_ended, workspace=workspace)
    if not transfer_started:
        transfer_started = timezone.now()
    if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started:
        ingest_started = timezone.now()
    if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended:
        ingest_ended = timezone.now()

    try:
        job_type = Ingest.objects.get_ingest_job_type()
    except:
        job_type = job_utils.create_job_type()
    job = job_utils.create_job(job_type=job_type)
    job_utils.create_job_exe(job=job)

    return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job,
                                 bytes_transferred=source_file.file_size, transfer_started=transfer_started,
                                 transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started,
                                 ingest_ended=ingest_ended, workspace=workspace, strike=strike, source_file=source_file)
예제 #6
0
    def setUp(self):
        django.setup()

        Scheduler.objects.initialize_scheduler()

        self.node_agent = 'agent_1'
        self.node_agent_paused = 'agent_paused'
        self.node_model = node_test_utils.create_node(slave_id=self.node_agent)
        self.node = Node(self.node_agent, self.node_model)
        self.node._is_image_pulled = True
        self.node._initial_cleanup_completed()
        self.node._update_state()
        self.paused_node_model = node_test_utils.create_node(
            slave_id=self.node_agent_paused)
        self.paused_node_model.is_paused = True
        self.paused_node = Node(self.node_agent_paused, self.paused_node_model)

        self.running_job_exe_1 = job_test_utils.create_job_exe(
            status='RUNNING', node=self.paused_node_model)
        self.running_job_exe_1.cpus_scheduled = 2.0
        self.running_job_exe_1.mem_scheduled = 512.0
        self.running_job_exe_1.disk_in_scheduled = 100.0
        self.running_job_exe_1.disk_out_scheduled = 200.0
        self.running_job_exe_1.disk_total_scheduled = 300.0
        self.running_job_exe_2 = job_test_utils.create_job_exe(
            status='RUNNING', node=self.node_model)
        self.running_job_exe_2.cpus_scheduled = 2.0
        self.running_job_exe_2.mem_scheduled = 512.0
        self.running_job_exe_2.disk_in_scheduled = 100.0
        self.running_job_exe_2.disk_out_scheduled = 200.0
        self.running_job_exe_2.disk_total_scheduled = 300.0

        self.queue_1 = queue_test_utils.create_queue(cpus_required=4.0,
                                                     mem_required=1024.0,
                                                     disk_in_required=100.0,
                                                     disk_out_required=200.0,
                                                     disk_total_required=300.0)
        self.queue_2 = queue_test_utils.create_queue(cpus_required=8.0,
                                                     mem_required=512.0,
                                                     disk_in_required=400.0,
                                                     disk_out_required=45.0,
                                                     disk_total_required=445.0)
        self.queue_high_cpus = queue_test_utils.create_queue(
            cpus_required=200.0,
            mem_required=1024.0,
            disk_in_required=100.0,
            disk_out_required=200.0,
            disk_total_required=300.0)
        self.queue_high_mem = queue_test_utils.create_queue(
            cpus_required=2.0,
            mem_required=10240.0,
            disk_in_required=100.0,
            disk_out_required=200.0,
            disk_total_required=300.0)
        self.queue_high_disk = queue_test_utils.create_queue(
            cpus_required=2.0,
            mem_required=1024.0,
            disk_in_required=10000.0,
            disk_out_required=20000.0,
            disk_total_required=30000.0)
예제 #7
0
    def setUp(self):
        django.setup()

        from batch.test import utils as batch_test_utils
        from product.test import utils as product_test_utils
        self.country = storage_test_utils.create_country()
        self.src_file = source_test_utils.create_source()
        self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True)
        self.job1 = job_test_utils.create_job(job_type=self.job_type1)
        self.job_exe1 = job_test_utils.create_job_exe(job=self.job1)
        self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True,
                                                          is_published=True, file_name='test.txt',
                                                          countries=[self.country])
        product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1,
                                            job_exe=self.job_exe1)

        self.batch = batch_test_utils.create_batch()
        self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False)
        self.job2 = job_test_utils.create_job(job_type=self.job_type2)
        self.job_exe2 = job_test_utils.create_job_exe(job=self.job2)
        self.product2a = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True,
                                                           is_published=False, countries=[self.country])
        product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2a, job=self.job2,
                                            job_exe=self.job_exe2, batch=self.batch)

        self.product2b = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True,
                                                           is_published=True, is_superseded=True,
                                                           countries=[self.country])
        product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2b, job=self.job2,
                                            job_exe=self.job_exe2, batch=self.batch)

        self.product2c = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True,
                                                           is_published=True, countries=[self.country])
        product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2c, job=self.job2,
                                            job_exe=self.job_exe2, batch=self.batch)
예제 #8
0
    def test_schedule_job_executions(self):
        job_exe_1 = job_test_utils.create_job_exe(status='QUEUED')
        job_exe_2 = job_test_utils.create_job_exe(status='QUEUED')
        node_1 = node_test_utils.create_node()
        node_2 = node_test_utils.create_node()
        resources_1 = JobResources(cpus=1, mem=2, disk_in=3, disk_out=4, disk_total=7)
        resources_2 = JobResources(cpus=10, mem=11, disk_in=12, disk_out=13, disk_total=25)

        job_exes = JobExecution.objects.schedule_job_executions([(job_exe_1, node_1, resources_1), (job_exe_2, node_2, resources_2)])

        for job_exe in job_exes:
            if job_exe.id == job_exe_1.id:
                job_exe_1 = job_exe
                self.assertEqual(job_exe_1.status, 'RUNNING')
                self.assertEqual(job_exe_1.job.status, 'RUNNING')
                self.assertEqual(job_exe_1.node_id, node_1.id)
                self.assertIsNotNone(job_exe_1.started)
                self.assertEqual(job_exe_1.cpus_scheduled, 1)
                self.assertEqual(job_exe_1.mem_scheduled, 2)
                self.assertEqual(job_exe_1.disk_in_scheduled, 3)
                self.assertEqual(job_exe_1.disk_out_scheduled, 4)
                self.assertEqual(job_exe_1.disk_total_scheduled, 7)
                self.assertEqual(job_exe_1.requires_cleanup, job_exe_1.job.job_type.requires_cleanup)
            else:
                job_exe_2 = job_exe
                self.assertEqual(job_exe_2.status, 'RUNNING')
                self.assertEqual(job_exe_2.job.status, 'RUNNING')
                self.assertEqual(job_exe_2.node_id, node_2.id)
                self.assertIsNotNone(job_exe_2.started)
                self.assertEqual(job_exe_2.cpus_scheduled, 10)
                self.assertEqual(job_exe_2.mem_scheduled, 11)
                self.assertEqual(job_exe_2.disk_in_scheduled, 12)
                self.assertEqual(job_exe_2.disk_out_scheduled, 13)
                self.assertEqual(job_exe_2.disk_total_scheduled, 25)
                self.assertEqual(job_exe_2.requires_cleanup, job_exe_2.job.job_type.requires_cleanup)
예제 #9
0
    def setUp(self):
        django.setup()

        self.src_file_1 = source_test_utils.create_source()
        self.src_file_2 = source_test_utils.create_source()
        self.src_file_3 = source_test_utils.create_source()
        self.src_file_4 = source_test_utils.create_source()

        self.job_exe_1 = job_test_utils.create_job_exe()
        self.recipe_job_1 = recipe_test_utils.create_recipe_job(job=self.job_exe_1.job)
        self.product_1 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True)
        self.product_2 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True)
        FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_1, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_2, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_1, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_2, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)

        self.job_exe_2 = job_test_utils.create_job_exe()
        self.recipe_job_2 = recipe_test_utils.create_recipe_job(job=self.job_exe_2.job)
        self.product_3 = prod_test_utils.create_product(self.job_exe_2, has_been_published=True)
        FileAncestryLink.objects.create(ancestor=self.src_file_3, descendant=self.product_3, job_exe=self.job_exe_2,
                                        job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_4, descendant=self.product_3, job_exe=self.job_exe_2,
                                        job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
예제 #10
0
    def test_calculate_negative_times(self):
        """Tests calculating times when machine clocks are out of sync."""
        job_type = job_test_utils.create_job_type()
        job = job_test_utils.create_job(job_type=job_type,
                                        status='COMPLETED',
                                        ended=datetime.datetime(2015,
                                                                1,
                                                                1,
                                                                tzinfo=utc))
        job_test_utils.create_job_exe(
            job=job,
            status=job.status,
            queued=datetime.datetime(2015, 1, 1, 1, 10, tzinfo=utc),
            started=datetime.datetime(2015, 1, 1, 1, 5, tzinfo=utc),
            ended=datetime.datetime(2015, 1, 1, tzinfo=utc),
        )

        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))

        entries = MetricsJobType.objects.filter(
            occurred=datetime.date(2015, 1, 1))
        self.assertEqual(len(entries), 1)

        entry = entries.first()
        self.assertEqual(entry.queue_time_min, 0)
        self.assertEqual(entry.queue_time_max, 0)
예제 #11
0
파일: test_models.py 프로젝트: Carl4/scale
    def setUp(self):
        django.setup()

        self.src_file_1 = source_test_utils.create_source()
        self.src_file_2 = source_test_utils.create_source()
        self.src_file_3 = source_test_utils.create_source()
        self.src_file_4 = source_test_utils.create_source()

        self.job_exe_1 = job_test_utils.create_job_exe()
        self.recipe_job_1 = recipe_test_utils.create_recipe_job(job=self.job_exe_1.job)
        self.product_1 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True)
        self.product_2 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True)
        FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_1, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_2, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_1, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_2, job_exe=self.job_exe_1,
                                        job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe)

        self.job_exe_2 = job_test_utils.create_job_exe()
        self.recipe_job_2 = recipe_test_utils.create_recipe_job(job=self.job_exe_2.job)
        self.product_3 = prod_test_utils.create_product(self.job_exe_2, has_been_published=True)
        FileAncestryLink.objects.create(ancestor=self.src_file_3, descendant=self.product_3, job_exe=self.job_exe_2,
                                        job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
        FileAncestryLink.objects.create(ancestor=self.src_file_4, descendant=self.product_3, job_exe=self.job_exe_2,
                                        job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
예제 #12
0
    def test_get_source_products(self):
        """Tests calling get_source_products()"""

        from batch.test import utils as batch_test_utils
        from product.test import utils as product_test_utils
        job_exe_1 = job_utils.create_job_exe()
        job_exe_2 = job_utils.create_job_exe()
        product_1 = product_test_utils.create_product(job_exe=job_exe_1,
                                                      has_been_published=True,
                                                      workspace=self.workspace)
        product_2 = product_test_utils.create_product(job_exe=job_exe_2,
                                                      has_been_published=True,
                                                      workspace=self.workspace)
        batch_1 = batch_test_utils.create_batch()
        batch_2 = batch_test_utils.create_batch()
        product_test_utils.create_file_link(ancestor=self.src_file,
                                            descendant=product_1,
                                            job=job_exe_1.job,
                                            job_exe=job_exe_1,
                                            batch=batch_1)
        product_test_utils.create_file_link(ancestor=self.src_file,
                                            descendant=product_2,
                                            job=job_exe_2.job,
                                            job_exe=job_exe_2,
                                            batch=batch_2)

        products = SourceFile.objects.get_source_products(
            self.src_file.id, batch_ids=[batch_1.id])
        self.assertEqual(len(products), 1)
        self.assertEqual(products[0].id, product_1.id)
예제 #13
0
파일: test_models.py 프로젝트: Carl4/scale
    def setUp(self):
        django.setup()

        self.workspace = storage_test_utils.create_workspace()
        self.workspace.upload_files = MagicMock()
        self.workspace.delete_files = MagicMock()

        self.upload_dir = os.path.join('upload', 'dir')
        self.work_dir = os.path.join('work', 'dir')
        self.workspace_work_dir = ScaleFile.objects._get_workspace_work_dir(self.work_dir, self.workspace)

        self.source_file = source_test_utils.create_source(file_name=u'input1.txt',
                                                                       workspace=self.workspace)

        self.job_exe = job_test_utils.create_job_exe()
        self.job_exe_no = job_test_utils.create_job_exe()
        with transaction.atomic():
            self.job_exe_no.job.is_operational = False
            self.job_exe_no.job.job_type.is_operational = False
            self.job_exe_no.job.save()
            self.job_exe_no.job.job_type.save()

        self.files = [
            (u'local/1/file.txt', u'remote/1/file.txt', None),
            (u'local/2/file.json', u'remote/2/file.json', u'application/x-custom-json'),
        ]
        self.files_no = [
            (u'local/3/file.h5', u'remote/3/file.h5', u'image/x-hdf5-image'),
        ]
예제 #14
0
    def test_execute_force_stop_purge(self):
        """Tests calling PurgeJobs.execute() successfully with force_stop_purge set (no action should be completed)"""

        # Create PurgeResults entry
        source_file = storage_test_utils.create_file()
        trigger = trigger_test_utils.create_trigger_event()
        PurgeResults.objects.create(source_file_id=source_file.id, trigger_event=trigger, force_stop_purge=True)
        self.assertEqual(PurgeResults.objects.values_list('num_jobs_deleted', flat=True).get(
            source_file_id=source_file.id), 0)

        job_exe_1 = job_test_utils.create_job_exe(status='COMPLETED')
        job_exe_2 = job_test_utils.create_job_exe(status='COMPLETED')
        job_exe_3 = job_test_utils.create_job_exe(status='COMPLETED')
        job_1 = job_exe_1.job
        job_2 = job_exe_2.job
        job_3 = job_exe_3.job

        # Add job to message
        message = PurgeJobs()
        message.source_file_id = source_file.id
        message.trigger_id = trigger.id
        message._purge_job_ids = [job_1.id, job_2.id, job_3.id]
        message.status_change = timezone.now()

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        # Check results are accurate
        self.assertEqual(PurgeResults.objects.values_list('num_jobs_deleted', flat=True).get(
            source_file_id=source_file.id), 0)
예제 #15
0
    def setUp(self):
        django.setup()

        self.node_agent = 'agent_1'
        self.node_agent_paused = 'agent_paused'
        self.node = node_test_utils.create_node(slave_id=self.node_agent)
        self.paused_node = node_test_utils.create_node(slave_id=self.node_agent_paused)
        self.paused_node.is_paused = True

        self.running_job_exe_1 = job_test_utils.create_job_exe(status='RUNNING', node=self.paused_node)
        self.running_job_exe_1.cpus_scheduled = 2.0
        self.running_job_exe_1.mem_scheduled = 512.0
        self.running_job_exe_1.disk_in_scheduled = 100.0
        self.running_job_exe_1.disk_out_scheduled = 200.0
        self.running_job_exe_1.disk_total_scheduled = 300.0
        self.running_job_exe_2 = job_test_utils.create_job_exe(status='RUNNING', node=self.node)
        self.running_job_exe_2.cpus_scheduled = 2.0
        self.running_job_exe_2.mem_scheduled = 512.0
        self.running_job_exe_2.disk_in_scheduled = 100.0
        self.running_job_exe_2.disk_out_scheduled = 200.0
        self.running_job_exe_2.disk_total_scheduled = 300.0

        self.queue_1 = queue_test_utils.create_queue(cpus_required=4.0, mem_required=1024.0, disk_in_required=100.0,
                                                     disk_out_required=200.0, disk_total_required=300.0)
        self.queue_2 = queue_test_utils.create_queue(cpus_required=8.0, mem_required=512.0, disk_in_required=400.0,
                                                     disk_out_required=45.0, disk_total_required=445.0)
        self.queue_high_cpus = queue_test_utils.create_queue(cpus_required=200.0, mem_required=1024.0,
                                                             disk_in_required=100.0, disk_out_required=200.0,
                                                             disk_total_required=300.0)
        self.queue_high_mem = queue_test_utils.create_queue(cpus_required=2.0, mem_required=10240.0,
                                                            disk_in_required=100.0, disk_out_required=200.0,
                                                            disk_total_required=300.0)
        self.queue_high_disk = queue_test_utils.create_queue(cpus_required=2.0, mem_required=1024.0,
                                                             disk_in_required=10000.0, disk_out_required=20000.0,
                                                             disk_total_required=30000.0)
예제 #16
0
파일: test_views.py 프로젝트: ctc-oss/scale
    def setUp(self):
        django.setup()

        from product.test import utils as product_test_utils
        self.src_file = source_test_utils.create_source()

        self.job_type1 = job_test_utils.create_job_type(
            name='scale-batch-creator', version='1.0', category='test-1')
        self.job1 = job_test_utils.create_job(job_type=self.job_type1,
                                              status='RUNNING')
        self.job_exe1 = job_test_utils.create_job_exe(job=self.job1)
        product_test_utils.create_file_link(ancestor=self.src_file,
                                            job=self.job1,
                                            job_exe=self.job_exe1)

        self.job_type2 = job_test_utils.create_job_type(name='test2',
                                                        version='1.0',
                                                        category='test-2')
        self.job2 = job_test_utils.create_job(job_type=self.job_type2,
                                              status='PENDING')
        self.job_exe2 = job_test_utils.create_job_exe(job=self.job2)
        product_test_utils.create_file_link(ancestor=self.src_file,
                                            job=self.job2,
                                            job_exe=self.job_exe2)

        self.job3 = job_test_utils.create_job(is_superseded=True)
        self.job_exe3 = job_test_utils.create_job_exe(job=self.job3)
        product_test_utils.create_file_link(ancestor=self.src_file,
                                            job=self.job3,
                                            job_exe=self.job_exe3)
예제 #17
0
파일: utils.py 프로젝트: mnjstwins/scale
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None,
                  ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None,
                  strike=None, scan=None, source_file=None):
    if not workspace:
        workspace = storage_test_utils.create_workspace()
    if not source_file:
        source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started,
                                                      data_ended=data_ended, workspace=workspace)
    if not transfer_started:
        transfer_started = timezone.now()
    if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started:
        ingest_started = timezone.now()
    if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended:
        ingest_ended = timezone.now()

    try:
        job_type = Ingest.objects.get_ingest_job_type()
    except:
        job_type = job_utils.create_job_type()
    job = job_utils.create_job(job_type=job_type)
    job_utils.create_job_exe(job=job)

    return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job,
                                 bytes_transferred=source_file.file_size, transfer_started=transfer_started,
                                 transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started,
                                 ingest_ended=ingest_ended, data_started=source_file.data_started,
                                 data_ended=source_file.data_ended, workspace=workspace, strike=strike, scan=scan,
                                 source_file=source_file)
예제 #18
0
    def testResourceRemainingUnrelatedJob(self):
        job = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job)

        remaining = SharedResource.objects.get_resource_remaining(self.resource_2)

        self.assertEqual(remaining, RESOURCE_LIMIT)
예제 #19
0
    def test_requeue_failed(self, ):
        """Tests calling the requeue view successfully for a job that was previously queued."""

        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')
        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')

        # make sure the job is in the right state despite not actually having been run
        Job.objects.update_status([self.job_2], 'FAILED', timezone.now(),
                                  error_test_utils.create_error())
        self.job_2.num_exes = 2
        self.job_2.save()

        base_count = Queue.objects.count()
        json_data = {
            'job_ids': [self.job_2.id],
        }

        url = rest_util.get_url('/queue/requeue-jobs/')
        response = self.client.post(url, json.dumps(json_data),
                                    'application/json')
        self.assertEqual(response.status_code, status.HTTP_200_OK,
                         response.content)

        result = json.loads(response.content)
        self.assertEqual(len(result['results']), 1)
        self.assertEqual(result['results'][0]['id'], self.job_2.id)
        self.assertEqual(result['results'][0]['status'], 'QUEUED')

        self.assertEqual(Queue.objects.count() - base_count, 1)
예제 #20
0
    def test_uuid_use_properties(self):
        """Tests setting UUIDs on products with different property values."""
        job_type = job_test_utils.create_job_type()
        job1 = job_test_utils.create_job(job_type=job_type)
        job_exe1 = job_test_utils.create_job_exe(job=job1)
        data1 = job_exe1.job.get_job_data()
        data1.add_property_input('property1', 'value1')
        data1.add_property_input('property2', 'value2')
        job_exe1.job.data = data1.get_dict()
        job2 = job_test_utils.create_job(job_type=job_type)
        job_exe2 = job_test_utils.create_job_exe(job=job2)
        data2 = job_exe2.job.get_job_data()
        data2.add_property_input('property1', 'diffvalue1')
        data2.add_property_input('property2', 'value2')
        job_exe2.job.data = data2.get_dict()

        products1 = ProductFile.objects.upload_files(self.files,
                                                     [self.source_file.id],
                                                     job_exe1, self.workspace)
        products2 = ProductFile.objects.upload_files(self.files,
                                                     [self.source_file.id],
                                                     job_exe2, self.workspace)

        # Make sure the product files have different UUIDs
        self.assertIsNotNone(products1[0].uuid)
        self.assertIsNotNone(products1[1].uuid)
        self.assertNotEqual(products1[0].uuid, products2[0].uuid)
        self.assertNotEqual(products1[1].uuid, products2[1].uuid)
예제 #21
0
    def test_execute(self):
        """Tests calling UnpublishJobs.execute() successfully"""

        when = now()
        job_exe_1 = job_test_utils.create_job_exe(status='COMPLETED')
        job_exe_2 = job_test_utils.create_job_exe(status='COMPLETED')
        product_1 = product_test_utils.create_product(job_exe=job_exe_1,
                                                      is_published=True)
        product_2 = product_test_utils.create_product(job_exe=job_exe_2,
                                                      is_published=True)

        # Add jobs to message
        message = UnpublishJobs()
        message.when = when
        if message.can_fit_more():
            message.add_job(job_exe_1.job_id)
        if message.can_fit_more():
            message.add_job(job_exe_2.job_id)

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        # Check that products are unpublished
        products = ProductFile.objects.filter(
            id__in=[product_1.id, product_2.id])
        self.assertEqual(len(products), 2)
        self.assertFalse(products[0].is_published)
        self.assertEqual(products[0].unpublished, when)
        self.assertFalse(products[1].is_published)
        self.assertEqual(products[1].unpublished, when)
예제 #22
0
    def setUp(self):
        django.setup()

        self.job_exe_1 = job_test_utils.create_job_exe()
        self.job_type_1_id = self.job_exe_1.job.job_type.id
        self.job_exe_2 = job_test_utils.create_job_exe()
        self.job_type_2_id = self.job_exe_2.job.job_type.id

        self.product_1 = prod_test_utils.create_product()
        self.product_2 = prod_test_utils.create_product(
            has_been_published=True)
        self.product_3 = prod_test_utils.create_product(
            self.job_exe_2, has_been_published=True)

        time.sleep(0.001)
        self.last_modified_start = now()
        self.product_4 = prod_test_utils.create_product()
        self.product_5 = prod_test_utils.create_product(self.job_exe_2)
        self.product_6 = prod_test_utils.create_product(
            self.job_exe_2, has_been_published=True)
        time.sleep(0.001)
        self.product_7 = prod_test_utils.create_product(
            self.job_exe_1, has_been_published=True)
        time.sleep(0.001)
        self.product_8 = prod_test_utils.create_product(
            has_been_published=True)
        self.last_modified_end = now()
예제 #23
0
    def test_json(self):
        """Tests coverting an UnpublishJobs message to and from JSON"""

        when = now()
        job_exe_1 = job_test_utils.create_job_exe(status='COMPLETED')
        job_exe_2 = job_test_utils.create_job_exe(status='COMPLETED')
        product_1 = product_test_utils.create_product(job_exe=job_exe_1,
                                                      is_published=True)
        product_2 = product_test_utils.create_product(job_exe=job_exe_2,
                                                      is_published=True)

        # Add jobs to message
        message = UnpublishJobs()
        message.when = when
        if message.can_fit_more():
            message.add_job(job_exe_1.job_id)
        if message.can_fit_more():
            message.add_job(job_exe_2.job_id)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = UnpublishJobs.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        products = ProductFile.objects.filter(
            id__in=[product_1.id, product_2.id])
        self.assertEqual(len(products), 2)
        self.assertFalse(products[0].is_published)
        self.assertEqual(products[0].unpublished, when)
        self.assertFalse(products[1].is_published)
        self.assertEqual(products[1].unpublished, when)
예제 #24
0
    def testResourceRemainingReduceByJob(self):
        job = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job)

        remaining = SharedResource.objects.get_resource_remaining(self.resource_1)

        self.assertEqual(remaining, RESOURCE_LIMIT - JOB_TYPE_1_USAGE)
예제 #25
0
    def test_requeue_failed(self,):
        """Tests calling the requeue view successfully for a job that was previously queued."""

        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')
        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')

        # make sure the job is in the right state despite not actually having been run
        Job.objects.update_status([self.job_2], 'FAILED', timezone.now(), error_test_utils.create_error())
        self.job_2.num_exes = 2
        self.job_2.save()

        base_count = Queue.objects.count()
        json_data = {
            'job_id': self.job_2.id,
        }

        url = '/queue/requeue-job/'
        response = self.client.post(url, json.dumps(json_data), 'application/json')
        result = json.loads(response.content)

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(result['id'], self.job_2.id)
        self.assertEqual(result['status'], 'QUEUED')
        self.assertEqual(len(result['job_exes']), 3)

        job = Job.objects.get(id=self.job_2.id)
        self.assertEqual(Queue.objects.count() - base_count, 1)
예제 #26
0
    def test_priority(self):
        """Tests successfully calling the requeue view changing the job priority."""

        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')
        job_test_utils.create_job_exe(job=self.job_2, status='FAILED')

        # make sure the job is in the right state despite not actually having been run
        Job.objects.update_status([self.job_2], 'FAILED', timezone.now(),
                                  error_test_utils.create_error())
        self.job_2.num_exes = 2
        self.job_2.save()

        json_data = {
            'job_ids': [self.job_2.id],
            'priority': 123,
        }

        url = rest_util.get_url('/queue/requeue-jobs/')
        response = self.client.post(url, json.dumps(json_data),
                                    'application/json')
        self.assertEqual(response.status_code, status.HTTP_200_OK,
                         response.content)

        result = json.loads(response.content)
        self.assertEqual(len(result['results']), 1)
        self.assertEqual(result['results'][0]['id'], self.job_2.id)
        self.assertEqual(result['results'][0]['status'], 'QUEUED')
        self.assertEqual(result['results'][0]['priority'], 123)
예제 #27
0
    def test_publish_products_unpublish_superseded(self):
        """Tests calling ProductFileManager.publish_products() where the job has superseded job products that must be
        unpublished
        """

        # Job 1 is superseded by Job 2 and Job 2 is superseded by Job 3
        job_exe_1 = job_test_utils.create_job_exe()
        product_1_a = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True)
        product_1_b = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True)
        job_type = job_test_utils.create_seed_job_type()
        event = trigger_test_utils.create_trigger_event()
        job_type_rev = JobTypeRevision.objects.get_by_natural_key(job_type, job_type.revision_num)
        job_2 = Job.objects.create_job_v6(job_type_rev, event_id=event.id, superseded_job=job_exe_1.job)
        job_2.save()
        job_exe_2 = job_test_utils.create_job_exe(job=job_2)
        Job.objects.supersede_jobs([job_exe_1.job.id], now())
        product_2_a = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True)
        product_2_b = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True)
        job_3 = Job.objects.create_job_v6(job_type_rev, event_id=event.id, superseded_job=job_exe_2.job)
        job_3.save()
        job_exe_3 = job_test_utils.create_job_exe(job=job_3)
        Job.objects.supersede_jobs([job_2.id], now())
        product_3_a = prod_test_utils.create_product(job_exe=job_exe_3)
        product_3_b = prod_test_utils.create_product(job_exe=job_exe_3)

        when = now()
        ProductFile.objects.publish_products(job_exe_3, job_3, when)

        # Make sure products from Job 1 and Job 2 are unpublished
        product_1_a = ScaleFile.objects.get(id=product_1_a.id)
        product_1_b = ScaleFile.objects.get(id=product_1_b.id)
        product_2_a = ScaleFile.objects.get(id=product_2_a.id)
        product_2_b = ScaleFile.objects.get(id=product_2_b.id)
        self.assertTrue(product_1_a.has_been_published)
        self.assertFalse(product_1_a.is_published)
        self.assertEqual(product_1_a.unpublished, when)
        self.assertTrue(product_1_b.has_been_published)
        self.assertFalse(product_1_b.is_published)
        self.assertEqual(product_1_b.unpublished, when)
        self.assertTrue(product_2_a.has_been_published)
        self.assertFalse(product_2_a.is_published)
        self.assertEqual(product_2_a.unpublished, when)
        self.assertTrue(product_2_b.has_been_published)
        self.assertFalse(product_2_b.is_published)
        self.assertEqual(product_2_b.unpublished, when)

        # Make sure Job 3 products are published
        product_3_a = ScaleFile.objects.get(id=product_3_a.id)
        product_3_b = ScaleFile.objects.get(id=product_3_b.id)
        self.assertTrue(product_3_a.has_been_published)
        self.assertTrue(product_3_a.is_published)
        self.assertFalse(product_3_a.is_superseded)
        self.assertEqual(product_3_a.published, when)
        self.assertIsNone(product_3_a.superseded)
        self.assertTrue(product_3_b.has_been_published)
        self.assertTrue(product_3_b.is_published)
        self.assertFalse(product_3_b.is_superseded)
        self.assertEqual(product_3_b.published, when)
        self.assertIsNone(product_3_b.superseded)
예제 #28
0
    def test_successful_with_full_recipe(self):
        """Tests calling QueueManager.handle_job_completion() successfully with all jobs in a recipe."""

        # Queue the recipe
        handler = Queue.objects.queue_new_recipe(self.recipe_type, self.data,
                                                 self.event)

        # Fake out completing Job 1
        job_1 = RecipeJob.objects.select_related('job').get(
            recipe_id=handler.recipe.id, job_name='Job 1').job
        job_exe_1 = job_test_utils.create_job_exe(job=job_1, status='RUNNING')
        output_file_1 = product_test_utils.create_product(
            job_exe=job_exe_1, workspace=self.workspace)
        output_file_2 = product_test_utils.create_product(
            job_exe=job_exe_1, workspace=self.workspace)

        results = JobResults()
        results.add_file_list_parameter('Test Output 1',
                                        [output_file_1.id, output_file_2.id])
        job_exe_output_1 = JobExecutionOutput()
        job_exe_output_1.job_exe_id = job_exe_1.id
        job_exe_output_1.job_id = job_exe_1.job_id
        job_exe_output_1.job_type_id = job_exe_1.job_type_id
        job_exe_output_1.exe_num = job_exe_1.exe_num
        job_exe_output_1.output = results.get_dict()
        job_exe_output_1.save()

        Job.objects.filter(pk=job_1.id).update(status='RUNNING')

        Queue.objects.handle_job_completion(job_1.id, job_1.num_exes, now())

        # Fake out completing Job 2
        job_2 = RecipeJob.objects.select_related('job').get(
            recipe_id=handler.recipe.id, job_name='Job 2').job
        job_exe_2 = job_test_utils.create_job_exe(job=job_2, status='RUNNING')
        output_file_1 = product_test_utils.create_product(
            job_exe=job_exe_2, workspace=self.workspace)
        output_file_2 = product_test_utils.create_product(
            job_exe=job_exe_2, workspace=self.workspace)

        results = JobResults()
        results.add_file_list_parameter('Test Output 2',
                                        [output_file_1.id, output_file_2.id])
        job_exe_output_2 = JobExecutionOutput()
        job_exe_output_2.job_exe_id = job_exe_2.id
        job_exe_output_2.job_id = job_exe_2.job_id
        job_exe_output_2.job_type_id = job_exe_2.job_type_id
        job_exe_output_2.exe_num = job_exe_2.exe_num
        job_exe_output_2.output = results.get_dict()
        job_exe_output_2.save()

        Job.objects.filter(pk=job_2.id).update(status='RUNNING')

        # Call method to test
        Queue.objects.handle_job_completion(job_2.id, job_2.num_exes, now())

        # Make sure final recipe attributes are updated
        recipe = Recipe.objects.get(pk=handler.recipe.id)
        self.assertIsNotNone(recipe.completed)
예제 #29
0
파일: test_models.py 프로젝트: wong-j/scale
    def testResourceRemainingUnrelatedJob(self):
        job = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job)

        remaining = SharedResource.objects.get_resource_remaining(
            self.resource_2)

        self.assertEqual(remaining, RESOURCE_LIMIT)
예제 #30
0
파일: test_models.py 프로젝트: wong-j/scale
    def testResourceRemainingReduceByJob(self):
        job = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job)

        remaining = SharedResource.objects.get_resource_remaining(
            self.resource_1)

        self.assertEqual(remaining, RESOURCE_LIMIT - JOB_TYPE_1_USAGE)
예제 #31
0
    def test_publish_products_unpublish_superseded(self):
        """Tests calling ProductFileManager.publish_products() where the job has superseded job products that must be
        unpublished
        """

        # Job 1 is superseded by Job 2 and Job 2 is superseded by Job 3
        job_exe_1 = job_test_utils.create_job_exe()
        product_1_a = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True)
        product_1_b = prod_test_utils.create_product(job_exe=job_exe_1, has_been_published=True, is_published=True)
        job_type = job_test_utils.create_job_type()
        event = trigger_test_utils.create_trigger_event()
        job_2 = Job.objects.create_job(job_type=job_type, event=event, superseded_job=job_exe_1.job)
        job_2.save()
        job_exe_2 = job_test_utils.create_job_exe(job=job_2)
        Job.objects.supersede_jobs([job_exe_1.job], now())
        product_2_a = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True)
        product_2_b = prod_test_utils.create_product(job_exe=job_exe_2, has_been_published=True, is_published=True)
        job_3 = Job.objects.create_job(job_type=job_type, event=event, superseded_job=job_exe_2.job)
        job_3.save()
        job_exe_3 = job_test_utils.create_job_exe(job=job_3)
        Job.objects.supersede_jobs([job_2], now())
        product_3_a = prod_test_utils.create_product(job_exe=job_exe_3)
        product_3_b = prod_test_utils.create_product(job_exe=job_exe_3)

        when = now()
        ProductFile.objects.publish_products(job_exe_3, when)

        # Make sure products from Job 1 and Job 2 are unpublished
        product_1_a = ProductFile.objects.get(id=product_1_a.id)
        product_1_b = ProductFile.objects.get(id=product_1_b.id)
        product_2_a = ProductFile.objects.get(id=product_2_a.id)
        product_2_b = ProductFile.objects.get(id=product_2_b.id)
        self.assertTrue(product_1_a.has_been_published)
        self.assertFalse(product_1_a.is_published)
        self.assertEqual(product_1_a.unpublished, when)
        self.assertTrue(product_1_b.has_been_published)
        self.assertFalse(product_1_b.is_published)
        self.assertEqual(product_1_b.unpublished, when)
        self.assertTrue(product_2_a.has_been_published)
        self.assertFalse(product_2_a.is_published)
        self.assertEqual(product_2_a.unpublished, when)
        self.assertTrue(product_2_b.has_been_published)
        self.assertFalse(product_2_b.is_published)
        self.assertEqual(product_2_b.unpublished, when)

        # Make sure Job 3 products are published
        product_3_a = ProductFile.objects.get(id=product_3_a.id)
        product_3_b = ProductFile.objects.get(id=product_3_b.id)
        self.assertTrue(product_3_a.has_been_published)
        self.assertTrue(product_3_a.is_published)
        self.assertFalse(product_3_a.is_superseded)
        self.assertEqual(product_3_a.published, when)
        self.assertIsNone(product_3_a.superseded)
        self.assertTrue(product_3_b.has_been_published)
        self.assertTrue(product_3_b.is_published)
        self.assertFalse(product_3_b.is_superseded)
        self.assertEqual(product_3_b.published, when)
        self.assertIsNone(product_3_b.superseded)
예제 #32
0
    def setUp(self):
        django.setup()

        Scheduler.objects.initialize_scheduler()

        self.node_agent = 'agent_1'
        self.node_agent_paused = 'agent_paused'
        self.node_model = node_test_utils.create_node(slave_id=self.node_agent)
        self.node = Node(self.node_agent, self.node_model)
        self.node.initial_cleanup_completed()
        self.paused_node_model = node_test_utils.create_node(slave_id=self.node_agent_paused)
        self.paused_node_model.is_paused = True
        self.paused_node = Node(self.node_agent_paused, self.paused_node_model)

        self.running_job_exe_1 = job_test_utils.create_job_exe(status='RUNNING')
        self.running_job_exe_1.cpus_scheduled = 2.0
        self.running_job_exe_1.mem_scheduled = 512.0
        self.running_job_exe_1.disk_in_scheduled = 100.0
        self.running_job_exe_1.disk_out_scheduled = 200.0
        self.running_job_exe_1.disk_total_scheduled = 300.0
        self.running_job_exe_2 = job_test_utils.create_job_exe(status='RUNNING')
        self.running_job_exe_2.cpus_scheduled = 4.0
        self.running_job_exe_2.mem_scheduled = 1024.0
        self.running_job_exe_2.disk_in_scheduled = 500.0
        self.running_job_exe_2.disk_out_scheduled = 50.0
        self.running_job_exe_2.disk_total_scheduled = 550.0
        self.running_job_exe_high_cpus = job_test_utils.create_job_exe(status='RUNNING')
        self.running_job_exe_high_cpus.cpus_scheduled = 200.0
        self.running_job_exe_high_cpus.mem_scheduled = 512.0
        self.running_job_exe_high_cpus.disk_in_scheduled = 100.0
        self.running_job_exe_high_cpus.disk_out_scheduled = 200.0
        self.running_job_exe_high_cpus.disk_total_scheduled = 300.0
        self.running_job_exe_high_mem = job_test_utils.create_job_exe(status='RUNNING')
        self.running_job_exe_high_mem.cpus_scheduled = 2.0
        self.running_job_exe_high_mem.mem_scheduled = 1048576.0
        self.running_job_exe_high_mem.disk_in_scheduled = 100.0
        self.running_job_exe_high_mem.disk_out_scheduled = 200.0
        self.running_job_exe_high_mem.disk_total_scheduled = 300.0
        self.running_job_exe_high_disk = job_test_utils.create_job_exe(status='RUNNING')
        self.running_job_exe_high_disk.cpus_scheduled = 2.0
        self.running_job_exe_high_disk.mem_scheduled = 512.0
        self.running_job_exe_high_disk.disk_in_scheduled = 10000.0
        self.running_job_exe_high_disk.disk_out_scheduled = 20000.0
        self.running_job_exe_high_disk.disk_total_scheduled = 30000.0

        self.queue_1 = queue_test_utils.create_queue(cpus_required=2.0, mem_required=1024.0, disk_in_required=100.0,
                                                     disk_out_required=200.0, disk_total_required=300.0)
        self.queue_2 = queue_test_utils.create_queue(cpus_required=8.0, mem_required=512.0, disk_in_required=400.0,
                                                     disk_out_required=45.0, disk_total_required=445.0)
        self.queue_high_cpus = queue_test_utils.create_queue(cpus_required=200.0, mem_required=1024.0,
                                                             disk_in_required=100.0, disk_out_required=200.0,
                                                             disk_total_required=300.0)
        self.queue_high_mem = queue_test_utils.create_queue(cpus_required=2.0, mem_required=10240.0,
                                                            disk_in_required=100.0, disk_out_required=200.0,
                                                            disk_total_required=300.0)
        self.queue_high_disk = queue_test_utils.create_queue(cpus_required=2.0, mem_required=1024.0,
                                                             disk_in_required=10000.0, disk_out_required=20000.0,
                                                             disk_total_required=30000.0)
예제 #33
0
    def testJobTypesForAcessWithJustEnoughUsage(self):
        job1 = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job1)

        job2 = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job2)

        runnable_job_types = SharedResource.objects.runnable_job_types(self.node_with_special_access)
        self.assertIn(self.job_type_1a, runnable_job_types)
예제 #34
0
    def test_calculate_filtered(self):
        """Tests generating metrics with only certain job executions."""
        job_test_utils.create_job(status='QUEUED')
        job_test_utils.create_job(status='RUNNING')
        job_test_utils.create_job(status='FAILED')
        job_test_utils.create_job(status='COMPLETED')
        job_test_utils.create_job(status='CANCELED')

        job1 = job_test_utils.create_job(status='QUEUED',
                                         ended=datetime.datetime(2015,
                                                                 1,
                                                                 1,
                                                                 tzinfo=utc))
        job_test_utils.create_job_exe(job=job1,
                                      status=job1.status,
                                      ended=job1.ended)
        job2 = job_test_utils.create_job(status='RUNNING',
                                         ended=datetime.datetime(2015,
                                                                 1,
                                                                 1,
                                                                 tzinfo=utc))
        job_test_utils.create_job_exe(job=job2,
                                      status=job2.status,
                                      ended=job2.ended)

        job3 = job_test_utils.create_job(status='FAILED',
                                         ended=datetime.datetime(2015,
                                                                 1,
                                                                 1,
                                                                 tzinfo=utc))
        job_test_utils.create_job_exe(job=job3,
                                      status=job3.status,
                                      ended=job3.ended)
        job4 = job_test_utils.create_job(status='COMPLETED',
                                         ended=datetime.datetime(2015,
                                                                 1,
                                                                 1,
                                                                 tzinfo=utc))
        job_test_utils.create_job_exe(job=job4,
                                      status=job4.status,
                                      ended=job4.ended)
        job5 = job_test_utils.create_job(status='CANCELED',
                                         ended=datetime.datetime(2015,
                                                                 1,
                                                                 1,
                                                                 tzinfo=utc))
        job_test_utils.create_job_exe(job=job5,
                                      status=job5.status,
                                      ended=job5.ended)

        MetricsJobType.objects.calculate(
            datetime.datetime(2015, 1, 1, tzinfo=utc))
        entries = MetricsJobType.objects.filter(
            occurred=datetime.datetime(2015, 1, 1, tzinfo=utc))

        self.assertEqual(len(entries), 3)
예제 #35
0
파일: test_models.py 프로젝트: wong-j/scale
    def testJobTypesForAcessWithJustEnoughUsage(self):
        job1 = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job1)

        job2 = job_test_utils.create_job(self.job_type_1)
        job_test_utils.create_job_exe(job=job2)

        runnable_job_types = SharedResource.objects.runnable_job_types(
            self.node_with_special_access)
        self.assertIn(self.job_type_1a, runnable_job_types)
예제 #36
0
    def test_calculate_repeated(self):
        '''Tests regenerating metrics for a date that already has metrics.'''
        job = job_test_utils.create_job(status='COMPLETED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(job=job, status=job.status, ended=job.ended)

        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))
        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))
        entries = MetricsJobType.objects.filter(occurred=datetime.date(2015, 1, 1))

        self.assertEqual(len(entries), 1)
예제 #37
0
    def test_calculate_repeated(self):
        """Tests regenerating metrics for a date that already has metrics."""
        job = job_test_utils.create_job(status='COMPLETED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc))
        job_test_utils.create_job_exe(job=job, status=job.status, ended=job.ended)

        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))
        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))
        entries = MetricsJobType.objects.filter(occurred=datetime.date(2015, 1, 1))

        self.assertEqual(len(entries), 1)
예제 #38
0
    def test_calculate_repeated(self):
        """Tests regenerating metrics for a date that already has metrics."""
        error = error_test_utils.create_error(is_builtin=True)
        job = job_test_utils.create_job(status='FAILED', error=error, ended=datetime.datetime(2015, 1, 1, tzinfo=utc))
        job_test_utils.create_job_exe(job=job, error=error, status=job.status, ended=job.ended)

        MetricsError.objects.calculate(datetime.date(2015, 1, 1))
        MetricsError.objects.calculate(datetime.date(2015, 1, 1))
        entries = MetricsError.objects.filter(occurred=datetime.date(2015, 1, 1))

        self.assertEqual(len(entries), 1)
예제 #39
0
    def test_exception_with_canceled_job(self):
        """Tests calling QueueManager.handle_job_cancellation() with a canceled job that results in an exception."""

        # Create the canceled job
        job = job_test_utils.create_job(status='CANCELED')
        job_exe_1 = job_test_utils.create_job_exe(job=job, status='FAILED')
        time.sleep(0.001)
        job_exe_2 = job_test_utils.create_job_exe(job=job, status='CANCELED')

        # Call method to test
        self.assertRaises(Exception, Queue.objects.handle_job_cancellation, job.id, now())
예제 #40
0
    def test_calculate_repeated(self):
        """Tests regenerating metrics for a date that already has metrics."""
        error = error_test_utils.create_error(is_builtin=True)
        job = job_test_utils.create_job(status='FAILED', error=error, ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(job=job, error=error, status=job.status, ended=job.ended)

        MetricsError.objects.calculate(datetime.date(2015, 1, 1))
        MetricsError.objects.calculate(datetime.date(2015, 1, 1))
        entries = MetricsError.objects.filter(occurred=datetime.date(2015, 1, 1))

        self.assertEqual(len(entries), 1)
예제 #41
0
    def test_calculate_stats(self):
        """Tests calculating individual statistics for a metrics entry."""
        error = error_test_utils.create_error(is_builtin=True)
        job1 = job_test_utils.create_job(error=error, status='FAILED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(
            job=job1, error=error, status=job1.status,
            queued=datetime.datetime(2015, 1, 1, tzinfo=timezone.utc),
            started=datetime.datetime(2015, 1, 1, 0, 10, 2, tzinfo=timezone.utc),
            pre_started=datetime.datetime(2015, 1, 1, 0, 30, 4, tzinfo=timezone.utc),
            pre_completed=datetime.datetime(2015, 1, 1, 1, 6, tzinfo=timezone.utc),
            job_started=datetime.datetime(2015, 1, 1, 1, 40, 8, tzinfo=timezone.utc),
            job_completed=datetime.datetime(2015, 1, 1, 2, 30, 10, tzinfo=timezone.utc),
            post_started=datetime.datetime(2015, 1, 1, 3, 30, 12, tzinfo=timezone.utc),
            post_completed=datetime.datetime(2015, 1, 1, 4, 40, 14, tzinfo=timezone.utc),
            ended=datetime.datetime(2015, 1, 1, 6, 0, 16, tzinfo=timezone.utc),
        )
        job2 = job_test_utils.create_job(error=error, status='FAILED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(
            job=job2, error=error, status=job2.status,
            queued=datetime.datetime(2015, 1, 1, tzinfo=timezone.utc),
            started=datetime.datetime(2015, 1, 1, 2, 10, 2, tzinfo=timezone.utc),
            pre_started=datetime.datetime(2015, 1, 1, 4, 30, 4, tzinfo=timezone.utc),
            pre_completed=datetime.datetime(2015, 1, 1, 6, 0, 8, tzinfo=timezone.utc),
            job_started=datetime.datetime(2015, 1, 1, 8, 40, 14, tzinfo=timezone.utc),
            job_completed=datetime.datetime(2015, 1, 1, 10, 30, 22, tzinfo=timezone.utc),
            post_started=datetime.datetime(2015, 1, 1, 12, 30, 32, tzinfo=timezone.utc),
            post_completed=datetime.datetime(2015, 1, 1, 14, 40, 44, tzinfo=timezone.utc),
            ended=datetime.datetime(2015, 1, 1, 16, 0, 58, tzinfo=timezone.utc),
        )

        sys_error = error_test_utils.create_error(category='SYSTEM', is_builtin=True)
        job3a = job_test_utils.create_job(error=sys_error, status='FAILED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(job=job3a, status=job3a.status, ended=job3a.ended, error=sys_error)

        data_error = error_test_utils.create_error(category='DATA', is_builtin=True)
        job3b = job_test_utils.create_job(error=data_error, status='FAILED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(job=job3b, status=job3b.status, ended=job3b.ended, error=data_error)

        algo_error = error_test_utils.create_error(category='ALGORITHM', is_builtin=True)
        job3c = job_test_utils.create_job(error=algo_error, status='FAILED', ended=datetime.datetime(2015, 1, 1))
        job_test_utils.create_job_exe(job=job3c, status=job3c.status, ended=job3c.ended, error=algo_error)

        MetricsError.objects.calculate(datetime.date(2015, 1, 1))

        entries = MetricsError.objects.filter(occurred=datetime.date(2015, 1, 1))
        self.assertEqual(len(entries), 4)

        for entry in entries:
            self.assertEqual(entry.occurred, datetime.date(2015, 1, 1))
            if entry.error == error:
                self.assertEqual(entry.total_count, 2)
            else:
                self.assertEqual(entry.total_count, 1)
예제 #42
0
    def setUp(self):
        django.setup()

        self.node_agent = 'agent_1'
        self.node_agent_paused = 'agent_paused'
        self.node = node_test_utils.create_node(slave_id=self.node_agent)
        self.paused_node = node_test_utils.create_node(
            slave_id=self.node_agent_paused)
        self.paused_node.is_paused = True

        self.running_job_exe_1 = job_test_utils.create_job_exe(
            status='RUNNING', node=self.paused_node)
        self.running_job_exe_1.cpus_scheduled = 2.0
        self.running_job_exe_1.mem_scheduled = 512.0
        self.running_job_exe_1.disk_in_scheduled = 100.0
        self.running_job_exe_1.disk_out_scheduled = 200.0
        self.running_job_exe_1.disk_total_scheduled = 300.0
        self.running_job_exe_2 = job_test_utils.create_job_exe(
            status='RUNNING', node=self.node)
        self.running_job_exe_2.cpus_scheduled = 2.0
        self.running_job_exe_2.mem_scheduled = 512.0
        self.running_job_exe_2.disk_in_scheduled = 100.0
        self.running_job_exe_2.disk_out_scheduled = 200.0
        self.running_job_exe_2.disk_total_scheduled = 300.0

        self.queue_1 = queue_test_utils.create_queue(cpus_required=4.0,
                                                     mem_required=1024.0,
                                                     disk_in_required=100.0,
                                                     disk_out_required=200.0,
                                                     disk_total_required=300.0)
        self.queue_2 = queue_test_utils.create_queue(cpus_required=8.0,
                                                     mem_required=512.0,
                                                     disk_in_required=400.0,
                                                     disk_out_required=45.0,
                                                     disk_total_required=445.0)
        self.queue_high_cpus = queue_test_utils.create_queue(
            cpus_required=200.0,
            mem_required=1024.0,
            disk_in_required=100.0,
            disk_out_required=200.0,
            disk_total_required=300.0)
        self.queue_high_mem = queue_test_utils.create_queue(
            cpus_required=2.0,
            mem_required=10240.0,
            disk_in_required=100.0,
            disk_out_required=200.0,
            disk_total_required=300.0)
        self.queue_high_disk = queue_test_utils.create_queue(
            cpus_required=2.0,
            mem_required=1024.0,
            disk_in_required=10000.0,
            disk_out_required=20000.0,
            disk_total_required=30000.0)
예제 #43
0
파일: test_views.py 프로젝트: Carl4/scale
    def setUp(self):
        django.setup()

        self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True)
        self.job1 = job_test_utils.create_job(job_type=self.job_type1)
        self.job_exe1 = job_test_utils.create_job_exe(job=self.job1)
        self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True,
                                                          file_name='test.txt')

        self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False)
        self.job2 = job_test_utils.create_job(job_type=self.job_type2)
        self.job_exe2 = job_test_utils.create_job_exe(job=self.job2)
        self.product2 = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True)
예제 #44
0
    def setUp(self):
        django.setup()

        def upload_files(file_uploads):
            for file_upload in file_uploads:
                file_upload.file.save()

        def delete_files(files):
            for scale_file in files:
                scale_file.save()

        self.workspace = storage_test_utils.create_workspace()
        self.workspace.upload_files = MagicMock(side_effect=upload_files)
        self.workspace.delete_files = MagicMock(side_effect=delete_files)

        self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace)

        inputs_json=[
            {'name': 'property1', 'type': 'string'},
            {'name': 'property2', 'type': 'string'}
        ]
        manifest = job_test_utils.create_seed_manifest(inputs_json=inputs_json, command='my_command')
        manifest['job']['interface']['inputs']['files'] = []
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)
        self.job_exe = job_test_utils.create_job_exe(job_type=job_type)
        data = self.job_exe.job.get_input_data()
        data.add_value(JsonValue('property1', 'value1'))
        data.add_value(JsonValue('property2', 'value2'))
        self.job_exe.job.input = convert_data_to_v6_json(data).get_dict()
        self.job_exe.job.source_sensor_class = 'classA'
        self.job_exe.job.source_sensor = '1'
        self.job_exe.job.source_collection = '12345'
        self.job_exe.job.source_task = 'my-task'
        self.job_exe.job.save()
        self.job_exe_no = job_test_utils.create_job_exe()

        self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt')
        self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json')
        self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5')

        self.files = [
            ProductFileMetadata(output_name='output_name_1', local_path=self.local_path_1,
            remote_path='remote/1/file.txt'),
            ProductFileMetadata(output_name='output_name_2', local_path=self.local_path_2,
            media_type='application/x-custom-json', remote_path='remote/2/file.json',
            source_sensor_class='classB', source_sensor='2', source_collection='12346',
            source_task='my-task-2'),
        ]
        self.files_no = [
            ProductFileMetadata(output_name='output_name_3', local_path=self.local_path_3, media_type='image/x-hdf5-image', remote_path='remote/3/file.h5')
        ]
예제 #45
0
    def test_uuid_use_job_exe(self):
        """Tests setting UUIDs on products from multiple job executions of the same type."""
        job = job_test_utils.create_job()
        job_exe1 = job_test_utils.create_job_exe(job=job)
        job_exe2 = job_test_utils.create_job_exe(job=job)

        products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace)
        products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace)

        # Make sure products produced by multiple runs of the same job type have the same UUIDs
        self.assertIsNotNone(products1[0].uuid)
        self.assertIsNotNone(products1[1].uuid)
        self.assertEqual(products1[0].uuid, products2[0].uuid)
        self.assertEqual(products1[1].uuid, products2[1].uuid)
예제 #46
0
    def setUp(self):
        django.setup()

        def upload_files(file_uploads):
            for file_upload in file_uploads:
                file_upload.file.save()

        def delete_files(files):
            for scale_file in files:
                scale_file.save()

        self.workspace = storage_test_utils.create_workspace()
        self.workspace.upload_files = MagicMock(side_effect=upload_files)
        self.workspace.delete_files = MagicMock(side_effect=delete_files)

        self.source_file = source_test_utils.create_source(
            file_name='input1.txt', workspace=self.workspace)

        self.job_exe = job_test_utils.create_job_exe()
        data = self.job_exe.job.get_job_data()
        data.add_property_input('property1', 'value1')
        data.add_property_input('property2', 'value2')
        self.job_exe.job.data = data.get_dict()
        self.job_exe.job.save()
        self.job_exe_no = job_test_utils.create_job_exe()
        with transaction.atomic():
            self.job_exe_no.job.is_operational = False
            self.job_exe_no.job.job_type.is_operational = False
            self.job_exe_no.job.save()
            self.job_exe_no.job.job_type.save()

        self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH,
                                         'local/1/file.txt')
        self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH,
                                         'local/2/file.json')
        self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH,
                                         'local/3/file.h5')

        self.files = [
            ProductFileMetadata('output_name_1',
                                self.local_path_1,
                                remote_path='remote/1/file.txt'),
            ProductFileMetadata('output_name_2', self.local_path_2,
                                'application/x-custom-json',
                                'remote/2/file.json'),
        ]
        self.files_no = [
            ProductFileMetadata('output_name_3', self.local_path_3,
                                'image/x-hdf5-image', 'remote/3/file.h5')
        ]
예제 #47
0
    def test_uuid_use_job_exe(self):
        """Tests setting UUIDs on products from multiple job executions of the same type."""
        job = job_test_utils.create_job()
        job_exe1 = job_test_utils.create_job_exe(job=job)
        job_exe2 = job_test_utils.create_job_exe(job=job)

        products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace)
        products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace)

        # Make sure products produced by multiple runs of the same job type have the same UUIDs
        self.assertIsNotNone(products1[0].uuid)
        self.assertIsNotNone(products1[1].uuid)
        self.assertEqual(products1[0].uuid, products2[0].uuid)
        self.assertEqual(products1[1].uuid, products2[1].uuid)
예제 #48
0
    def test_wrong_status(self,):
        """Tests calling the requeue view when the job hasn't failed."""

        job_test_utils.create_job_exe(job=self.job_2, status='COMPLETED')
        Job.objects.update_status([self.job_2], 'COMPLETED', timezone.now())

        json_data = {
            'job_id': self.job_2.id,
        }

        url = '/queue/requeue-job/'
        response = self.client.post(url, json.dumps(json_data), 'application/json')
        job = Job.objects.get(id=self.job_2.id)

        self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
예제 #49
0
파일: test_models.py 프로젝트: Carl4/scale
    def test_get_available_based_on_usage(self):
        job1 = job_test_utils.create_job(job_type=self.job_type_3, event=self.trigger_event_1)
        job_test_utils.create_job_exe(job=job1)

        job2 = job_test_utils.create_job(self.job_type_3, self.trigger_event_1)
        job_test_utils.create_job_exe(job=job2)

        Queue.objects.queue_new_job(self.job_type_3, {}, self.trigger_event_1)
        Queue.objects.queue_new_job(self.job_type_4, {}, self.trigger_event_1)

        job_exes = Queue.objects.schedule_jobs_on_node(50, 500, 50, self.node_3)
        self.assertEqual(len(job_exes), 1)

        # We should see job type 4 despite 3 being a higher priority since 3 requires too many resources
        self.assertTrue(job_exes[0].job.job_type == self.job_type_4)
예제 #50
0
    def test_inputs_and_products(self):
        """Tests creating links for inputs and then later replacing with generated products."""

        file_8 = storage_test_utils.create_file()

        parent_ids = [self.file_4.id, self.file_6.id, self.file_7.id]
        child_ids = [file_8.id]
        job_exe = job_test_utils.create_job_exe()
        recipe_test_utils.create_recipe_job(job=job_exe.job)

        # First create only the input files
        FileAncestryLink.objects.create_file_ancestry_links(parent_ids, None, job_exe)

        # Replace the inputs with the new links for both inputs and products
        FileAncestryLink.objects.create_file_ancestry_links(parent_ids, child_ids, job_exe)

        # Make sure the old entries were deleted
        old_direct_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe,
                                                         ancestor_job__isnull=True)
        self.assertEqual(len(old_direct_qry), 0)

        old_indirect_qry = FileAncestryLink.objects.filter(descendant__isnull=True, job_exe=job_exe,
                                                           ancestor_job__isnull=False)
        self.assertEqual(len(old_indirect_qry), 0)

        direct_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=True)
        self.assertEqual(direct_qry.count(), 3)
        file_8_parent_ids = {link.ancestor_id for link in direct_qry}
        self.assertSetEqual(file_8_parent_ids, {self.file_4.id, self.file_6.id, self.file_7.id})

        indirect_qry = FileAncestryLink.objects.filter(descendant=file_8, job_exe=job_exe, ancestor_job__isnull=False)
        self.assertEqual(indirect_qry.count(), 3)
        file_8_ancestor_ids = {link.ancestor_id for link in indirect_qry}
        self.assertSetEqual(file_8_ancestor_ids, {self.file_1.id, self.file_2.id, self.file_3.id})
예제 #51
0
    def test_requeue_ignored(self,):
        """Tests calling the requeue view when the job has already completed."""

        job_test_utils.create_job_exe(job=self.job_2, status="COMPLETED")
        Job.objects.update_status([self.job_2], "COMPLETED", timezone.now())

        json_data = {"job_ids": [self.job_2.id]}

        url = rest_util.get_url("/queue/requeue-jobs/")
        response = self.client.post(url, json.dumps(json_data), "application/json")
        self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)

        result = json.loads(response.content)
        self.assertEqual(len(result["results"]), 1)
        self.assertEqual(result["results"][0]["id"], self.job_2.id)
        self.assertEqual(result["results"][0]["status"], "COMPLETED")
예제 #52
0
파일: utils.py 프로젝트: AppliedIS/scale
def create_queue(job_type=None, priority=1, cpus_required=1.0, mem_required=512.0, disk_in_required=200.0,
                 disk_out_required=100.0, disk_total_required=300.0):
    """Creates a queue model for unit testing

    :param job_type: The job type
    :type job_type: :class:`job.models.JobType`
    :param priority: The priority
    :type priority: int
    :param cpus_required: The CPUs required in MiB
    :type cpus_required: float
    :param mem_required: The memory required in MiB
    :type mem_required: float
    :param disk_in_required: The input disk space required in MiB
    :type disk_in_required: float
    :param disk_out_required: The output disk space required in MiB
    :type disk_out_required: float
    :param disk_total_required: The total disk space required in MiB
    :type disk_total_required: float
    """

    job = job_test_utils.create_job(job_type=job_type, status='QUEUED')
    job_exe = job_test_utils.create_job_exe(job=job, status='QUEUED')

    return Queue.objects.create(job_exe=job_exe, job=job, job_type=job.job_type, priority=priority,
                                cpus_required=cpus_required, mem_required=mem_required,
                                disk_in_required=disk_in_required, disk_out_required=disk_out_required,
                                disk_total_required=disk_total_required, queued=timezone.now())
예제 #53
0
    def setUp(self):
        django.setup()

        job_type = job_test_utils.create_job_type(max_tries=1)
        job = job_test_utils.create_job(job_type=job_type, num_exes=1)
        job_exe = job_test_utils.create_job_exe(job=job, status='RUNNING')
        self._job_exe_id = job_exe.id
예제 #54
0
    def test_job_type_limit(self, mock_taskinfo):
        """Tests running the scheduling thread with a job type limit"""
        mock_taskinfo.return_value = MagicMock()

        Queue.objects.all().delete()
        job_type_with_limit = job_test_utils.create_job_type()
        job_type_with_limit.max_scheduled = 4
        job_type_with_limit.save()
        job_exe_1 = job_test_utils.create_job_exe(job_type=job_type_with_limit, status='RUNNING')
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        queue_test_utils.create_queue(job_type=job_type_with_limit)
        job_type_mgr.sync_with_database()
        # One job of this type is already running
        running_job_mgr.add_job_exes([RunningJobExecution(job_exe_1)])

        offer_1 = ResourceOffer('offer_1', self.node_agent_1, NodeResources(cpus=200.0, mem=102400.0, disk=102400.0))
        offer_2 = ResourceOffer('offer_2', self.node_agent_2, NodeResources(cpus=200.0, mem=204800.0, disk=204800.0))
        offer_mgr.add_new_offers([offer_1, offer_2])

        # Ignore cleanup tasks
        for node in node_mgr.get_nodes():
            node.initial_cleanup_completed()

        num_tasks = self._scheduling_thread._perform_scheduling()
        self.assertEqual(num_tasks, 3)  # One is already running, should only be able to schedule 3 more
예제 #55
0
파일: utils.py 프로젝트: AppliedIS/scale
def create_product(job_exe=None, workspace=None, has_been_published=False, is_published=False, uuid=None,
                   file_name='my_test_file.txt', file_path='/file/path/my_test_file.txt', media_type='text/plain',
                   file_size=100, countries=None, is_superseded=False, superseded=None):
    """Creates a product file model for unit testing

    :returns: The product model
    :rtype: :class:`product.models.ProductFile`
    """

    if not job_exe:
        job_exe = job_utils.create_job_exe()
    if not workspace:
        workspace = storage_utils.create_workspace()

    if not uuid:
        builder = hashlib.md5()
        builder.update(str(job_exe.job.job_type.id))
        builder.update(file_name)
        uuid = builder.hexdigest()

    if is_superseded and not superseded:
        superseded = timezone.now()

    product_file = ProductFile.objects.create(job_exe=job_exe, job=job_exe.job, job_type=job_exe.job.job_type,
                                              has_been_published=has_been_published, is_published=is_published,
                                              uuid=uuid, file_name=file_name, media_type=media_type,
                                              file_size=file_size, file_path=file_path, workspace=workspace,
                                              is_superseded=is_superseded, superseded=superseded)
    if countries:
        product_file.countries = countries
        product_file.save()
    return product_file
예제 #56
0
    def setUp(self):
        django.setup()

        self.job_exe = job_test_utils.create_job_exe()

        self.product_1 = prod_test_utils.create_product(job_exe=self.job_exe)
        self.product_2 = prod_test_utils.create_product(job_exe=self.job_exe)
        self.product_3 = prod_test_utils.create_product(job_exe=self.job_exe)
예제 #57
0
파일: test_models.py 프로젝트: Carl4/scale
    def setUp(self):
        django.setup()

        # Generation 1
        self.file_1 = storage_test_utils.create_file()
        self.file_2 = storage_test_utils.create_file()

        # Generation 2
        job_exe_1 = job_test_utils.create_job_exe()
        recipe_job_1 = recipe_test_utils.create_recipe_job(job=job_exe_1.job)
        self.file_3 = prod_test_utils.create_product(job_exe=job_exe_1)
        self.file_4 = prod_test_utils.create_product(job_exe=job_exe_1)
        self.file_5 = prod_test_utils.create_product(job_exe=job_exe_1)

        # Generation 3
        job_exe_2 = job_test_utils.create_job_exe()
        recipe_job_2 = recipe_test_utils.create_recipe_job(job=job_exe_2.job)
        self.file_6 = prod_test_utils.create_product(job_exe=job_exe_2)

        # Stand alone file
        self.file_7 = prod_test_utils.create_product()

        # First job links generation 1 to 2
        FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_3, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_4, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_5, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)

        FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_3, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_4, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)
        FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_5, job_exe=job_exe_1,
                                        job=job_exe_1.job, recipe=recipe_job_1.recipe)

        # Second job links generation 2 to 3
        FileAncestryLink.objects.create(ancestor=self.file_3, descendant=self.file_6, job_exe=job_exe_2,
                                        job=job_exe_2.job, recipe=recipe_job_2.recipe)
        FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_6, job_exe=job_exe_2,
                                        job=job_exe_2.job, recipe=recipe_job_2.recipe,
                                        ancestor_job_exe=job_exe_1, ancestor_job=job_exe_1.job)
        FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_6, job_exe=job_exe_2,
                                        job=job_exe_2.job, recipe=recipe_job_2.recipe,
                                        ancestor_job_exe=job_exe_1, ancestor_job=job_exe_1.job)
예제 #58
0
    def setUp(self):
        django.setup()

        Scheduler.objects.initialize_scheduler()
        job_type = job_test_utils.create_job_type(max_tries=1)
        job = job_test_utils.create_job(job_type=job_type, num_exes=1)
        job_exe = job_test_utils.create_job_exe(job=job, status='RUNNING')
        self._job_exe_id = job_exe.id
예제 #59
0
    def test_requeue_ignored(self,):
        """Tests calling the requeue view when the job has already completed."""

        job_test_utils.create_job_exe(job=self.job_2, status='COMPLETED')
        Job.objects.update_status([self.job_2], 'COMPLETED', timezone.now())

        json_data = {
            'job_ids': [self.job_2.id],
        }

        url = '/queue/requeue-jobs/'
        response = self.client.post(url, json.dumps(json_data), 'application/json')
        result = json.loads(response.content)

        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(len(result['results']), 1)
        self.assertEqual(result['results'][0]['id'], self.job_2.id)
        self.assertEqual(result['results'][0]['status'], 'COMPLETED')