コード例 #1
0
ファイル: test_filter.py プロジェクト: sau29/scale
    def test_is_data_accepted(self):
        """Tests calling DataFilter.is_data_accepted()"""

        data_filter = DataFilter(all=False)
        data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']})
        data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']})
        data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']})
        data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']})
        data_filter.add_filter({'name': 'input_f', 'type': 'meta-data', 'condition': 'in', 'values': [['foo', 'baz']],
                                'fields': [['a', 'b']]})
        
        data = Data()

        file_value = FileValue('input_a', [self.file1.id])
        data.add_value(file_value)
        
        # first filter passes, so data is accepted if all is set to false
        self.assertTrue(data_filter.is_data_accepted(data))
        data_filter.all = True
        # other filters fail so data is not accepted
        self.assertFalse(data_filter.is_data_accepted(data))
        
        # get other filters to pass
        json_value = JsonValue('input_b', 'abcdefg')
        data.add_value(json_value)
        json_value = JsonValue('input_c', '10')
        data.add_value(json_value)
        json_value = JsonValue('input_d', 50)
        data.add_value(json_value)
        file_value = FileValue('input_f', [self.file2.id])
        data.add_value(file_value)

        self.assertTrue(data_filter.is_data_accepted(data))
コード例 #2
0
    def test_validate(self):
        """Tests calling Data.validate()"""

        interface = Interface()
        data = Data()

        interface.add_parameter(FileParameter('input_1', ['application/json']))
        interface.add_parameter(JsonParameter('input_2', 'integer'))
        data.add_value(FileValue('input_1', [123]))
        data.add_value(JsonValue('input_2', 100))
        data.add_value(JsonValue('extra_input_1', 'hello'))
        data.add_value(JsonValue('extra_input_2', 'there'))

        # Valid data
        data.validate(interface)
        # Ensure extra data values are removed
        self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'})

        # Data is missing required input 3
        interface.add_parameter(FileParameter('input_3', ['image/gif'], required=True))
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED')

        data.add_value(FileValue('input_3', [999]))  # Input 3 taken care of now

        # Invalid data
        interface.add_parameter(JsonParameter('input_4', 'string'))
        mock_value = MagicMock()
        mock_value.name = 'input_4'
        mock_value.validate.side_effect = InvalidData('MOCK', '')
        data.add_value(mock_value)
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'MOCK')
コード例 #3
0
    def test_uuid_use_properties(self):
        """Tests setting UUIDs on products with different property values."""

        inputs_json=[
            {'name': 'property1', 'type': 'string'},
            {'name': 'property2', 'type': 'string'}
        ]

        manifest = job_test_utils.create_seed_manifest(name='test-job', inputs_json=inputs_json, command='my_command')
        manifest['job']['interface']['inputs']['files'] = []
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)

        job1 = job_test_utils.create_job(job_type=job_type)
        job_exe1 = job_test_utils.create_job_exe(job=job1)
        data1 = job_exe1.job.get_input_data()
        data1.add_value(JsonValue('property1', 'value1'))
        data1.add_value(JsonValue('property2', 'value2'))
        job_exe1.job.input = convert_data_to_v6_json(data1).get_dict()
        job2 = job_test_utils.create_job(job_type=job_type)
        job_exe2 = job_test_utils.create_job_exe(job=job2)
        data2 = job_exe2.job.get_input_data()
        data2.add_value(JsonValue('property1', 'diffvalue1'))
        data2.add_value(JsonValue('property2', 'value2'))
        job_exe2.job.input = convert_data_to_v6_json(data2).get_dict()

        products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace)
        products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace)

        # Make sure the product files have different UUIDs
        self.assertIsNotNone(products1[0].uuid)
        self.assertIsNotNone(products1[1].uuid)
        self.assertNotEqual(products1[0].uuid, products2[0].uuid)
        self.assertNotEqual(products1[1].uuid, products2[1].uuid)
コード例 #4
0
    def test_convert_data_to_v1_json(self):
        """Tests calling convert_data_to_v1_json()"""

        # Try interface with nothing set
        data = Data()
        interface = Interface()
        json = convert_data_to_v1_json(data, interface)
        DataV1(data=json.get_dict())  # Revalidate

        # Try data with a variety of values
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        data.add_value(FileValue('input_b', [1235, 1236]))
        data.add_value(JsonValue('input_c', 'hello'))
        data.add_value(JsonValue('input_d', 11.9))
        json = convert_data_to_v1_json(data, interface)
        self.assertDictEqual(
            json.get_dict(), {
                u'input_data': [{
                    u'name': u'input_d',
                    u'value': 11.9
                }, {
                    u'name': u'input_b',
                    u'file_ids': [1235, 1236]
                }, {
                    u'name': u'input_c',
                    u'value': u'hello'
                }, {
                    u'name': u'input_a',
                    u'file_id': 1234
                }],
                u'version':
                u'1.0'
            })
        DataV1(data=json.get_dict())  # Revalidate
        self.assertSetEqual(
            set(DataV6(json.get_dict()).get_data().values.keys()),
            {'input_a', 'input_b', 'input_c', 'input_d'})

        # Try data with a single file list that should be a directory
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        interface = Interface()
        file_param = FileParameter('input_a', [], True, True)
        interface.add_parameter(file_param)
        json = convert_data_to_v1_json(data, interface)

        self.assertDictEqual(
            json.get_dict(), {
                u'input_data': [{
                    u'name': u'input_a',
                    u'file_ids': [1234]
                }],
                u'version': u'1.0'
            })
コード例 #5
0
    def setUp(self):
        django.setup()

        def upload_files(file_uploads):
            for file_upload in file_uploads:
                file_upload.file.save()

        def delete_files(files):
            for scale_file in files:
                scale_file.save()

        self.workspace = storage_test_utils.create_workspace()
        self.workspace.upload_files = MagicMock(side_effect=upload_files)
        self.workspace.delete_files = MagicMock(side_effect=delete_files)

        self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace)

        inputs_json=[
            {'name': 'property1', 'type': 'string'},
            {'name': 'property2', 'type': 'string'}
        ]
        manifest = job_test_utils.create_seed_manifest(inputs_json=inputs_json, command='my_command')
        manifest['job']['interface']['inputs']['files'] = []
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)
        self.job_exe = job_test_utils.create_job_exe(job_type=job_type)
        data = self.job_exe.job.get_input_data()
        data.add_value(JsonValue('property1', 'value1'))
        data.add_value(JsonValue('property2', 'value2'))
        self.job_exe.job.input = convert_data_to_v6_json(data).get_dict()
        self.job_exe.job.source_sensor_class = 'classA'
        self.job_exe.job.source_sensor = '1'
        self.job_exe.job.source_collection = '12345'
        self.job_exe.job.source_task = 'my-task'
        self.job_exe.job.save()
        self.job_exe_no = job_test_utils.create_job_exe()

        self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt')
        self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json')
        self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5')

        self.files = [
            ProductFileMetadata(output_name='output_name_1', local_path=self.local_path_1,
            remote_path='remote/1/file.txt'),
            ProductFileMetadata(output_name='output_name_2', local_path=self.local_path_2,
            media_type='application/x-custom-json', remote_path='remote/2/file.json',
            source_sensor_class='classB', source_sensor='2', source_collection='12346',
            source_task='my-task-2'),
        ]
        self.files_no = [
            ProductFileMetadata(output_name='output_name_3', local_path=self.local_path_3, media_type='image/x-hdf5-image', remote_path='remote/3/file.h5')
        ]
コード例 #6
0
ファイル: test_definition.py プロジェクト: ctc-oss/scale
    def test_generate_node_input_data(self):
        """Tests calling RecipeDefinition.generate_node_input_data()"""

        input_interface = Interface()
        input_interface.add_parameter(
            FileParameter('recipe_input_1', ['image/gif'], multiple=True))
        input_interface.add_parameter(JsonParameter('recipe_input_2',
                                                    'string'))
        definition = RecipeDefinition(input_interface)
        definition.add_job_node('node_a', 'job_type_1', '1.0', 1)
        definition.add_job_node('node_b', 'job_type_2', '1.0', 1)
        definition.add_job_node('node_c', 'job_type_3', '1.0', 1)
        definition.add_dependency('node_c', 'node_b')
        definition.add_dependency('node_c', 'node_a')
        definition.add_recipe_input_connection('node_c', 'input_1',
                                               'recipe_input_1')
        definition.add_recipe_input_connection('node_c', 'input_2',
                                               'recipe_input_2')
        definition.add_dependency_input_connection('node_c', 'input_3',
                                                   'node_a', 'output_a_1')
        definition.add_dependency_input_connection('node_c', 'input_4',
                                                   'node_a', 'output_a_2')
        definition.add_dependency_input_connection('node_c', 'input_5',
                                                   'node_b', 'output_b_1')

        recipe_data = Data()
        recipe_data.add_value(FileValue('recipe_input_1', [1, 2, 3, 4, 5]))
        recipe_data.add_value(JsonValue('recipe_input_2', 'Scale is awesome!'))
        a_output_data = Data()
        a_output_data.add_value(FileValue('output_a_1', [1234]))
        a_output_data.add_value(JsonValue('output_a_2', {'foo': 'bar'}))
        b_output_data = Data()
        b_output_data.add_value(JsonValue('output_b_1', 12.34))
        node_outputs = {
            'node_a': RecipeNodeOutput('node_a', 'job', 1, a_output_data),
            'node_b': RecipeNodeOutput('node_b', 'job', 1, b_output_data)
        }

        node_data = definition.generate_node_input_data(
            'node_c', recipe_data, node_outputs)
        self.assertSetEqual(
            set(node_data.values.keys()),
            {'input_1', 'input_2', 'input_3', 'input_4', 'input_5'})
        self.assertListEqual(node_data.values['input_1'].file_ids,
                             [1, 2, 3, 4, 5])
        self.assertEqual(node_data.values['input_2'].value,
                         'Scale is awesome!')
        self.assertListEqual(node_data.values['input_3'].file_ids, [1234])
        self.assertDictEqual(node_data.values['input_4'].value, {'foo': 'bar'})
        self.assertEqual(node_data.values['input_5'].value, 12.34)
コード例 #7
0
ファイル: daily_metrics.py プロジェクト: sau29/scale
    def process_event(self, event, last_event=None):
        """See :meth:`job.clock.ClockEventProcessor.process_event`.

        Compares the new event with the last event any missing metrics jobs.
        """

        # Attempt to get the daily metrics job type
        try:
            job_type = JobType.objects.filter(
                name='scale-daily-metrics').last()
        except JobType.DoesNotExist:
            raise ClockEventError(
                'Missing required job type: scale-daily-metrics')

        if last_event:
            # Build a list of days that require metrics
            day_count = xrange(
                (event.occurred.date() - last_event.occurred.date()).days)
            days = [
                last_event.occurred.date() + datetime.timedelta(days=d)
                for d in day_count
            ]
        else:
            # Use the previous day when first triggered
            days = [timezone.now().date() - datetime.timedelta(days=1)]

        # Schedule one job for each required day
        for day in days:
            job_data = Data()
            job_data.add_value(JsonValue('DAY', day.strftime('%Y-%m-%d')))
            job = Queue.objects.queue_new_job_v6(job_type, job_data, event)
            CommandMessageManager().send_messages(
                create_process_job_input_messages([job.id]))
コード例 #8
0
    def test_convert_definition_to_v6_json(self):
        """Tests calling convert_data_to_v6_json()"""

        # Try interface with nothing set
        definition = DataSetDefinitionV6()
        json = convert_definition_to_v6_json(definition.get_definition())
        DataSetDefinitionV6(definition=json.get_dict(),
                            do_validate=True)  # Revalidate

        # Try data with a variety of values
        definition = DataSetDefinition(definition={})
        file_param = FileParameter('input_a', ['application/json'])
        json_param = JsonParameter('input_b', 'integer')
        file_param2 = FileParameter('input_c', ['application/json'])
        json_param2 = JsonParameter('input_d', 'integer')
        definition.add_global_parameter(file_param)
        definition.add_global_parameter(json_param)
        definition.add_global_value(FileValue('input_a', [123]))
        definition.add_global_value(JsonValue('input_b', 100))
        definition.add_parameter(file_param2)
        definition.add_parameter(json_param2)
        json = convert_definition_to_v6_json(definition)
        DataSetDefinitionV6(definition=json.get_dict(),
                            do_validate=True)  # Revalidate
        self.assertSetEqual(set(json.get_definition().get_parameters()),
                            {'input_a', 'input_b', 'input_c', 'input_d'})
コード例 #9
0
ファイル: test_data_v6.py プロジェクト: ctc-oss/scale
    def test_convert_data_to_v6_json(self):
        """Tests calling convert_data_to_v6_json()"""

        # Try interface with nothing set
        data = Data()
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate

        # Try data with a variety of values
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        data.add_value(FileValue('input_b', [1235, 1236]))
        data.add_value(JsonValue('input_c', 'hello'))
        data.add_value(JsonValue('input_d', 11.9))
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate
        self.assertSetEqual(set(json.get_data().values.keys()), {'input_a', 'input_b', 'input_c', 'input_d'})
コード例 #10
0
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """
        from ingest.models import Ingest
        ingest_job_type = Ingest.objects.get_ingest_job_type()
        
        # Grab the ingest object
        ingest = Ingest.objects.get(pk=self.ingest_id)
        
        when = ingest.transfer_ended if ingest.transfer_ended else now()
        desc = {'file_name': ingest.file_name}

        event = None
        ingest_id = ingest.id
        with transaction.atomic():
            # Create the appropriate triggerevent
            if self.create_ingest_type == STRIKE_JOB_TYPE:
                desc['strike_id'] = self.strike_id
                event =  TriggerEvent.objects.create_trigger_event('STRIKE_TRANSFER', None, desc, when)
            elif self.create_ingest_type == SCAN_JOB_TYPE:
                ingest_id = Ingest.objects.get(scan_id=self.scan_id, file_name=ingest.file_name).id
                desc['scan_id'] = self.scan_id
                event = TriggerEvent.objects.create_trigger_event('SCAN_TRANSFER', None, desc, when)
            
        data = Data()
        data.add_value(JsonValue('ingest_id', ingest_id))
        data.add_value(JsonValue('workspace', ingest.workspace.name))
        if ingest.new_workspace:
            data.add_value(JsonValue('new_workspace', ingest.new_workspace.name))

        ingest_job = None
        with transaction.atomic():
            ingest_job = Queue.objects.queue_new_job_v6(ingest_job_type, data, event)
            ingest.job = ingest_job
            ingest.status = 'QUEUED'
            ingest.save()
            
        # Send message to start processing job input (done outside the transaction to hope the job exists)
        # This can cause a race condition with a slow DB.
        job = Job.objects.get_details(ingest_job.id)
        self.new_messages.extend(create_process_job_input_messages([job.id]))
        
        return True
 
        
コード例 #11
0
    def add_json_input(self, data, add_to_internal=True):
        """Adds a new json parameter to this job data.

        :param data: The json parameter dict
        :type data: dict
        :param add_to_internal: Whether we should add to private data dict. Unneeded when used from __init__
        :type add_to_internal: bool
        """

        self._new_data.add_value(JsonValue(data['name'], data['value']))
コード例 #12
0
ファイル: create_ingest_jobs.py プロジェクト: matttalda/scale
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """
        from ingest.models import Ingest
        ingest_job_type = Ingest.objects.get_ingest_job_type()

        # Grab the ingest object
        ingest = Ingest.objects.get(pk=self.ingest_id)

        when = ingest.transfer_ended if ingest.transfer_ended else now()
        desc = {'file_name': ingest.file_name}

        event = None
        ingest_id = ingest.id
        with transaction.atomic():
            # Create the appropriate triggerevent
            if self.create_ingest_type == STRIKE_JOB_TYPE:
                desc['strike_id'] = self.strike_id
                event = TriggerEvent.objects.create_trigger_event(
                    'STRIKE_TRANSFER', None, desc, when)
            elif self.create_ingest_type == SCAN_JOB_TYPE:
                ingest_id = Ingest.objects.get(scan_id=self.scan_id,
                                               file_name=ingest.file_name).id
                desc['scan_id'] = self.scan_id
                event = TriggerEvent.objects.create_trigger_event(
                    'SCAN_TRANSFER', None, desc, when)

        data = Data()
        data.add_value(JsonValue('ingest_id', ingest_id))
        data.add_value(JsonValue('workspace', ingest.workspace.name))
        if ingest.new_workspace:
            data.add_value(
                JsonValue('new_workspace', ingest.new_workspace.name))

        ingest_job = None
        with transaction.atomic():
            ingest_job = Queue.objects.queue_new_job_v6(
                ingest_job_type, data, event)
            ingest.job = ingest_job
            ingest.status = 'QUEUED'
            ingest.save()

        return True
コード例 #13
0
ファイル: job_results.py プロジェクト: sau29/scale
    def add_output_json(self, output_name, value):
        """Adds the given output json from the seed.outputs.json file

        :param output_name: Output JSON key used to capture from output file
        :type output_name: str
        :param value: Raw value provided by job
        :type value: float or str or dict or array
        """

        self._results_data.add_value(JsonValue(output_name, value))
コード例 #14
0
    def test_json(self):
        """Tests converting a ProcessCondition message to and from JSON"""

        definition = RecipeDefinition(Interface())

        cond_interface_1 = Interface()
        cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer'))
        df1 = DataFilter(filter_list=[{
            'name': 'cond_int',
            'type': 'integer',
            'condition': '==',
            'values': [0]
        }])
        definition = RecipeDefinition(cond_interface_1)
        definition.add_condition_node('node_a', cond_interface_1, df1)
        definition.add_recipe_input_connection('node_a', 'cond_int',
                                               'cond_int')

        definition_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=definition_dict)

        data_1 = Data()
        data_1.add_value(JsonValue('cond_int', 0))
        data_1_dict = convert_data_to_v6_json(data_1).get_dict()
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=data_1_dict)
        condition = recipe_test_utils.create_recipe_condition(recipe=recipe,
                                                              save=True)
        recipe_test_utils.create_recipe_node(recipe=recipe,
                                             node_name='node_a',
                                             condition=condition,
                                             save=True)

        # Create message
        message = create_process_condition_messages([condition.id])[0]

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = ProcessCondition.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        condition = RecipeCondition.objects.get(id=condition.id)
        self.assertEqual(len(new_message.new_messages), 1)
        self.assertEqual(new_message.new_messages[0].type, 'update_recipe')
        self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id)
        self.assertTrue(condition.is_processed)
        self.assertIsNotNone(condition.processed)
        self.assertTrue(condition.is_accepted)
コード例 #15
0
    def get_data(self):
        """Returns the data represented by this JSON

        :returns: The data
        :rtype: :class:`data.data.data.Data`:
        """

        data = Data()

        for name, file_ids in self._data['files'].items():
            file_value = FileValue(name, file_ids)
            data.add_value(file_value)
        for name, json in self._data['json'].items():
            json_value = JsonValue(name, json)
            data.add_value(json_value)

        return data
コード例 #16
0
    def test_add_value(self):
        """Tests calling Data.add_value()"""

        data = Data()

        file_value = FileValue('input_1', [123])
        data.add_value(file_value)

        json_value = JsonValue('input_2', {'foo': 'bar'})
        data.add_value(json_value)

        self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'})

        # Duplicate value
        dup_value = FileValue('input_1', [123])
        with self.assertRaises(InvalidData) as context:
            data.add_value(dup_value)
        self.assertEqual(context.exception.error.name, 'DUPLICATE_VALUE')
コード例 #17
0
    def test_add_value_from_output_data(self):
        """Tests calling Data.add_value_from_output_data()"""

        data = Data()
        output_data = Data()

        file_value = FileValue('output_1', [1, 2, 3])
        output_data.add_value(file_value)
        json_value = JsonValue('output_2', 'hello')
        output_data.add_value(json_value)

        data.add_value_from_output_data('input_1', 'output_1', output_data)
        self.assertSetEqual(set(data.values.keys()), {'input_1'})
        self.assertListEqual(data.values['input_1'].file_ids, [1, 2, 3])

        # Duplicate parameter
        with self.assertRaises(InvalidData) as context:
            data.add_value_from_output_data('input_1', 'output_1', output_data)
        self.assertEqual(context.exception.error.name, 'DUPLICATE_VALUE')
コード例 #18
0
ファイル: spawn_delete_files_job.py プロジェクト: sau29/scale
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """

        # Check to see if a force stop was placed on this purge process
        results = PurgeResults.objects.get(trigger_event=self.trigger_id)
        if results.force_stop_purge:
            return True

        files_to_delete = ScaleFile.objects.filter_files(job_ids=[self.job_id])

        if files_to_delete:
            # Construct input data
            files = []
            workspaces = []

            for f in files_to_delete:
                files.append({
                    'id': f.id,
                    'file_path': f.file_path,
                    'workspace': f.workspace.name
                })
                if f.workspace.name not in [
                        k for wrkspc in workspaces for k in wrkspc.keys()
                ]:
                    workspaces.append(
                        {f.workspace.name: f.workspace.json_config})

            inputs = Data()
            inputs.add_value(JsonValue('job_id', str(self.job_id)))
            inputs.add_value(JsonValue('trigger_id', str(self.trigger_id)))
            inputs.add_value(
                JsonValue('source_file_id', str(self.source_file_id)))
            inputs.add_value(JsonValue('purge', str(self.purge)))
            inputs.add_value(JsonValue('files', json.dumps(files)))
            inputs.add_value(JsonValue('workspaces', json.dumps(workspaces)))

            # Send message to create system job to delete files
            msg = create_jobs_message(job_type_name="scale-delete-files",
                                      job_type_version="1.0.0",
                                      event_id=self.trigger_id,
                                      job_type_rev_num=1,
                                      input_data=inputs)
            self.new_messages.append(msg)

        return True
コード例 #19
0
    def test_execute_with_recipe(self):
        """Tests calling ProcessRecipeInput.execute() successfully when a sub-recipe has to get its data from its
        recipe
        """

        workspace = storage_test_utils.create_workspace()
        file_1 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=104857600.0)
        file_2 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=987654321.0)
        file_3 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=65456.0)
        file_4 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=24564165456.0)
        manifest_a = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'job-a',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': '',
                'description': '',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': '',
                    'inputs': {
                        'files': [],
                        'json': []
                    },
                    'outputs': {
                        'files': [{
                            'name': 'output_a',
                            'pattern': '*.png'
                        }]
                    }
                }
            }
        }
        job_type_a = job_test_utils.create_job_type(interface=manifest_a)
        output_data_a = Data()
        output_data_a.add_value(FileValue('output_a', [file_1.id]))
        output_data_a_dict = convert_data_to_v6_json(output_data_a).get_dict()
        manifest_b = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'job-b',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': '',
                'description': '',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': '',
                    'inputs': {
                        'files': [],
                        'json': []
                    },
                    'outputs': {
                        'files': [{
                            'name': 'output_b',
                            'pattern': '*.png',
                            'multiple': True
                        }]
                    }
                }
            }
        }
        job_type_b = job_test_utils.create_job_type(interface=manifest_b)
        output_data_b = Data()
        output_data_b.add_value(
            FileValue('output_b', [file_2.id, file_3.id, file_4.id]))
        output_data_b_dict = convert_data_to_v6_json(output_data_b).get_dict()
        job_a = job_test_utils.create_job(job_type=job_type_a,
                                          num_exes=1,
                                          status='COMPLETED',
                                          output=output_data_a_dict)
        job_b = job_test_utils.create_job(job_type=job_type_b,
                                          num_exes=1,
                                          status='COMPLETED',
                                          output=output_data_b_dict)
        sub_recipe_interface_c = Interface()
        sub_recipe_interface_c.add_parameter(
            FileParameter('input_a', ['image/png']))
        sub_recipe_interface_c.add_parameter(
            FileParameter('input_b', ['image/png'], multiple=True))
        sub_recipe_interface_c.add_parameter(JsonParameter(
            'input_c', 'string'))
        sub_recipe_def_c = RecipeDefinition(sub_recipe_interface_c)
        sub_recipe_def_dict_c = convert_recipe_definition_to_v6_json(
            sub_recipe_def_c).get_dict()
        sub_recipe_type_c = recipe_test_utils.create_recipe_type(
            definition=sub_recipe_def_dict_c)
        sub_recipe_c = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type_c)

        recipe_interface = Interface()
        recipe_interface.add_parameter(JsonParameter('recipe_input', 'string'))
        definition = RecipeDefinition(recipe_interface)
        definition.add_job_node('node_a', job_type_a.name, job_type_a.version,
                                job_type_a.revision_num)
        definition.add_job_node('node_b', job_type_b.name, job_type_b.version,
                                job_type_b.revision_num)
        definition.add_recipe_node('node_c', sub_recipe_type_c.name,
                                   sub_recipe_type_c.revision_num)
        definition.add_recipe_input_connection('node_c', 'input_c',
                                               'recipe_input')
        definition.add_dependency('node_c', 'node_a')
        definition.add_dependency_input_connection('node_c', 'input_a',
                                                   'node_a', 'output_a')
        definition.add_dependency('node_c', 'node_b')
        definition.add_dependency_input_connection('node_c', 'input_b',
                                                   'node_b', 'output_b')
        def_dict = convert_recipe_definition_to_v6_json(definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict)
        recipe_data = Data()
        recipe_data.add_value(JsonValue('recipe_input', 'hello'))
        recipe_data_dict = convert_data_to_v6_json(recipe_data).get_dict()
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=recipe_data_dict)
        recipe_node_a = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_a', job=job_a)
        recipe_node_b = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_b', job=job_b)
        recipe_node_c = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_c', sub_recipe=sub_recipe_c)
        RecipeNode.objects.bulk_create(
            [recipe_node_a, recipe_node_b, recipe_node_c])
        job_a.recipe = recipe
        job_a.save()
        job_b.recipe = recipe
        job_b.save()
        sub_recipe_c.recipe = recipe
        sub_recipe_c.save()

        # Create message
        message = ProcessRecipeInput()
        message.recipe_id = sub_recipe_c.id

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        sub_recipe_c = Recipe.objects.get(id=sub_recipe_c.id)
        # Check for update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Check sub-recipe for expected input_file_size
        self.assertEqual(sub_recipe_c.input_file_size, 24469.0)
        # Check sub-recipe for expected input data
        self.assertNotEqual(sub_recipe_c.input['version'],
                            '1.0')  # Should not be legacy
        self.assertSetEqual(set(sub_recipe_c.get_input_data().values.keys()),
                            {'input_a', 'input_b', 'input_c'})
        self.assertListEqual(
            sub_recipe_c.get_input_data().values['input_a'].file_ids,
            [file_1.id])
        self.assertListEqual(
            sub_recipe_c.get_input_data().values['input_b'].file_ids,
            [file_2.id, file_3.id, file_4.id])
        self.assertEqual(sub_recipe_c.get_input_data().values['input_c'].value,
                         'hello')

        # Make sure sub-recipe input file models are created
        input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id)
        self.assertEqual(len(input_files), 4)
        file_ids = {input_file.input_file_id for input_file in input_files}
        self.assertSetEqual(file_ids,
                            {file_1.id, file_2.id, file_3.id, file_4.id})

        # Test executing message again
        message_json_dict = message.to_json()
        message = ProcessRecipeInput.from_json(message_json_dict)
        result = message.execute()
        self.assertTrue(result)

        # Still should have update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Make sure recipe input file models are unchanged
        input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id)
        self.assertEqual(len(input_files), 4)
コード例 #20
0
    def test_validate(self):
        """Tests calling JsonValue.validate()"""

        file_param = FileParameter('input_1', ['application/json'])
        json_param = JsonParameter('input_1', 'string')
        json_value = JsonValue('input_1', 'hello')

        # Invalid parameter type
        with self.assertRaises(InvalidData) as context:
            json_value.validate(file_param)
        self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE')

        # Invalid array
        json_param = JsonParameter('input_1', 'array')
        json_value = JsonValue('input_1', 123)
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Invalid boolean
        json_param = JsonParameter('input_1', 'boolean')
        json_value = JsonValue('input_1', 123)
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Invalid integer
        json_param = JsonParameter('input_1', 'integer')
        json_value = JsonValue('input_1', 123.5)
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Invalid number
        json_param = JsonParameter('input_1', 'number')
        json_value = JsonValue('input_1', 'foo')
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Invalid object
        json_param = JsonParameter('input_1', 'object')
        json_value = JsonValue('input_1', 123)
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Invalid string
        json_param = JsonParameter('input_1', 'string')
        json_value = JsonValue('input_1', 123)
        with self.assertRaises(InvalidData) as context:
            json_value.validate(json_param)
        self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')

        # Valid array value
        json_param = JsonParameter('input_1', 'array')
        json_value = JsonValue('input_1', [1, 2, 3])
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])

        # Valid boolean value
        json_param = JsonParameter('input_1', 'boolean')
        json_value = JsonValue('input_1', True)
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])

        # Valid integer value
        json_param = JsonParameter('input_1', 'integer')
        json_value = JsonValue('input_1', 1234)
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])

        # Valid number value
        json_param = JsonParameter('input_1', 'number')
        json_value = JsonValue('input_1', 1234.5)
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])

        # Valid object value
        json_param = JsonParameter('input_1', 'object')
        json_value = JsonValue('input_1', {'foo': 'bar'})
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])

        # Valid string value
        json_param = JsonParameter('input_1', 'string')
        json_value = JsonValue('input_1', 'hello')
        warnings = json_value.validate(json_param)
        self.assertListEqual(warnings, [])