예제 #1
0
    def process_event(self, event, last_event=None):
        """See :meth:`job.clock.ClockEventProcessor.process_event`.

        Compares the new event with the last event any missing metrics jobs.
        """

        # Attempt to get the daily metrics job type
        try:
            job_type = JobType.objects.filter(
                name='scale-daily-metrics').last()
        except JobType.DoesNotExist:
            raise ClockEventError(
                'Missing required job type: scale-daily-metrics')

        if last_event:
            # Build a list of days that require metrics
            day_count = xrange(
                (event.occurred.date() - last_event.occurred.date()).days)
            days = [
                last_event.occurred.date() + datetime.timedelta(days=d)
                for d in day_count
            ]
        else:
            # Use the previous day when first triggered
            days = [timezone.now().date() - datetime.timedelta(days=1)]

        # Schedule one job for each required day
        for day in days:
            job_data = Data()
            job_data.add_value(JsonValue('DAY', day.strftime('%Y-%m-%d')))
            job = Queue.objects.queue_new_job_v6(job_type, job_data, event)
            CommandMessageManager().send_messages(
                create_process_job_input_messages([job.id]))
예제 #2
0
    def process_manual_ingested_source_file(self, ingest_id, source_file, when,
                                            recipe_type_id):
        """Processes a manual ingest where a strike or scan is not involved. All database
        changes are made in an atomic transaction

        :param ingest_id:
        :type ingest_id: int

        :param source_file: The source file that was ingested
        :type source_file: :class:`source.models.SourceFile`
        :param when: When the source file was ingested
        :type when: :class:`datetime.datetime`
        :param recipe_type_id: id of the Recipe type to kick off
        :type recipe_type_id: int
        """

        recipe_type = RecipeType.objects.get(id=recipe_type_id)

        if recipe_type and recipe_type.is_active:
            recipe_data = Data()
            input_name = recipe_type.get_definition().get_input_keys()[0]
            recipe_data.add_value(FileValue(input_name, [source_file.id]))
            event = self._create_trigger_event(None, source_file, when)
            ingest_event = self._create_ingest_event(ingest_id, None,
                                                     source_file, when)

            logger.info('Queueing new recipe of type %s %s', recipe_type.name,
                        recipe_type.revision_num)
            Queue.objects.queue_new_recipe_v6(recipe_type, recipe_data, event,
                                              ingest_event)

        else:
            logger.info(
                'No recipe type found for id %s or recipe type is inactive' %
                recipe_type_id)
예제 #3
0
    def process_ingested_source_file_cm(self, ingest_id, source, source_file,
                                        when):
        """Processes the given ingested source file by kicking off its recipe.
        All database changes are made in an atomic transaction.

        :param source: The strike that triggered the ingest
        :type scan: `object`

        :param source_file: The source file that was ingested
        :type source_file: :class:`source.models.SourceFile`
        :param when: When the source file was ingested
        :type when: :class:`datetime.datetime`
        """

        # Create the recipe handler associated with the ingest strike/scan
        source_recipe_config = source.configuration['recipe']
        recipe_name = source_recipe_config['name']
        recipe_revision = source_recipe_config[
            'revision_num'] if 'revision_num' in source_recipe_config else None

        recipe_type = RecipeType.objects.get(name=recipe_name)
        if recipe_revision:
            recipe_type = RecipeTypeRevision.objects.get_revision(
                recipe_name, recipe_revision).recipe_type

        if len(recipe_type.get_definition().get_input_keys()) == 0:
            logger.info(
                'No inputs defined for recipe %s. Recipe will not be run.' %
                recipe_name)
            return

        if recipe_type and recipe_type.is_active:
            # Assuming one input per recipe, so pull the first defined input you find
            recipe_data = Data()
            input_name = recipe_type.get_definition().get_input_keys()[0]
            recipe_data.add_value(FileValue(input_name, [source_file.id]))
            event = self._create_trigger_event(source, source_file, when)
            ingest_event = self._create_ingest_event(ingest_id, source,
                                                     source_file, when)

            # This can cause a race condition with a slow DB.
            messages = create_recipes_messages(
                recipe_type.name, recipe_type.revision_num,
                convert_data_to_v6_json(recipe_data).get_dict(), event.id,
                ingest_event.id)
            CommandMessageManager().send_messages(messages)

        else:
            logger.info(
                'No recipe type found for %s %s or recipe type is inactive' %
                (recipe_name, recipe_revision))
예제 #4
0
    def test_json(self):
        """Tests converting a ProcessCondition message to and from JSON"""

        definition = RecipeDefinition(Interface())

        cond_interface_1 = Interface()
        cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer'))
        df1 = DataFilter(filter_list=[{
            'name': 'cond_int',
            'type': 'integer',
            'condition': '==',
            'values': [0]
        }])
        definition = RecipeDefinition(cond_interface_1)
        definition.add_condition_node('node_a', cond_interface_1, df1)
        definition.add_recipe_input_connection('node_a', 'cond_int',
                                               'cond_int')

        definition_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=definition_dict)

        data_1 = Data()
        data_1.add_value(JsonValue('cond_int', 0))
        data_1_dict = convert_data_to_v6_json(data_1).get_dict()
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=data_1_dict)
        condition = recipe_test_utils.create_recipe_condition(recipe=recipe,
                                                              save=True)
        recipe_test_utils.create_recipe_node(recipe=recipe,
                                             node_name='node_a',
                                             condition=condition,
                                             save=True)

        # Create message
        message = create_process_condition_messages([condition.id])[0]

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = ProcessCondition.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        condition = RecipeCondition.objects.get(id=condition.id)
        self.assertEqual(len(new_message.new_messages), 1)
        self.assertEqual(new_message.new_messages[0].type, 'update_recipe')
        self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id)
        self.assertTrue(condition.is_processed)
        self.assertIsNotNone(condition.processed)
        self.assertTrue(condition.is_accepted)
예제 #5
0
    def test_json(self):
        """Tests converting an UpdateRecipe message to and from JSON"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job_failed = job_test_utils.create_job(status='FAILED', input=data_dict)
        job_pending = job_test_utils.create_job(status='PENDING')
        definition = RecipeDefinition(Interface())
        definition.add_job_node('job_failed', job_failed.job_type.name, job_failed.job_type.version,
                                job_failed.job_type_rev.revision_num)
        definition.add_job_node('job_pending', job_pending.job_type.name, job_pending.job_type.version,
                                job_pending.job_type_rev.revision_num)
        definition.add_dependency('job_failed', 'job_pending')
        definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict)
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type)
        recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_failed', job=job_failed)
        recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_pending', job=job_pending)

        # Create message
        message = create_update_recipe_message(recipe.id)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = UpdateRecipe.from_json(message_json_dict)
        result = new_message.execute()
        self.assertTrue(result)

        # Check for message to set job_pending to BLOCKED
        self.assertEqual(len(new_message.new_messages), 1)
        msg = new_message.new_messages[0]
        self.assertEqual(msg.type, 'blocked_jobs')
        self.assertListEqual(msg._blocked_job_ids, [job_pending.id])
예제 #6
0
    def queue_bake_jobs(self, request):
        """Creates and queues the specified number of Scale Bake jobs

        :param request: the HTTP POST request
        :type request: :class:`rest_framework.request.Request`
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """

        num = rest_util.parse_int(request, 'num')

        if num < 1:
            raise BadParameter('num must be at least 1')

        # TODO: in the future, send command message to do this asynchronously
        try:
            recipe_type = RecipeType.objects.get(name='scale-bake',
                                                 revision_num='1')
            for _ in xrange(num):
                Queue.objects.queue_new_recipe_for_user_v6(recipe_type, Data())
        except (InvalidData, InvalidRecipeData, InactiveRecipeType) as ex:
            message = 'Unable to create new recipe'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        return Response(status=status.HTTP_202_ACCEPTED)
예제 #7
0
    def test_json_forced_nodes(self):
        """Tests coverting a ProcessRecipeInput message to and from JSON with forced nodes provided"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        recipe = recipe_test_utils.create_recipe(input=data_dict)
        forced_nodes = ForcedNodes()
        forced_nodes.set_all_nodes()
        forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict()

        # Create message
        message = create_process_recipe_input_messages(
            [recipe.id], forced_nodes=forced_nodes)[0]

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = ProcessRecipeInput.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        recipe = Recipe.objects.get(id=recipe.id)
        self.assertEqual(len(new_message.new_messages), 1)
        msg = new_message.new_messages[0]
        self.assertEqual(msg.type, 'update_recipe')
        self.assertEqual(msg.root_recipe_id, recipe.id)
        self.assertDictEqual(
            convert_forced_nodes_to_v6(msg.forced_nodes).get_dict(),
            forced_nodes_dict)
        # Recipe should have input_file_size set to 0 (no input files)
        self.assertEqual(recipe.input_file_size, 0.0)
예제 #8
0
    def generate_input_data(self, recipe_input_data, node_outputs,
                            optional_outputs):
        """Generates the input data for this node

        :param recipe_input_data: The input data for the recipe
        :type recipe_input_data: :class:`data.data.data.Data`
        :param node_outputs: The RecipeNodeOutput tuples stored in a dict by node name
        :type node_outputs: dict
        :returns: The input data for this node
        :rtype: :class:`data.data.data.Data`

        :raises :class:`data.data.exceptions.InvalidData`: If there is a duplicate data value
        """

        input_data = Data()
        for connection in self.connections.values():
            try:
                connection.add_value_to_data(input_data, recipe_input_data,
                                             node_outputs)
            except InvalidData as ex:
                if optional_outputs:
                    if not connection.output_name in optional_outputs:
                        logger.warning(
                            "Output name %s not found in optional outputs",
                            connection.output_name)
                        raise
                    else:
                        logger.info(
                            "InvalidData exception occured due to optional output not present. Proceeding with job execution."
                        )
                else:
                    logger.exception(
                        "InvalidData exception occurred generating input data")

        return input_data
예제 #9
0
    def test_execute_input_data(self):
        """Tests calling CreateJobs.execute() with input data"""

        manifest = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'name',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': 'Title',
                'description': 'This is a description',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': 'the command'
                }
            }
        }
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)
        event = trigger_test_utils.create_trigger_event()
        data = Data()

        # Create and execute message
        message = create_jobs_message(job_type.name,
                                      job_type.version,
                                      job_type.revision_num,
                                      event.id,
                                      input_data=data)
        result = message.execute()
        self.assertTrue(result)

        # Check for job creation
        self.assertEqual(
            Job.objects.filter(job_type_id=job_type.id,
                               event_id=event.id).count(), 1)

        # Check for process_job_input message
        self.assertEqual(len(message.new_messages), 1)
        msg = message.new_messages[0]
        self.assertEqual(msg.type, 'process_job_input')

        # Test executing message again
        message_json_dict = message.to_json()
        message = CreateJobs.from_json(message_json_dict)
        result = message.execute()
        self.assertTrue(result)

        # Check that a second job is not created
        self.assertEqual(
            Job.objects.filter(job_type_id=job_type.id,
                               event_id=event.id).count(), 1)

        # Check for process_job_input message
        self.assertEqual(len(message.new_messages), 1)
        msg = message.new_messages[0]
        self.assertEqual(msg.type, 'process_job_input')
예제 #10
0
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """
        from ingest.models import Ingest
        ingest_job_type = Ingest.objects.get_ingest_job_type()
        
        # Grab the ingest object
        ingest = Ingest.objects.get(pk=self.ingest_id)
        
        when = ingest.transfer_ended if ingest.transfer_ended else now()
        desc = {'file_name': ingest.file_name}

        event = None
        ingest_id = ingest.id
        with transaction.atomic():
            # Create the appropriate triggerevent
            if self.create_ingest_type == STRIKE_JOB_TYPE:
                desc['strike_id'] = self.strike_id
                event =  TriggerEvent.objects.create_trigger_event('STRIKE_TRANSFER', None, desc, when)
            elif self.create_ingest_type == SCAN_JOB_TYPE:
                ingest_id = Ingest.objects.get(scan_id=self.scan_id, file_name=ingest.file_name).id
                desc['scan_id'] = self.scan_id
                event = TriggerEvent.objects.create_trigger_event('SCAN_TRANSFER', None, desc, when)
            
        data = Data()
        data.add_value(JsonValue('ingest_id', ingest_id))
        data.add_value(JsonValue('workspace', ingest.workspace.name))
        if ingest.new_workspace:
            data.add_value(JsonValue('new_workspace', ingest.new_workspace.name))

        ingest_job = None
        with transaction.atomic():
            ingest_job = Queue.objects.queue_new_job_v6(ingest_job_type, data, event)
            ingest.job = ingest_job
            ingest.status = 'QUEUED'
            ingest.save()
            
        # Send message to start processing job input (done outside the transaction to hope the job exists)
        # This can cause a race condition with a slow DB.
        job = Job.objects.get_details(ingest_job.id)
        self.new_messages.extend(create_process_job_input_messages([job.id]))
        
        return True
 
        
예제 #11
0
    def test_add_value(self):
        """Tests calling Data.add_value()"""

        data = Data()

        file_value = FileValue('input_1', [123])
        data.add_value(file_value)

        json_value = JsonValue('input_2', {'foo': 'bar'})
        data.add_value(json_value)

        self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'})

        # Duplicate value
        dup_value = FileValue('input_1', [123])
        with self.assertRaises(InvalidData) as context:
            data.add_value(dup_value)
        self.assertEqual(context.exception.error.name, 'DUPLICATE_VALUE')
예제 #12
0
    def test_execute_invalid_data(self):
        """Tests calling CreateJobs.execute() when the input data is invalid"""

        manifest = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'name',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': 'Title',
                'description': 'This is a description',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': 'the command',
                    'inputs': {
                        'files': [{
                            'name': 'input_a'
                        }]
                    }
                }
            }
        }
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)
        event = trigger_test_utils.create_trigger_event()
        # Data does not provide required input_a so it is invalid
        data_dict = convert_data_to_v6_json(Data()).get_dict()

        # Create and execute message
        message = create_jobs_message(job_type.name,
                                      job_type.version,
                                      job_type.revision_num,
                                      event.id,
                                      count=10,
                                      input_data_dict=data_dict)
        result = message.execute()

        self.assertTrue(result)
        self.assertEqual(
            Job.objects.filter(job_type_id=job_type.id,
                               event_id=event.id).count(), 0)

        # Should be no new messages
        self.assertEqual(len(message.new_messages), 0)
예제 #13
0
    def test_queue_job_timestamps(self):
        """Tests that job attributes are updated when a job is queued."""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job = job_test_utils.create_job(num_exes=1,
                                        status='CANCELED',
                                        input=data_dict,
                                        started=timezone.now(),
                                        ended=timezone.now())

        Job.objects.update_jobs_to_queued([job], timezone.now(), requeue=True)
        job = Job.objects.get(pk=job.id)

        self.assertEqual(job.status, 'QUEUED')
        self.assertIsNotNone(job.queued)
        self.assertIsNone(job.started)
        self.assertIsNone(job.ended)
예제 #14
0
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """
        from ingest.models import Ingest
        ingest_job_type = Ingest.objects.get_ingest_job_type()

        # Grab the ingest object
        ingest = Ingest.objects.get(pk=self.ingest_id)

        when = ingest.transfer_ended if ingest.transfer_ended else now()
        desc = {'file_name': ingest.file_name}

        event = None
        ingest_id = ingest.id
        with transaction.atomic():
            # Create the appropriate triggerevent
            if self.create_ingest_type == STRIKE_JOB_TYPE:
                desc['strike_id'] = self.strike_id
                event = TriggerEvent.objects.create_trigger_event(
                    'STRIKE_TRANSFER', None, desc, when)
            elif self.create_ingest_type == SCAN_JOB_TYPE:
                ingest_id = Ingest.objects.get(scan_id=self.scan_id,
                                               file_name=ingest.file_name).id
                desc['scan_id'] = self.scan_id
                event = TriggerEvent.objects.create_trigger_event(
                    'SCAN_TRANSFER', None, desc, when)

        data = Data()
        data.add_value(JsonValue('ingest_id', ingest_id))
        data.add_value(JsonValue('workspace', ingest.workspace.name))
        if ingest.new_workspace:
            data.add_value(
                JsonValue('new_workspace', ingest.new_workspace.name))

        ingest_job = None
        with transaction.atomic():
            ingest_job = Queue.objects.queue_new_job_v6(
                ingest_job_type, data, event)
            ingest.job = ingest_job
            ingest.status = 'QUEUED'
            ingest.save()

        return True
예제 #15
0
    def test_json_no_recipe(self):
        """Tests converting a CreateJobs message (without a recipe) to and from JSON"""

        manifest = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'name',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': 'Title',
                'description': 'This is a description',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10
            }
        }
        job_type = job_test_utils.create_seed_job_type(manifest=manifest)
        event = trigger_test_utils.create_trigger_event()
        data_dict = convert_data_to_v6_json(Data()).get_dict()
        count = 10

        # Create message
        message = create_jobs_message(job_type.name,
                                      job_type.version,
                                      job_type.revision_num,
                                      event.id,
                                      count=count,
                                      input_data_dict=data_dict)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = CreateJobs.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        self.assertEqual(
            Job.objects.filter(job_type_id=job_type.id,
                               event_id=event.id).count(), count)

        # Check for process_job_input messages
        self.assertEqual(len(new_message.new_messages), count)
        for msg in new_message.new_messages:
            self.assertEqual(msg.type, 'process_job_input')
예제 #16
0
    def test_json_forced_nodes(self):
        """Tests converting an UpdateRecipe message to and from JSON when forced nodes are provided"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job_completed = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict)
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6()
        definition = RecipeDefinition(Interface())
        definition.add_job_node('job_completed', job_completed.job_type.name, job_completed.job_type.version,
                                job_completed.job_type_rev.revision_num)
        definition.add_recipe_node('the_sub_recipe', sub_recipe_type.name, sub_recipe_type.revision_num)
        definition.add_dependency('job_completed', 'the_sub_recipe')
        definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict)
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict)
        recipe_test_utils.create_recipe_job(recipe=recipe, job_name='job_completed', job=job_completed)
        forced_nodes = ForcedNodes()
        sub_forced_nodes = ForcedNodes()
        sub_forced_nodes.set_all_nodes()
        forced_nodes.add_subrecipe('the_sub_recipe', sub_forced_nodes)

        # Create message
        message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = UpdateRecipe.from_json(message_json_dict)
        result = new_message.execute()
        self.assertTrue(result)

        # Check for message to create sub-recipe
        self.assertEqual(len(new_message.new_messages), 1)
        msg = new_message.new_messages[0]
        self.assertEqual(msg.type, 'create_recipes')
        self.assertEqual(msg.event_id, recipe.event_id)
        msg_forced_nodes_dict = convert_forced_nodes_to_v6(msg.forced_nodes).get_dict()
        expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict()
        self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict)
        self.assertEqual(msg.create_recipes_type, SUB_RECIPE_TYPE)
        self.assertEqual(msg.recipe_id, recipe.id)
        self.assertEqual(msg.root_recipe_id, recipe.root_superseded_recipe_id)
        self.assertIsNone(msg.superseded_recipe_id)
        sub = SubRecipe(sub_recipe_type.name, sub_recipe_type.revision_num, 'the_sub_recipe', True)
        self.assertListEqual(msg.sub_recipes, [sub])
예제 #17
0
파일: node.py 프로젝트: sau29/scale
    def generate_input_data(self, recipe_input_data, node_outputs):
        """Generates the input data for this node

        :param recipe_input_data: The input data for the recipe
        :type recipe_input_data: :class:`data.data.data.Data`
        :param node_outputs: The RecipeNodeOutput tuples stored in a dict by node name
        :type node_outputs: dict
        :returns: The input data for this node
        :rtype: :class:`data.data.data.Data`

        :raises :class:`data.data.exceptions.InvalidData`: If there is a duplicate data value
        """

        input_data = Data()

        for connection in self.connections.values():
            connection.add_value_to_data(input_data, recipe_input_data, node_outputs)

        return input_data
예제 #18
0
파일: initialize.py 프로젝트: sau29/scale
def initialize_system():
    """Performs any necessary functions needed for initializing Scale"""

    logger.info('Initializing system')

    Scheduler.objects.initialize_scheduler()

    # Make sure clock job has been created
    clock_job_type = JobType.objects.get_clock_job_type()
    count = Job.objects.filter(job_type_id=clock_job_type.id).count()
    if not count:
        logger.info('Queuing Scale Clock job')
        with transaction.atomic():
            init_event = TriggerEvent.objects.create_trigger_event(
                'SCALE_INIT', None, {}, now())
            job = Queue.objects.queue_new_job_v6(clock_job_type, Data(),
                                                 init_event)
            CommandMessageManager().send_messages(
                create_process_job_input_messages([job.id]))
예제 #19
0
    def test_json(self):
        """Tests coverting a ProcessRecipeInput message to and from JSON"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        recipe = recipe_test_utils.create_recipe(input=data_dict)

        # Create message
        message = ProcessRecipeInput()
        message.recipe_id = recipe.id

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = ProcessRecipeInput.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        recipe = Recipe.objects.get(id=recipe.id)
        self.assertEqual(len(new_message.new_messages), 1)
        self.assertEqual(new_message.new_messages[0].type, 'update_recipes')
        # Recipe should have input_file_size set to 0 (no input files)
        self.assertEqual(recipe.input_file_size, 0.0)
예제 #20
0
    def get_data(self):
        """Returns the data represented by this JSON

        :returns: The data
        :rtype: :class:`data.data.data.Data`:
        """

        data = Data()

        for name, file_ids in self._data['files'].items():
            file_value = FileValue(name, file_ids)
            data.add_value(file_value)
        for name, json in self._data['json'].items():
            json_value = JsonValue(name, json)
            data.add_value(json_value)

        return data
예제 #21
0
    def test_execute_with_data(self):
        """Tests calling ProcessRecipeInput.execute() successfully when the recipe already has data populated"""

        workspace = storage_test_utils.create_workspace()
        file_1 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=10485760.0)
        file_2 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=104857600.0)
        file_3 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=987654321.0)
        recipe_interface = Interface()
        recipe_interface.add_parameter(FileParameter('input_a',
                                                     ['text/plain']))
        recipe_interface.add_parameter(
            FileParameter('input_b', ['text/plain'], multiple=True))
        definition = RecipeDefinition(recipe_interface)
        definition_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type(
            definition=definition_dict)

        data = Data()
        data.add_value(FileValue('input_a', [file_1.id]))
        data.add_value(FileValue('input_b', [file_2.id, file_3.id]))
        data_dict = convert_data_to_v6_json(data).get_dict()
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=data_dict)

        # Create message
        message = ProcessRecipeInput()
        message.recipe_id = recipe.id

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        recipe = Recipe.objects.get(id=recipe.id)
        # Check for update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Check recipe for expected input_file_size
        self.assertEqual(recipe.input_file_size, 1052.0)

        # Make sure recipe input file models are created
        recipe_input_files = RecipeInputFile.objects.filter(
            recipe_id=recipe.id)
        self.assertEqual(len(recipe_input_files), 3)
        for recipe_input_file in recipe_input_files:
            if recipe_input_file.input_file_id == file_1.id:
                self.assertEqual(recipe_input_file.recipe_input, 'input_a')
            elif recipe_input_file.input_file_id == file_2.id:
                self.assertEqual(recipe_input_file.recipe_input, 'input_b')
            elif recipe_input_file.input_file_id == file_3.id:
                self.assertEqual(recipe_input_file.recipe_input, 'input_b')
            else:
                self.fail('Invalid input file ID: %s' %
                          recipe_input_file.input_file_id)

        # Test executing message again
        message_json_dict = message.to_json()
        message = ProcessRecipeInput.from_json(message_json_dict)
        result = message.execute()
        self.assertTrue(result)

        # Still should have update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Make sure recipe input file models are unchanged
        recipe_input_files = RecipeInputFile.objects.filter(
            recipe_id=recipe.id)
        self.assertEqual(len(recipe_input_files), 3)
예제 #22
0
    def test_execute_with_recipe_legacy(self):
        """Tests calling ProcessRecipeInput.execute() successfully when a legacy sub-recipe has to get its data from its
        recipe
        """

        workspace = storage_test_utils.create_workspace()
        file_1 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=104857600.0)
        file_2 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=987654321.0)
        file_3 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=65456.0)
        file_4 = storage_test_utils.create_file(workspace=workspace,
                                                file_size=24564165456.0)
        manifest_a = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'job-a',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': '',
                'description': '',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': '',
                    'inputs': {
                        'files': [],
                        'json': []
                    },
                    'outputs': {
                        'files': [{
                            'name': 'output_a',
                            'pattern': '*.png'
                        }]
                    }
                }
            }
        }
        job_type_a = job_test_utils.create_job_type(interface=manifest_a)
        output_data_a = Data()
        output_data_a.add_value(FileValue('output_a', [file_1.id]))
        output_data_a_dict = convert_data_to_v6_json(output_data_a).get_dict()
        manifest_b = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'job-b',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': '',
                'description': '',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 10,
                'interface': {
                    'command': '',
                    'inputs': {
                        'files': [],
                        'json': []
                    },
                    'outputs': {
                        'files': [{
                            'name': 'output_b',
                            'pattern': '*.png',
                            'multiple': True
                        }]
                    }
                }
            }
        }
        job_type_b = job_test_utils.create_job_type(interface=manifest_b)
        output_data_b = Data()
        output_data_b.add_value(
            FileValue('output_b', [file_2.id, file_3.id, file_4.id]))
        output_data_b_dict = convert_data_to_v6_json(output_data_b).get_dict()
        job_a = job_test_utils.create_job(job_type=job_type_a,
                                          num_exes=1,
                                          status='COMPLETED',
                                          output=output_data_a_dict)
        job_b = job_test_utils.create_job(job_type=job_type_b,
                                          num_exes=1,
                                          status='COMPLETED',
                                          output=output_data_b_dict)
        sub_recipe_interface_c = Interface()
        sub_recipe_interface_c.add_parameter(
            FileParameter('input_a', ['image/png']))
        sub_recipe_interface_c.add_parameter(
            FileParameter('input_b', ['image/png'], multiple=True))
        sub_recipe_def_c = RecipeDefinition(sub_recipe_interface_c)
        sub_recipe_def_dict_c = convert_recipe_definition_to_v1_json(
            sub_recipe_def_c).get_dict()
        sub_recipe_type_c = recipe_test_utils.create_recipe_type(
            definition=sub_recipe_def_dict_c)
        sub_recipe_c = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type_c)

        definition = RecipeDefinition(Interface())
        definition.add_job_node('node_a', job_type_a.name, job_type_a.version,
                                job_type_a.revision_num)
        definition.add_job_node('node_b', job_type_b.name, job_type_b.version,
                                job_type_b.revision_num)
        definition.add_recipe_node('node_c', sub_recipe_type_c.name,
                                   sub_recipe_type_c.revision_num)
        definition.add_dependency('node_c', 'node_a')
        definition.add_dependency_input_connection('node_c', 'input_a',
                                                   'node_a', 'output_a')
        definition.add_dependency('node_c', 'node_b')
        definition.add_dependency_input_connection('node_c', 'input_b',
                                                   'node_b', 'output_b')
        def_dict = convert_recipe_definition_to_v6_json(definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict)
        recipe_data_dict = {
            'version': '1.0',
            'input_data': [],
            'workspace_id': workspace.id
        }
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=recipe_data_dict)
        recipe_node_a = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_a', job=job_a)
        recipe_node_b = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_b', job=job_b)
        recipe_node_c = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='node_c', sub_recipe=sub_recipe_c)
        RecipeNode.objects.bulk_create(
            [recipe_node_a, recipe_node_b, recipe_node_c])
        job_a.recipe = recipe
        job_a.save()
        job_b.recipe = recipe
        job_b.save()
        sub_recipe_c.recipe = recipe
        sub_recipe_c.save()

        # Create message
        message = ProcessRecipeInput()
        message.recipe_id = sub_recipe_c.id

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        sub_recipe_c = Recipe.objects.get(id=sub_recipe_c.id)
        # Check for update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Check sub-recipe for expected input_file_size
        self.assertEqual(sub_recipe_c.input_file_size, 24469.0)
        # Check sub-recipe for expected input data
        self.assertEqual(
            sub_recipe_c.input['version'],
            '1.0')  # Should be legacy input data with workspace ID
        self.assertEqual(sub_recipe_c.input['workspace_id'], workspace.id)
        self.assertSetEqual(set(sub_recipe_c.get_input_data().values.keys()),
                            {'input_a', 'input_b'})
        self.assertListEqual(
            sub_recipe_c.get_input_data().values['input_a'].file_ids,
            [file_1.id])
        self.assertListEqual(
            sub_recipe_c.get_input_data().values['input_b'].file_ids,
            [file_2.id, file_3.id, file_4.id])

        # Make sure sub-recipe input file models are created
        input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id)
        self.assertEqual(len(input_files), 4)
        file_ids = {input_file.input_file_id for input_file in input_files}
        self.assertSetEqual(file_ids,
                            {file_1.id, file_2.id, file_3.id, file_4.id})

        # Test executing message again
        message_json_dict = message.to_json()
        message = ProcessRecipeInput.from_json(message_json_dict)
        result = message.execute()
        self.assertTrue(result)

        # Still should have update_recipes message
        self.assertEqual(len(message.new_messages), 1)
        self.assertEqual(message.new_messages[0].type, 'update_recipes')

        # Make sure recipe input file models are unchanged
        input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id)
        self.assertEqual(len(input_files), 4)
예제 #23
0
    def test_convert_data_to_v6_json(self):
        """Tests calling convert_data_to_v6_json()"""

        # Try interface with nothing set
        data = Data()
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate

        # Try data with a variety of values
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        data.add_value(FileValue('input_b', [1235, 1236]))
        data.add_value(JsonValue('input_c', 'hello'))
        data.add_value(JsonValue('input_d', 11.9))
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate
        self.assertSetEqual(set(json.get_data().values.keys()), {'input_a', 'input_b', 'input_c', 'input_d'})
예제 #24
0
파일: test_filter.py 프로젝트: sau29/scale
    def test_is_data_accepted(self):
        """Tests calling DataFilter.is_data_accepted()"""

        data_filter = DataFilter(all=False)
        data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']})
        data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']})
        data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']})
        data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']})
        data_filter.add_filter({'name': 'input_f', 'type': 'meta-data', 'condition': 'in', 'values': [['foo', 'baz']],
                                'fields': [['a', 'b']]})
        
        data = Data()

        file_value = FileValue('input_a', [self.file1.id])
        data.add_value(file_value)
        
        # first filter passes, so data is accepted if all is set to false
        self.assertTrue(data_filter.is_data_accepted(data))
        data_filter.all = True
        # other filters fail so data is not accepted
        self.assertFalse(data_filter.is_data_accepted(data))
        
        # get other filters to pass
        json_value = JsonValue('input_b', 'abcdefg')
        data.add_value(json_value)
        json_value = JsonValue('input_c', '10')
        data.add_value(json_value)
        json_value = JsonValue('input_d', 50)
        data.add_value(json_value)
        file_value = FileValue('input_f', [self.file2.id])
        data.add_value(file_value)

        self.assertTrue(data_filter.is_data_accepted(data))
예제 #25
0
    def test_has_completed_false(self):
        """Tests calling Recipe.has_completed() when an entire recipe has not completed"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job_type = job_test_utils.create_seed_job_type()
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6()

        definition = RecipeDefinition(Interface())
        definition.add_job_node('A', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_recipe_node('B', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_job_node('C', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('D', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('E', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('F', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_recipe_node('G', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_job_node('H', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_dependency('A', 'C')
        definition.add_dependency('A', 'E')
        definition.add_dependency('A', 'H')
        definition.add_dependency('C', 'D')
        definition.add_dependency('G', 'H')

        job_a = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_c = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_d = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_e = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_f = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_h = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h])

        recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type,
                                                   save=False)
        recipe_b.is_completed = True
        recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type,
                                                   save=False)
        recipe_g.is_completed = False
        Recipe.objects.bulk_create([recipe_b, recipe_g])

        definition_json_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=definition_json_dict)
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type)
        recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='A',
                                                             job=job_a,
                                                             save=False,
                                                             is_original=False)
        recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='C',
                                                             job=job_c,
                                                             save=False,
                                                             is_original=False)
        recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='D',
                                                             job=job_d,
                                                             save=False,
                                                             is_original=False)
        recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='E',
                                                             job=job_e,
                                                             save=False)
        recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='F',
                                                             job=job_f,
                                                             save=False)
        recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='H',
                                                             job=job_h,
                                                             save=False)

        recipe_node_g = recipe_test_utils.create_recipe_node(
            recipe=recipe,
            node_name='G',
            sub_recipe=recipe_g,
            save=False,
            is_original=False)
        recipe_node_b = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False)
        RecipeNode.objects.bulk_create([
            recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d,
            recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h
        ])

        recipe_instance = Recipe.objects.get_recipe_instance(recipe.id)
        self.assertFalse(recipe_instance.has_completed())
예제 #26
0
    def test_validate(self):
        """Tests calling Data.validate()"""

        interface = Interface()
        data = Data()

        interface.add_parameter(FileParameter('input_1', ['application/json']))
        interface.add_parameter(JsonParameter('input_2', 'integer'))
        data.add_value(FileValue('input_1', [123]))
        data.add_value(JsonValue('input_2', 100))
        data.add_value(JsonValue('extra_input_1', 'hello'))
        data.add_value(JsonValue('extra_input_2', 'there'))

        # Valid data
        data.validate(interface)
        # Ensure extra data values are removed
        self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'})

        # Data is missing required input 3
        interface.add_parameter(FileParameter('input_3', ['image/gif'], required=True))
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED')

        data.add_value(FileValue('input_3', [999]))  # Input 3 taken care of now

        # Invalid data
        interface.add_parameter(JsonParameter('input_4', 'string'))
        mock_value = MagicMock()
        mock_value.name = 'input_4'
        mock_value.validate.side_effect = InvalidData('MOCK', '')
        data.add_value(mock_value)
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'MOCK')
예제 #27
0
    def test_get_nodes_to_process_input(self):
        """Tests calling Recipe.get_nodes_to_process_input()"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job_type = job_test_utils.create_seed_job_type()
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6()

        # Create recipe
        definition = RecipeDefinition(Interface())
        cond_interface_1 = Interface()
        cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer'))
        definition.add_job_node('A', job_type.name, job_type.version,
                                job_type.revision_num)
        df1 = DataFilter(filter_list=[{
            'name': 'cond_int',
            'type': 'integer',
            'condition': '==',
            'values': [0]
        }, {
            'name': 'cond_int',
            'type': 'integer',
            'condition': '!=',
            'values': [0]
        }],
                         all=False)  #always True
        df2 = DataFilter(filter_list=[{
            'name': 'cond_int',
            'type': 'integer',
            'condition': '==',
            'values': [0]
        }, {
            'name': 'cond_int',
            'type': 'integer',
            'condition': '!=',
            'values': [0]
        }],
                         all=True)  #always False
        definition.add_condition_node('B', cond_interface_1, df1)  #True
        definition.add_condition_node('C', cond_interface_1, df1)  #True
        definition.add_condition_node('D', cond_interface_1, df2)  #False
        definition.add_job_node('E', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('F', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_recipe_node('G', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_recipe_node('H', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_dependency('A', 'D')
        definition.add_dependency('A', 'E')
        definition.add_dependency('B', 'E')
        definition.add_dependency('B', 'F')
        definition.add_dependency('C', 'F')
        definition.add_dependency('D', 'G')
        definition.add_dependency('E', 'G')
        definition.add_dependency('E', 'H')
        definition_json_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=definition_json_dict)
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                 input=data_dict)

        # Nodes A, B, and D already exist
        job_a = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          input=data_dict,
                                          output=data_dict,
                                          save=True)
        condition_b = recipe_test_utils.create_recipe_condition(
            is_processed=True, is_accepted=True, save=False)
        condition_d = recipe_test_utils.create_recipe_condition(
            is_processed=True, is_accepted=False, save=False)
        RecipeCondition.objects.bulk_create([condition_b, condition_d])
        recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='A',
                                                             job=job_a,
                                                             save=False)
        recipe_node_b = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='B', condition=condition_b, save=False)
        recipe_node_d = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='D', condition=condition_d, save=False)
        RecipeNode.objects.bulk_create(
            [recipe_node_a, recipe_node_b, recipe_node_d])

        recipe_instance = Recipe.objects.get_recipe_instance(recipe.id)
        nodes_to_process = recipe_instance.get_nodes_to_process_input()
        self.assertSetEqual(set(nodes_to_process.keys()), {'C', 'E'})
예제 #28
0
파일: test_models.py 프로젝트: sau29/scale
    def test_process_recipe_input(self):
        """Tests calling RecipeManager.process_recipe_input()"""

        date_1 = now()
        min_src_started_recipe_1 = date_1 - datetime.timedelta(days=200)
        max_src_ended_recipe_1 = date_1 + datetime.timedelta(days=200)
        date_2 = date_1 + datetime.timedelta(minutes=30)
        date_3 = date_1 + datetime.timedelta(minutes=40)
        date_4 = date_1 + datetime.timedelta(minutes=50)
        min_src_started_recipe_2 = date_1 - datetime.timedelta(days=500)
        max_src_ended_recipe_2 = date_1 + datetime.timedelta(days=500)
        s_class = 'A'
        s_sensor = '1'
        collection = '12345'
        task = 'abcd'
        workspace = storage_test_utils.create_workspace()
        file_1 = storage_test_utils.create_file(workspace=workspace, file_size=10485760.0,
                                                source_sensor_class=s_class, source_sensor=s_sensor,
                                                source_collection=collection, source_task=task)
        file_2 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0,
                                                source_started=date_2, source_ended=date_3,
                                                source_sensor_class=s_class, source_sensor=s_sensor,
                                                source_collection=collection, source_task=task)
        file_3 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0,
                                                source_started=min_src_started_recipe_1, source_ended=date_4)
        file_4 = storage_test_utils.create_file(workspace=workspace, file_size=46546.0,
                                                source_ended=max_src_ended_recipe_1)
        file_5 = storage_test_utils.create_file(workspace=workspace, file_size=83457.0, source_started=date_2)
        file_6 = storage_test_utils.create_file(workspace=workspace, file_size=42126588636633.0, source_ended=date_4)
        file_7 = storage_test_utils.create_file(workspace=workspace, file_size=76645464662354.0)
        file_8 = storage_test_utils.create_file(workspace=workspace, file_size=4654.0,
                                                source_started=min_src_started_recipe_2)
        file_9 = storage_test_utils.create_file(workspace=workspace, file_size=545.0, source_started=date_3,
                                                source_ended=max_src_ended_recipe_2)
        file_10 = storage_test_utils.create_file(workspace=workspace, file_size=0.154, source_ended=date_4,
                                                 source_sensor_class=s_class, source_sensor=s_sensor,
                                                 source_collection=collection, source_task=task)
        recipe_interface = Interface()
        recipe_interface.add_parameter(FileParameter('input_a', ['text/plain']))
        recipe_interface.add_parameter(FileParameter('input_b', ['text/plain'], multiple=True))
        definition = RecipeDefinition(recipe_interface)
        definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict)

        data_1 = Data()
        data_1.add_value(FileValue('input_a', [file_1.id]))
        data_1.add_value(FileValue('input_b', [file_2.id, file_3.id, file_4.id, file_5.id]))
        data_1_dict = convert_data_to_v6_json(data_1).get_dict()
        data_2 = Data()
        data_2.add_value(FileValue('input_a', [file_6.id]))
        data_2.add_value(FileValue('input_b', [file_7.id, file_8.id, file_9.id, file_10.id]))
        data_2_dict = convert_data_to_v6_json(data_2).get_dict()
        data_3 = Data()
        data_3_dict = convert_data_to_v6_json(data_3).get_dict()

        recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict)
        recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_2_dict)
        recipe_3 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_3_dict)

        # Execute method
        Recipe.objects.process_recipe_input(recipe_1)
        Recipe.objects.process_recipe_input(recipe_2)
        Recipe.objects.process_recipe_input(recipe_3)

        # Retrieve updated recipe models
        recipes = Recipe.objects.filter(id__in=[recipe_1.id, recipe_2.id, recipe_3.id]).order_by('id')
        recipe_1 = recipes[0]
        recipe_2 = recipes[1]
        recipe_3 = recipes[2]

        # Check recipes for expected fields
        self.assertEqual(recipe_1.input_file_size, 1053.0)
        self.assertEqual(recipe_1.source_started, min_src_started_recipe_1)
        self.assertEqual(recipe_1.source_ended, max_src_ended_recipe_1)
        self.assertEqual(recipe_1.source_sensor_class, s_class)
        self.assertEqual(recipe_1.source_sensor, s_sensor)
        self.assertEqual(recipe_1.source_collection, collection)
        self.assertEqual(recipe_1.source_task, task)
        self.assertEqual(recipe_2.input_file_size, 113269857.0)
        self.assertEqual(recipe_2.source_started, min_src_started_recipe_2)
        self.assertEqual(recipe_2.source_ended, max_src_ended_recipe_2)
        self.assertEqual(recipe_2.source_sensor_class, s_class)
        self.assertEqual(recipe_2.source_sensor, s_sensor)
        self.assertEqual(recipe_2.source_collection, collection)
        self.assertEqual(recipe_2.source_task, task)
        self.assertEqual(recipe_3.input_file_size, 0.0)
        self.assertIsNone(recipe_3.source_started)
        self.assertIsNone(recipe_3.source_ended)

        # Make sure recipe input file models are created
        recipe_input_files = RecipeInputFile.objects.filter(recipe_id=recipe_1.id)
        self.assertEqual(len(recipe_input_files), 5)
        input_files_dict = {'input_a': set(), 'input_b': set()}
        for recipe_input_file in recipe_input_files:
            input_files_dict[recipe_input_file.recipe_input].add(recipe_input_file.input_file_id)
        self.assertDictEqual(input_files_dict, {'input_a': {file_1.id}, 'input_b': {file_2.id, file_3.id, file_4.id,
                                                                                    file_5.id}})
        recipe_input_files = RecipeInputFile.objects.filter(recipe_id=recipe_2.id)
        self.assertEqual(len(recipe_input_files), 5)
        input_files_dict = {'input_a': set(), 'input_b': set()}
        for recipe_input_file in recipe_input_files:
            input_files_dict[recipe_input_file.recipe_input].add(recipe_input_file.input_file_id)
        self.assertDictEqual(input_files_dict, {'input_a': {file_6.id}, 'input_b': {file_7.id, file_8.id, file_9.id,
                                                                                    file_10.id}})

        self.assertEqual(RecipeInputFile.objects.filter(recipe_id=recipe_3.id).count(), 0)
예제 #29
0
    def test_has_completed_true(self):
        """Tests calling Recipe.has_completed() when an entire recipe has completed"""

        data_dict = convert_data_to_v6_json(Data()).get_dict()
        job_type = job_test_utils.create_seed_job_type()
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6()
        cond_interface_1 = Interface()
        cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer'))
        df2 = DataFilter(filter_list=[{
            'name': 'cond_int',
            'type': 'integer',
            'condition': '==',
            'values': [0]
        }, {
            'name': 'cond_int',
            'type': 'integer',
            'condition': '!=',
            'values': [0]
        }],
                         all=True)  #always False

        definition = RecipeDefinition(Interface())
        definition.add_job_node('A', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_recipe_node('B', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_job_node('C', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('D', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('E', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_job_node('F', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_recipe_node('G', sub_recipe_type.name,
                                   sub_recipe_type.revision_num)
        definition.add_job_node('H', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_condition_node('I', cond_interface_1, df2)  #False
        definition.add_job_node('J', job_type.name, job_type.version,
                                job_type.revision_num)
        definition.add_dependency('A', 'C')
        definition.add_dependency('A', 'E')
        definition.add_dependency('A', 'H')
        definition.add_dependency('C', 'D')
        definition.add_dependency('G', 'H')
        definition.add_dependency('A', 'I')
        definition.add_dependency('I', 'J')

        job_a = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_c = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_d = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_e = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_f = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        job_h = job_test_utils.create_job(job_type=job_type,
                                          status='COMPLETED',
                                          output=data_dict,
                                          save=False)
        Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h])

        condition_i = recipe_test_utils.create_recipe_condition(
            is_processed=True, is_accepted=False, save=True)
        recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type,
                                                   save=False)
        recipe_b.is_completed = True
        recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type,
                                                   save=False)
        recipe_g.is_completed = True
        Recipe.objects.bulk_create([recipe_b, recipe_g])

        definition_json_dict = convert_recipe_definition_to_v6_json(
            definition).get_dict()
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=definition_json_dict)
        recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type)
        recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='A',
                                                             job=job_a,
                                                             save=False,
                                                             is_original=False)
        recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='C',
                                                             job=job_c,
                                                             save=False,
                                                             is_original=False)
        recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='D',
                                                             job=job_d,
                                                             save=False,
                                                             is_original=False)
        recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='E',
                                                             job=job_e,
                                                             save=False)
        recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='F',
                                                             job=job_f,
                                                             save=False)
        recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe,
                                                             node_name='H',
                                                             job=job_h,
                                                             save=False)
        recipe_node_i = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='I', condition=condition_i, save=False)

        recipe_node_g = recipe_test_utils.create_recipe_node(
            recipe=recipe,
            node_name='G',
            sub_recipe=recipe_g,
            save=False,
            is_original=False)
        recipe_node_b = recipe_test_utils.create_recipe_node(
            recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False)
        RecipeNode.objects.bulk_create([
            recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d,
            recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h,
            recipe_node_i
        ])

        recipe_instance = Recipe.objects.get_recipe_instance(recipe.id)
        self.assertTrue(recipe_instance.has_completed())
예제 #30
0
    def execute(self):
        """See :meth:`messaging.messages.message.CommandMessage.execute`
        """

        # Check to see if a force stop was placed on this purge process
        results = PurgeResults.objects.get(trigger_event=self.trigger_id)
        if results.force_stop_purge:
            return True

        files_to_delete = ScaleFile.objects.filter_files(job_ids=[self.job_id])

        if files_to_delete:
            # Construct input data
            files = []
            workspaces = []

            for f in files_to_delete:
                files.append({
                    'id': f.id,
                    'file_path': f.file_path,
                    'workspace': f.workspace.name
                })
                if f.workspace.name not in [
                        k for wrkspc in workspaces for k in wrkspc.keys()
                ]:
                    workspaces.append(
                        {f.workspace.name: f.workspace.json_config})

            inputs = Data()
            inputs.add_value(JsonValue('job_id', str(self.job_id)))
            inputs.add_value(JsonValue('trigger_id', str(self.trigger_id)))
            inputs.add_value(
                JsonValue('source_file_id', str(self.source_file_id)))
            inputs.add_value(JsonValue('purge', str(self.purge)))
            inputs.add_value(JsonValue('files', json.dumps(files)))
            inputs.add_value(JsonValue('workspaces', json.dumps(workspaces)))

            # Send message to create system job to delete files
            msg = create_jobs_message(job_type_name="scale-delete-files",
                                      job_type_version="1.0.0",
                                      event_id=self.trigger_id,
                                      job_type_rev_num=1,
                                      input_data=inputs)
            self.new_messages.append(msg)

        return True