Exemple #1
0
    def execute(self, net, color_descriptor, active_tokens, service_interfaces):
        operation_id = self.args['operation_id']
        op = factory.load_operation(net, operation_id)

        parallel_size = color_descriptor.group.size

        workflow_data = io.extract_workflow_data(net, active_tokens)
        parallel_id = _parallel_id_from_workflow_data(workflow_data)
        parent_parallel_id = parallel_id.parent_identifier

        for property_name in op.output_properties:
            array_value = self.collect_array_output(net=net,
                    operation=op,
                    parallel_size=parallel_size,
                    property_name=property_name,
                    parallel_id=parent_parallel_id)

            op.store_output(property_name, value=array_value,
                    parallel_id=parent_parallel_id)

        workflow_data['parallel_id'] = list(parent_parallel_id)
        token = net.create_token(color=color_descriptor.group.parent_color,
            color_group_idx=color_descriptor.group.parent_color_group_idx,
            data={'workflow_data': workflow_data})

        return [token], defer.succeed(None)
Exemple #2
0
    def execute(self, net, color_descriptor, active_tokens, service_interfaces):
        # created token should have workflow_data on it so observers can
        # know what parallel_id failed.
        workflow_data = io.extract_workflow_data(net, active_tokens)
        parallel_id = _parallel_id_from_workflow_data(workflow_data)
        workflow_data['parallel_id'] = list(parallel_id.parent_identifier)

        data = {'workflow_data': workflow_data}
        token = net.create_token(color=color_descriptor.group.parent_color,
            color_group_idx=color_descriptor.group.parent_color_group_idx,
            data=data)

        return [token], defer.succeed(None)
Exemple #3
0
    def execute(self, net, color_descriptor, active_tokens, service_interfaces):
        workflow_data = io.extract_workflow_data(net, active_tokens)
        parallel_id = ParallelIdentifier(workflow_data.get('parallel_id', []))

        outputs = self.converge_inputs(net=net, parallel_id=parallel_id)
        io.store_outputs(net=net, operation_id=self.args['operation_id'],
                outputs=outputs, parallel_id=list(parallel_id))

        output_token = net.create_token(color=color_descriptor.color,
                color_group_idx=color_descriptor.group.idx,
                data={'workflow_data': workflow_data})

        return [output_token], defer.succeed(None)
Exemple #4
0
    def execute(self, net, color_descriptor, active_tokens, service_interfaces):
        workflow_data = io.extract_workflow_data(net, active_tokens)

        parallel_property = self.args['parallel_property']
        parallel_id = _parallel_id_from_workflow_data(workflow_data)

        operation = factory.load_operation(net=net,
                operation_id=self.args['operation_id'])

        parallel_input = operation.load_input(
                name=parallel_property,
                parallel_id=parallel_id)

        self.store_parallel_input(operation=operation,
                parallel_input=parallel_input,
                parallel_property=parallel_property,
                parallel_id=parallel_id)

        tokens = self._create_tokens(num_tokens=len(parallel_input),
                color_descriptor=color_descriptor,
                workflow_data=workflow_data, net=net)

        return tokens, defer.succeed(None)
Exemple #5
0
 def test_extract_data_from_tokens(self):
     results = io.extract_workflow_data(self.net, self.token_keys)
     expected_results = {str(i): 'val_%d' % i
             for i in xrange(self.num_tokens)}
     self.assertEqual(expected_results, results)