Ejemplo n.º 1
0
    def execute(self, net, color_descriptor, active_tokens, service_interfaces):
        workflow_data = io.extract_workflow_data(net, active_tokens)
        parallel_id = ParallelIdentifier(workflow_data.get('parallel_id', []))

        outputs = self.converge_inputs(net=net, parallel_id=parallel_id)
        io.store_outputs(net=net, operation_id=self.args['operation_id'],
                outputs=outputs, parallel_id=list(parallel_id))

        output_token = net.create_token(color=color_descriptor.color,
                color_group_idx=color_descriptor.group.idx,
                data={'workflow_data': workflow_data})

        return [output_token], defer.succeed(None)
Ejemplo n.º 2
0
    def store_outputs_then_load_inputs(self, store_parallel_id,
            load_parallel_id):
        spi = ParallelIdentifier(store_parallel_id)
        lpi = ParallelIdentifier(load_parallel_id)
        outputs = {
            'bar1': 'value A',
            'bar2': 'value B'
        }
        input_connections = {
            self.output_operation_id: {
                'foo1': 'bar1',
                'foo2': 'bar2',
            }
        }

        io.store_outputs(net=self.net, operation_id=self.output_operation_id,
                outputs=outputs, parallel_id=spi)

        expected_inputs = {
            'foo1': 'value A',
            'foo2': 'value B',
        }
        self.assertEqual(expected_inputs, io.load_inputs(net=self.net,
            input_connections=input_connections, parallel_id=lpi))
Ejemplo n.º 3
0
def read_and_store_outputs(file_obj, net, operation_id, parallel_id):
    outputs = json.load(file_obj)

    io.store_outputs(net=net, operation_id=operation_id, outputs=outputs, parallel_id=parallel_id)