def execute(self, net, color_descriptor, active_tokens, service_interfaces): operation_id = self.args['operation_id'] op = factory.load_operation(net, operation_id) parallel_size = color_descriptor.group.size workflow_data = io.extract_workflow_data(net, active_tokens) parallel_id = _parallel_id_from_workflow_data(workflow_data) parent_parallel_id = parallel_id.parent_identifier for property_name in op.output_properties: array_value = self.collect_array_output(net=net, operation=op, parallel_size=parallel_size, property_name=property_name, parallel_id=parent_parallel_id) op.store_output(property_name, value=array_value, parallel_id=parent_parallel_id) workflow_data['parallel_id'] = list(parent_parallel_id) token = net.create_token(color=color_descriptor.group.parent_color, color_group_idx=color_descriptor.group.parent_color_group_idx, data={'workflow_data': workflow_data}) return [token], defer.succeed(None)
def write_outputs(self, net, operation_id, output_properties, outputs_file): if outputs_file: op = factory.load_operation(net=net, operation_id=operation_id) outputs = op.load_outputs(parallel_id=ParallelIdentifier()) with open(outputs_file, 'w') as f: json.dump(outputs, f)
def converge_inputs(self, net, parallel_id): operation = factory.load_operation(net, self.args['operation_id']) inputs = operation.load_inputs(parallel_id=parallel_id) return order_outputs(inputs, input_property_order=self.args['input_property_order'], output_properties=self.args['output_properties'])
def _load_operation(self, net_key, operation_id): cache_key = (net_key, int(operation_id)) if cache_key not in self._cached_operations: self._cached_operations[cache_key] = load_operation( net=Net(self._connection, key=net_key), operation_id=operation_id ) return self._cached_operations[cache_key]
def write_inputs(file_object, net, parallel_id, operation_id): operation = factory.load_operation(net, operation_id) inputs = operation.load_inputs(parallel_id) LOG.debug("Inputs: %s", inputs) json.dump(inputs, file_object) file_object.flush()
def create_operation(self, operation_id, name, **kwargs): fop = future_operation.FutureOperation( operation_class='direct_storage', operation_id=operation_id, name=name, parent=future_operation.NullFutureOperation(), log_dir='/exciting/log/dir', **kwargs) fop.save(self.net) return factory.load_operation(self.net, operation_id)
def execute(self, net, color_descriptor, active_tokens, service_interfaces): workflow_data = io.extract_workflow_data(net, active_tokens) parallel_property = self.args['parallel_property'] parallel_id = _parallel_id_from_workflow_data(workflow_data) operation = factory.load_operation(net=net, operation_id=self.args['operation_id']) parallel_input = operation.load_input( name=parallel_property, parallel_id=parallel_id) self.store_parallel_input(operation=operation, parallel_input=parallel_input, parallel_property=parallel_property, parallel_id=parallel_id) tokens = self._create_tokens(num_tokens=len(parallel_input), color_descriptor=color_descriptor, workflow_data=workflow_data, net=net) return tokens, defer.succeed(None)
def _get_operation(self, net): return factory.load_operation(net, self.args['operation_id'])