def execute(self, net, color_descriptor, active_tokens, service_interfaces): if len(active_tokens) == 1: new_token_idx = head(active_tokens) new_token = net.token(new_token_idx) else: new_token = net.create_token(color=color_descriptor.color, color_group_idx=color_descriptor.group.idx) self.merge_data(net, new_token, active_tokens) return [new_token], defer.succeed(None)
def load_operation(net, operation_id): operation_dict = net.variables[operation_variable_name(operation_id)] LOG.debug('Loading operation %s using dict: %s', operation_id, operation_dict) ep = head(pkg_resources.iter_entry_points( 'flow_workflow.operations', operation_dict.pop('_class'))) cls = ep.load() LOG.debug('Loaded operation %s (%r) from net %s: %s', operation_id, cls, net.key, operation_dict) return cls(net=net, **operation_dict)
def adapter(operation_type, *args, **kwargs): global _NEXT_OPERATION_ID _NEXT_OPERATION_ID += 1 LOG.debug('Loading adapter for operation_type %s, ' 'with args: %s -- and kwargs: %s', operation_type, args, kwargs) ep = head(pkg_resources.iter_entry_points('flow_workflow.adapters', sanitize_operation_type(operation_type))) cls = ep.load() obj = cls(operation_id=_NEXT_OPERATION_ID, *args, **kwargs) return obj
def execute(self, net, color_descriptor, active_tokens, service_interfaces): assert len(active_tokens) == 1 old_token = net.token(head(active_tokens)) data = old_token.data.value for field in self.args['fields']: data.pop(field, None) new_token = net.create_token(color=color_descriptor.color, color_group_idx=color_descriptor.group.idx, data=data) return [new_token], defer.succeed(None)
def execute(self, net, color_descriptor, active_tokens, service_interfaces): if env_is_perl_true(net, 'UR_DBI_NO_COMMIT'): LOG.debug('UR_DBI_NO_COMMIT is set, not updating status.') return map(net.token, active_tokens), defer.succeed(None) historian = service_interfaces['workflow_historian'] token_data = net.token(head(active_tokens)).data.value workflow_data = token_data.get('workflow_data', {}) parallel_id = ParallelIdentifier(workflow_data.get('parallel_id', [])) deferred = self._execute(historian=historian, net=net, color_descriptor=color_descriptor, parallel_id=parallel_id, token_data=token_data) return map(net.token, active_tokens), deferred