def load(connection, flow, flow_detail=None, book=None, store=None, engine_cls=SerialActionEngine, **options): """ :param connection: Store connection :param flow: flow to load :param flow_detail: FlowDetail that holds the state of the flow (if one is not provided then one will be created for you in the provided backend) :param book: LogBook to create flow detail in if flow_detail is None :param store: dict -- data to put to storage to satisfy flow requirements :param engine_cls: engine class :param options: options for engine :returns: engine """ if flow_detail is None: flow_detail = p_utils.create_flow_detail(flow, book=book, connection=connection) try: engine = engine_cls(flow, flow_detail, connection, options) except Exception: raise exc.NotFound("Could not find engine '%s'" % str(engine_cls)) else: if store: engine.storage.inject(store) return engine
def _get_next_value(self, values, history): # Fetches the next resolution result to try, removes overlapping # entries with what has already been tried and then returns the first # resolution strategy remaining. remaining = misc.sequence_minus(values, history.provided_iter()) if not remaining: raise exc.NotFound("No elements left in collection of iterable " "retry controller %s" % self.name) return remaining[0]
def inject_atom_args(self, atom_name, pairs, transient=True): """Add values into storage for a specific atom only. :param transient: save the data in-memory only instead of persisting the data to backend storage (useful for resource-like objects or similar objects which can **not** be persisted) This method injects a dictionary/pairs of arguments for an atom so that when that atom is scheduled for execution it will have immediate access to these arguments. .. note:: Injected atom arguments take precedence over arguments provided by predecessor atoms or arguments provided by injecting into the flow scope (using the :py:meth:`~taskflow.storage.Storage.inject` method). .. warning:: It should be noted that injected atom arguments (that are scoped to the atom with the given name) *should* be serializable whenever possible. This is a **requirement** for the :doc:`worker based engine <workers>` which **must** serialize (typically using ``json``) all atom :py:meth:`~taskflow.atom.Atom.execute` and :py:meth:`~taskflow.atom.Atom.revert` arguments to be able to transmit those arguments to the target worker(s). If the use-case being applied/desired is to later use the worker based engine then it is highly recommended to ensure all injected atoms (even transient ones) are serializable to avoid issues that *may* appear later (when a object turned out to not actually be serializable). """ if atom_name not in self._atom_name_to_uuid: raise exceptions.NotFound("Unknown atom name '%s'" % atom_name) def save_transient(): self._injected_args.setdefault(atom_name, {}) self._injected_args[atom_name].update(pairs) def save_persistent(): source, clone = self._atomdetail_by_name(atom_name, clone=True) injected = source.meta.get(META_INJECTED) if not injected: injected = {} injected.update(pairs) clone.meta[META_INJECTED] = injected self._with_connection(self._save_atom_detail, source, clone) with self._lock.write_lock(): if transient: save_transient() else: save_persistent()
def destroy_logbook(self, book_uuid): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook).filter_by(uuid=book_uuid) if query.delete() == 0: raise exc.NotFound("No logbook found with uuid '%s'" % book_uuid) except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed destroying logbook '%s'" % book_uuid)
def _item_from_first_of(providers, looking_for): """Returns item from the *first* successful container extraction.""" for (provider, container) in providers: try: return (provider, _item_from(container, provider.index)) except _EXTRACTION_EXCEPTIONS: pass providers = [p[0] for p in providers] raise exceptions.NotFound( "Unable to find result %r, expected to be able to find it" " created by one of %s but was unable to perform successful" " extraction" % (looking_for, providers))
def get_atom_details(self, ad_uuid): try: query = dbapi.model_query(self.session, AtomDetail).filter_by(uuid=ad_uuid) with self.session.begin(): atomdetail = query.one_or_none() if not atomdetail: raise exc.NotFound("No atom details found with uuid" " '%s'" % ad_uuid) return self._converter.convert_atom_detail(atomdetail) except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed getting atom details with" " uuid '%s'" % ad_uuid)
def fetch(self, name, many_handler=None): """Fetch a named ``execute`` result.""" def _many_handler(values): # By default we just return the first of many (unless provided # a different callback that can translate many results into # something more meaningful). return values[0] if many_handler is None: many_handler = _many_handler try: maybe_providers = self._reverse_mapping[name] except KeyError: raise exceptions.NotFound("Name %r is not mapped as a produced" " output by any providers" % name) locator = _ProviderLocator( self._transients, functools.partial(self._fetch_providers, providers=maybe_providers), lambda atom_name: self._get(atom_name, 'last_results', 'failure', _EXECUTE_STATES_WITH_RESULTS, states.EXECUTE)) values = [] searched_providers, providers = locator.find( name, short_circuit=False, # NOTE(harlowja): There are no scopes used here (as of now), so # we just return all known providers as if it was one large # scope. scope_walker=[[p.name for p in maybe_providers]]) for provider, results in providers: values.append(_item_from_single(provider, results, name)) if not values: raise exceptions.NotFound( "Unable to find result %r, searched %s providers" % (name, len(searched_providers))) else: return many_handler(values)
def get_logbook(self, book_uuid, lazy=False): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook).filter_by(uuid=book_uuid) logbook = query.one_or_none() if not logbook: raise exc.NotFound("No logbook found with" " uuid '%s'" % book_uuid) book = self._converter.convert_book(logbook) if not lazy: self._converter.populate_book(self.session, book) return book except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed getting logbook '%s'" % book_uuid)
def get_flow_details(self, fd_uuid, lazy=False): try: query = dbapi.model_query(self.session, FlowDetail).filter_by(uuid=fd_uuid) with self.session.begin(): flowdetail = query.one_or_none() if not flowdetail: raise exc.NotFound("No flow details found with uuid" " '%s'" % fd_uuid) fd = self._converter.convert_flow_detail(flowdetail) if not lazy: self._converter.populate_flow_detail(self.session, fd) return fd except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed getting flow details with" " uuid '%s'" % fd_uuid)
def update_atom_details(self, atom_detail): try: # atomdetails = self._tables.atomdetails with self.session.begin(): query = dbapi.model_query( self.session, AtomDetail).filter_by(uuid=atom_detail.uuid) atomdetail = query.one_or_none() if not atomdetail: raise exc.NotFound("No atom details found with uuid '%s'" % atom_detail.uuid) e_ad = self._converter.convert_atom_detail(atomdetail) self._update_atom_details(atom_detail, e_ad) return e_ad except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed updating atom details" " with uuid '%s'" % atom_detail.uuid)
def update_flow_details(self, flow_detail): try: # flowdetails = self._tables.flowdetails with self.session.begin(): query = dbapi.model_query( self.session, FlowDetail).filter_by(uuid=flow_detail.uuid) flowdetail = query.one_or_none() if not flowdetail: raise exc.NotFound("No flow details found with uuid '%s'" % flow_detail.uuid) e_fd = self._converter.convert_flow_detail(flowdetail) self._converter.populate_flow_detail(self.session, e_fd) self._update_flow_details(flow_detail, e_fd) return e_fd except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed updating flow details with" " uuid '%s'" % flow_detail.uuid)
def fetch_mapped_args(self, args_mapping, atom_name=None, scope_walker=None, optional_args=None): """Fetch ``execute`` arguments for an atom using its args mapping.""" def _extract_first_from(name, sources): """Extracts/returns first occurrence of key in list of dicts.""" for i, source in enumerate(sources): if not source: continue if name in source: return (i, source[name]) raise KeyError(name) if optional_args is None: optional_args = [] if atom_name: source, _clone = self._atomdetail_by_name(atom_name) injected_sources = [ self._injected_args.get(atom_name, {}), source.meta.get(META_INJECTED, {}), ] if scope_walker is None: scope_walker = self._scope_fetcher(atom_name) else: injected_sources = [] if not args_mapping: return {} get_results = lambda atom_name: \ self._get(atom_name, 'last_results', 'failure', _EXECUTE_STATES_WITH_RESULTS, states.EXECUTE) mapped_args = {} for (bound_name, name) in six.iteritems(args_mapping): if LOG.isEnabledFor(logging.TRACE): if atom_name: LOG.trace("Looking for %r <= %r for atom '%s'", bound_name, name, atom_name) else: LOG.trace("Looking for %r <= %r", bound_name, name) try: source_index, value = _extract_first_from( name, injected_sources) mapped_args[bound_name] = value if LOG.isEnabledFor(logging.TRACE): if source_index == 0: LOG.trace( "Matched %r <= %r to %r (from injected" " atom-specific transient" " values)", bound_name, name, value) else: LOG.trace( "Matched %r <= %r to %r (from injected" " atom-specific persistent" " values)", bound_name, name, value) except KeyError: try: maybe_providers = self._reverse_mapping[name] except KeyError: if bound_name in optional_args: LOG.trace("Argument %r is optional, skipping", bound_name) continue raise exceptions.NotFound("Name %r is not mapped as a" " produced output by any" " providers" % name) locator = _ProviderLocator( self._transients, functools.partial(self._fetch_providers, providers=maybe_providers), get_results) searched_providers, providers = locator.find( name, scope_walker=scope_walker) if not providers: raise exceptions.NotFound( "Mapped argument %r <= %r was not produced" " by any accessible provider (%s possible" " providers were scanned)" % (bound_name, name, len(searched_providers))) provider, value = _item_from_first_of(providers, name) mapped_args[bound_name] = value LOG.trace("Matched %r <= %r to %r (from %s)", bound_name, name, value, provider) return mapped_args