def save_logbook(self, book): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook).filter_by(uuid=book.uuid) logbook = query.one_or_none() if logbook: e_lb = self._converter.convert_book(logbook) self._converter.populate_book(self.session, e_lb) e_lb.merge(book) logbook.update(e_lb.to_dict()) for fd in book: e_fd = e_lb.find(fd.uuid) if e_fd is None: e_lb.add(fd) self._insert_flow_details(fd, e_lb.uuid) else: self._update_flow_details(fd, e_fd) return e_lb else: self.session.add(LogBook(**book.to_dict())) self.session.flush() for fd in book: self._insert_flow_details(fd, book.uuid) return book except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed saving logbook '%s'" % book.uuid)
def _item_from_single(provider, container, looking_for): """Returns item from a *single* provider.""" try: return _item_from(container, provider.index) except _EXTRACTION_EXCEPTIONS: exceptions.raise_with_cause( exceptions.NotFound, "Unable to find result %r, expected to be able to find it" " created by %s but was unable to perform successful" " extraction" % (looking_for, provider))
def clear_all(self): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook) for logbook in query: self.session.delete(logbook) self.session.flush() except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed clearing all entries")
def destroy_logbook(self, book_uuid): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook).filter_by(uuid=book_uuid) if query.delete() == 0: raise exc.NotFound("No logbook found with uuid '%s'" % book_uuid) except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed destroying logbook '%s'" % book_uuid)
def get_atoms_for_flow(self, fd_uuid): gathered = [] try: with self.session.begin(): for ad in self._converter.atom_query_iter( self.session, fd_uuid): gathered.append(ad) except sa_exc.DBAPIError: exc.raise_with_cause( exc.StorageFailure, "Failed getting atom details in flow" " detail '%s'" % fd_uuid) for atom_details in gathered: yield atom_details
def get_atom_details(self, ad_uuid): try: query = dbapi.model_query(self.session, AtomDetail).filter_by(uuid=ad_uuid) with self.session.begin(): atomdetail = query.one_or_none() if not atomdetail: raise exc.NotFound("No atom details found with uuid" " '%s'" % ad_uuid) return self._converter.convert_atom_detail(atomdetail) except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed getting atom details with" " uuid '%s'" % ad_uuid)
def get_logbooks(self, lazy=False): gathered = [] try: query = dbapi.model_query(self.session, LogBook) with self.session.begin(): for logbook in query: book = self._converter.convert_book(logbook) if not lazy: self._converter.populate_book(self.session, book) gathered.append(book) except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed getting logbooks") for book in gathered: yield book
def get_flows_for_book(self, book_uuid, lazy=False): gathered = [] try: with self.session.begin(): for fd in self._converter.flow_query_iter( self.session, book_uuid): if not lazy: self._converter.populate_flow_detail(self.session, fd) gathered.append(fd) except sa_exc.DBAPIError: exc.raise_with_cause( exc.StorageFailure, "Failed getting flow details in" " logbook '%s'" % book_uuid) for flow_details in gathered: yield flow_details
def get_logbook(self, book_uuid, lazy=False): try: with self.session.begin(): query = dbapi.model_query(self.session, LogBook).filter_by(uuid=book_uuid) logbook = query.one_or_none() if not logbook: raise exc.NotFound("No logbook found with" " uuid '%s'" % book_uuid) book = self._converter.convert_book(logbook) if not lazy: self._converter.populate_book(self.session, book) return book except sa_exc.DBAPIError: exc.raise_with_cause(exc.StorageFailure, "Failed getting logbook '%s'" % book_uuid)
def get_flow_details(self, fd_uuid, lazy=False): try: query = dbapi.model_query(self.session, FlowDetail).filter_by(uuid=fd_uuid) with self.session.begin(): flowdetail = query.one_or_none() if not flowdetail: raise exc.NotFound("No flow details found with uuid" " '%s'" % fd_uuid) fd = self._converter.convert_flow_detail(flowdetail) if not lazy: self._converter.populate_flow_detail(self.session, fd) return fd except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed getting flow details with" " uuid '%s'" % fd_uuid)
def update_atom_details(self, atom_detail): try: # atomdetails = self._tables.atomdetails with self.session.begin(): query = dbapi.model_query( self.session, AtomDetail).filter_by(uuid=atom_detail.uuid) atomdetail = query.one_or_none() if not atomdetail: raise exc.NotFound("No atom details found with uuid '%s'" % atom_detail.uuid) e_ad = self._converter.convert_atom_detail(atomdetail) self._update_atom_details(atom_detail, e_ad) return e_ad except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed updating atom details" " with uuid '%s'" % atom_detail.uuid)
def update_flow_details(self, flow_detail): try: # flowdetails = self._tables.flowdetails with self.session.begin(): query = dbapi.model_query( self.session, FlowDetail).filter_by(uuid=flow_detail.uuid) flowdetail = query.one_or_none() if not flowdetail: raise exc.NotFound("No flow details found with uuid '%s'" % flow_detail.uuid) e_fd = self._converter.convert_flow_detail(flowdetail) self._converter.populate_flow_detail(self.session, e_fd) self._update_flow_details(flow_detail, e_fd) return e_fd except sa_exc.SQLAlchemyError: exc.raise_with_cause( exc.StorageFailure, "Failed updating flow details with" " uuid '%s'" % flow_detail.uuid)
def _find(self, looking_for, scope_walker=None, short_circuit=True, find_potentials=False): if scope_walker is None: scope_walker = [] default_providers, atom_providers = self.providers_fetcher(looking_for) searched_providers = set() providers_and_results = [] if default_providers: for p in default_providers: searched_providers.add(p) try: provider_results = self._try_get_results( looking_for, p, find_potentials=find_potentials, # For default providers always look into there # results as default providers are statically setup # and therefore looking into there provided results # should fail early. look_into_results=True) except exceptions.NotFound: if not find_potentials: raise else: providers_and_results.append((p, provider_results)) if short_circuit: return (searched_providers, providers_and_results) if not atom_providers: return (searched_providers, providers_and_results) atom_providers_by_name = dict((p.name, p) for p in atom_providers) for accessible_atom_names in iter(scope_walker): # *Always* retain the scope ordering (if any matches # happen); instead of retaining the possible provider match # order (which isn't that important and may be different from # the scope requested ordering). maybe_atom_providers = [ atom_providers_by_name[atom_name] for atom_name in accessible_atom_names if atom_name in atom_providers_by_name ] tmp_providers_and_results = [] if find_potentials: for p in maybe_atom_providers: searched_providers.add(p) tmp_providers_and_results.append((p, {})) else: for p in maybe_atom_providers: searched_providers.add(p) try: # Don't at this point look into the provider results # as calling code will grab all providers, and then # get the result from the *first* provider that # actually provided it (or die). provider_results = self._try_get_results( looking_for, p, find_potentials=find_potentials, look_into_results=False) except exceptions.DisallowedAccess as e: if e.state != states.IGNORE: exceptions.raise_with_cause( exceptions.NotFound, "Expected to be able to find output %r" " produced by %s but was unable to get at" " that providers results" % (looking_for, p)) else: LOG.trace( "Avoiding using the results of" " %r (from %s) for name %r because" " it was ignored", p.name, p, looking_for) else: tmp_providers_and_results.append((p, provider_results)) if tmp_providers_and_results and short_circuit: return (searched_providers, tmp_providers_and_results) else: providers_and_results.extend(tmp_providers_and_results) return (searched_providers, providers_and_results)