Beispiel #1
0
 def _step_create():
     for path in (self._book_path, self._flow_path, self._task_path):
         try:
             misc.ensure_tree(path)
         except EnvironmentError as e:
             raise exc.StorageError("Unable to create logbooks"
                                    " required child path %s" % path, e)
Beispiel #2
0
 def _destroy_logbook(self, session, lb_id):
     try:
         lb = _logbook_get_model(lb_id, session=session)
         session.delete(lb)
     except sa_exc.DBAPIError as e:
         LOG.exception('Failed destroying logbook')
         raise exc.StorageError("Failed destroying logbook %s" % lb_id, e)
Beispiel #3
0
 def _make_session(self):
     try:
         return self._session_maker()
     except sa_exc.SQLAlchemyError as e:
         LOG.exception('Failed creating database session')
         raise exc.StorageError("Failed creating database session: %s" % e,
                                e)
Beispiel #4
0
 def close(self):
     self._validated = False
     if not self._owned:
         return
     try:
         k_utils.finalize_client(self._client)
     except (k_exc.KazooException, k_exc.ZookeeperError) as e:
         raise exc.StorageError("Unable to finalize client", e)
Beispiel #5
0
 def get_logbook(self, book_uuid):
     session = self._make_session()
     try:
         lb = _logbook_get_model(book_uuid, session=session)
         return _convert_lb_to_external(lb)
     except sa_exc.DBAPIError as e:
         LOG.exception('Failed getting logbook')
         raise exc.StorageError("Failed getting logbook %s" % book_uuid, e)
Beispiel #6
0
 def get_logbooks(self):
     try:
         books = list(self._get_logbooks())
     except EnvironmentError as e:
         raise exc.StorageError("Unable to fetch logbooks", e)
     else:
         for b in books:
             yield b
Beispiel #7
0
def logbook_get(lb_id):
    session = db_session.get_session()
    try:
        lb_m = _logbook_get_model(lb_id, session=session)
        return _convert_lb_to_external(lb_m)
    except sql_exc.DBAPIError as e:
        raise exc.StorageError("Failed getting"
                               " logbook %s: %s" % (lb_id, e), e)
Beispiel #8
0
 def _destroy_tasks(task_details):
     for task_detail in task_details:
         task_path = os.path.join(self._task_path, task_detail.uuid)
         try:
             shutil.rmtree(task_path)
         except EnvironmentError as e:
             if e.errno != errno.ENOENT:
                 raise exc.StorageError("Unable to remove task"
                                        " directory %s" % task_path, e)
Beispiel #9
0
 def _clear_all(self, session):
     # NOTE(harlowja): due to how we have our relationship setup and
     # cascading deletes are enabled, this will cause all associated
     # task details and flow details to automatically be purged.
     try:
         return session.query(models.LogBook).delete()
     except sa_exc.DBAPIError as e:
         LOG.exception('Failed clearing all entries')
         raise exc.StorageError("Failed clearing all entries: %s" % e, e)
Beispiel #10
0
def logbook_destroy(lb_id):
    session = db_session.get_session()
    with session.begin():
        try:
            lb = _logbook_get_model(lb_id, session=session)
            session.delete(lb)
        except sql_exc.DBAPIError as e:
            raise exc.StorageError(
                "Failed destroying"
                " logbook %s: %s" % (lb_id, e), e)
Beispiel #11
0
def clear_all():
    session = db_session.get_session()
    with session.begin():
        # NOTE(harlowja): due to how we have our relationship setup and
        # cascading deletes are enabled, this will cause all associated task
        # details and flow details to automatically be purged.
        try:
            return session.query(models.LogBook).delete()
        except sql_exc.DBAPIError as e:
            raise exc.StorageError("Failed clearing all entries: %s" % e, e)
Beispiel #12
0
 def get_logbooks(self):
     session = self._make_session()
     try:
         raw_books = session.query(models.LogBook).all()
         books = [_convert_lb_to_external(lb) for lb in raw_books]
     except sa_exc.DBAPIError as e:
         LOG.exception('Failed getting logbooks')
         raise exc.StorageError("Failed getting logbooks: %s" % e, e)
     for lb in books:
         yield lb
Beispiel #13
0
 def _destroy_book():
     book = self._get_logbook(book_uuid)
     book_path = os.path.join(self._book_path, book.uuid)
     self._run_with_process_lock("flow", _destroy_flows, list(book))
     try:
         shutil.rmtree(book_path)
     except EnvironmentError as e:
         if e.errno != errno.ENOENT:
             raise exc.StorageError("Unable to remove book"
                                    " directory %s" % book_path, e)
Beispiel #14
0
 def _destroy_flows(flow_details):
     for flow_detail in flow_details:
         flow_path = os.path.join(self._flow_path, flow_detail.uuid)
         self._run_with_process_lock("task", _destroy_tasks,
                                     list(flow_detail))
         try:
             shutil.rmtree(flow_path)
         except EnvironmentError as e:
             if e.errno != errno.ENOENT:
                 raise exc.StorageError("Unable to remove flow"
                                        " directory %s" % flow_path, e)
Beispiel #15
0
 def _run_with_process_lock(self, lock_name, functor, *args, **kwargs):
     lock_path = os.path.join(self.backend.lock_path, lock_name)
     with lock_utils.InterProcessLock(lock_path):
         try:
             return functor(*args, **kwargs)
         except exc.TaskFlowException:
             raise
         except Exception as e:
             LOG.exception("Failed running locking file based session")
             # NOTE(harlowja): trap all other errors as storage errors.
             raise exc.StorageError("Storage backend internal error", e)
Beispiel #16
0
 def _run_in_session(self, functor, *args, **kwargs):
     """Runs a function in a session and makes sure that sqlalchemy
     exceptions aren't emitted from that sessions actions (as that would
     expose the underlying backends exception model).
     """
     try:
         session = self._make_session()
         with session.begin():
             return functor(session, *args, **kwargs)
     except sa_exc.SQLAlchemyError as e:
         LOG.exception('Failed running database session')
         raise exc.StorageError("Storage backend internal error", e)
Beispiel #17
0
 def upgrade(self):
     try:
         with contextlib.closing(self._engine.connect()) as conn:
             # NOTE(imelnikov): Alembic does not support SQLite,
             # and we don't recommend to use SQLite in production
             # deployments, so migrations are rarely needed
             # for SQLite. So we don't bother about working around
             # SQLite limitations, and create database from models
             # when it is in use.
             if 'sqlite' in self._engine.url.drivername:
                 models.BASE.metadata.create_all(conn)
             else:
                 migration.db_sync(conn)
     except sa_exc.SQLAlchemyError as e:
         LOG.exception('Failed upgrading database version')
         raise exc.StorageError("Failed upgrading database version", e)
Beispiel #18
0
 def _exc_wrapper(self):
     """Exception wrapper which wraps kazoo exceptions and groups them
     to taskflow exceptions.
     """
     try:
         yield
     except self._client.handler.timeout_exception as e:
         raise exc.ConnectionFailure("Storage backend timeout: %s" % e)
     except k_exc.SessionExpiredError as e:
         raise exc.ConnectionFailure("Storage backend session"
                                     " has expired: %s" % e)
     except k_exc.NoNodeError as e:
         raise exc.NotFound("Storage backend node not found: %s" % e)
     except k_exc.NodeExistsError as e:
         raise exc.AlreadyExists("Storage backend duplicate node: %s" % e)
     except (k_exc.KazooException, k_exc.ZookeeperError) as e:
         raise exc.StorageError("Storage backend internal error", e)
Beispiel #19
0
 def _save_logbook(self, session, lb):
     try:
         lb_m = _logbook_get_model(lb.uuid, session=session)
         # NOTE(harlowja): Merge them (note that this doesn't provide
         # 100% correct update semantics due to how databases have
         # MVCC). This is where a stored procedure or a better backing
         # store would handle this better by allowing this merge logic
         # to exist in the database itself.
         lb_m = _logbook_merge(lb_m, lb)
     except exc.NotFound:
         lb_m = _convert_lb_to_internal(lb)
     try:
         lb_m = session.merge(lb_m)
         return _convert_lb_to_external(lb_m)
     except sa_exc.DBAPIError as e:
         LOG.exception('Failed saving logbook')
         raise exc.StorageError("Failed saving logbook %s" % lb.uuid, e)
Beispiel #20
0
    def upgrade(self):

        def _step_create():
            for path in (self._book_path, self._flow_path, self._task_path):
                try:
                    misc.ensure_tree(path)
                except EnvironmentError as e:
                    raise exc.StorageError("Unable to create logbooks"
                                           " required child path %s" % path, e)

        for path in (self._backend.base_path, self._backend.lock_path):
            try:
                misc.ensure_tree(path)
            except EnvironmentError as e:
                raise exc.StorageError("Unable to create logbooks required"
                                       " path %s" % path, e)

        self._run_with_process_lock("init", _step_create)
Beispiel #21
0
def logbook_save(lb):
    session = db_session.get_session()
    with session.begin():
        try:
            lb_m = _logbook_get_model(lb.uuid, session=session)
            # NOTE(harlowja): Merge them (note that this doesn't provide 100%
            # correct update semantics due to how databases have MVCC). This
            # is where a stored procedure or a better backing store would
            # handle this better (something more suited to this type of data).
            for fd in lb:
                existing_fd = False
                for fd_m in lb_m.flowdetails:
                    if fd_m.uuid == fd.uuid:
                        existing_fd = True
                        if fd_m.meta != fd.meta:
                            fd_m.meta = fd.meta
                        if fd_m.state != fd.state:
                            fd_m.state = fd.state
                        for td in fd:
                            existing_td = False
                            for td_m in fd_m.taskdetails:
                                if td_m.uuid == td.uuid:
                                    existing_td = True
                                    td_m = _taskdetails_merge(td_m, td)
                                    break
                            if not existing_td:
                                td_m = _convert_td_to_internal(td, fd_m.uuid)
                                fd_m.taskdetails.append(td_m)
                if not existing_fd:
                    lb_m.flowdetails.append(
                        _convert_fd_to_internal(fd, lb_m.uuid))
        except exc.NotFound:
            lb_m = _convert_lb_to_internal(lb)
        try:
            lb_m = session.merge(lb_m)
            return _convert_lb_to_external(lb_m)
        except sql_exc.DBAPIError as e:
            raise exc.StorageError(
                "Failed saving"
                " logbook %s: %s" % (lb.uuid, e), e)