Ejemplo n.º 1
0
    def save_logbook(self, book):
        # Get a existing logbook model (or create it if it isn't there).
        try:
            e_lb = self.backend.log_books[book.uuid]
        except KeyError:
            e_lb = logbook.LogBook(book.name, book.uuid,
                                   updated_at=book.updated_at,
                                   created_at=timeutils.utcnow())
            self.backend.log_books[e_lb.uuid] = e_lb
        else:
            # TODO(harlowja): figure out a better way to set this property
            # without actually setting a 'private' property.
            e_lb._updated_at = timeutils.utcnow()

        p_utils.logbook_merge(e_lb, book, deep_copy=True)
        # Add anything in to the new logbook that isn't already
        # in the existing logbook.
        for flow_detail in book:
            try:
                e_fd = self.backend.flow_details[flow_detail.uuid]
            except KeyError:
                e_fd = logbook.FlowDetail(name=flow_detail.name,
                                          uuid=flow_detail.uuid)
                e_lb.add(flow_detail)
                self.backend.flow_details[flow_detail.uuid] = e_fd
            p_utils.flow_details_merge(e_fd, flow_detail, deep_copy=True)
            self._save_flowdetail_tasks(e_fd, flow_detail)
        return e_lb
Ejemplo n.º 2
0
 def save_logbook(self, book):
     # Get a existing logbook model (or create it if it isn't there).
     try:
         e_lb = _logbook_merge(_LOG_BOOKS[book.uuid], book)
         # Add anything in to the new logbook that isn't already
         # in the existing logbook.
         for flow_detail in book:
             if e_lb.find(flow_detail.uuid) is None:
                 _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail)
                 e_lb.add(flow_detail)
             if flow_detail.uuid not in _FLOW_DETAILS:
                 _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail)
             flow_detail.update(self.update_flow_details(flow_detail))
         # TODO(harlowja): figure out a better way to set this property
         # without actually setting a 'private' property.
         e_lb._updated_at = timeutils.utcnow()
     except KeyError:
         # Ok the one given is now the one we will save
         e_lb = _copy(book)
         # TODO(harlowja): figure out a better way to set this property
         # without actually setting a 'private' property.
         e_lb._created_at = timeutils.utcnow()
         # Record all the pieces as being saved.
         _LOG_BOOKS[e_lb.uuid] = e_lb
         for flow_detail in e_lb:
             _FLOW_DETAILS[flow_detail.uuid] = _copy(flow_detail)
             flow_detail.update(self.update_flow_details(flow_detail))
     return e_lb
Ejemplo n.º 3
0
    def save_logbook(self, book):
        # Get a existing logbook model (or create it if it isn't there).
        try:
            e_lb = self.backend.log_books[book.uuid]
        except KeyError:
            e_lb = logbook.LogBook(book.name,
                                   book.uuid,
                                   updated_at=book.updated_at,
                                   created_at=timeutils.utcnow())
            self.backend.log_books[e_lb.uuid] = e_lb
        else:
            # TODO(harlowja): figure out a better way to set this property
            # without actually setting a 'private' property.
            e_lb._updated_at = timeutils.utcnow()

        p_utils.logbook_merge(e_lb, book, deep_copy=True)
        # Add anything in to the new logbook that isn't already
        # in the existing logbook.
        for flow_detail in book:
            try:
                e_fd = self.backend.flow_details[flow_detail.uuid]
            except KeyError:
                e_fd = logbook.FlowDetail(name=flow_detail.name,
                                          uuid=flow_detail.uuid)
                e_lb.add(flow_detail)
                self.backend.flow_details[flow_detail.uuid] = e_fd
            p_utils.flow_details_merge(e_fd, flow_detail, deep_copy=True)
            self._save_flowdetail_tasks(e_fd, flow_detail)
        return e_lb
Ejemplo n.º 4
0
    def add(self, fd):
        """Adds a new entry to the underlying logbook.

        Does not *guarantee* that the details will be immediately saved.
        """
        self._flowdetails_by_id[fd.uuid] = fd
        self.updated_at = timeutils.utcnow()
Ejemplo n.º 5
0
 def delete(self, session=None):
     """Delete this object."""
     self.deleted = True
     self.deleted_at = timeutils.utcnow()
     if not session:
         session = sql_session.get_session()
     session.delete(self)
     session.flush()
Ejemplo n.º 6
0
 def delete(self, session=None):
     """Delete this object."""
     self.deleted = True
     self.deleted_at = timeutils.utcnow()
     if not session:
         session = sql_session.get_session()
     session.delete(self)
     session.flush()
Ejemplo n.º 7
0
 def soft_delete(self, synchronize_session='evaluate'):
     return self.update(
         {
             'deleted': literal_column('id'),
             'updated_at': literal_column('updated_at'),
             'deleted_at': timeutils.utcnow()
         },
         synchronize_session=synchronize_session)
Ejemplo n.º 8
0
 def __init__(self, name, uuid=None):
     if uuid:
         self._uuid = uuid
     else:
         self._uuid = uuidutils.generate_uuid()
     self._name = name
     self._flowdetails_by_id = {}
     self.created_at = timeutils.utcnow()
     self.updated_at = None
     self.meta = {}
Ejemplo n.º 9
0
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
                                          use_soft_delete, *uc_column_names):
    """Drop all old rows having the same values for columns in uc_columns.

    This method drop (or mark ad `deleted` if use_soft_delete is True) old
    duplicate rows form table with name `table_name`.

    :param migrate_engine:  Sqlalchemy engine
    :param table_name:      Table with duplicates
    :param use_soft_delete: If True - values will be marked as `deleted`,
                            if False - values will be removed from table
    :param uc_column_names: Unique constraint columns
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    columns_for_group_by = [table.c[name] for name in uc_column_names]

    columns_for_select = [func.max(table.c.id)]
    columns_for_select.extend(columns_for_group_by)

    duplicated_rows_select = select(columns_for_select,
                                    group_by=columns_for_group_by,
                                    having=func.count(table.c.id) > 1)

    for row in migrate_engine.execute(duplicated_rows_select):
        # NOTE(boris-42): Do not remove row that has the biggest ID.
        delete_condition = table.c.id != row[0]
        is_none = None  # workaround for pyflakes
        delete_condition &= table.c.deleted_at == is_none
        for name in uc_column_names:
            delete_condition &= table.c[name] == row[name]

        rows_to_delete_select = select([table.c.id]).where(delete_condition)
        for row in migrate_engine.execute(rows_to_delete_select).fetchall():
            LOG.info(_("Deleting duplicated row with id: %(id)s from table: "
                       "%(table)s") % dict(id=row[0], table=table_name))

        if use_soft_delete:
            delete_statement = table.update().\
                where(delete_condition).\
                values({
                    'deleted': literal_column('id'),
                    'updated_at': literal_column('updated_at'),
                    'deleted_at': timeutils.utcnow()
                })
        else:
            delete_statement = table.delete().where(delete_condition)
        migrate_engine.execute(delete_statement)
Ejemplo n.º 10
0
def logbook_save(lb):
    # Acquire all the locks that will be needed to perform this operation with
    # out being affected by other threads doing it at the same time.
    with threading_utils.MultiLock(READ_SAVE_ORDER):
        # Get a existing logbook model (or create it if it isn't there).
        try:
            backing_lb = LOG_BOOKS[lb.uuid]
            if backing_lb.meta != lb.meta:
                backing_lb.meta = lb.meta
            # Add anything on to the existing loaded logbook that isn't already
            # in the existing logbook.
            for fd in lb:
                if fd not in backing_lb:
                    FLOW_DETAILS[fd.uuid] = copy.deepcopy(fd)
                    backing_lb.add(flowdetails_save(fd))
                else:
                    # Previously added but not saved into the flowdetails
                    # 'permanent' storage.
                    if fd.uuid not in FLOW_DETAILS:
                        FLOW_DETAILS[fd.uuid] = copy.deepcopy(fd)
                    flowdetails_save(fd)
            # TODO(harlowja): figure out a better way to set this property
            # without actually letting others set it external.
            backing_lb._updated_at = timeutils.utcnow()
        except KeyError:
            backing_lb = copy.deepcopy(lb)
            # TODO(harlowja): figure out a better way to set this property
            # without actually letting others set it external.
            backing_lb._created_at = timeutils.utcnow()
            # Record all the pieces as being saved.
            LOG_BOOKS[lb.uuid] = backing_lb
            for fd in backing_lb:
                FLOW_DETAILS[fd.uuid] = fd
                for td in fd:
                    TASK_DETAILS[td.uuid] = td
        return backing_lb
Ejemplo n.º 11
0
def logbook_save(lb):
    # Acquire all the locks that will be needed to perform this operation with
    # out being affected by other threads doing it at the same time.
    with threading_utils.MultiLock(READ_SAVE_ORDER):
        # Get a existing logbook model (or create it if it isn't there).
        try:
            backing_lb = LOG_BOOKS[lb.uuid]
            if backing_lb.meta != lb.meta:
                backing_lb.meta = lb.meta
            # Add anything on to the existing loaded logbook that isn't already
            # in the existing logbook.
            for fd in lb:
                if fd not in backing_lb:
                    FLOW_DETAILS[fd.uuid] = copy.deepcopy(fd)
                    backing_lb.add(flowdetails_save(fd))
                else:
                    # Previously added but not saved into the flowdetails
                    # 'permanent' storage.
                    if fd.uuid not in FLOW_DETAILS:
                        FLOW_DETAILS[fd.uuid] = copy.deepcopy(fd)
                    flowdetails_save(fd)
            # TODO(harlowja): figure out a better way to set this property
            # without actually letting others set it external.
            backing_lb._updated_at = timeutils.utcnow()
        except KeyError:
            backing_lb = copy.deepcopy(lb)
            # TODO(harlowja): figure out a better way to set this property
            # without actually letting others set it external.
            backing_lb._created_at = timeutils.utcnow()
            # Record all the pieces as being saved.
            LOG_BOOKS[lb.uuid] = backing_lb
            for fd in backing_lb:
                FLOW_DETAILS[fd.uuid] = fd
                for td in fd:
                    TASK_DETAILS[td.uuid] = td
        return backing_lb
Ejemplo n.º 12
0
 def soft_delete(self, session=None):
     """Mark this object as deleted."""
     self.deleted = self.id
     self.deleted_at = timeutils.utcnow()
     self.save(session=session)
Ejemplo n.º 13
0
 def soft_delete(self, synchronize_session='evaluate'):
     return self.update({'deleted': literal_column('id'),
                         'updated_at': literal_column('updated_at'),
                         'deleted_at': timeutils.utcnow()},
                        synchronize_session=synchronize_session)
Ejemplo n.º 14
0
 def soft_delete(self, session=None):
     """Mark this object as deleted."""
     self.deleted = self.id
     self.deleted_at = timeutils.utcnow()
     self.save(session=session)