Example #1
0
class GitRepository(db.Model):
    """Information about a GitHub repository."""

    id = db.Column(db.Integer, primary_key=True)
    external_id = db.Column(db.Integer, unique=False, nullable=False)

    host = db.Column(db.String(255), nullable=False)
    owner = db.Column(db.String(255), nullable=False)
    name = db.Column(db.String(255), nullable=False)

    __tablename__ = 'git_repository'
    __table_args__ = db.UniqueConstraint(
        'host', 'owner', 'name', name='uq_git_repository_unique_constraint'),

    @classmethod
    def create_or_get(cls, external_id, host, owner, name):
        """."""
        try:
            repo = cls.query.filter_by(host=host, owner=owner, name=name).one()
        except NoResultFound:
            repo = cls(external_id=external_id,
                       host=host,
                       owner=owner,
                       name=name)
            db.session.add(repo)
        return repo
class GitWebhook(db.Model):
    """Webook for a Git repository."""

    __tablename__ = 'git_webhook'
    __table_args__ = db.UniqueConstraint(
        'event_type', 'repo_id', name='uq_git_webhook_unique_constraint'),

    id = db.Column(db.Integer, primary_key=True)
    event_type = db.Column(db.String(255), nullable=False)

    external_id = db.Column(db.String(255), nullable=False)
    secret = db.Column(db.String(32), nullable=True)

    repo_id = db.Column(db.Integer, db.ForeignKey(GitRepository.id))
    repo = db.relationship(GitRepository,
                           backref=db.backref("webhooks",
                                              cascade="all, delete-orphan"))
Example #3
0
def do_upgrade():
    """Implement your upgrades here."""
    if not op.has_table('remoteACCOUNT'):
        op.create_table('remoteACCOUNT',
                        db.Column('id',
                                  db.Integer(display_width=15),
                                  nullable=False),
                        db.Column('user_id',
                                  db.Integer(display_width=15),
                                  nullable=False),
                        db.Column('client_id',
                                  db.String(length=255),
                                  nullable=False),
                        db.Column('extra_data', db.JSON, nullable=True),
                        db.ForeignKeyConstraint(
                            ['user_id'],
                            ['user.id'],
                        ),
                        db.PrimaryKeyConstraint('id'),
                        db.UniqueConstraint('user_id', 'client_id'),
                        mysql_charset='utf8',
                        mysql_engine='MyISAM')
    else:
        warnings.warn("*** Creation of table 'remoteACCOUNT table skipped!'")

    if not op.has_table('remoteTOKEN'):
        op.create_table('remoteTOKEN',
                        db.Column('id_remote_account',
                                  db.Integer(display_width=15),
                                  nullable=False),
                        db.Column('token_type',
                                  db.String(length=40),
                                  nullable=False),
                        db.Column('access_token', db.Text(), nullable=False),
                        db.Column('secret', db.Text(), nullable=False),
                        db.ForeignKeyConstraint(
                            ['id_remote_account'],
                            ['remoteACCOUNT.id'],
                        ),
                        db.PrimaryKeyConstraint('id_remote_account',
                                                'token_type'),
                        mysql_charset='utf8',
                        mysql_engine='MyISAM')
    else:
        warnings.warn("*** Creation of table 'remoteTOKEN' skipped!'")
Example #4
0
class HarvestedItem(db.Model):
    """The items harvested from a repository"""

    __tablename__ = 'iroko_harvest_items'
    __table_args__ = (db.UniqueConstraint(
        'source_uuid',
        'identifier',
        name='identifier_in_repository'
        ),
        )
    id = db.Column(db.Integer, primary_key=True)

    source_uuid = db.Column(
        UUIDType,
        db.ForeignKey(
            'iroko_source_repositories'
            '.source_uuid', ondelete='CASCADE'
            ),
        nullable=False, index=True
        )
    repository = db.relationship(
        "Repository", backref=db.backref(
            'harvested_items'
            )
        )

    """identifier in the repo"""
    identifier = db.Column(db.String, nullable=False)

    # el uuid del iroko record asociado
    record = db.Column(UUIDType, nullable=True)

    status = db.Column(db.Enum(HarvestedItemStatus))
    error_log = db.Column(db.String)

    data = db.Column(JSONType)
    """Any other relevant data to be used in the future could be here."""

    def __str__(self):
        """Representation."""
        return self.identifier
Example #5
0
class GitWebhookSubscriber(db.Model):
    """Records subscribed to the git repository events."""

    __tablename__ = 'git_subscriber'
    __table_args__ = db.UniqueConstraint(
        'record_id',
        'webhook_id',
        name='uq_git_webhook_subscriber_unique_constraint'),

    id = db.Column(db.Integer, primary_key=True)

    status = db.Column(db.Enum('active', 'deleted', name='git_webhook_status'),
                       nullable=False,
                       default='active')

    record_id = db.Column(UUIDType,
                          db.ForeignKey(RecordMetadata.id),
                          nullable=False)
    record = db.relationship(RecordMetadata,
                             backref=db.backref("webhooks",
                                                cascade="all, delete-orphan"))

    webhook_id = db.Column(db.Integer,
                           db.ForeignKey(GitWebhook.id),
                           nullable=False)
    webhook = db.relationship(GitWebhook,
                              backref=db.backref("subscribers",
                                                 cascade="all, delete-orphan"))

    user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
    user = db.relationship(User)

    snapshots = db.relationship(GitSnapshot,
                                order_by="desc(GitSnapshot.created)",
                                secondary='git_subscriber_snapshots')

    @property
    def repo(self):
        return self.webhook.repo
Example #6
0
class RemoteAccount(db.Model):
    """Storage for remote linked accounts."""

    __tablename__ = 'oauthclient_remoteaccount'

    __table_args__ = (db.UniqueConstraint('user_id', 'client_id'), )

    #
    # Fields
    #
    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """Primary key."""

    user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
    """Local user linked with a remote app via the access token."""

    client_id = db.Column(db.String(255), nullable=False)
    """Client ID of remote application (defined in OAUTHCLIENT_REMOTE_APPS)."""

    extra_data = db.Column(MutableDict.as_mutable(JSONType), nullable=False)
    """Extra data associated with this linked account."""

    #
    # Relationships properties
    #
    user = db.relationship(User, backref='remote_accounts')
    """SQLAlchemy relationship to user."""
    @classmethod
    def get(cls, user_id, client_id):
        """Get RemoteAccount object for user.

        :param user_id: User id
        :param client_id: Client id.
        :returns: A :class:`invenio_oauthclient.models.RemoteAccount` instance.
        """
        return cls.query.filter_by(
            user_id=user_id,
            client_id=client_id,
        ).first()

    @classmethod
    def create(cls, user_id, client_id, extra_data):
        """Create new remote account for user.

        :param user_id: User id.
        :param client_id: Client id.
        :param extra_data: JSON-serializable dictionary of any extra data that
            needs to be save together with this link.
        :returns: A :class:`invenio_oauthclient.models.RemoteAccount` instance.
        """
        with db.session.begin_nested():
            account = cls(user_id=user_id,
                          client_id=client_id,
                          extra_data=extra_data or dict())
            db.session.add(account)
        return account

    def delete(self):
        """Delete remote account together with all stored tokens."""
        with db.session.begin_nested():
            db.session.delete(self)

    def __repr__(self):
        """String representation for model."""
        return 'Remote Account <id={0.id}, user_id={0.user.id}>'.format(self)
Example #7
0
class Index(db.Model, Timestamp):
    """
    Represent an index.

    The Index object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'index'

    __table_args__ = (db.UniqueConstraint('parent',
                                          'position',
                                          name='uix_position'), )

    id = db.Column(db.BigInteger, primary_key=True, unique=True)
    """Identifier of the index."""

    parent = db.Column(db.BigInteger, nullable=False, default=0)
    """Parent Information of the index."""

    position = db.Column(db.Integer, nullable=False, default=0)
    """Children position of parent."""

    index_name = db.Column(db.Text, nullable=True, default='')
    """Name of the index."""

    index_name_english = db.Column(db.Text, nullable=False, default='')
    """English Name of the index."""

    index_link_name = db.Column(db.Text, nullable=True, default='')
    """Name of the index link."""

    index_link_name_english = db.Column(db.Text, nullable=False, default='')
    """English Name of the index link."""

    harvest_spec = db.Column(db.Text, nullable=True, default='')
    """Harvest Spec."""

    index_link_enabled = db.Column(db.Boolean(name='index_link_enabled'),
                                   nullable=False,
                                   default=False)
    """Index link enable flag."""

    comment = db.Column(db.Text, nullable=True, default='')
    """Comment of the index."""

    more_check = db.Column(db.Boolean(name='more_check'),
                           nullable=False,
                           default=False)
    """More Status of the index."""

    display_no = db.Column(db.Integer, nullable=False, default=0)
    """Display number of the index."""

    harvest_public_state = db.Column(db.Boolean(name='harvest_public_state'),
                                     nullable=False,
                                     default=True)
    """Harvest public State of the index."""

    display_format = db.Column(db.Text, nullable=True, default='1')
    """The Format of Search Resault."""

    image_name = db.Column(db.Text, nullable=False, default='')
    """The Name of upload image."""

    public_state = db.Column(db.Boolean(name='public_state'),
                             nullable=False,
                             default=False)
    """Public State of the index."""

    public_date = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6),
                                                       "mysql"),
                            nullable=True)
    """Public Date of the index."""

    recursive_public_state = db.Column(db.Boolean(name='recs_public_state'),
                                       nullable=True,
                                       default=False)
    """Recursive Public State of the index."""

    coverpage_state = db.Column(db.Boolean(name='coverpage_state'),
                                nullable=True,
                                default=False)
    """PDF Cover Page State of the index."""

    recursive_coverpage_check = db.Column(
        db.Boolean(name='recursive_coverpage_check'),
        nullable=True,
        default=False)
    """Recursive PDF Cover Page State of the index."""

    browsing_role = db.Column(db.Text, nullable=True)
    """Browsing Role of the  ."""

    recursive_browsing_role = db.Column(db.Boolean(name='recs_browsing_role'),
                                        nullable=True,
                                        default=False)
    """Recursive Browsing Role of the index."""

    contribute_role = db.Column(db.Text, nullable=True)
    """Contribute Role of the index."""

    recursive_contribute_role = db.Column(
        db.Boolean(name='recs_contribute_role'), nullable=True, default=False)
    """Recursive Browsing Role of the index."""

    browsing_group = db.Column(db.Text, nullable=True)
    """Browsing Group of the  ."""

    recursive_browsing_group = db.Column(
        db.Boolean(name='recs_browsing_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    contribute_group = db.Column(db.Text, nullable=True)
    """Contribute Group of the index."""

    recursive_contribute_group = db.Column(
        db.Boolean(name='recs_contribute_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    owner_user_id = db.Column(db.Integer, nullable=True, default=0)
    """Owner user id of the index."""

    # item_custom_sort = db.Column(db.Text, nullable=True, default='')

    item_custom_sort = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                 default=lambda: dict(),
                                 nullable=True)
    """The sort of item by custom setting"""

    # index_items = db.relationship('IndexItems', back_populates='index', cascade='delete')

    def __iter__(self):
        """Iter."""
        for name in dir(Index):
            if not name.startswith('__') and not name.startswith('_') \
                    and name not in dir(Timestamp):
                value = getattr(self, name)
                if value is None:
                    value = ""
                if isinstance(value, str) or isinstance(value, bool) \
                        or isinstance(value, datetime) \
                        or isinstance(value, int):
                    yield (name, value)

    # format setting for community admin page

    def __str__(self):
        """Representation."""
        return 'Index <id={0.id}, index_name={0.index_name_english}>'.format(
            self)

    @classmethod
    def have_children(cls, id):
        """Have Children."""
        children = cls.query.filter_by(parent=id).all()
        return False if (children is None or len(children) == 0) else True
Example #8
0
class MultipartObject(db.Model, Timestamp):
    """Model for storing files in chunks.

    A multipart object belongs to a specific bucket and key and is identified
    by an upload id. You can have multiple multipart uploads for the same
    bucket and key. Once all parts of a multipart object is uploaded, the state
    is changed to ``completed``. Afterwards it is not possible to upload
    new parts. Once completed, the multipart object is merged, and added as
    a new version in the current object/bucket.

    All parts for a multipart upload must be of the same size, except for the
    last part.
    """

    __tablename__ = 'files_multipartobject'

    __table_args__ = (db.UniqueConstraint('upload_id',
                                          'bucket_id',
                                          'key',
                                          name='uix_item'), )

    upload_id = db.Column(
        UUIDType,
        default=uuid.uuid4,
        primary_key=True,
    )
    """Identifier for the specific version of an object."""

    bucket_id = db.Column(
        UUIDType,
        db.ForeignKey(Bucket.id, ondelete='RESTRICT'),
    )
    """Bucket identifier."""

    key = db.Column(db.Text().with_variant(mysql.VARCHAR(255), 'mysql'), )
    """Key identifying the object."""

    file_id = db.Column(UUIDType,
                        db.ForeignKey(FileInstance.id, ondelete='RESTRICT'),
                        nullable=False)
    """File instance for this multipart object."""

    chunk_size = db.Column(db.Integer, nullable=True)
    """Size of chunks for file."""

    size = db.Column(db.BigInteger, nullable=True)
    """Size of file."""

    completed = db.Column(db.Boolean, nullable=False, default=False)
    """Defines if object is the completed."""

    # Relationships definitions
    bucket = db.relationship(Bucket, backref='multipart_objects')
    """Relationship to buckets."""

    file = db.relationship(FileInstance, backref='multipart_objects')
    """Relationship to buckets."""
    def __repr__(self):
        """Return representation of the multipart object."""
        return "{0}:{2}:{1}".format(self.bucket_id, self.key, self.upload_id)

    @property
    def last_part_number(self):
        """Get last part number."""
        return int(self.size / self.chunk_size)

    @property
    def last_part_size(self):
        """Get size of last part."""
        return self.size % self.chunk_size

    @validates('key')
    def validate_key(self, key, key_):
        """Validate key."""
        return validate_key(key_)

    @staticmethod
    def is_valid_chunksize(chunk_size):
        """Check if size is valid."""
        min_csize = current_app.config['FILES_REST_MULTIPART_CHUNKSIZE_MIN']
        max_csize = current_app.config['FILES_REST_MULTIPART_CHUNKSIZE_MAX']
        return chunk_size >= min_csize and chunk_size <= max_csize

    @staticmethod
    def is_valid_size(size, chunk_size):
        """Validate max theoretical size."""
        min_csize = current_app.config['FILES_REST_MULTIPART_CHUNKSIZE_MIN']
        max_size = \
            chunk_size * current_app.config['FILES_REST_MULTIPART_MAX_PARTS']
        return size > min_csize and size <= max_size

    def expected_part_size(self, part_number):
        """Get expected part size for a particular part number."""
        last_part = self.multipart.last_part_number

        if part_number == last_part:
            return self.multipart.last_part_size
        elif part_number >= 0 and part_number < last_part:
            return self.multipart.chunk_size
        else:
            raise MultipartInvalidPartNumber()

    @ensure_uncompleted()
    def complete(self):
        """Mark a multipart object as complete."""
        if Part.count(self) != self.last_part_number + 1:
            raise MultipartMissingParts()

        with db.session.begin_nested():
            self.completed = True
            self.file.readable = True
            self.file.writable = False
        return self

    @ensure_completed()
    def merge_parts(self, **kwargs):
        """Merge parts into object version."""
        self.file.update_checksum(**kwargs)
        with db.session.begin_nested():
            obj = ObjectVersion.create(self.bucket,
                                       self.key,
                                       _file_id=self.file_id)
            self.delete()
        return obj

    def delete(self):
        """Delete a multipart object."""
        # Update bucket size.
        self.bucket.size -= self.size
        # Remove parts
        Part.query_by_multipart(self).delete()
        # Remove self
        self.query.filter_by(upload_id=self.upload_id).delete()

    @classmethod
    def create(cls, bucket, key, size, chunk_size):
        """Create a new object in a bucket."""
        bucket = as_bucket(bucket)

        if bucket.locked:
            raise BucketLockedError()

        # Validate chunk size.
        if not cls.is_valid_chunksize(chunk_size):
            raise MultipartInvalidChunkSize()

        # Validate max theoretical size.
        if not cls.is_valid_size(size, chunk_size):
            raise MultipartInvalidSize()

        # Validate max bucket size.
        bucket_limit = bucket.size_limit
        if bucket_limit and size > bucket_limit:
            desc = 'File size limit exceeded.' \
                if isinstance(bucket_limit, int) else bucket_limit.reason
            raise FileSizeError(description=desc)

        with db.session.begin_nested():
            file_ = FileInstance.create()
            file_.size = size
            obj = cls(
                upload_id=uuid.uuid4(),
                bucket=bucket,
                key=key,
                chunk_size=chunk_size,
                size=size,
                completed=False,
                file=file_,
            )
            bucket.size += size
            db.session.add(obj)
        file_.init_contents(
            size=size,
            default_location=bucket.location.uri,
            default_storage_class=bucket.default_storage_class,
        )
        return obj

    @classmethod
    def get(cls, bucket, key, upload_id, with_completed=False):
        """Fetch a specific multipart object."""
        q = cls.query.filter_by(
            upload_id=upload_id,
            bucket_id=as_bucket_id(bucket),
            key=key,
        )
        if not with_completed:
            q = q.filter(cls.completed.is_(False))

        return q.one_or_none()

    @classmethod
    def query_expired(cls, dt, bucket=None):
        """Query all uncompleted multipart uploads."""
        q = cls.query.filter(cls.created < dt).filter_by(completed=True)
        if bucket:
            q = q.filter(cls.bucket_id == as_bucket_id(bucket))
        return q

    @classmethod
    def query_by_bucket(cls, bucket):
        """Query all uncompleted multipart uploads."""
        return cls.query.filter(cls.bucket_id == as_bucket_id(bucket))
Example #9
0
class TaxonomyTerm(db.Model):
    """TaxonomyTerm adjacency list model."""
    __tablename__ = "taxonomy_term"
    __table_args__ = (
        db.UniqueConstraint('slug',
                            'parent_id',
                            name='uq_taxonomy_term_slug_parent'),
        db.UniqueConstraint('slug',
                            'tree_id',
                            name='uq_taxonomy_term_slug_tree'),
        #
        # can not use constraints here as db can not perform update
        # which would temporarily invalidate constraints
        #
        # db.UniqueConstraint('left', 'tree_id', name='uq_taxonomy_term_left_tree'),
        # db.UniqueConstraint('right', 'tree_id', name='uq_taxonomy_term_right_tree'),
        # db.UniqueConstraint('order', 'parent_id', name='uq_taxonomy_term_order_parent'),
    )

    id = db.Column(db.Integer, primary_key=True)
    slug = db.Column(db.String(64), unique=False, index=True)
    extra_data = db.Column(db.JSON().with_variant(
        sqlalchemy.dialects.postgresql.JSONB, 'postgresql'))

    tree_id = db.Column("tree_id", db.Integer, nullable=False)
    left = db.Column("left", db.Integer, nullable=False)
    right = db.Column("right", db.Integer, nullable=False)
    level = db.Column("level", db.Integer, nullable=False)
    order = db.Column("order", db.Integer, nullable=False)

    parent_id = db.Column(db.Integer, db.ForeignKey(__tablename__ + '.id'))
    parent = relationship("TaxonomyTerm",
                          back_populates="children",
                          remote_side=[id])
    children = relationship("TaxonomyTerm",
                            back_populates="parent",
                            order_by=order,
                            cascade="all, delete",
                            lazy="dynamic")

    def __init__(self,
                 slug: str,
                 extra_data: dict = None,
                 parent=None,
                 tree_id=None,
                 left=None,
                 right=None,
                 level=None,
                 order=None):
        """Taxonomy Term constructor."""
        self.slug = slug
        self.extra_data = extra_data
        self.parent = parent
        if not tree_id:
            # can not use db generator as it is not supported in sqlite
            with db.session.begin_nested():
                t = TreeId()
                db.session.add(t)
            tree_id = t.id

        self.tree_id = tree_id
        self.left = left
        self.right = right
        self.level = level
        self.order = order

    def create_term(self, **kwargs):
        db.session.refresh(self)

        with db.session.begin_nested():
            # move all following nodes by 2 to make space for the new node
            t = TaxonomyTerm.__table__
            following_terms_cond = and_(TaxonomyTerm.left > self.right,
                                        TaxonomyTerm.tree_id == self.tree_id)
            db.session.execute(t.update().where(following_terms_cond).values(
                left=TaxonomyTerm.left + 2, right=TaxonomyTerm.right + 2))

            # on path to parent move the right property by 2
            # as the hierarchy will contain the new node
            ancestors_cond = and_(TaxonomyTerm.left <= self.left,
                                  TaxonomyTerm.right >= self.right,
                                  TaxonomyTerm.tree_id == self.tree_id)
            db.session.execute(t.update().where(ancestors_cond).values(
                right=TaxonomyTerm.right + 2))

            # self is still not modified here, so can use its unchanged "right" property
            term = self.__class__(**kwargs,
                                  tree_id=self.tree_id,
                                  left=self.right,
                                  right=self.right + 1,
                                  level=self.level + 1,
                                  order=self.children.count(),
                                  parent=self)
            db.session.add(term)

        return term

    def update(self, extra_data: dict = None):
        """Update Taxonomy Term data."""
        self.extra_data = extra_data
        with db.session.begin_nested():
            db.session.add(self)

    def delete(self):
        # refetch it from db just to be sure that all props are up to date
        db.session.refresh(self)

        parent_id = self.parent_id
        order = self.order
        right = self.right
        left = self.left
        tree_id = self.tree_id

        if parent_id is None:
            # top-level object
            with db.session.begin_nested():
                db.session.delete(self)
        else:

            # get space occupied by the term
            occupied_space = right - left + 1

            with db.session.begin_nested():

                # delete current object and all its children
                with db.session.begin_nested():
                    db.session.delete(self)

                with db.session.begin_nested():
                    t = TaxonomyTerm.__table__
                    # update order on following siblings

                    following_siblings_cond = and_(
                        TaxonomyTerm.order > order,
                        TaxonomyTerm.parent_id == parent_id)
                    db.session.execute(
                        t.update().where(following_siblings_cond).values(
                            order=TaxonomyTerm.order - 1))

                    # move all following nodes to the left by the occupied space
                    following_terms_cond = and_(
                        TaxonomyTerm.left > right,
                        TaxonomyTerm.tree_id == tree_id)
                    db.session.execute(
                        t.update().where(following_terms_cond).values(
                            left=TaxonomyTerm.left - occupied_space,
                            right=TaxonomyTerm.right - occupied_space))

                    # on path to parent move the right property left by the occupied space
                    ancestors_cond = and_(TaxonomyTerm.left < left,
                                          TaxonomyTerm.right > right,
                                          TaxonomyTerm.tree_id == tree_id)
                    db.session.execute(t.update().where(ancestors_cond).values(
                        right=TaxonomyTerm.right - occupied_space))

    def check(self, path=None):
        """
        Checks consistency of MPTT tree

        :param path:    internal, always pass None
        :raise ValueError:  when tree is corrupted
        """
        if not path:
            path = []
            db.session.refresh(self)

        path = path + [self.slug]

        children = list(self.children)
        for c in children:
            db.session.refresh(c)  # make sure cached data are not used here

        if not children:
            if self.left + 1 != self.right:
                raise ValueError(  # pragma: no cover
                    'Error in childless element {}: bad left {} or right{}'.
                    format(path, self.left, self.right))
        else:
            # check lefts and rights
            if self.left + 1 != children[0].left:
                raise ValueError(  # pragma: no cover
                    'First child "{}" of {} has bad left {}, expecting {}'.
                    format(children[0].slug, path, children[0].left,
                           self.left + 1))
            if self.right - 1 != children[-1].right:
                raise ValueError(  # pragma: no cover
                    'Last child "{}" of {} has bad right {}, expecting {}'.
                    format(children[-1].slug, path, children[0].right,
                           self.right - 1))
            # check lefts and rights between children
            for i in range(0, len(children) - 1):
                c1 = children[i]
                c2 = children[i + 1]
                if c1.right + 1 != c2.left:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of element {} has bad left {}, expecting {}'
                        .format(c2.slug, path, c2.left, c1.right + 1))
            for ci, c in enumerate(children):
                if c.level != self.level + 1:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of {} has bad level {}, expecting {}'
                        .format(c.slug, path, c.level, self.level + 1))
                if c.order != ci:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of {} has bad order {}, expecting {}'
                        .format(c.slug, path, c.order, ci))

            for c in children:
                c.check(path)

    def move(self, target_node, position: MovePosition):
        db.session.refresh(self)
        db.session.refresh(target_node)

        if self == target_node:
            raise TaxonomyError(
                'Can not move term inside, before or after the same term')

        if (self.tree_id == target_node.tree_id
                and self.left < target_node.left
                and self.right > target_node.right):
            raise TaxonomyError(
                'Can not move a term inside its own descendants')

        with db.session.begin_nested():
            if self.tree_id != target_node.tree_id:
                return self._move_between_trees(target_node, position)

            return self._move_within_tree(target_node, position)

    def _move_within_tree(self, target_node, position):
        (target_parent_id, target_parent_left, target_parent_right,
         target_left, target_level,
         target_order) = target_node._get_insertion_position(position)

        self_right = self.right
        self_left = self.left
        self_level = self.level
        self_parent_id = self.parent_id
        self_order = self.order
        occupied_space = self_right - self_left + 1
        tree_id = self.tree_id

        t = TaxonomyTerm.__table__

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug(
                f'target parent id {target_parent_id}, '
                f'target parent left {target_parent_left}, '
                f'target parent right {target_parent_right}\n'
                f'target_left {target_left}, target_level {target_level}, '
                f'target_order {target_order}, occupied space {occupied_space}\n'
                f'self left {self_left}, '
                f'self right {self_right}\n')

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 0: before move right   \n%s",
                          self.taxonomy.dump(True))

        if self_left == target_left:
            # nothing to do, already there
            return

        # just a sanity check that can not move the term inside itself
        assert not self_left <= target_left < self_right

        with db.session.begin_nested():
            # make space for the tree by moving subsequent terms to the right
            target_following_cond = and_(TaxonomyTerm.left >= target_left,
                                         TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(target_following_cond).values(
                left=TaxonomyTerm.left + occupied_space,
                right=TaxonomyTerm.right + occupied_space))

        with db.session.begin_nested():
            # update ancestors' right
            if target_parent_left < target_left:
                # target parent's right has not been moved
                # in the previous condition, so add it as well
                target_ancestors_cond = and_(
                    TaxonomyTerm.left <= target_parent_left,
                    TaxonomyTerm.right >= target_parent_right,
                    TaxonomyTerm.tree_id == tree_id)
                db.session.execute(
                    t.update().where(target_ancestors_cond).values(
                        right=TaxonomyTerm.right + occupied_space))
            else:  # pragma: no cover
                raise RuntimeError(
                    'Should not get here as we are moving into parent, not onto it'
                )

        if self_left >= target_left:
            # if self was in subsequent terms, fix its left and right
            self_left += occupied_space
            self_right += occupied_space

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 1: after move right    \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # update order on the future siblings that will be after the moved element
            future_siblings_cond = and_(
                TaxonomyTerm.order >= target_order,
                TaxonomyTerm.parent_id == target_parent_id)
            db.session.execute(t.update().where(future_siblings_cond).values(
                order=TaxonomyTerm.order + 1))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 2: after order future siblings\n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # move the descendants of the moved term to the new location, fixing left, right, level
            term_descendants_cond = and_(TaxonomyTerm.left > self_left,
                                         TaxonomyTerm.right < self_right,
                                         TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(term_descendants_cond).values(
                left=TaxonomyTerm.left + target_left - self_left,
                right=TaxonomyTerm.right + target_left - self_left,
                level=TaxonomyTerm.level + target_level - self_level))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 3: after move children \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # move the self only to the new location, fixing left, right, level, parent and order
            db.session.execute(
                t.update().where(TaxonomyTerm.id == self.id).values(
                    left=TaxonomyTerm.left + target_left - self_left,
                    right=TaxonomyTerm.right + target_left - self_left,
                    level=target_level,
                    parent_id=target_parent_id,
                    order=target_order))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 4: after move term     \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # remove the space left by the moved element
            orig_following_cond = and_(TaxonomyTerm.left >= self_left,
                                       TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(orig_following_cond).values(
                left=TaxonomyTerm.left - occupied_space,
                right=TaxonomyTerm.right - occupied_space))

        with db.session.begin_nested():
            # and parents ...
            orig_parents_cond = and_(TaxonomyTerm.left < self_left,
                                     TaxonomyTerm.right > self_right,
                                     TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(orig_parents_cond).values(
                right=TaxonomyTerm.right - occupied_space))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 5: after removing space\n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # fix order on siblings following the previously moved term
            orig_siblings_cond = and_(TaxonomyTerm.order > self_order,
                                      TaxonomyTerm.parent_id == self_parent_id)
            db.session.execute(t.update().where(orig_siblings_cond).values(
                order=TaxonomyTerm.order - 1))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 6: after fixing order, done\n%s",
                          self.taxonomy.dump(True))

    def _get_insertion_position(self, position: MovePosition):
        db.session.refresh(self)
        if position == MovePosition.INSIDE:
            return self.id, self.left, self.right, self.right, \
                   self.level + 1, self.children.count()
        parent = self.parent
        db.session.refresh(parent)
        if position == MovePosition.AFTER:
            return parent.id, parent.left, parent.right, self.right + 1, \
                   self.level, self.order + 1
        if position == MovePosition.BEFORE:
            return parent.id, parent.left, parent.right, self.left, \
                   self.level, self.order
        raise Exception('Unhandled MovePosition %s' %
                        position)  # pragma: no cover

    @property
    def taxonomy(self):
        term = TaxonomyTerm.query.filter_by(tree_id=self.tree_id,
                                            level=1).one()
        return Taxonomy(term)

    @property
    def tree_path(self) -> [str, None]:
        """Get path in a taxonomy tree."""
        path = [x[0] for x in self.ancestors_or_self.values(TaxonomyTerm.slug)]
        if not path:
            return None
        return '/' + '/'.join(path)

    def __repr__(self):
        """Represent taxonomy term instance as a unique string."""
        return "<TaxonomyTerm({slug}:{path})>" \
            .format(slug=self.slug, path=self.id)

    @property
    def descendants(self):
        # need to have up to date left and right
        db.session.refresh(self)

        return TaxonomyTerm.query.filter(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > self.left,
            TaxonomyTerm.right < self.right).order_by('left')

    @property
    def descendants_or_self(self):
        # need to have up to date left and right
        db.session.refresh(self)

        return TaxonomyTerm.query.filter(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left >= self.left,
            TaxonomyTerm.right <= self.right).order_by('left')

    @property
    def ancestors(self):
        ancestor_cond = and_(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > 1,  # do not take root
            TaxonomyTerm.left < self.left,
            TaxonomyTerm.right > self.right)
        return TaxonomyTerm.query.filter(ancestor_cond).order_by(
            TaxonomyTerm.left)

    @property
    def ancestors_or_self(self):
        ancestor_cond = and_(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > 1,  # do not take root
            TaxonomyTerm.left <= self.left,
            TaxonomyTerm.right >= self.right)
        return TaxonomyTerm.query.filter(ancestor_cond).order_by(
            TaxonomyTerm.left)

    @property
    def link_self(self):
        taxonomy_code, term_path = self.tree_path.lstrip('/').split('/', 1)
        return url_for(
            "taxonomies.taxonomy_get_term",
            taxonomy_code=taxonomy_code,
            term_path=term_path,
            _external=True,
        )

    @property
    def link_tree(self, parent_path):
        taxonomy_code, term_path = self.tree_path.lstrip('/').split('/', 1)

        return url_for(
            "taxonomies.taxonomy_get_term",
            taxonomy_code=taxonomy_code,
            term_path=term_path,
            drilldown=True,
            _external=True,
        )
Example #10
0
class GroupAdmin(db.Model):
    """Represent an administrator of a group."""

    __tablename__ = 'accounts_group_admin'

    __table_args__ = (db.UniqueConstraint('group_id', 'admin_type',
                                          'admin_id'),
                      getattr(db.Model, '__table_args__', {}))

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """GroupAdmin identifier."""

    group_id = db.Column(db.Integer, db.ForeignKey(Group.id), nullable=False)
    """Group for membership."""

    admin_type = db.Column(db.Unicode(255))
    """Generic relationship to an object."""

    admin_id = db.Column(db.Integer, nullable=False)
    """Generic relationship to an object."""

    #
    # Relations
    #

    group = db.relationship(Group,
                            backref=db.backref('admins',
                                               cascade='all, delete-orphan'))
    """Group relationship."""

    admin = generic_relationship(admin_type, admin_id)
    """Generic relationship to administrator of group."""
    @classmethod
    def create(cls, group, admin):
        """Create a new group admin.

        :param group: Group object.
        :param admin: Admin object.
        :returns: Newly created GroupAdmin object.
        :raises: IntegrityError
        """
        with db.session.begin_nested():
            obj = cls(
                group=group,
                admin=admin,
            )
            db.session.add(obj)
        db.session.commit()
        return obj

    @classmethod
    def get(cls, group, admin):
        """
        Get specific GroupAdmin object.

        :param group: Group object.
        :param admin: Admin object.
        :returns: GroupAdmin object or None.
        """
        try:
            ga = cls.query.filter_by(
                group=group,
                admin_id=admin.get_id(),
                admin_type=resolve_admin_type(admin)).one()
            return ga
        except Exception:
            return None

    @classmethod
    def delete(cls, group, admin):
        """Delete admin from group.

        :param group: Group object.
        :param admin: Admin object.
        """
        with db.session.begin_nested():
            obj = cls.query.filter(cls.admin == admin,
                                   cls.group == group).one()
            db.session.delete(obj)
        db.session.commit()

    @classmethod
    def query_by_group(cls, group):
        """
        Get all admins for a specific group.

        :param group: Group object.
        :returns: Admin object for specific group.
        """
        return cls.query.filter_by(group=group)

    @classmethod
    def query_by_admin(cls, admin):
        """
        Get all groups for for a specific admin.

        :param admin: Admin object.
        :returns: Group objects for specific admin.
        """
        return cls.query.filter_by(admin_type=resolve_admin_type(admin),
                                   admin_id=admin.get_id())

    @classmethod
    def query_admins_by_group_ids(cls, groups_ids=None):
        """
        Get count of admins per group.

        :param bool groups_ids:
        :returns: Query object.
        """
        assert groups_ids is None or isinstance(groups_ids, list)

        query = db.session.query(Group.id, func.count(
            GroupAdmin.id)).join(GroupAdmin).group_by(Group.id)

        if groups_ids:
            query = query.filter(Group.id.in_(groups_ids))

        return query
Example #11
0
class GitRepository(db.Model):
    """Information about a GitHub repository."""

    __tablename__ = 'git_connected_repositories'
    __table_args__ = (db.UniqueConstraint('recid',
                                          'git_repo_id',
                                          'branch',
                                          name='unique_ids_constraint'), )

    id = db.Column(db.Integer, primary_key=True)
    git_repo_id = db.Column(db.Integer, unique=False, index=True)

    # CAP relations
    recid = db.Column(db.String(255), unique=False, nullable=False)
    repo_saved_name = db.Column(db.String(255), nullable=True)
    user_id = db.Column(db.Integer, db.ForeignKey(User.id))
    user = db.relationship(User)

    # git specific attributes
    url = db.Column(db.String(255), nullable=False)
    host = db.Column(db.String(255), nullable=False)
    owner = db.Column(db.String(255), nullable=False)
    name = db.Column(db.String(255), nullable=False)
    branch = db.Column(db.String(255), nullable=False, default='master')

    # enable/disable the hook through this field (hook id)
    hook = db.Column(db.String(255), nullable=True)
    hook_secret = db.Column(db.String(32), nullable=True)

    # check if the repo should be downloaded every time an event occurs
    # do not remove create_constraint, sqlalchemy bug workaround
    for_download = db.Column(db.Boolean(create_constraint=False),
                             nullable=False,
                             default=False)

    @classmethod
    def create_or_get(cls,
                      git,
                      data_url,
                      user_id,
                      record_id,
                      hook,
                      hook_secret,
                      name=None,
                      for_download=False):
        """Create a new repository instance, using the API information."""
        repo = cls.get_by(git.repo_id, branch=git.branch)

        if not repo:
            # avoid celery trouble with serializing
            user = User.query.filter_by(id=user_id).first()
            repo = cls(git_repo_id=git.repo_id,
                       host=git.host,
                       owner=git.owner,
                       name=git.repo,
                       branch=git.branch,
                       url=data_url,
                       recid=record_id,
                       user=user,
                       user_id=user.id,
                       hook=hook,
                       hook_secret=hook_secret,
                       for_download=for_download,
                       repo_saved_name=name)
            db.session.add(repo)
            db.session.commit()

        return repo

    @classmethod
    def get_by(cls, repo_id, branch='master'):
        """Get a repo by its ID and branch if available."""
        return cls.query.filter(cls.git_repo_id == repo_id,
                                cls.branch == branch).first()

    def update_hook(self, hook_id=None, hook_secret=None):
        """Update the hook of the retrieved repo."""
        self.hook = hook_id
        self.hook_secret = hook_secret
        db.session.commit()

    def __repr__(self):
        """Get repository representation."""
        return '<Repository {self.name}: {self.git_repo_id}>'.format(self=self)
Example #12
0
class Index(db.Model, Timestamp):
    """
    Represent an index.

    The Index object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'index'

    __table_args__ = (db.UniqueConstraint('parent',
                                          'position',
                                          name='uix_position'), )

    id = db.Column(db.BigInteger, primary_key=True, unique=True)
    """Identifier of the index."""

    parent = db.Column(db.BigInteger, nullable=False, default=0)
    """Parent Information of the index."""

    position = db.Column(db.Integer, nullable=False, default=0)
    """Children position of parent."""

    index_name = db.Column(db.Text, nullable=True, default='')
    """Name of the index."""

    index_name_english = db.Column(db.Text, nullable=False, default='')
    """English Name of the index."""

    comment = db.Column(db.Text, nullable=True, default='')
    """Comment of the index."""

    more_check = db.Column(db.Boolean(name='more_check'),
                           nullable=False,
                           default=False)
    """More Status of the index."""

    display_no = db.Column(db.Integer, nullable=False, default=0)
    """Display number of the index."""

    harvest_public_state = db.Column(db.Boolean(name='harvest_public_state'),
                                     nullable=False,
                                     default=True)
    """Harvest public State of the index."""

    display_format = db.Column(db.Text, nullable=True, default='1')
    """The Format of Search Resault."""

    image_name = db.Column(db.Text, nullable=False, default='')
    """The Name of upload image."""

    public_state = db.Column(db.Boolean(name='public_state'),
                             nullable=False,
                             default=False)
    """Public State of the index."""

    public_date = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6),
                                                       "mysql"),
                            nullable=True)
    """Public Date of the index."""

    recursive_public_state = db.Column(db.Boolean(name='recs_public_state'),
                                       nullable=True,
                                       default=False)
    """Recursive Public State of the index."""

    browsing_role = db.Column(db.Text, nullable=True)
    """Browsing Role of the  ."""

    recursive_browsing_role = db.Column(db.Boolean(name='recs_browsing_role'),
                                        nullable=True,
                                        default=False)
    """Recursive Browsing Role of the index."""

    contribute_role = db.Column(db.Text, nullable=True)
    """Contribute Role of the index."""

    recursive_contribute_role = db.Column(
        db.Boolean(name='recs_contribute_role'), nullable=True, default=False)
    """Recursive Browsing Role of the index."""

    browsing_group = db.Column(db.Text, nullable=True)
    """Browsing Group of the  ."""

    recursive_browsing_group = db.Column(
        db.Boolean(name='recs_browsing_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    contribute_group = db.Column(db.Text, nullable=True)
    """Contribute Group of the index."""

    recursive_contribute_group = db.Column(
        db.Boolean(name='recs_contribute_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    owner_user_id = db.Column(db.Integer, nullable=True, default=0)
    """Owner user id of the index."""

    # index_items = db.relationship('IndexItems', back_populates='index', cascade='delete')

    def __iter__(self):
        for name in dir(Index):
            if not name.startswith('__') and not name.startswith('_') \
                 and name not in dir(Timestamp):
                value = getattr(self, name)
                if value is None:
                    value = ""
                if isinstance(value, str) or isinstance(value, bool) \
                        or isinstance(value, datetime) \
                        or isinstance(value, int):
                    yield (name, value)

    # format setting for community admin page

    def __str__(self):
        """Representation."""
        return 'Index <id={0.id}, index_name={0.index_name_english}>'.format(
            self)