Example #1
0
class IndexTree(db.Model, Timestamp):
    """Represent an index tree structure.

    The IndexTree object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'index_tree'

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """Identifier of the index tree."""

    tree = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                     default=lambda: dict(),
                     nullable=True)
    """Store the index tree structure in JSON format."""
Example #2
0
File: models.py Project: mhaya/weko
class Authors(db.Model, Timestamp):
    """
    Represent an index.

    The Index object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'authors'

    id = db.Column(db.BigInteger, primary_key=True, unique=True)
    """id of the authors."""

    gather_flg = db.Column(
        db.BigInteger,
        primary_key=False,
        unique=False,
        default=0)
    """gather_flg of the authors."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """json for author info"""
Example #3
0
class FileMetadata(db.Model, Timestamp):
    """Represent a record metadata.

    The FileMetadata object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'file_metadata'

    id = db.Column(
        db.Integer(),
        autoincrement=True,
        primary_key=True
    )

    pid = db.Column(
        db.Integer()
    )
    """Record identifier."""

    # uid = db.Column(
    #     UUIDType,
    #     default=uuid.uuid4
    # )

    contents = db.Column(
        LargeBinary,
        nullable=True
    )

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store metadata in JSON format.

    When you create a new ``Record`` the ``json`` field value should never be
    ``NULL``. Default value is an empty dict. ``NULL`` value means that the
    record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
class OAIProvider(db.Model):
    __tablename__ = "oarepo_oai_provider"
    id = db.Column(db.Integer, primary_key=True)
    code = db.Column(db.String(16), nullable=False, unique=True)
    description = db.Column(db.String(2048), nullable=True)
    oai_endpoint = db.Column(db.String(2048), nullable=False)
    set_ = db.Column(db.String(256), name="set")
    metadata_prefix = db.Column(db.String(32), default="oai_dc")
    constant_fields = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                default=lambda: dict(),
                                nullable=True)

    def get_parsers(self):
        return registry.parsers.get(self.code) or {}

    def get_rules(self, parser_name):
        return registry.rules.get(parser_name)
Example #5
0
class CircTransactions(db.Model, Timestamp):
    """Circulation transactions record."""

    __tablename__ = 'circulation_transactions'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Transaction record identifier."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store Transaction record metadata in JSON format."""
Example #6
0
class FeedbackMailList(db.Model, Timestamp):
    """Represent an feedback mail list.

    Stored table stored list email address base on item_id
    """

    __tablename__ = 'feedback_mail_list'

    id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
    """Feedback mail list identifier."""

    item_id = db.Column(
        UUIDType,
        nullable=False,
        default=uuid.uuid4,
    )
    """Item identifier."""

    mail_list = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                          default=lambda: dict(),
                          nullable=True)
    """List of feedback mail in json format."""
Example #7
0
class GroupMetadata(db.Model, Timestamp):
    """Metadata for a group."""

    __tablename__ = 'groupmetadata'

    # TODO: assert group.type == GroupType.Identity
    group_id = db.Column(UUIDType,
                         db.ForeignKey(Group.id,
                                       onupdate='CASCADE',
                                       ondelete='CASCADE'),
                         primary_key=True)
    group = db.relationship(
        Group,
        backref=backref('data', uselist=False),
        single_parent=True,
    )
    json = db.Column(
        db.JSON().with_variant(postgresql.JSONB(none_as_null=True),
                               'postgresql').with_variant(
                                   JSONType(), 'sqlite'),
        default=dict,
    )

    # Identifier metadata
    SCHEMA = {
        '$schema': 'http://json-schema.org/draft-06/schema#',
        'definitions': COMMON_SCHEMA_DEFINITIONS,
        'additionalProperties': False,
        'properties': {
            k: v
            for k, v in OBJECT_TYPE_SCHEMA['properties'].items()
            if k in OVERRIDABLE_KEYS or k in MERGEABLE_KEYS
        },
    }

    def update(self, payload: dict, validate: bool = True):
        """Update the metadata of a group."""
        new_json = deepcopy(self.json or {})
        for key in OVERRIDABLE_KEYS:
            if payload.get(key):
                if key == 'Type':
                    type_val = (payload['Type'] or {}).get('Name', 'unknown')
                    if type_val == 'unknown':
                        continue
                new_json[key] = payload[key]
        for key in MERGEABLE_KEYS:
            mergeKey(new_json, payload, key)
        # Set "Type" to "unknown" if not provided
        if not new_json.get('Type', {}).get('Name'):
            new_json['Type'] = {'Name': 'unknown'}
        if validate:
            jsonschema.validate(new_json, self.SCHEMA)
        self.json = new_json
        flag_modified(self, 'json')
        return self
class DraftMetadataBase(Timestamp):
    """Represent a base class for draft metadata.

    The DraftMetadata object  contains a `created` and  a `updated`
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Draft identifier."""

    fork_id = db.Column(UUIDType)
    """Draft identifier, it is the same than the record it is draft of"""

    fork_version_id = db.Column(db.Integer)
    """Version id of the record it is draft of."""

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    status = db.Column(db.String(255), default="draft", nullable=False)
    """Status for workflow management."""

    expiry_date = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6),
                                                       "mysql"),
                            default=datetime.utcnow,
                            nullable=True)
    """Specifies when the it expires. If `NULL` the draft does not expire"""

    json = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                     default=lambda: dict(),
                     nullable=True)
    """Store metadata in JSON format.
    When you create a new `Record the `json field value should never be
    `NULL`. Default value is an empty dict. `NULL` value means that the
    record metadata has been deleted.
    """

    __mapper_args__ = {'version_id_col': version_id}
Example #9
0
class HarvestLogs(db.Model):
    """Harvest Logs."""

    __tablename__ = "harvest_logs"

    id = db.Column(db.Integer, primary_key=True)
    harvest_setting_id = db.Column(db.Integer, nullable=False)
    start_time = db.Column(db.DateTime, default=datetime.datetime.now())
    end_time = db.Column(db.DateTime, nullable=True)
    status = db.Column(db.String(10), nullable=False, default='Running')
    errmsg = db.Column(db.String(255), nullable=True, default=None)
    requrl = db.Column(db.String(255), nullable=True, default=None)
    counter = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                        default=lambda: dict(),
                        nullable=True)
    setting = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                        default=lambda: dict(),
                        nullable=True)
Example #10
0
class ItemTypeMapping(db.Model, Timestamp):
    """Represent a record metadata.

    The ItemTypeMapping object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'item_type_mapping'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Record identifier."""

    item_type_id = db.Column(db.Integer)
    """ID of item type."""

    mapping = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store mapping in JSON format.
     When you create a new ``Record`` the ``mapping`` field value
     should never be ``NULL``. Default value is an empty dict.
     ``NULL`` value means that the record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Example #11
0
class RecordMetadata(db.Model, Timestamp):
    """Represent a record metadata inside the SQL database.

    Additionally it contains two columns ``created`` and ``updated``
    with automatically managed timestamps.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'records_metadata'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Record identifier."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store metadata in JSON format.

    When you create new ``Record`` the ``json`` field value should
    never be ``NULL``.  Default value is an empty dict.  ``NULL``
    value means that the record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """It is used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Example #12
0
class ResyncLogs(db.Model, Timestamp):
    """Harvest Logs."""

    __tablename__ = "resync_logs"

    id = db.Column(db.Integer, primary_key=True)

    resync_indexes_id = db.Column(
        db.Integer,
        db.ForeignKey(ResyncIndexes.id,
                      ondelete='CASCADE'),
        nullable=True
    )
    log_type = db.Column(db.String(10))

    task_id = db.Column(db.String(40), default=None)

    start_time = db.Column(db.DateTime, default=datetime.now())

    end_time = db.Column(db.DateTime, nullable=True)

    status = db.Column(db.String(10), nullable=False, default='Running')

    errmsg = db.Column(db.String(255), nullable=True, default=None)

    counter = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )

    resync_index = db.relationship(
        ResyncIndexes,
        backref='resync_indexes_id',
        foreign_keys=[resync_indexes_id])
    """Relation to the Resync Identifier."""
Example #13
0
class ErrorMonitoring(db.Model, Timestamp):
    """Error monitoring model."""

    __tablename__ = 'error_monitoring'

    id = db.Column(UUIDType, default=uuid.uuid4, primary_key=True)
    event_id = db.Column(UUIDType)
    origin = db.Column(db.String, nullable=False)
    error = db.Column(db.String)
    n_retries = db.Column(db.Integer)
    payload = db.Column(
        db.JSON().with_variant(postgresql.JSONB(none_as_null=True),
                               'postgresql').with_variant(
                                   JSONType(), 'sqlite'),
        default=dict,
    )

    @classmethod
    def getLastWeeksErrors(cls, **kwargs):
        """Gets all the errors from last week where it has been rerun for at least 2 times all ready"""
        last_week = datetime.datetime.now() - datetime.timedelta(days=8)
        resp = cls.query.filter(cls.created > str(last_week),
                                cls.n_retries > 2).all()
        return resp

    @classmethod
    def getFromEvent(cls, event_id):
        """Dictionary representation of the error."""
        return cls.query.filter_by(event_id=event_id).one_or_none()

    def to_dict(self):
        """Dictionary representation of the error."""
        return dict(created=self.created,
                    updated=self.updated,
                    id=self.id,
                    origin=self.origin,
                    error=self.error,
                    payload=self.payload)

    def __repr__(self):
        """String representation of the error."""
        return str(self.to_dict())
Example #14
0
class ItemTypeEditHistory(db.Model, Timestamp):
    """Represent an item type edit history.

    The ItemTypeEditHistory object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'item_type_edit_history'

    id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
    """Identifier of item type edit history."""

    item_type_id = db.Column(db.Integer(),
                             db.ForeignKey(ItemType.id),
                             nullable=False)
    """Identifier for item type."""

    user_id = db.Column(db.Integer(), db.ForeignKey(User.id), nullable=False)
    """Identifier for author of item type."""

    notes = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                      default=lambda: dict(),
                      nullable=True)
    """Edit notes for item type."""
    @classmethod
    def get_latest_by_item_type_id(cls, item_type_id=0):
        """Get latest notes for item type."""
        pass
Example #15
0
class ActionJournal(db.Model, TimestampMixin):
    """Define journal info."""

    __tablename__ = 'workflow_action_journal'

    id = db.Column(db.Integer(),
                   nullable=False,
                   primary_key=True,
                   autoincrement=True)
    """Activity_Action identifier."""

    activity_id = db.Column(db.String(24),
                            nullable=False,
                            unique=False,
                            index=True)
    """Activity id of Activity Action."""

    action_id = db.Column(db.Integer(),
                          db.ForeignKey(Action.id),
                          nullable=True,
                          unique=False)
    """Action id."""

    action_journal = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                               default=lambda: dict(),
                               nullable=True)
    """Action journal info."""
Example #16
0
class Index(db.Model, Timestamp):
    """
    Represent an index.

    The Index object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'index'

    __table_args__ = (db.UniqueConstraint('parent',
                                          'position',
                                          name='uix_position'), )

    id = db.Column(db.BigInteger, primary_key=True, unique=True)
    """Identifier of the index."""

    parent = db.Column(db.BigInteger, nullable=False, default=0)
    """Parent Information of the index."""

    position = db.Column(db.Integer, nullable=False, default=0)
    """Children position of parent."""

    index_name = db.Column(db.Text, nullable=True, default='')
    """Name of the index."""

    index_name_english = db.Column(db.Text, nullable=False, default='')
    """English Name of the index."""

    index_link_name = db.Column(db.Text, nullable=True, default='')
    """Name of the index link."""

    index_link_name_english = db.Column(db.Text, nullable=False, default='')
    """English Name of the index link."""

    harvest_spec = db.Column(db.Text, nullable=True, default='')
    """Harvest Spec."""

    index_link_enabled = db.Column(db.Boolean(name='index_link_enabled'),
                                   nullable=False,
                                   default=False)
    """Index link enable flag."""

    comment = db.Column(db.Text, nullable=True, default='')
    """Comment of the index."""

    more_check = db.Column(db.Boolean(name='more_check'),
                           nullable=False,
                           default=False)
    """More Status of the index."""

    display_no = db.Column(db.Integer, nullable=False, default=0)
    """Display number of the index."""

    harvest_public_state = db.Column(db.Boolean(name='harvest_public_state'),
                                     nullable=False,
                                     default=True)
    """Harvest public State of the index."""

    display_format = db.Column(db.Text, nullable=True, default='1')
    """The Format of Search Resault."""

    image_name = db.Column(db.Text, nullable=False, default='')
    """The Name of upload image."""

    public_state = db.Column(db.Boolean(name='public_state'),
                             nullable=False,
                             default=False)
    """Public State of the index."""

    public_date = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6),
                                                       "mysql"),
                            nullable=True)
    """Public Date of the index."""

    recursive_public_state = db.Column(db.Boolean(name='recs_public_state'),
                                       nullable=True,
                                       default=False)
    """Recursive Public State of the index."""

    coverpage_state = db.Column(db.Boolean(name='coverpage_state'),
                                nullable=True,
                                default=False)
    """PDF Cover Page State of the index."""

    recursive_coverpage_check = db.Column(
        db.Boolean(name='recursive_coverpage_check'),
        nullable=True,
        default=False)
    """Recursive PDF Cover Page State of the index."""

    browsing_role = db.Column(db.Text, nullable=True)
    """Browsing Role of the  ."""

    recursive_browsing_role = db.Column(db.Boolean(name='recs_browsing_role'),
                                        nullable=True,
                                        default=False)
    """Recursive Browsing Role of the index."""

    contribute_role = db.Column(db.Text, nullable=True)
    """Contribute Role of the index."""

    recursive_contribute_role = db.Column(
        db.Boolean(name='recs_contribute_role'), nullable=True, default=False)
    """Recursive Browsing Role of the index."""

    browsing_group = db.Column(db.Text, nullable=True)
    """Browsing Group of the  ."""

    recursive_browsing_group = db.Column(
        db.Boolean(name='recs_browsing_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    contribute_group = db.Column(db.Text, nullable=True)
    """Contribute Group of the index."""

    recursive_contribute_group = db.Column(
        db.Boolean(name='recs_contribute_group'), nullable=True, default=False)
    """Recursive Browsing Group of the index."""

    owner_user_id = db.Column(db.Integer, nullable=True, default=0)
    """Owner user id of the index."""

    # item_custom_sort = db.Column(db.Text, nullable=True, default='')

    item_custom_sort = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                 default=lambda: dict(),
                                 nullable=True)
    """The sort of item by custom setting"""

    # index_items = db.relationship('IndexItems', back_populates='index', cascade='delete')

    def __iter__(self):
        """Iter."""
        for name in dir(Index):
            if not name.startswith('__') and not name.startswith('_') \
                    and name not in dir(Timestamp):
                value = getattr(self, name)
                if value is None:
                    value = ""
                if isinstance(value, str) or isinstance(value, bool) \
                        or isinstance(value, datetime) \
                        or isinstance(value, int):
                    yield (name, value)

    # format setting for community admin page

    def __str__(self):
        """Representation."""
        return 'Index <id={0.id}, index_name={0.index_name_english}>'.format(
            self)

    @classmethod
    def have_children(cls, id):
        """Have Children."""
        children = cls.query.filter_by(parent=id).all()
        return False if (children is None or len(children) == 0) else True
from sqlalchemy.dialects import postgresql
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import validates
from sqlalchemy_utils import IPAddressType, Timestamp
from sqlalchemy_utils.types import JSONType

from .errors import AlreadyLinkedError
from .profiles import UserPreferenceDict, UserProfileDict
from .utils import validate_username

json_field = (db.JSON().with_variant(
    postgresql.JSONB(none_as_null=True),
    "postgresql",
).with_variant(
    JSONType(),
    "sqlite",
).with_variant(
    JSONType(),
    "mysql",
))

userrole = db.Table(
    "accounts_userrole",
    db.Column(
        "user_id",
        db.Integer(),
        db.ForeignKey("accounts_user.id", name="fk_accounts_userrole_user_id"),
    ),
    db.Column(
        "role_id",
        db.Integer(),
class ReanaJob(db.Model):
    """Model defining REANA job."""
    __tablename__ = 'reana'

    id = db.Column(UUIDType,
                   primary_key=True,
                   nullable=False,
                   default=uuid.uuid4)

    user_id = db.Column(
        db.Integer,
        db.ForeignKey(User.id),
        nullable=False,
    )

    record_id = db.Column(
        UUIDType,
        db.ForeignKey(RecordMetadata.id),
        nullable=False,
    )

    name = db.Column(db.String(100), unique=False, nullable=False)

    params = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )

    output = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )

    user = db.relationship('User')
    record = db.relationship('RecordMetadata')

    @classmethod
    def get_jobs(cls, user_id, record_id):
        """Return all the jobs run by user for this record."""
        return cls.query.filter_by(user_id=user_id,
                                   record_id=record_id).all()
Example #19
0
File: models.py Project: mhaya/weko
class RankingSettings(db.Model):
    """Ranking settings."""

    __tablename__ = 'ranking_settings'

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)

    is_show = db.Column(db.Boolean(name='show'), nullable=False, default=False)

    new_item_period = db.Column(db.Integer, nullable=False, default=14)

    statistical_period = db.Column(db.Integer, nullable=False, default=365)

    display_rank = db.Column(db.Integer, nullable=False, default=10)

    rankings = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                         default=lambda: dict(),
                         nullable=True)

    @classmethod
    def get(cls, id=0):
        """Get ranking settings."""
        return cls.query.filter_by(id=id).first()

    @classmethod
    def update(cls, id=0, data=None):
        """Update/Create ranking settings."""
        try:
            with db.session.begin_nested():
                new_data_flag = False
                settings = cls.query.filter_by(id=id).first()
                if not settings:
                    settings = RankingSettings()
                    new_data_flag = True
                settings.id = id
                settings.is_show = data.is_show
                settings.new_item_period = data.new_item_period
                settings.statistical_period = data.statistical_period
                settings.display_rank = data.display_rank
                settings.rankings = data.rankings
                if new_data_flag:
                    db.session.add(settings)
                else:
                    db.session.merge(settings)
            db.session.commit()
        except BaseException as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            raise
        return cls

    @classmethod
    def delete(cls, id=0):
        """Delete settings."""
        try:
            with db.session.begin_nested():
                cls.query.filter_by(id=id).delete()
            db.session.commit()
        except BaseException as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            raise ex
        return cls
Example #20
0
class WidgetDesignSetting(db.Model):
    """Database for admin WidgetDesignSetting."""

    __tablename__ = 'widget_design_setting'

    repository_id = db.Column(db.String(100), nullable=False, primary_key=True)

    settings = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                         default=lambda: dict(),
                         nullable=True)

    @classmethod
    def select_all(cls):
        """Get all information about widget setting in database.

        :return: Widget setting list.
        """
        query_result = cls.query.all()
        result = []
        if query_result:
            for record in query_result:
                data = dict()
                data['repository_id'] = record.repository_id
                data['settings'] = record.settings
                result.append(data)
        return result

    @classmethod
    def select_by_repository_id(cls, repository_id):
        """Get widget setting value by repository id.

        :param repository_id: Identifier of the repository
        :return: Widget setting
        """
        query_result = cls.query.filter_by(
            repository_id=str(repository_id)).one_or_none()
        data = {}
        if query_result is not None:
            data['repository_id'] = query_result.repository_id
            data['settings'] = query_result.settings

        return data

    @classmethod
    def update(cls, repository_id, settings):
        """Update widget setting.

        :param repository_id: Identifier of the repository
        :param settings: The setting data
        :return: True if success, otherwise False
        """
        query_result = cls.query.filter_by(
            repository_id=str(repository_id)).one_or_none()
        if query_result is None:
            return False
        else:
            try:
                with db.session.begin_nested():
                    query_result.settings = settings
                    db.session.merge(query_result)
                db.session.commit()
                return True
            except Exception as ex:
                current_app.logger.debug(ex)
                db.session.rollback()
                return False

    @classmethod
    def create(cls, repository_id, settings=None):
        """Insert new widget setting.

        :param repository_id: Identifier of the repository
        :param settings: The setting data
        :return: True if success, otherwise False
        """
        try:
            widget_setting = WidgetDesignSetting()
            with db.session.begin_nested():
                if repository_id is not None:
                    widget_setting.repository_id = repository_id
                    widget_setting.settings = settings
                db.session.add(widget_setting)
            db.session.commit()
            return True
        except Exception as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            return False
Example #21
0
File: models.py Project: mhaya/weko
class SearchManagement(db.Model):
    """Search setting model."""

    __tablename__ = 'search_management'

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)

    default_dis_num = db.Column(db.Integer, nullable=False, default=20)
    """ Default display number of search results"""

    default_dis_sort_index = db.Column(db.Text, nullable=True, default="")
    """ Default display sort of index search"""

    default_dis_sort_keyword = db.Column(db.Text, nullable=True, default="")
    """ Default display sort of keyword search"""

    sort_setting = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                             default=lambda: dict(),
                             nullable=True)
    """ The list of sort setting"""

    search_conditions = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                  default=lambda: dict(),
                                  nullable=True)
    """ The list of search condition """

    search_setting_all = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                   default=lambda: dict(),
                                   nullable=True)
    """ The list of search condition """

    create_date = db.Column(db.DateTime, default=datetime.now)
    """Create Time"""
    @classmethod
    def create(cls, data):
        """Create data."""
        try:
            dataObj = SearchManagement()
            with db.session.begin_nested():
                dataObj.default_dis_num = data.get('dlt_dis_num_selected')
                dataObj.default_dis_sort_index = data.get(
                    'dlt_index_sort_selected')
                dataObj.default_dis_sort_keyword = data.get(
                    'dlt_keyword_sort_selected')
                dataObj.sort_setting = data.get('sort_options')
                dataObj.search_conditions = data.get('detail_condition')
                dataObj.search_setting_all = data
                db.session.add(dataObj)
            db.session.commit()
        except BaseException as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            raise
        return cls

    @classmethod
    def get(cls):
        """Get setting."""
        id = db.session.query(func.max(SearchManagement.id)).first()[0]
        if id is None:
            return None
        return cls.query.filter_by(id=id).one_or_none()

    @classmethod
    def update(cls, id, data):
        """Update setting."""
        try:
            with db.session.begin_nested():
                setting_data = cls.query.filter_by(id=id).one()
                setting_data.default_dis_num = data.get('dlt_dis_num_selected')
                setting_data.default_dis_sort_index = data.get(
                    'dlt_index_sort_selected')
                setting_data.default_dis_sort_keyword = data.get(
                    'dlt_keyword_sort_selected')
                setting_data.sort_setting = data.get('sort_options')
                setting_data.search_conditions = data.get('detail_condition')
                setting_data.search_setting_all = data
                db.session.merge(setting_data)
            db.session.commit()
        except BaseException as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            raise
        return cls
Example #22
0
class ItemTypeProperty(db.Model, Timestamp):
    """Represent an itemtype property.

    The ItemTypeProperty object contains a ``created`` and  a
    ``updated`` properties that are automatically updated.
    """

    __tablename__ = 'item_type_property'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Identifier of itemtype property."""

    name = db.Column(
        db.Text,
        nullable=False,
        unique=True
    )
    """Name identifier of itemtype property."""

    schema = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``schema`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    form = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form (single) in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``form`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    forms = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form (array) in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``forms`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    delflg = db.Column(db.Boolean(name='delFlg'),
                       default=False, nullable=False)
    """record delete flag
Example #23
0
class WidgetItem(db.Model):
    """Database for WidgetItem."""

    __tablename__ = 'widget_items'

    widget_id = db.Column(db.Integer, primary_key=True, nullable=False)
    repository_id = db.Column(db.String(100), nullable=False)
    widget_type = db.Column(db.String(100),
                            db.ForeignKey(WidgetType.type_id),
                            nullable=False)
    settings = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                         default=lambda: dict(),
                         nullable=True)
    browsing_role = db.Column(db.Text, nullable=True)
    edit_role = db.Column(db.Text, nullable=True)
    is_enabled = db.Column(db.Boolean(name='enable'), default=True)
    is_deleted = db.Column(db.Boolean(name='deleted'), default=False)

    #
    # Relation
    #
    widgettype = db.relationship(WidgetType,
                                 backref=db.backref('repositories',
                                                    cascade='all, \
                                                    delete-orphan'))

    #
    # Query Operation
    #
    @classmethod
    def get_by_id(cls, widget_item_id):
        """Get a widget item by id."""
        widget = cls.query.filter_by(widget_id=widget_item_id).one_or_none()
        return widget

    @classmethod
    def get_id_by_repository_and_type(cls, repository, widget_type):
        """Get id by repository id and widget type.

        :param repository: Repository id
        :param widget_type: Widget type
        :return:Widget Item
        """
        widget_data = cls.query.filter_by(repository_id=repository,
                                          widget_type=widget_type,
                                          is_deleted=False).all()
        if not widget_data:
            return None

        list_id = list()
        for data in widget_data:
            list_id.append(data.widget_id)
        return list_id

    @classmethod
    def get_sequence(cls, session):
        """Get widget item next sequence.

        :param session: Session
        :return: Next sequence.
        """
        if not session:
            session = db.session
        seq = Sequence('widget_items_widget_id_seq')
        next_id = session.execute(seq)
        return next_id

    @classmethod
    def create(cls, widget_data, session):
        """Create widget item.

        :param widget_data: widget data
        :param session: session
        :return:
        """
        if not session:
            return None
        data = cls(**widget_data)
        session.add(data)

    @classmethod
    def update_by_id(cls, widget_item_id, widget_data, session=None):
        """Update the widget by id.

        Arguments:
            widget_item_id {Integer} -- Id of widget
            widget_data {Dictionary} -- data

        Returns:
            widget -- if success

        """
        if not session:
            session = db.session
        widget = cls.get_by_id(widget_item_id)
        if not widget:
            return
        for k, v in widget_data.items():
            setattr(widget, k, v)
        session.merge(widget)
        return widget

    @classmethod
    def delete_by_id(cls, widget_id, session):
        """Delete the widget by id.

        Arguments:
            widget_id {Integer} -- The widget id

        Returns:
            widget -- If success

        """
        widget = cls.get_by_id(widget_id)
        if not widget:
            return
        setattr(widget, 'is_deleted', 'True')
        session.merge(widget)
        return widget
Example #24
0
File: models.py Project: mhaya/weko
class ApiCertificate(db.Model):
    """Database for API Certificate."""

    __tablename__ = 'api_certificate'

    api_code = db.Column(db.String(3),
                         primary_key=True,
                         nullable=False,
                         unique=True)

    api_name = db.Column(db.String(25), nullable=False, unique=True)

    cert_data = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                          default=lambda: dict(),
                          nullable=True)

    @classmethod
    def select_all(cls):
        """Get all information about certificates in database.

        :return: list of pair (api short name, api full name, certificate data)
        """
        query_result = cls.query.all()
        result = []
        for record in query_result:
            data = dict()
            data['api_code'] = record.api_code
            data['api_name'] = record.api_name
            data['cert_data'] = record.cert_data
            result.append(data)
        return result

    @classmethod
    def select_by_api_code(cls, api_code):
        """Get certificate value by certificate type.

        :param api_code: input api short name
        :return: certificate data corresponding with api code
        """
        query_result = cls.query.filter_by(api_code=api_code).one_or_none()
        data = {}
        if query_result is not None:
            data['api_code'] = query_result.api_code
            data['api_name'] = query_result.api_name
            data['cert_data'] = query_result.cert_data

            return data
        else:
            return None

    @classmethod
    def update_cert_data(cls, api_code, cert_data):
        """Update certification data.

        Overwrite if certificate existed,
        otherwise insert new certificate into database.

        :param api_code: input api short name
        :param cert_data: input certificate value
        :return: true if success, otherwise false
        """
        query_result = cls.query.filter_by(api_code=api_code).one_or_none()
        # Update in case certificate already existed in database
        if query_result is None:
            return False
        else:
            try:
                with db.session.begin_nested():
                    query_result.cert_data = cert_data
                    db.session.merge(query_result)
                db.session.commit()
                return True
            except Exception as ex:
                current_app.logger.debug(ex)
                db.session.rollback()
                return False

    @classmethod
    def insert_new_api_cert(cls, api_code, api_name, cert_data=None):
        """Insert new certificate.

        :param api_code: input api code
        :param api_name: input api name
        :param cert_data: input certificate value with json format
        :return: True if success, otherwise False
        """
        try:
            dataObj = ApiCertificate()
            with db.session.begin_nested():
                if api_code is not None:
                    dataObj.api_code = api_code
                    dataObj.api_name = api_name
                if cert_data is not None:
                    dataObj.cert_data = cert_data
                db.session.add(dataObj)
            db.session.commit()
            return True
        except Exception as ex:
            db.session.rollback()
            current_app.logger.debug(ex)
            return False

    @classmethod
    def update_api_cert(cls, api_code, api_name, cert_data):
        """Update API certification.

        Overwrite if certificate existed,
        otherwise insert new certificate into database.

        :param api_code: input api code
        :param api_name: input api name
        :param cert_data: input certificate value
        :return: true if success, otherwise false
        """
        # Get current api data
        query_result = cls.query.filter_by(api_code=api_code).one_or_none()

        if query_result is None:
            return False
        else:
            try:
                with db.session.begin_nested():
                    query_result.api_name = api_name
                    query_result.cert_data = cert_data
                    db.session.merge(query_result)
                db.session.commit()
                return True
            except Exception as ex:
                current_app.logger.debug(ex)
                db.session.rollback()
                return False
Example #25
0
class GroupRelationshipMetadata(db.Model, Timestamp):
    """Metadata for a group relationship."""

    __tablename__ = 'grouprelationshipmetadata'

    # TODO: assert group_relationship.type == GroupType.Identity
    group_relationship_id = db.Column(UUIDType,
                                      db.ForeignKey(GroupRelationship.id,
                                                    onupdate='CASCADE',
                                                    ondelete='CASCADE'),
                                      primary_key=True)
    group_relationship = db.relationship(
        GroupRelationship,
        backref=backref('data', uselist=False),
        single_parent=True,
    )
    json = db.Column(
        db.JSON().with_variant(postgresql.JSONB(none_as_null=True),
                               'postgresql').with_variant(
                                   JSONType(), 'sqlite'),
        default=list,
    )

    # Relationship metadata
    SCHEMA = {
        '$schema': 'http://json-schema.org/draft-06/schema#',
        'definitions': COMMON_SCHEMA_DEFINITIONS,
        'type': 'array',
        'items': {
            'type': 'object',
            'additionalProperties': False,
            'properties': {
                'LinkPublicationDate': {
                    '$ref': '#/definitions/DateType'
                },
                'LinkProvider': {
                    'type': 'array',
                    'items': {
                        '$ref': '#/definitions/PersonOrOrgType'
                    }
                },
                'LicenseURL': {
                    'type': 'string'
                },
            },
            'required': ['LinkPublicationDate', 'LinkProvider'],
        }
    }

    def update(self,
               payload: dict,
               validate: bool = True,
               multi: bool = False):
        """Updates the metadata of a group relationship."""
        new_json = deepcopy(self.json or [])
        if multi:
            new_json.extend(payload)
        else:
            new_json.append(payload)
        if validate:
            jsonschema.validate(new_json, self.SCHEMA)
        self.json = new_json
        flag_modified(self, 'json')
        return self
Example #26
0
class WidgetMultiLangData(db.Model):
    """Database for widget multiple language data."""

    __tablename__ = 'widget_multi_lang_data'

    id = db.Column(db.Integer, primary_key=True, nullable=False)
    widget_id = db.Column(db.Integer, nullable=False)
    lang_code = db.Column(db.String(3), nullable=False)
    label = db.Column(db.String(100), nullable=False)
    description_data = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                                 default=lambda: dict(),
                                 nullable=True)

    is_deleted = db.Column(db.Boolean(name='deleted'), default=False)

    #
    # Query Operation
    #
    @classmethod
    def get_by_id(cls, widget_multi_lang_id):
        """Get widget multi language data by id.

        Arguments:
            widget_multilanguage_id {Integer} -- The ID

        Returns:
            data -- Widget multi language data

        """
        data = cls.query.filter_by(id=widget_multi_lang_id).one_or_none()
        return data

    @classmethod
    def create(cls, data, session):
        """Create Widget multi language data.

        :param data: WWidget multi language data
        :param session: session
        :return:
        """
        if not data:
            return None
        obj = cls(**data)
        session.add(obj)
        return obj

    @classmethod
    def get_by_widget_id(cls, widget_id):
        """Get list widget multilanguage data by widget ID.

        Arguments:
            widget_id {Integer} -- The widget id

        Returns:
            data -- List widget multilanguage data

        """
        list_data = cls.query.filter_by(widget_id=widget_id).all()
        return list_data

    @classmethod
    def update_by_id(cls, widget_item_id, data):
        """Update widget multilanguage data by id.

        Arguments:
            id {Integer} -- The id
            data {WidgetMultiLangData} -- The Widget multilanguage data

        Returns:
            True -- If deleted

        """
        widget_multi_lang = cls.get_by_id(widget_item_id)
        if not data:
            return
        for k, v in data.items():
            setattr(widget_multi_lang, k, v)
        db.session.merge(widget_multi_lang)
        return widget_multi_lang

    @classmethod
    def delete_by_widget_id(cls, widget_id, session):
        """Delete widget by id.

        :param widget_id: id of widget
        :param session: session of delete
        :return:
        """
        if not session:
            session = db.session
        multi_data = cls.get_by_widget_id(widget_id)
        if not multi_data:
            return False
        for data in multi_data:
            setattr(data, 'is_deleted', 'True')
        return True
Example #27
0
class ItemType(db.Model, Timestamp):
    """Represent an item type.

    The ItemType object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'item_type'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Identifier of item type."""

    name_id = db.Column(
        db.Integer(),
        db.ForeignKey(
            'item_type_name.id',
            name='fk_item_type_name_id'
        ),
        nullable=False
    )
    """Name identifier of item type."""

    item_type_name = db.relationship(
        'ItemTypeName',
        backref=db.backref('item_type', lazy='dynamic',
                           order_by=desc('item_type.tag'))
    )
    """Name information from ItemTypeName class."""

    schema = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema in JSON format. When you create a new ``item type`` the 
    ``schema`` field value should never be ``NULL``. Default value is an 
    empty dict. ``NULL`` value means that the record metadata has been 
    deleted. """

    form = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form in JSON format.
    When you create a new ``item type`` the ``form`` field value should never be
    ``NULL``. Default value is an empty dict. ``NULL`` value means that the
    record metadata has been deleted.
    """

    render = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store page render information in JSON format. When you create a new 
    ``item type`` the ``render`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    tag = db.Column(db.Integer, nullable=False)
    """Tag of item type."""

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Example #28
0
class TaxonomyTerm(db.Model):
    """TaxonomyTerm adjacency list model."""
    __tablename__ = "taxonomy_term"
    __table_args__ = (
        db.UniqueConstraint('slug',
                            'parent_id',
                            name='uq_taxonomy_term_slug_parent'),
        db.UniqueConstraint('slug',
                            'tree_id',
                            name='uq_taxonomy_term_slug_tree'),
        #
        # can not use constraints here as db can not perform update
        # which would temporarily invalidate constraints
        #
        # db.UniqueConstraint('left', 'tree_id', name='uq_taxonomy_term_left_tree'),
        # db.UniqueConstraint('right', 'tree_id', name='uq_taxonomy_term_right_tree'),
        # db.UniqueConstraint('order', 'parent_id', name='uq_taxonomy_term_order_parent'),
    )

    id = db.Column(db.Integer, primary_key=True)
    slug = db.Column(db.String(64), unique=False, index=True)
    extra_data = db.Column(db.JSON().with_variant(
        sqlalchemy.dialects.postgresql.JSONB, 'postgresql'))

    tree_id = db.Column("tree_id", db.Integer, nullable=False)
    left = db.Column("left", db.Integer, nullable=False)
    right = db.Column("right", db.Integer, nullable=False)
    level = db.Column("level", db.Integer, nullable=False)
    order = db.Column("order", db.Integer, nullable=False)

    parent_id = db.Column(db.Integer, db.ForeignKey(__tablename__ + '.id'))
    parent = relationship("TaxonomyTerm",
                          back_populates="children",
                          remote_side=[id])
    children = relationship("TaxonomyTerm",
                            back_populates="parent",
                            order_by=order,
                            cascade="all, delete",
                            lazy="dynamic")

    def __init__(self,
                 slug: str,
                 extra_data: dict = None,
                 parent=None,
                 tree_id=None,
                 left=None,
                 right=None,
                 level=None,
                 order=None):
        """Taxonomy Term constructor."""
        self.slug = slug
        self.extra_data = extra_data
        self.parent = parent
        if not tree_id:
            # can not use db generator as it is not supported in sqlite
            with db.session.begin_nested():
                t = TreeId()
                db.session.add(t)
            tree_id = t.id

        self.tree_id = tree_id
        self.left = left
        self.right = right
        self.level = level
        self.order = order

    def create_term(self, **kwargs):
        db.session.refresh(self)

        with db.session.begin_nested():
            # move all following nodes by 2 to make space for the new node
            t = TaxonomyTerm.__table__
            following_terms_cond = and_(TaxonomyTerm.left > self.right,
                                        TaxonomyTerm.tree_id == self.tree_id)
            db.session.execute(t.update().where(following_terms_cond).values(
                left=TaxonomyTerm.left + 2, right=TaxonomyTerm.right + 2))

            # on path to parent move the right property by 2
            # as the hierarchy will contain the new node
            ancestors_cond = and_(TaxonomyTerm.left <= self.left,
                                  TaxonomyTerm.right >= self.right,
                                  TaxonomyTerm.tree_id == self.tree_id)
            db.session.execute(t.update().where(ancestors_cond).values(
                right=TaxonomyTerm.right + 2))

            # self is still not modified here, so can use its unchanged "right" property
            term = self.__class__(**kwargs,
                                  tree_id=self.tree_id,
                                  left=self.right,
                                  right=self.right + 1,
                                  level=self.level + 1,
                                  order=self.children.count(),
                                  parent=self)
            db.session.add(term)

        return term

    def update(self, extra_data: dict = None):
        """Update Taxonomy Term data."""
        self.extra_data = extra_data
        with db.session.begin_nested():
            db.session.add(self)

    def delete(self):
        # refetch it from db just to be sure that all props are up to date
        db.session.refresh(self)

        parent_id = self.parent_id
        order = self.order
        right = self.right
        left = self.left
        tree_id = self.tree_id

        if parent_id is None:
            # top-level object
            with db.session.begin_nested():
                db.session.delete(self)
        else:

            # get space occupied by the term
            occupied_space = right - left + 1

            with db.session.begin_nested():

                # delete current object and all its children
                with db.session.begin_nested():
                    db.session.delete(self)

                with db.session.begin_nested():
                    t = TaxonomyTerm.__table__
                    # update order on following siblings

                    following_siblings_cond = and_(
                        TaxonomyTerm.order > order,
                        TaxonomyTerm.parent_id == parent_id)
                    db.session.execute(
                        t.update().where(following_siblings_cond).values(
                            order=TaxonomyTerm.order - 1))

                    # move all following nodes to the left by the occupied space
                    following_terms_cond = and_(
                        TaxonomyTerm.left > right,
                        TaxonomyTerm.tree_id == tree_id)
                    db.session.execute(
                        t.update().where(following_terms_cond).values(
                            left=TaxonomyTerm.left - occupied_space,
                            right=TaxonomyTerm.right - occupied_space))

                    # on path to parent move the right property left by the occupied space
                    ancestors_cond = and_(TaxonomyTerm.left < left,
                                          TaxonomyTerm.right > right,
                                          TaxonomyTerm.tree_id == tree_id)
                    db.session.execute(t.update().where(ancestors_cond).values(
                        right=TaxonomyTerm.right - occupied_space))

    def check(self, path=None):
        """
        Checks consistency of MPTT tree

        :param path:    internal, always pass None
        :raise ValueError:  when tree is corrupted
        """
        if not path:
            path = []
            db.session.refresh(self)

        path = path + [self.slug]

        children = list(self.children)
        for c in children:
            db.session.refresh(c)  # make sure cached data are not used here

        if not children:
            if self.left + 1 != self.right:
                raise ValueError(  # pragma: no cover
                    'Error in childless element {}: bad left {} or right{}'.
                    format(path, self.left, self.right))
        else:
            # check lefts and rights
            if self.left + 1 != children[0].left:
                raise ValueError(  # pragma: no cover
                    'First child "{}" of {} has bad left {}, expecting {}'.
                    format(children[0].slug, path, children[0].left,
                           self.left + 1))
            if self.right - 1 != children[-1].right:
                raise ValueError(  # pragma: no cover
                    'Last child "{}" of {} has bad right {}, expecting {}'.
                    format(children[-1].slug, path, children[0].right,
                           self.right - 1))
            # check lefts and rights between children
            for i in range(0, len(children) - 1):
                c1 = children[i]
                c2 = children[i + 1]
                if c1.right + 1 != c2.left:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of element {} has bad left {}, expecting {}'
                        .format(c2.slug, path, c2.left, c1.right + 1))
            for ci, c in enumerate(children):
                if c.level != self.level + 1:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of {} has bad level {}, expecting {}'
                        .format(c.slug, path, c.level, self.level + 1))
                if c.order != ci:
                    raise ValueError(  # pragma: no cover
                        'Child with slug "{}" of {} has bad order {}, expecting {}'
                        .format(c.slug, path, c.order, ci))

            for c in children:
                c.check(path)

    def move(self, target_node, position: MovePosition):
        db.session.refresh(self)
        db.session.refresh(target_node)

        if self == target_node:
            raise TaxonomyError(
                'Can not move term inside, before or after the same term')

        if (self.tree_id == target_node.tree_id
                and self.left < target_node.left
                and self.right > target_node.right):
            raise TaxonomyError(
                'Can not move a term inside its own descendants')

        with db.session.begin_nested():
            if self.tree_id != target_node.tree_id:
                return self._move_between_trees(target_node, position)

            return self._move_within_tree(target_node, position)

    def _move_within_tree(self, target_node, position):
        (target_parent_id, target_parent_left, target_parent_right,
         target_left, target_level,
         target_order) = target_node._get_insertion_position(position)

        self_right = self.right
        self_left = self.left
        self_level = self.level
        self_parent_id = self.parent_id
        self_order = self.order
        occupied_space = self_right - self_left + 1
        tree_id = self.tree_id

        t = TaxonomyTerm.__table__

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug(
                f'target parent id {target_parent_id}, '
                f'target parent left {target_parent_left}, '
                f'target parent right {target_parent_right}\n'
                f'target_left {target_left}, target_level {target_level}, '
                f'target_order {target_order}, occupied space {occupied_space}\n'
                f'self left {self_left}, '
                f'self right {self_right}\n')

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 0: before move right   \n%s",
                          self.taxonomy.dump(True))

        if self_left == target_left:
            # nothing to do, already there
            return

        # just a sanity check that can not move the term inside itself
        assert not self_left <= target_left < self_right

        with db.session.begin_nested():
            # make space for the tree by moving subsequent terms to the right
            target_following_cond = and_(TaxonomyTerm.left >= target_left,
                                         TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(target_following_cond).values(
                left=TaxonomyTerm.left + occupied_space,
                right=TaxonomyTerm.right + occupied_space))

        with db.session.begin_nested():
            # update ancestors' right
            if target_parent_left < target_left:
                # target parent's right has not been moved
                # in the previous condition, so add it as well
                target_ancestors_cond = and_(
                    TaxonomyTerm.left <= target_parent_left,
                    TaxonomyTerm.right >= target_parent_right,
                    TaxonomyTerm.tree_id == tree_id)
                db.session.execute(
                    t.update().where(target_ancestors_cond).values(
                        right=TaxonomyTerm.right + occupied_space))
            else:  # pragma: no cover
                raise RuntimeError(
                    'Should not get here as we are moving into parent, not onto it'
                )

        if self_left >= target_left:
            # if self was in subsequent terms, fix its left and right
            self_left += occupied_space
            self_right += occupied_space

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 1: after move right    \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # update order on the future siblings that will be after the moved element
            future_siblings_cond = and_(
                TaxonomyTerm.order >= target_order,
                TaxonomyTerm.parent_id == target_parent_id)
            db.session.execute(t.update().where(future_siblings_cond).values(
                order=TaxonomyTerm.order + 1))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 2: after order future siblings\n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # move the descendants of the moved term to the new location, fixing left, right, level
            term_descendants_cond = and_(TaxonomyTerm.left > self_left,
                                         TaxonomyTerm.right < self_right,
                                         TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(term_descendants_cond).values(
                left=TaxonomyTerm.left + target_left - self_left,
                right=TaxonomyTerm.right + target_left - self_left,
                level=TaxonomyTerm.level + target_level - self_level))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 3: after move children \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # move the self only to the new location, fixing left, right, level, parent and order
            db.session.execute(
                t.update().where(TaxonomyTerm.id == self.id).values(
                    left=TaxonomyTerm.left + target_left - self_left,
                    right=TaxonomyTerm.right + target_left - self_left,
                    level=target_level,
                    parent_id=target_parent_id,
                    order=target_order))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 4: after move term     \n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # remove the space left by the moved element
            orig_following_cond = and_(TaxonomyTerm.left >= self_left,
                                       TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(orig_following_cond).values(
                left=TaxonomyTerm.left - occupied_space,
                right=TaxonomyTerm.right - occupied_space))

        with db.session.begin_nested():
            # and parents ...
            orig_parents_cond = and_(TaxonomyTerm.left < self_left,
                                     TaxonomyTerm.right > self_right,
                                     TaxonomyTerm.tree_id == tree_id)
            db.session.execute(t.update().where(orig_parents_cond).values(
                right=TaxonomyTerm.right - occupied_space))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 5: after removing space\n%s",
                          self.taxonomy.dump(True))

        with db.session.begin_nested():
            # fix order on siblings following the previously moved term
            orig_siblings_cond = and_(TaxonomyTerm.order > self_order,
                                      TaxonomyTerm.parent_id == self_parent_id)
            db.session.execute(t.update().where(orig_siblings_cond).values(
                order=TaxonomyTerm.order - 1))

        if logger.isEnabledFor(logging.DEBUG):  # pragma: no cover
            logging.debug("Phase 6: after fixing order, done\n%s",
                          self.taxonomy.dump(True))

    def _get_insertion_position(self, position: MovePosition):
        db.session.refresh(self)
        if position == MovePosition.INSIDE:
            return self.id, self.left, self.right, self.right, \
                   self.level + 1, self.children.count()
        parent = self.parent
        db.session.refresh(parent)
        if position == MovePosition.AFTER:
            return parent.id, parent.left, parent.right, self.right + 1, \
                   self.level, self.order + 1
        if position == MovePosition.BEFORE:
            return parent.id, parent.left, parent.right, self.left, \
                   self.level, self.order
        raise Exception('Unhandled MovePosition %s' %
                        position)  # pragma: no cover

    @property
    def taxonomy(self):
        term = TaxonomyTerm.query.filter_by(tree_id=self.tree_id,
                                            level=1).one()
        return Taxonomy(term)

    @property
    def tree_path(self) -> [str, None]:
        """Get path in a taxonomy tree."""
        path = [x[0] for x in self.ancestors_or_self.values(TaxonomyTerm.slug)]
        if not path:
            return None
        return '/' + '/'.join(path)

    def __repr__(self):
        """Represent taxonomy term instance as a unique string."""
        return "<TaxonomyTerm({slug}:{path})>" \
            .format(slug=self.slug, path=self.id)

    @property
    def descendants(self):
        # need to have up to date left and right
        db.session.refresh(self)

        return TaxonomyTerm.query.filter(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > self.left,
            TaxonomyTerm.right < self.right).order_by('left')

    @property
    def descendants_or_self(self):
        # need to have up to date left and right
        db.session.refresh(self)

        return TaxonomyTerm.query.filter(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left >= self.left,
            TaxonomyTerm.right <= self.right).order_by('left')

    @property
    def ancestors(self):
        ancestor_cond = and_(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > 1,  # do not take root
            TaxonomyTerm.left < self.left,
            TaxonomyTerm.right > self.right)
        return TaxonomyTerm.query.filter(ancestor_cond).order_by(
            TaxonomyTerm.left)

    @property
    def ancestors_or_self(self):
        ancestor_cond = and_(
            TaxonomyTerm.tree_id == self.tree_id,
            TaxonomyTerm.left > 1,  # do not take root
            TaxonomyTerm.left <= self.left,
            TaxonomyTerm.right >= self.right)
        return TaxonomyTerm.query.filter(ancestor_cond).order_by(
            TaxonomyTerm.left)

    @property
    def link_self(self):
        taxonomy_code, term_path = self.tree_path.lstrip('/').split('/', 1)
        return url_for(
            "taxonomies.taxonomy_get_term",
            taxonomy_code=taxonomy_code,
            term_path=term_path,
            _external=True,
        )

    @property
    def link_tree(self, parent_path):
        taxonomy_code, term_path = self.tree_path.lstrip('/').split('/', 1)

        return url_for(
            "taxonomies.taxonomy_get_term",
            taxonomy_code=taxonomy_code,
            term_path=term_path,
            drilldown=True,
            _external=True,
        )
Example #29
0
class OAIServerSchema(db.Model, Timestamp):
    """Represent a OAIServer Schema.

    The OAIServer object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'oaiserver_schema'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """schema identifier."""

    schema_name = db.Column(db.String(255), nullable=False, unique=True)
    """Mapping Name of schema"""

    form_data = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                          default=lambda: dict(),
                          nullable=True)
    """Data(schema name,root name,description) of form."""

    xsd = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                    default=lambda: OrderedDict(),
                    nullable=False)
    """Xsd schema"""

    namespaces = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                           default=lambda: dict(),
                           nullable=True)
    """NameSpace for xml"""

    schema_location = db.Column(db.String(255))
    """Schema Url"""

    isvalid = db.Column(db.Boolean(name='isvalid'),
                        nullable=False,
                        default=lambda: False)

    is_mapping = db.Column(db.Boolean(name='is_mapping'),
                           nullable=False,
                           default=lambda: False)

    isfixed = db.Column(db.Boolean(name='isfixed'),
                        nullable=False,
                        default=lambda: False)

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {'version_id_col': version_id}
Example #30
0
class ResyncIndexes(db.Model, Timestamp):
    """ResyncIndexes model.

    Stores session life_time created for Session.
    """

    __tablename__ = 'resync_indexes'

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """Identifier of resource list."""

    status = db.Column(db.String(),
                       nullable=False,
                       default=lambda: current_app.config[
                           'INVENIO_RESYNC_INDEXES_STATUS'].get('automatic'))
    """Status of resource list."""

    index_id = db.Column(db.BigInteger,
                         db.ForeignKey(Index.id, ondelete='CASCADE'),
                         nullable=True)
    """Index Identifier relation to resync indexes."""

    repository_name = db.Column(db.String(50), nullable=False)
    """Repository name."""

    from_date = db.Column(db.DateTime, nullable=True)
    """From Date."""

    to_date = db.Column(db.DateTime, nullable=True)
    """To Date."""

    resync_save_dir = db.Column(db.String(50), nullable=False)
    """Path directory save."""

    resync_mode = db.Column(db.String(20),
                            nullable=False,
                            default=lambda: current_app.config[
                                'INVENIO_RESYNC_INDEXES_MODE'].get('baseline'))
    """Resync mode."""

    saving_format = db.Column(
        db.String(10),
        nullable=False,
        default=lambda: current_app.config[
            'INVENIO_RESYNC_INDEXES_SAVING_FORMAT'].get('jpcoar'))
    """Saving format."""

    base_url = db.Column(db.String(255), nullable=False)
    """base url of resync."""

    is_running = db.Column(db.Boolean(), default=True)
    """is running."""

    interval_by_day = db.Column(db.Integer, nullable=False)
    """Time cycle for each change list."""

    task_id = db.Column(db.String(40), default=None)

    result = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                       default=lambda: dict(),
                       nullable=True)

    index = db.relationship(Index,
                            backref='resync_index_id',
                            foreign_keys=[index_id])
    """Relation to the Index Identifier."""