Ejemplo n.º 1
0
class FileMetadata(db.Model, Timestamp):
    """Represent a record metadata.

    The FileMetadata object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'file_metadata'

    id = db.Column(
        db.Integer(),
        autoincrement=True,
        primary_key=True
    )

    pid = db.Column(
        db.Integer()
    )
    """Record identifier."""

    # uid = db.Column(
    #     UUIDType,
    #     default=uuid.uuid4
    # )

    contents = db.Column(
        LargeBinary,
        nullable=True
    )

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store metadata in JSON format.

    When you create a new ``Record`` the ``json`` field value should never be
    ``NULL``. Default value is an empty dict. ``NULL`` value means that the
    record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Ejemplo n.º 2
0
class CircTransactions(db.Model, Timestamp):
    """Circulation transactions record."""

    __tablename__ = 'circulation_transactions'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Transaction record identifier."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store Transaction record metadata in JSON format."""
Ejemplo n.º 3
0
class IndexTree(db.Model, Timestamp):
    """Represent an index tree structure.

    The IndexTree object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'index_tree'

    id = db.Column(db.Integer, primary_key=True, autoincrement=True)
    """Identifier of the index tree."""

    tree = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                     default=lambda: dict(),
                     nullable=True)
    """Store the index tree structure in JSON format."""
Ejemplo n.º 4
0
Archivo: models.py Proyecto: mhaya/weko
class Authors(db.Model, Timestamp):
    """
    Represent an index.

    The Index object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'authors'

    id = db.Column(db.BigInteger, primary_key=True, unique=True)
    """id of the authors."""

    gather_flg = db.Column(
        db.BigInteger,
        primary_key=False,
        unique=False,
        default=0)
    """gather_flg of the authors."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """json for author info"""
Ejemplo n.º 5
0
class FeedbackMailList(db.Model, Timestamp):
    """Represent an feedback mail list.

    Stored table stored list email address base on item_id
    """

    __tablename__ = 'feedback_mail_list'

    id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
    """Feedback mail list identifier."""

    item_id = db.Column(
        UUIDType,
        nullable=False,
        default=uuid.uuid4,
    )
    """Item identifier."""

    mail_list = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                          default=lambda: dict(),
                          nullable=True)
    """List of feedback mail in json format."""
Ejemplo n.º 6
0
class DraftMetadataBase(Timestamp):
    """Represent a base class for draft metadata.

    The DraftMetadata object  contains a `created` and  a `updated`
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Draft identifier."""

    fork_id = db.Column(UUIDType)
    """Draft identifier, it is the same than the record it is draft of"""

    fork_version_id = db.Column(db.Integer)
    """Version id of the record it is draft of."""

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    status = db.Column(db.String(255), default="draft", nullable=False)
    """Status for workflow management."""

    expiry_date = db.Column(db.DateTime().with_variant(mysql.DATETIME(fsp=6),
                                                       "mysql"),
                            default=datetime.utcnow,
                            nullable=True)
    """Specifies when the it expires. If `NULL` the draft does not expire"""

    json = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                     default=lambda: dict(),
                     nullable=True)
    """Store metadata in JSON format.
    When you create a new `Record the `json field value should never be
    `NULL`. Default value is an empty dict. `NULL` value means that the
    record metadata has been deleted.
    """

    __mapper_args__ = {'version_id_col': version_id}
Ejemplo n.º 7
0
class ReanaJob(db.Model):
    """Model defining REANA job."""

    __tablename__ = 'reana'

    id = db.Column(UUIDType,
                   primary_key=True,
                   nullable=False,
                   default=uuid.uuid4)

    user_id = db.Column(
        db.Integer,
        db.ForeignKey(User.id),
        nullable=False,
    )

    record_id = db.Column(
        UUIDType,
        db.ForeignKey(RecordMetadata.id),
        nullable=False,
    )

    name = db.Column(db.String(100), unique=False, nullable=False)

    params = db.Column(JSONType().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ),
                       default=lambda: dict(),
                       nullable=True)

    output = db.Column(JSONType().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ),
                       default=lambda: dict(),
                       nullable=True)

    user = db.relationship('User')
    record = db.relationship('RecordMetadata')

    @classmethod
    def get_jobs(cls, user_id, record_id):
        """Return all the jobs run by user for this record."""
        return cls.query.filter_by(user_id=user_id, record_id=record_id).all()
Ejemplo n.º 8
0
class ItemTypeMapping(db.Model, Timestamp):
    """Represent a record metadata.

    The ItemTypeMapping object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'item_type_mapping'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Record identifier."""

    item_type_id = db.Column(db.Integer)
    """ID of item type."""

    mapping = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store mapping in JSON format.
     When you create a new ``Record`` the ``mapping`` field value
     should never be ``NULL``. Default value is an empty dict.
     ``NULL`` value means that the record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Ejemplo n.º 9
0
class Migration(db.Model, Timestamp):
    """Represent a community metadata inside the SQL database.
    Additionally it contains two columns ``created`` and ``updated``
    with automatically managed timestamps.
    """

    __tablename__ = 'b2share_migrations'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )

    # migration version
    version = db.Column(db.String(80), nullable=False)

    data = db.Column(JSONType().with_variant(
        postgresql.JSON(none_as_null=True),
        'postgresql',
    ),
                     nullable=True)

    @property
    def success(self):
        """Returns the success or failure of the last migration."""
        return 'status' in self.data and self.data['status'] == 'success'
def upgrade():
    """Upgrade database."""
    op.create_table(
        'workflows_record_sources',
        sa.Column(
            'source',
            sa.Text,
            default='',
            nullable=False,
        ),
        sa.Column(
            'record_id',
            UUIDType,
            sa.ForeignKey('records_metadata.id', ondelete='CASCADE'),
            nullable=False,
        ),
        sa.PrimaryKeyConstraint('record_id', 'source'),
        sa.Column(
            'json',
            JSONType().with_variant(
                postgresql.JSON(none_as_null=True),
                'postgresql',
            ),
            default=lambda: dict(),
        ),
    )
Ejemplo n.º 11
0
class RecordMetadata(db.Model, Timestamp):
    """Represent a record metadata inside the SQL database.

    Additionally it contains two columns ``created`` and ``updated``
    with automatically managed timestamps.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'records_metadata'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """Record identifier."""

    json = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store metadata in JSON format.

    When you create new ``Record`` the ``json`` field value should
    never be ``NULL``.  Default value is an empty dict.  ``NULL``
    value means that the record metadata has been deleted.
    """

    version_id = db.Column(db.Integer, nullable=False)
    """It is used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Ejemplo n.º 12
0
def _include_sqlalchemy(obj, engine=None):
    """Init all required SQLAlchemy's types."""
    # for module in sqlalchemy, sqlalchemy.orm:
    #    for key in module.__all__:
    #        if not hasattr(obj, key):
    #            setattr(obj, key,
    #                    getattr(module, key))

    if engine == 'mysql':
        from sqlalchemy.dialects import mysql as engine_types
    else:
        from sqlalchemy import types as engine_types

    # Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
    # of TEXT which only provides for 64kb storage compared to 4gb for
    # LONGTEXT.
    setattr(obj, 'JSON', JSONType(length=2 ** 32 - 2))
    setattr(obj, 'Char', engine_types.CHAR)
    try:
        setattr(obj, 'TinyText', engine_types.TINYTEXT)
    except Exception:
        setattr(obj, 'TinyText', engine_types.TEXT)
    setattr(obj, 'hybrid_property', hybrid_property)
    try:
        setattr(obj, 'Double', engine_types.DOUBLE)
    except Exception:
        setattr(obj, 'Double', engine_types.FLOAT)
    setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
    setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
    setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
    setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
    setattr(obj, 'UUID', GUID)
    setattr(obj, 'Integer', LegacyInteger)
    setattr(obj, 'MediumInteger', LegacyMediumInteger)
    setattr(obj, 'SmallInteger', LegacySmallInteger)
    setattr(obj, 'TinyInteger', LegacyTinyInteger)
    setattr(obj, 'BigInteger', LegacyBigInteger)

    if engine == 'mysql':
        from .engines import mysql as dummy_mysql  # noqa
    #    module = invenio.sqlalchemyutils_mysql
    #    for key in module.__dict__:
    #        setattr(obj, key,
    #                getattr(module, key))

    obj.AsBINARY = AsBINARY
    obj.MarshalBinary = MarshalBinary
    obj.PickleBinary = PickleBinary

    # Overwrite :meth:`MutableDick.update` to detect changes.
    from sqlalchemy.ext.mutable import MutableDict

    def update_mutable_dict(self, *args, **kwargs):
        super(MutableDict, self).update(*args, **kwargs)
        self.changed()

    MutableDict.update = update_mutable_dict
    obj.MutableDict = MutableDict
Ejemplo n.º 13
0
class Workflow(db.Model):
    """Represents a workflow instance storing the state of the workflow."""

    __tablename__ = "workflows_workflow"

    uuid = db.Column(UUIDType,
                     primary_key=True,
                     nullable=False,
                     default=uuid.uuid4())
    name = db.Column(db.String(255),
                     default="Default workflow",
                     nullable=False)
    created = db.Column(db.DateTime, default=datetime.now, nullable=False)
    modified = db.Column(db.DateTime,
                         default=datetime.now,
                         onupdate=datetime.now,
                         nullable=False)
    id_user = db.Column(db.Integer, default=0, nullable=False)
    extra_data = db.Column(JSONType().with_variant(
        postgresql.JSON(none_as_null=True),
        'postgresql',
    ),
                           default=lambda: dict(),
                           nullable=False)
    status = db.Column(ChoiceType(WorkflowStatus, impl=db.Integer()),
                       default=WorkflowStatus.NEW,
                       nullable=False)
    objects = db.relationship("WorkflowObjectModel",
                              backref='workflows_workflow',
                              cascade="all, delete-orphan")

    def __repr__(self):
        """Represent a Workflow instance."""
        return "<Workflow(name: %s, cre: %s, mod: %s," \
               "id_user: %s, status: %s)>" % \
               (str(self.name), str(self.created), str(self.modified),
                str(self.id_user), str(self.status))

    def __str__(self):
        """Represent a Workflow instance."""
        return self.__repr__()

    @classmethod
    def delete(cls, uuid):
        """Delete a workflow."""
        to_delete = Workflow.query.get(uuid)
        db.session.delete(to_delete)

    def save(self, status=None):
        """Save object to persistent storage."""
        with db.session.begin_nested():
            self.modified = datetime.now()
            if status is not None:
                self.status = status
            if self.extra_data is None:
                self.extra_data = dict()
            flag_modified(self, 'extra_data')
            db.session.merge(self)
Ejemplo n.º 14
0
def upgrade():
    """Upgrade database."""
    op.add_column(
        "accounts_user",
        sa.Column(
            "profile",
            sa.JSON()
            .with_variant(JSONType(), "mysql")
            .with_variant(
                postgresql.JSONB(none_as_null=True, astext_type=sa.Text()),
                "postgresql",
            )
            .with_variant(JSONType(), "sqlite"),
            nullable=True,
        ),
    )
    op.add_column(
        "accounts_user",
        sa.Column(
            "preferences",
            sa.JSON()
            .with_variant(JSONType(), "mysql")
            .with_variant(
                postgresql.JSONB(none_as_null=True, astext_type=sa.Text()),
                "postgresql",
            )
            .with_variant(JSONType(), "sqlite"),
            nullable=True,
        ),
    )

    # the user name is split into two columns:
    # 'displayname' which stores the original version of the username, and
    # 'username' which stores a lower-case version to ensure uniqueness
    op.add_column(
        "accounts_user",
        sa.Column("displayname", sa.String(length=255), nullable=True),
    )
    op.add_column(
        "accounts_user",
        sa.Column("username", sa.String(length=255), nullable=True),
    )
    op.create_unique_constraint(
        op.f("uq_accounts_user_username"), "accounts_user", ["username"]
    )
Ejemplo n.º 15
0
class ResyncLogs(db.Model, Timestamp):
    """Harvest Logs."""

    __tablename__ = "resync_logs"

    id = db.Column(db.Integer, primary_key=True)

    resync_indexes_id = db.Column(
        db.Integer,
        db.ForeignKey(ResyncIndexes.id,
                      ondelete='CASCADE'),
        nullable=True
    )
    log_type = db.Column(db.String(10))

    task_id = db.Column(db.String(40), default=None)

    start_time = db.Column(db.DateTime, default=datetime.now())

    end_time = db.Column(db.DateTime, nullable=True)

    status = db.Column(db.String(10), nullable=False, default='Running')

    errmsg = db.Column(db.String(255), nullable=True, default=None)

    counter = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )

    resync_index = db.relationship(
        ResyncIndexes,
        backref='resync_indexes_id',
        foreign_keys=[resync_indexes_id])
    """Relation to the Resync Identifier."""
Ejemplo n.º 16
0
class GroupRelationshipMetadata(db.Model, Timestamp):
    """Metadata for a group relationship."""

    __tablename__ = 'grouprelationshipmetadata'

    # TODO: assert group_relationship.type == GroupType.Identity
    group_relationship_id = Column(
        UUIDType,
        ForeignKey(GroupRelationship.id,
                   onupdate='CASCADE',
                   ondelete='CASCADE'),
        primary_key=True)
    group_relationship = orm_relationship(
        GroupRelationship,
        backref=backref('data', uselist=False),
        single_parent=True,
    )
    json = Column(
        JSON()
        .with_variant(postgresql.JSONB(none_as_null=True), 'postgresql')
        .with_variant(JSONType(), 'sqlite'),
        default=list,
    )

    # Relationship metadata
    SCHEMA = {
        '$schema': 'http://json-schema.org/draft-06/schema#',
        'definitions': COMMON_SCHEMA_DEFINITIONS,
        'type': 'array',
        'items': {
            'type': 'object',
            'additionalProperties': False,
            'properties': {
                'LinkPublicationDate': {'$ref': '#/definitions/DateType'},
                'LinkProvider': {
                    'type': 'array',
                    'items': {'$ref': '#/definitions/PersonOrOrgType'}
                },
                'LicenseURL': {'type': 'string'},
            },
            'required': ['LinkPublicationDate', 'LinkProvider'],
        }
    }

    def update(self, payload, validate=True, multi=False):
        """Updates the metadata of a group relationship."""
        new_json = deepcopy(self.json or [])
        if multi:
            new_json.extend(payload)
        else:
            new_json.append(payload)
        if validate:
            jsonschema.validate(new_json, self.SCHEMA)
        self.json = new_json
        flag_modified(self, 'json')
        return self
Ejemplo n.º 17
0
class GroupMetadata(db.Model, Timestamp):
    """Metadata for a group."""

    __tablename__ = 'groupmetadata'

    # TODO: assert group.type == GroupType.Identity
    group_id = db.Column(UUIDType,
                         db.ForeignKey(Group.id,
                                       onupdate='CASCADE',
                                       ondelete='CASCADE'),
                         primary_key=True)
    group = db.relationship(
        Group,
        backref=backref('data', uselist=False),
        single_parent=True,
    )
    json = db.Column(
        db.JSON().with_variant(postgresql.JSONB(none_as_null=True),
                               'postgresql').with_variant(
                                   JSONType(), 'sqlite'),
        default=dict,
    )

    # Identifier metadata
    SCHEMA = {
        '$schema': 'http://json-schema.org/draft-06/schema#',
        'definitions': COMMON_SCHEMA_DEFINITIONS,
        'additionalProperties': False,
        'properties': {
            k: v
            for k, v in OBJECT_TYPE_SCHEMA['properties'].items()
            if k in OVERRIDABLE_KEYS or k in MERGEABLE_KEYS
        },
    }

    def update(self, payload: dict, validate: bool = True):
        """Update the metadata of a group."""
        new_json = deepcopy(self.json or {})
        for key in OVERRIDABLE_KEYS:
            if payload.get(key):
                if key == 'Type':
                    type_val = (payload['Type'] or {}).get('Name', 'unknown')
                    if type_val == 'unknown':
                        continue
                new_json[key] = payload[key]
        for key in MERGEABLE_KEYS:
            mergeKey(new_json, payload, key)
        # Set "Type" to "unknown" if not provided
        if not new_json.get('Type', {}).get('Name'):
            new_json['Type'] = {'Name': 'unknown'}
        if validate:
            jsonschema.validate(new_json, self.SCHEMA)
        self.json = new_json
        flag_modified(self, 'json')
        return self
Ejemplo n.º 18
0
class ItemTypeEditHistory(db.Model, Timestamp):
    """Represent an item type edit history.

    The ItemTypeEditHistory object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    __tablename__ = 'item_type_edit_history'

    id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
    """Identifier of item type edit history."""

    item_type_id = db.Column(db.Integer(),
                             db.ForeignKey(ItemType.id),
                             nullable=False)
    """Identifier for item type."""

    user_id = db.Column(db.Integer(), db.ForeignKey(User.id), nullable=False)
    """Identifier for author of item type."""

    notes = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                      default=lambda: dict(),
                      nullable=True)
    """Edit notes for item type."""
    @classmethod
    def get_latest_by_item_type_id(cls, item_type_id=0):
        """Get latest notes for item type."""
        pass
Ejemplo n.º 19
0
class StatusCheck(db.Model):
    """Model defining the Status table for services."""

    __tablename__ = 'status_checks'

    id = db.Column(UUIDType,
                   primary_key=True,
                   unique=True,
                   nullable=False,
                   default=uuid.uuid4)

    created_date = db.Column(db.DateTime,
                             nullable=False,
                             default=datetime.utcnow)

    service = db.Column(db.String(20), nullable=False)
    status = db.Column(db.String(10), nullable=False)

    message = db.Column(
        JSONType().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ),
        default=None,
        nullable=True
    )

    @classmethod
    def truncate_table_older_than(cls, days_to_delete=1):
        """Truncates the status check table since a certain date."""
        date = datetime.today() - timedelta(days=days_to_delete)
        cls.query.filter(cls.created_date <= date).delete()
        db.session.commit()
Ejemplo n.º 20
0
class HarvestLogs(db.Model):
    """Harvest Logs."""

    __tablename__ = "harvest_logs"

    id = db.Column(db.Integer, primary_key=True)
    harvest_setting_id = db.Column(db.Integer, nullable=False)
    start_time = db.Column(db.DateTime, default=datetime.datetime.now())
    end_time = db.Column(db.DateTime, nullable=True)
    status = db.Column(db.String(10), nullable=False, default='Running')
    errmsg = db.Column(db.String(255), nullable=True, default=None)
    requrl = db.Column(db.String(255), nullable=True, default=None)
    counter = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                        default=lambda: dict(),
                        nullable=True)
    setting = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                        default=lambda: dict(),
                        nullable=True)
Ejemplo n.º 21
0
def upgrade():
    op.create_table(
        'b2share_migrations',
        sa.Column('created', sa.DateTime(), nullable=False),
        sa.Column('updated', sa.DateTime(), nullable=False),
        sa.Column('id', UUIDType, default=uuid.uuid4, nullable=False),
        sa.Column('version',
                  sa.String(80),
                  default=__version__,
                  nullable=False),
        sa.Column('data',
                  JSONType().with_variant(postgresql.JSON(none_as_null=True),
                                          'postgresql'),
                  default=lambda: dict(),
                  nullable=True), sa.PrimaryKeyConstraint('id'))
Ejemplo n.º 22
0
class ActionJournal(db.Model, TimestampMixin):
    """Define journal info."""

    __tablename__ = 'workflow_action_journal'

    id = db.Column(db.Integer(),
                   nullable=False,
                   primary_key=True,
                   autoincrement=True)
    """Activity_Action identifier."""

    activity_id = db.Column(db.String(24),
                            nullable=False,
                            unique=False,
                            index=True)
    """Activity id of Activity Action."""

    action_id = db.Column(db.Integer(),
                          db.ForeignKey(Action.id),
                          nullable=True,
                          unique=False)
    """Action id."""

    action_journal = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                               default=lambda: dict(),
                               nullable=True)
    """Action journal info."""
Ejemplo n.º 23
0
class GroupMetadata(db.Model, Timestamp):
    """Metadata for a group."""

    __tablename__ = 'groupmetadata'

    # TODO: assert group.type == GroupType.Identity
    group_id = Column(
        UUIDType,
        ForeignKey(Group.id, onupdate='CASCADE', ondelete='CASCADE'),
        primary_key=True)
    group = orm_relationship(
        Group,
        backref=backref('data', uselist=False),
        single_parent=True,
    )
    json = Column(
        JSON()
        .with_variant(postgresql.JSONB(none_as_null=True), 'postgresql')
        .with_variant(JSONType(), 'sqlite'),
        default=dict,
    )

    # Identifier metadata
    SCHEMA = {
        '$schema': 'http://json-schema.org/draft-06/schema#',
        'definitions': COMMON_SCHEMA_DEFINITIONS,
        'additionalProperties': False,
        'properties': {
            k: v for k, v in OBJECT_TYPE_SCHEMA['properties'].items()
            if k in OVERRIDABLE_KEYS
        },
    }

    def update(self, payload, validate=True):
        """Update the metadata of a group."""
        new_json = deepcopy(self.json or {})
        for k in OVERRIDABLE_KEYS:
            if payload.get(k):
                if k == 'Type' and not _is_type_overridable(payload):
                    continue
                new_json[k] = payload[k]
        if validate:
            jsonschema.validate(new_json, self.SCHEMA)
        self.json = new_json
        flag_modified(self, 'json')
        return self
Ejemplo n.º 24
0
class ErrorMonitoring(db.Model, Timestamp):
    """Error monitoring model."""

    __tablename__ = 'error_monitoring'

    id = db.Column(UUIDType, default=uuid.uuid4, primary_key=True)
    event_id = db.Column(UUIDType)
    origin = db.Column(db.String, nullable=False)
    error = db.Column(db.String)
    n_retries = db.Column(db.Integer)
    payload = db.Column(
        db.JSON().with_variant(postgresql.JSONB(none_as_null=True),
                               'postgresql').with_variant(
                                   JSONType(), 'sqlite'),
        default=dict,
    )

    @classmethod
    def getLastWeeksErrors(cls, **kwargs):
        """Gets all the errors from last week where it has been rerun for at least 2 times all ready"""
        last_week = datetime.datetime.now() - datetime.timedelta(days=8)
        resp = cls.query.filter(cls.created > str(last_week),
                                cls.n_retries > 2).all()
        return resp

    @classmethod
    def getFromEvent(cls, event_id):
        """Dictionary representation of the error."""
        return cls.query.filter_by(event_id=event_id).one_or_none()

    def to_dict(self):
        """Dictionary representation of the error."""
        return dict(created=self.created,
                    updated=self.updated,
                    id=self.id,
                    origin=self.origin,
                    error=self.error,
                    payload=self.payload)

    def __repr__(self):
        """String representation of the error."""
        return str(self.to_dict())
Ejemplo n.º 25
0
class WorkflowsRecordSources(db.Model):

    __tablename__ = 'workflows_record_sources'
    __table_args__ = (db.PrimaryKeyConstraint('record_id', 'source'), )

    source = db.Column(
        db.Text,
        default='',
        nullable=False,
    )
    record_id = db.Column(
        UUIDType,
        db.ForeignKey('records_metadata.id', ondelete='CASCADE'),
        nullable=False,
    )
    json = db.Column(
        JSONType().with_variant(
            postgresql.JSON(none_as_null=True),
            'postgresql',
        ),
        default=lambda: dict(),
    )
Ejemplo n.º 26
0
class CollectionMetadata(db.Model):
    """Represent a collection metadata inside the SQL database.

    """

    __tablename__ = 'collections_metadata'
    """Collection identifier."""
    collection_id = db.Column(
        db.Integer,
        db.ForeignKey(Collection.id),
        primary_key=True,
        nullable=False,
        # NOTE no unique constrain for better future ...
    )

    infos = db.Column(JSONType().with_variant(
        postgresql.JSON(none_as_null=True),
        'postgresql',
    ),
                      default=lambda: dict(),
                      nullable=True)

    collection = db.relationship(Collection)
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Adds extra_data column to userprofiles."""

import sqlalchemy as sa
from alembic import op

from sqlalchemy.dialects import postgresql
from sqlalchemy_utils.types import JSONType

json_type = JSONType().with_variant(
    postgresql.JSONB(none_as_null=True),
    'postgresql',
).with_variant(
    JSONType(),
    'sqlite',
)

# revision identifiers, used by Alembic.
revision = 'a25efff50ffa'
down_revision = 'c25ef2c50ffa'
branch_labels = ()
depends_on = None


def upgrade():
    """Upgrade database."""
    op.add_column('userprofiles_userprofile',
                  sa.Column('extra_data', json_type, nullable=True))
Ejemplo n.º 28
0
class OAIServerSchema(db.Model, Timestamp):
    """Represent a OAIServer Schema.

    The OAIServer object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'oaiserver_schema'

    id = db.Column(
        UUIDType,
        primary_key=True,
        default=uuid.uuid4,
    )
    """schema identifier."""

    schema_name = db.Column(db.String(255), nullable=False, unique=True)
    """Mapping Name of schema"""

    form_data = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                          default=lambda: dict(),
                          nullable=True)
    """Data(schema name,root name,description) of form."""

    xsd = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                    default=lambda: OrderedDict(),
                    nullable=False)
    """Xsd schema"""

    namespaces = db.Column(db.JSON().with_variant(
        postgresql.JSONB(none_as_null=True),
        'postgresql',
    ).with_variant(
        JSONType(),
        'sqlite',
    ).with_variant(
        JSONType(),
        'mysql',
    ),
                           default=lambda: dict(),
                           nullable=True)
    """NameSpace for xml"""

    schema_location = db.Column(db.String(255))
    """Schema Url"""

    isvalid = db.Column(db.Boolean(name='isvalid'),
                        nullable=False,
                        default=lambda: False)

    is_mapping = db.Column(db.Boolean(name='is_mapping'),
                           nullable=False,
                           default=lambda: False)

    isfixed = db.Column(db.Boolean(name='isfixed'),
                        nullable=False,
                        default=lambda: False)

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {'version_id_col': version_id}
Ejemplo n.º 29
0
class ItemType(db.Model, Timestamp):
    """Represent an item type.

    The ItemType object contains a ``created`` and  a ``updated``
    properties that are automatically updated.
    """

    # Enables SQLAlchemy-Continuum versioning
    __versioned__ = {}

    __tablename__ = 'item_type'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Identifier of item type."""

    name_id = db.Column(
        db.Integer(),
        db.ForeignKey(
            'item_type_name.id',
            name='fk_item_type_name_id'
        ),
        nullable=False
    )
    """Name identifier of item type."""

    item_type_name = db.relationship(
        'ItemTypeName',
        backref=db.backref('item_type', lazy='dynamic',
                           order_by=desc('item_type.tag'))
    )
    """Name information from ItemTypeName class."""

    schema = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema in JSON format. When you create a new ``item type`` the 
    ``schema`` field value should never be ``NULL``. Default value is an 
    empty dict. ``NULL`` value means that the record metadata has been 
    deleted. """

    form = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form in JSON format.
    When you create a new ``item type`` the ``form`` field value should never be
    ``NULL``. Default value is an empty dict. ``NULL`` value means that the
    record metadata has been deleted.
    """

    render = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store page render information in JSON format. When you create a new 
    ``item type`` the ``render`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    tag = db.Column(db.Integer, nullable=False)
    """Tag of item type."""

    version_id = db.Column(db.Integer, nullable=False)
    """Used by SQLAlchemy for optimistic concurrency control."""

    __mapper_args__ = {
        'version_id_col': version_id
    }
Ejemplo n.º 30
0
class ItemTypeProperty(db.Model, Timestamp):
    """Represent an itemtype property.

    The ItemTypeProperty object contains a ``created`` and  a
    ``updated`` properties that are automatically updated.
    """

    __tablename__ = 'item_type_property'

    id = db.Column(
        db.Integer(),
        primary_key=True,
        autoincrement=True
    )
    """Identifier of itemtype property."""

    name = db.Column(
        db.Text,
        nullable=False,
        unique=True
    )
    """Name identifier of itemtype property."""

    schema = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``schema`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    form = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form (single) in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``form`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    forms = db.Column(
        db.JSON().with_variant(
            postgresql.JSONB(none_as_null=True),
            'postgresql',
        ).with_variant(
            JSONType(),
            'sqlite',
        ).with_variant(
            JSONType(),
            'mysql',
        ),
        default=lambda: dict(),
        nullable=True
    )
    """Store schema form (array) in JSON format. When you create a new 
    ``ItemTypeProperty`` the ``forms`` field value should never be ``NULL``. 
    Default value is an empty dict. ``NULL`` value means that the record 
    metadata has been deleted. """

    delflg = db.Column(db.Boolean(name='delFlg'),
                       default=False, nullable=False)
    """record delete flag