Exemplo n.º 1
0
class CompareFiles(DBBASE):

    __tablename__ = "compare_files"
    __maxsize__ = 40 * 1024 * 1024

    m_index = Column(Integer,
                     ForeignKey(Indexer.m_index),
                     unique=False,
                     nullable=False)
    filtered = Column(Boolean, unique=False)
    constraint = PrimaryKeyConstraint("m_index",
                                      "filtered",
                                      name="compare_idx")
    refmap = Column(LargeBinary(length=__maxsize__))
    tmap = Column(LargeBinary(length=__maxsize__))
    stats = Column(LargeBinary(length=__maxsize__))
    __table_args__ = (constraint, )

    def __init__(self, index, base, filtered):

        if not isinstance(filtered, bool):
            raise ValueError("Invalid filtered value: {}")

        assert index is not None
        self.m_index = index
        self.filtered = filtered
        self.tmap = memoryview(open("{}.tmap".format(base), "rb").read())
        self.refmap = memoryview(open("{}.refmap".format(base), "rb").read())
        self.stats = memoryview(open("{}.stats".format(base), "rb").read())
        print(self.m_index, base, self.filtered)
Exemplo n.º 2
0
class ReadStatus(Base):
    class MyPickleType(PickleType):
        impl = LargeBinary(length=2**30)

    __tablename__ = 'read_status'
    __table_args__ = {'mysql_engine': 'InnoDB'}

    id = Column(Integer, primary_key=True)
    user_id = Column(Integer, ForeignKey('users.id'), index=True)
    read_status_data = Column(MyPickleType)
    read_status_numbers = Column(LargeBinary(length=2**30))
    read_status_markers = Column(LargeBinary(length=2**30))

    user = relationship(User, backref='read_status')

    def __init__(self, user, read_status_data):
        '''
        @type user: model.User
        @type read_status_data: read_status_manager.ReadStatus
        '''
        self.user = user
        self.read_status_data = None
        self.read_status_numbers = ''
        self.read_status_markers = ''

    def __repr__(self):
        return "<ReadStatus('%s', '%s')>" % (self.user.username,
                                             self.read_status_data)
Exemplo n.º 3
0
class DbCertificate(Base):
    __tablename__ = 'certificate'
    id = Column(Integer, primary_key=True)
    valid_start = Column(DateTime())
    valid_end = Column(DateTime())
    latest_valid = Column(Boolean())
    name = Column(String(256))
    ca_name = Column(String(256))
    public_key = Column(LargeBinary())
    private_key = Column(LargeBinary())
    account_id = Column(Integer, ForeignKey('account.id'))
    domain_id = Column(Integer, ForeignKey('domain.id'))
Exemplo n.º 4
0
class User(Base, UserMixin):
    __tablename__ = 'users'

    user_id = Column(Integer, primary_key=True)
    email = Column(String(50), unique=True)
    password = Column(LargeBinary())
    is_admin = Column(Boolean, default=False)

    def __init__(self, email, password, is_admin):
        self.email = email
        self.password = password
        self.is_admin = is_admin

    def get_id(self):
        return self.user_id

    def __repr__(self):
        # obj = {
        # 	'user_id': self.user_id,
        # 	'email': self.email,
        # 	'password': self.password,
        # 	'is_admin': self.is_admin
        # }
        # return "<User(user_id={0}, email={1}, password={2})>".format(self.user_id, self.email, self.password)
        return "{0},{1},{2},{3}".format(self.user_id, self.email,
                                        self.password, self.is_admin)
Exemplo n.º 5
0
    class Reports(Base):
        """
            Embeded class for ORM map NmapReport to a
            simple three column table
        """
        __tablename__ = 'reports'

        id = Column('report_id', Integer, primary_key=True)
        inserted = Column('inserted', DateTime(), default='now')
        host = Column('host', String(256))
        command_line = Column('command_line', String(256))
        report_json = Column('report_json', LargeBinary())

        def __init__(self, obj_NmapReport):
            # self.inserted = datetime.fromtimestamp(obj_NmapReport.endtime)
            self.command_line = str(obj_NmapReport.status)

            self.host = obj_NmapReport.address
            self.command_line = 

            dumped_json = json.dumps(obj_NmapReport,
                                     cls=ReportEncoder)
            self.report_json = bytes(dumped_json.encode('UTF-8'))

        def decode(self):
            json_decoded = self.report_json.decode('utf-8')
            nmap_report_obj = json.loads(json_decoded,
                                         cls=ReportDecoder)
            return nmap_report_obj
class BlockChainMixin2(object):
    hash = Column('hash', LargeBinary())
    __tablename__ = 'block_chain'

    @hybrid_property
    def hash_hex2(self):
        return self.hash.hex()

    @hash_hex2.expression
    def hash_hex2(cls):
        return cls.hash.hex()
def _get_test_vertex_name_to_table():
    """Return a dict mapping the name of each VertexType to the underlying SQLAlchemy Table."""
    metadata = MetaData()
    table1 = Table(
        "Table1",
        metadata,
        Column("column_with_supported_type", String(), primary_key=True),
        Column("column_with_non_supported_type", LargeBinary()),
        Column("column_with_mssql_type", TINYINT()),
        Column("source_column", Integer(),
               ForeignKey("Table2.destination_column")),
        Column("unique_column", Integer(), unique=True),
    )

    table2 = Table(
        "Table2",
        metadata,
        Column("destination_column", Integer(), primary_key=True),
    )

    table3 = Table(
        "Table3",
        metadata,
        Column("primary_key_column1", Integer()),
        Column("primary_key_column2", Integer()),
        PrimaryKeyConstraint("primary_key_column1", "primary_key_column2"),
    )

    table4 = Table(
        "Table4",
        metadata,
        Column("primary_key_column_with_unsupported_type", LargeBinary()),
        PrimaryKeyConstraint("primary_key_column_with_unsupported_type"),
    )

    return {
        "Table1": table1,
        "ArbitraryObjectName": table2,
        "TableWithMultiplePrimaryKeyColumns": table3,
        "TableWithNonSupportedPrimaryKeyType": table4,
    }
Exemplo n.º 8
0
class Token(Base):
    __tablename__ = 'token'
    id = Column(Integer(), primary_key=True)
    token = Column(LargeBinary(size.password))
    activated = Column(Boolean)
    lastlogin = Column(DateTime())
    expiration = Column(DateTime())
    deleted = Column(Boolean())

    def __init__(self, **kw):
        self.token = kw['token']
        self.activated = kw['activated']
        self.deleted = kw['deleted']
    def get_type(self, col_type, params=None, unsigned=False):
        """Map the type to valid Column Types.

        Notes:
            http://docs.sqlalchemy.org/en/latest/core/type_basics.html

        Args:
            col_type (str): Type of column
            params (dict, optional): Defaults to None. Additional Column Options.
            unsigned (bool, optional): Defaults to False. If it is an unsigned integer or not.

        Returns:
            sqlalchemy.types.TypeEngine: Type for new column
        """
        # TODO: Check if vendor specific types like json, mediumint, etc work
        params = {} if params is None else params

        # Get number types
        if (
            'integer' in col_type or 'increments' in col_type or col_type == 'decimal' or
            col_type == 'double' or col_type == 'float'
        ):
            return get_number_type(col_type, params, unsigned)
        # Get String types
        elif (
            'text' in col_type or col_type == 'char' or col_type == 'json' or col_type == 'string'
        ):
            return get_string_type(col_type, params)
        # Get Date/Time Types
        elif 'date' in col_type or 'time' in col_type:
            return get_time_type(col_type)
        # Get BINARY type
        elif col_type == 'binary':
            return LargeBinary()
        # Get Boolean type
        elif col_type == 'boolean':
            return Boolean()
        # Get Enum Type
        elif col_type == 'enum':
            return Enum(*params.get('fields', []))
        # Get Array type
        elif col_type == 'array':
            arr_type = self.get_type(params.get('arr_type', 'text'))
            return arr_type.with_variant(
                ARRAY(arr_type, dimensions=params.get('dimensions')),
                'postgresql'
            )

        return Text()
Exemplo n.º 10
0
class User(Base, UserMixin):
    __tablename__ = 'users'

    id = Column(Integer, primary_key=True, unique=True)
    email = Column(String(50))
    password = Column(LargeBinary())
    is_admin = Column(Integer)

    def __repr__(self):
        obj = {
            'id': self.id,
            'email': self.email,
            'password': self.password,
            'is_admin': self.is_admin
        }
        return "{0},{1},{2},{3}".format(self.id, self.email, self.password,
                                        self.is_admin)
Exemplo n.º 11
0
    class Reports(Base):
        """
        Embeded class for ORM map NmapReport to a
        simple three column table
        """

        __tablename__ = "reports"

        id = Column("report_id", Integer, primary_key=True)
        inserted = Column("inserted", DateTime(), default="now")
        report_json = Column("report_json", LargeBinary())

        def __init__(self, obj_NmapReport):
            self.inserted = datetime.fromtimestamp(obj_NmapReport.endtime)
            dumped_json = json.dumps(obj_NmapReport, cls=ReportEncoder)
            self.report_json = bytes(dumped_json.encode("UTF-8"))

        def decode(self):
            json_decoded = self.report_json.decode("utf-8")
            nmap_report_obj = json.loads(json_decoded, cls=ReportDecoder)
            return nmap_report_obj
Exemplo n.º 12
0
class User(Base):
    __tablename__ = 'user'
    id = Column(Integer(), primary_key=True)
    username = Column(String(size.username))
    password = Column(LargeBinary(size.password))
    fullname = Column(String(size.fullname))
    tokenhash = Column(String(size.password))
    email = Column(String(size.email))
    activated = Column(Boolean())
    lastlogin = Column(DateTime())
    deleted = Column(Boolean())
    receive_email = Column(Boolean())

    def __init__(self, **kw):
        self.username = kw['username']
        self.password = kw['password']
        self.tokenhash = kw['tokenhash']
        self.fullname = kw['fullname']
        self.email = kw['email'] if 'email' in kw else None
        self.activated = kw['activated'] if 'activated' in kw else False
        self.deleted = kw['deleted'] if 'deleted' in kw else False
        self.receive_email = kw[
            'receive_email'] if 'receive_email' in kw else False
Exemplo n.º 13
0
class ContentRevisionRO(DeclarativeBase):
    """
    Revision of Content. It's immutable, update or delete an existing ContentRevisionRO will throw
    ContentRevisionUpdateError errors.
    """

    __tablename__ = 'content_revisions'

    revision_id = Column(Integer, primary_key=True)
    content_id = Column(Integer, ForeignKey('content.id'), nullable=False)
    owner_id = Column(Integer, ForeignKey('users.user_id'), nullable=True)

    label = Column(Unicode(1024), unique=False, nullable=False)
    description = Column(Text(), unique=False, nullable=False, default='')
    file_extension = Column(
        Unicode(255),
        unique=False,
        nullable=False,
        server_default='',
    )
    file_mimetype = Column(Unicode(255),
                           unique=False,
                           nullable=False,
                           default='')
    file_content = deferred(Column(LargeBinary(), unique=False, nullable=True))
    properties = Column('properties',
                        Text(),
                        unique=False,
                        nullable=False,
                        default='')

    type = Column(Unicode(32), unique=False, nullable=False)
    status = Column(Unicode(32),
                    unique=False,
                    nullable=False,
                    default=ContentStatus.OPEN)
    created = Column(DateTime,
                     unique=False,
                     nullable=False,
                     default=datetime.utcnow)
    updated = Column(DateTime,
                     unique=False,
                     nullable=False,
                     default=datetime.utcnow)
    is_deleted = Column(Boolean, unique=False, nullable=False, default=False)
    is_archived = Column(Boolean, unique=False, nullable=False, default=False)
    is_temporary = Column(Boolean, unique=False, nullable=False, default=False)
    revision_type = Column(Unicode(32),
                           unique=False,
                           nullable=False,
                           default='')

    workspace_id = Column(Integer,
                          ForeignKey('workspaces.workspace_id'),
                          unique=False,
                          nullable=True)
    workspace = relationship('Workspace', remote_side=[Workspace.workspace_id])

    parent_id = Column(Integer,
                       ForeignKey('content.id'),
                       nullable=True,
                       default=None)
    parent = relationship("Content",
                          foreign_keys=[parent_id],
                          back_populates="children_revisions")

    node = relationship("Content",
                        foreign_keys=[content_id],
                        back_populates="revisions")
    owner = relationship('User', remote_side=[User.user_id])
    """ List of column copied when make a new revision from another """
    _cloned_columns = (
        'content_id',
        'created',
        'description',
        'file_content',
        'file_mimetype',
        'file_extension',
        'is_archived',
        'is_deleted',
        'label',
        'node',
        'owner',
        'owner_id',
        'parent',
        'parent_id',
        'properties',
        'revision_type',
        'status',
        'type',
        'updated',
        'workspace',
        'workspace_id',
        'is_temporary',
    )

    # Read by must be used like this:
    # read_datetime = revision.ready_by[<User instance>]
    # if user did not read the content, then a key error is raised
    read_by = association_proxy(
        'revision_read_statuses',  # name of the attribute
        'view_datetime',  # attribute the value is taken from
        creator=lambda k, v: \
            RevisionReadStatus(user=k, view_datetime=v)
    )

    @property
    def file_name(self):
        return '{0}{1}'.format(
            self.label,
            self.file_extension,
        )

    @classmethod
    def new_from(cls, revision: 'ContentRevisionRO') -> 'ContentRevisionRO':
        """

        Return new instance of ContentRevisionRO where properties are copied from revision parameter.
        Look at ContentRevisionRO._cloned_columns to see what columns are copieds.

        :param revision: revision to copy
        :type revision: ContentRevisionRO
        :return: new revision from revision parameter
        :rtype: ContentRevisionRO
        """
        new_rev = cls()

        for column_name in cls._cloned_columns:
            column_value = getattr(revision, column_name)
            setattr(new_rev, column_name, column_value)

        new_rev.updated = datetime.utcnow()

        return new_rev

    def __setattr__(self, key: str, value: 'mixed'):
        """
        ContentRevisionUpdateError is raised if tried to update column and revision own identity
        :param key: attribute name
        :param value: attribute value
        :return:
        """
        if key in (
                '_sa_instance_state',
        ):  # Prevent infinite loop from SQLAlchemy code and altered set
            return super().__setattr__(key, value)

        if inspect(self).has_identity \
                and key in self._cloned_columns \
                and not RevisionsIntegrity.is_updatable(self):
            raise ContentRevisionUpdateError(
                "Can't modify revision. To work on new revision use tracim.model.new_revision "
                + "context manager.")

        super().__setattr__(key, value)

    def get_status(self) -> ContentStatus:
        return ContentStatus(self.status)

    def get_label(self) -> str:
        return self.label or self.file_name or ''

    def get_last_action(self) -> ActionDescription:
        return ActionDescription(self.revision_type)

    def has_new_information_for(self, user: User) -> bool:
        """
        :param user: the session current user
        :return: bool, True if there is new information for given user else False
                       False if the user is None
        """
        if not user:
            return False

        if user not in self.read_by.keys():
            return True

        return False

    def get_label_as_file(self):
        file_extension = self.file_extension or ''

        if self.type == ContentType.Thread:
            file_extension = '.html'
        elif self.type == ContentType.Page:
            file_extension = '.html'

        return '{0}{1}'.format(
            self.label,
            file_extension,
        )
Exemplo n.º 14
0
class GTSPokemon(TableBase):
    __tablename__ = 'gts_pokemon'
    id = Column(Integer, primary_key=True, autoincrement=True)
    pid = Column(Integer)
    pokemon_blob = Column(LargeBinary(292), nullable=False)
Exemplo n.º 15
0
class User(DeclarativeBase):
    """
    User definition.

    This is the user definition used by :mod:`repoze.who`, which requires at
    least the ``user_name`` column.

    In addition, we specify all the local user information that we
    will be storing.

    """
    __tablename__ = 'tg_user'

    ##{B:Columns}

    user_id = Column(Integer, autoincrement=True, primary_key=True)

    user_name = Column(Unicode(16), unique=True, nullable=False)

    email_address = Column(Unicode(1024), unique=True, nullable=False)

    display_name = Column(Unicode(255))

    title = Column(Unicode(64))

    streetaddress = Column(Unicode(255))

    city = Column(Unicode(255))

    state_province = Column(Unicode(255))

    postal_code = Column(Unicode(12))

    country = Column(Unicode(32))

    phones = Column(Unicode(1024)) # JSON Encoded

    logo = Column(LargeBinary(1024*256))

    callingcard = Column(LargeBinary(1024*256))

    photo = Column(LargeBinary(1024*256))

    external_links = Column(Unicode(1024*256)) # JSON Encoded

    default_summary = Column(Unicode(8192))

    analyticsid = Column(Unicode(32))

    _password = Column('password', Unicode(128),
                       info={'rum': {'field':'Password'}})

    created = Column(DateTime, default=datetime.now)

    jobs = relationship("JobHistory", backref="user", order_by="JobHistory.order")
    skillgroups = relationship("SkillGroups", backref="user", order_by="SkillGroups.order")
    projects = relationship("ProjectHistory", backref="user", order_by="ProjectHistory.order")
    education = relationship("Education", backref="user", order_by="Education.order")
    awards = relationship("Award", backref="user", order_by="Award.order")
    resumes = relationship("Resume", backref="user", order_by="Resume.name")

    ##{E:Columns}

    #{ Special methods

    def __repr__(self):
        return ('<User: name=%s, email=%s, display=%s>' % (
                self.user_name, self.email_address, self.display_name)).encode('utf-8')

    def __unicode__(self):
        return self.display_name or self.user_name

    #{ Getters and setters

    @property
    def permissions(self):
        """Return a set with all permissions granted to the user."""
        perms = set()
        for g in self.groups:
            perms = perms | set(g.permissions)
        return perms

    @classmethod
    def by_email_address(cls, email):
        """Return the user object whose email address is ``email``."""
        return DBSession.query(cls).filter_by(email_address=email).first()

    @classmethod
    def by_user_name(cls, username):
        """Return the user object whose user name is ``username``."""
        return DBSession.query(cls).filter_by(user_name=username).first()

    @classmethod
    def _hash_password(cls, password):
        # Make sure password is a str because we cannot hash unicode objects
        if isinstance(password, unicode):
            password = password.encode('utf-8')
        salt = sha256()
        salt.update(os.urandom(60))
        hash = sha256()
        hash.update(password + salt.hexdigest())
        password = salt.hexdigest() + hash.hexdigest()
        # Make sure the hashed password is a unicode object at the end of the
        # process because SQLAlchemy _wants_ unicode objects for Unicode cols
        if not isinstance(password, unicode):
            password = password.decode('utf-8')
        return password

    def _set_password(self, password):
        """Hash ``password`` on the fly and store its hashed version."""
        self._password = self._hash_password(password)

    def _get_password(self):
        """Return the hashed version of the password."""
        return self._password

    password = synonym('_password', descriptor=property(_get_password,
                                                        _set_password))

    #}

    def validate_password(self, password):
        """
        Check the password against existing credentials.

        :param password: the password that was provided by the user to
            try and authenticate. This is the clear text version that we will
            need to match against the hashed one in the database.
        :type password: unicode object.
        :return: Whether the password is valid.
        :rtype: bool

        """
        hash = sha256()
        if isinstance(password, unicode):
            password = password.encode('utf-8')
        hash.update(password + str(self.password[:64]))
        return self.password[64:] == hash.hexdigest()

    def phones_to_string(self):
        phones = simplejson.loads(self.phones)
        return ", ".join(["%s: %s" % (x, phones[x]) for x in sorted(phones)])

    def phones_to_dict(self):
        return simplejson.loads(self.phones)
    
    def links_to_dict(self):
        return simplejson.loads(self.external_links)
Exemplo n.º 16
0
    Column('moc_id',
           None,
           ForeignKey('moc.id', onupdate='RESTRICT', ondelete='CASCADE'),
           nullable=False), Column('order', Integer, nullable=False),
    Column('cell', Integer, nullable=False),
    Index('idx_moc_cell', 'moc_id', 'order', 'cell', unique=True),
    **table_opts)

moc_fits = Table(
    'moc_fits', metadata,
    Column('moc_id',
           None,
           ForeignKey('moc.id', onupdate='RESTRICT', ondelete='CASCADE'),
           primary_key=True,
           nullable=False),
    Column('fits', LargeBinary(2**32 - 1), nullable=False), **table_opts)

person = Table(
    'person', metadata, Column('id', Integer, primary_key=True),
    Column('name', Unicode(255), nullable=False),
    Column('title', Integer, nullable=True),
    Column('public', Boolean, default=False, nullable=False),
    Column('user_id',
           None,
           ForeignKey('user.id', onupdate='RESTRICT', ondelete='RESTRICT'),
           unique=True),
    Column(
        'institution_id', None,
        ForeignKey('institution.id', onupdate='RESTRICT',
                   ondelete='RESTRICT')),
    Column('admin', Boolean, default=False, nullable=False), **table_opts)
Exemplo n.º 17
0
class Classifier(Base):
    __tablename__ = 'classifier'
    id = Column(Integer(), primary_key=True)
    timestamp = Column(DateTime())
    uid = Column(String(256))
    model = Column(LargeBinary())
Exemplo n.º 18
0
from typing import List
from uuid import uuid4

import bcrypt
from sqlalchemy import Column, Table
from sqlalchemy.types import String, DateTime, LargeBinary
from sqlalchemy.sql import func
from fastapi import HTTPException

from api.main import database, metadata
from api.models.user import User, UserIn

users = Table(
    "users", metadata, Column("id", String(36), primary_key=True),
    Column("email", String(128), unique=True, nullable=False),
    Column("_password", LargeBinary(60)), Column("pseudonym", String(128)),
    Column("created_at", DateTime, server_default=func.now(), nullable=False),
    Column("modified_at",
           DateTime,
           server_default=func.now(),
           onupdate=func.now(),
           nullable=False))


async def get_user(user_id: str) -> User:
    """ Returns a User with the given id. """
    query = users.select().where(users.c.id == user_id)
    return await database.fetch_one(query)


async def get_user_by_email(email: str) -> User:
Exemplo n.º 19
0
 class MyPickleType(PickleType):
     impl = LargeBinary(length=2**30)
Exemplo n.º 20
0
class ContentRevisionRO(DeclarativeBase):
    """
    Revision of Content. It's immutable, update or delete an existing ContentRevisionRO will throw
    ContentRevisionUpdateError errors.
    """

    __tablename__ = 'content_revisions'

    revision_id = Column(Integer, primary_key=True)
    content_id = Column(Integer, ForeignKey('content.id'), nullable=False)
    owner_id = Column(Integer, ForeignKey('users.user_id'), nullable=True)

    label = Column(Unicode(1024), unique=False, nullable=False)
    description = Column(Text(), unique=False, nullable=False, default='')
    file_extension = Column(
        Unicode(255),
        unique=False,
        nullable=False,
        server_default='',
    )
    file_mimetype = Column(Unicode(255),
                           unique=False,
                           nullable=False,
                           default='')
    # TODO - A.P - 2017-07-03 - future removal planned
    # file_content is to be replaced by depot_file, for now both coexist as
    # this:
    # - file_content data is still setted
    # - newly created revision also gets depot_file data setted
    # - access to the file of a revision from depot_file exclusively
    # Here is the tasks workflow of the DB to OnDisk Switch :
    # - Add depot_file "prototype style"
    #   https://github.com/tracim/tracim/issues/233 - DONE
    # - Integrate preview generator feature "prototype style"
    #   https://github.com/tracim/tracim/issues/232 - DONE
    # - Write migrations
    #   https://github.com/tracim/tracim/issues/245
    #   https://github.com/tracim/tracim/issues/246
    # - Stabilize preview generator integration
    #   includes dropping DB file content
    #   https://github.com/tracim/tracim/issues/249
    file_content = deferred(Column(LargeBinary(), unique=False, nullable=True))
    # INFO - A.P - 2017-07-03 - Depot Doc
    # http://depot.readthedocs.io/en/latest/#attaching-files-to-models
    # http://depot.readthedocs.io/en/latest/api.html#module-depot.fields
    depot_file = Column(UploadedFileField, unique=False, nullable=True)
    properties = Column('properties',
                        Text(),
                        unique=False,
                        nullable=False,
                        default='')

    type = Column(Unicode(32), unique=False, nullable=False)
    status = Column(Unicode(32),
                    unique=False,
                    nullable=False,
                    default=ContentStatus.OPEN)
    created = Column(DateTime,
                     unique=False,
                     nullable=False,
                     default=datetime.utcnow)
    updated = Column(DateTime,
                     unique=False,
                     nullable=False,
                     default=datetime.utcnow)
    is_deleted = Column(Boolean, unique=False, nullable=False, default=False)
    is_archived = Column(Boolean, unique=False, nullable=False, default=False)
    is_temporary = Column(Boolean, unique=False, nullable=False, default=False)
    revision_type = Column(Unicode(32),
                           unique=False,
                           nullable=False,
                           default='')

    workspace_id = Column(Integer,
                          ForeignKey('workspaces.workspace_id'),
                          unique=False,
                          nullable=True)
    workspace = relationship('Workspace', remote_side=[Workspace.workspace_id])

    parent_id = Column(Integer,
                       ForeignKey('content.id'),
                       nullable=True,
                       default=None)
    parent = relationship("Content",
                          foreign_keys=[parent_id],
                          back_populates="children_revisions")

    node = relationship("Content",
                        foreign_keys=[content_id],
                        back_populates="revisions")
    owner = relationship('User', remote_side=[User.user_id])
    """ List of column copied when make a new revision from another """
    _cloned_columns = (
        'content_id',
        'created',
        'description',
        'file_content',
        'file_mimetype',
        'file_extension',
        'is_archived',
        'is_deleted',
        'label',
        'node',
        'owner',
        'owner_id',
        'parent',
        'parent_id',
        'properties',
        'revision_type',
        'status',
        'type',
        'updated',
        'workspace',
        'workspace_id',
        'is_temporary',
    )

    # Read by must be used like this:
    # read_datetime = revision.ready_by[<User instance>]
    # if user did not read the content, then a key error is raised
    read_by = association_proxy(
        'revision_read_statuses',  # name of the attribute
        'view_datetime',  # attribute the value is taken from
        creator=lambda k, v: \
            RevisionReadStatus(user=k, view_datetime=v)
    )

    @property
    def file_name(self):
        return '{0}{1}'.format(
            self.label,
            self.file_extension,
        )

    @classmethod
    def new_from(cls, revision: 'ContentRevisionRO') -> 'ContentRevisionRO':
        """

        Return new instance of ContentRevisionRO where properties are copied from revision parameter.
        Look at ContentRevisionRO._cloned_columns to see what columns are copieds.

        :param revision: revision to copy
        :type revision: ContentRevisionRO
        :return: new revision from revision parameter
        :rtype: ContentRevisionRO
        """
        new_rev = cls()

        for column_name in cls._cloned_columns:
            column_value = getattr(revision, column_name)
            setattr(new_rev, column_name, column_value)

        new_rev.updated = datetime.utcnow()
        # TODO APY tweaks here depot_file
        # import pudb; pu.db
        # new_rev.depot_file = DepotManager.get().get(revision.depot_file)
        new_rev.depot_file = revision.file_content

        return new_rev

    def __setattr__(self, key: str, value: 'mixed'):
        """
        ContentRevisionUpdateError is raised if tried to update column and revision own identity
        :param key: attribute name
        :param value: attribute value
        :return:
        """
        if key in (
                '_sa_instance_state',
        ):  # Prevent infinite loop from SQLAlchemy code and altered set
            return super().__setattr__(key, value)

        if inspect(self).has_identity \
                and key in self._cloned_columns \
                and not RevisionsIntegrity.is_updatable(self):
            raise ContentRevisionUpdateError(
                "Can't modify revision. To work on new revision use tracim.model.new_revision "
                + "context manager.")

        super().__setattr__(key, value)

    def get_status(self) -> ContentStatus:
        return ContentStatus(self.status)

    def get_label(self) -> str:
        return self.label or self.file_name or ''

    def get_last_action(self) -> ActionDescription:
        return ActionDescription(self.revision_type)

    def has_new_information_for(self, user: User) -> bool:
        """
        :param user: the session current user
        :return: bool, True if there is new information for given user else False
                       False if the user is None
        """
        if not user:
            return False

        if user not in self.read_by.keys():
            return True

        return False

    def get_label_as_file(self):
        file_extension = self.file_extension or ''

        if self.type == ContentType.Thread:
            file_extension = '.html'
        elif self.type == ContentType.Page:
            file_extension = '.html'

        return '{0}{1}'.format(
            self.label,
            file_extension,
        )
 def test_warn_when_type_is_not_supported(self):
     with pytest.warns(Warning):
         try_get_graphql_scalar_type("binary", LargeBinary())
Exemplo n.º 22
0
    'file_systems', metadata,
    Column('id', INT, autoincrement=True, nullable=False, primary_key=True),
    Column('host_id', INT, ForeignKey("host.id")), Column('mnt_pnt', TEXT),
    Column('fs_type', TEXT), Column('f_favail', BigInteger),
    Column('f_bsize', INT), Column('f_frsize', INT),
    Column('f_blocks', BigInteger), Column('f_bfree', BigInteger),
    Column('f_bavail', BigInteger), Column('f_files', BigInteger),
    Column('f_ffree', BigInteger), Column('f_fssize', BigInteger))

batch_queue = Table(
    'batch_queue', metadata,
    Column('id', Integer, primary_key=True, autoincrement=True),
    Column('arrival', TIMESTAMP, nullable=False),
    Column('added', Boolean, nullable=False),
    Column('hw_uuid', VARCHAR(36), nullable=False),
    Column('data', LargeBinary(1000000)))


class Host(object):
    def __init__(self,
                 selinux_enabled=False,
                 rating=0,
                 last_modified=datetime.today()):
        self.selinux_enabled = selinux_enabled
        self.rating = rating
        self.last_modified = last_modified


class HostArchive(object):
    def __init__(self, rating=0):
        self.rating = rating