def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'blacklist_tokens', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('token', sa.String(length=500), nullable=False), sa.Column('blacklisted_on', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token')) op.create_table( 'brands', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'stores', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('email', sa.String(length=80), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_table( 'users', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('email', sa.String(length=80), nullable=False), sa.Column('password_hash', sa.String(length=100), nullable=False), sa.Column('dp', sa.String(length=120), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_table( 'inventories', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=True), sa.Column('store_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['store_id'], ['stores.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'roles', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.Enum('ADMIN', 'WORKER', name='roletype'), nullable=True), sa.Column('store_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['store_id'], ['stores.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'products', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=True), sa.Column('dp', sa.String(length=120), nullable=True), sa.Column('inventory_id', sa.Integer(), nullable=False), sa.Column('brand_id', sa.Integer(), nullable=False), sa.Column('quantity_available', sa.Integer(), nullable=True), sa.Column('max_quantity', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['brand_id'], ['brands.id'], ), sa.ForeignKeyConstraint( ['inventory_id'], ['inventories.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('role_user_association', sa.Column('role_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['role_id'], ['roles.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], )) op.create_table( 'items', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('serial_number', sa.String(length=50), nullable=False), sa.Column('is_damaged', sa.Boolean(), nullable=True), sa.Column('is_sold', sa.Boolean(), nullable=True), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['product_id'], ['products.id'], ), sa.PrimaryKeyConstraint('id'))
# revision identifiers, used by Alembic. revision = '569e98a8132b' down_revision = 'f9263d6df56' # Change to ['*'] if this migration applies to all plugins migration_for_plugins = ['neutron.services.metering.metering_plugin.' 'MeteringPlugin'] from alembic import op import sqlalchemy as sa from neutron.db import migration meteringlabels_direction = sa.Enum('ingress', 'egress', name='meteringlabels_direction') def downgrade(active_plugins=None, options=None): if not migration.should_run(active_plugins, migration_for_plugins): return op.drop_table('meteringlabelrules') meteringlabels_direction.drop(op.get_bind(), checkfirst=False) op.drop_table('meteringlabels') def upgrade(active_plugins=None, options=None): if not migration.should_run(active_plugins, migration_for_plugins): return
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('ih_area_info', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table('ih_facility_info', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'ih_user_profile', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.Column('password_hash', sa.String(length=128), nullable=False), sa.Column('mobile', sa.String(length=11), nullable=False), sa.Column('real_name', sa.String(length=32), nullable=True), sa.Column('id_card', sa.String(length=20), nullable=True), sa.Column('avatar_url', sa.String(length=128), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('mobile'), sa.UniqueConstraint('name')) op.create_table( 'ih_house_info', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('area_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=64), nullable=False), sa.Column('price', sa.Integer(), nullable=True), sa.Column('address', sa.String(length=512), nullable=True), sa.Column('room_count', sa.Integer(), nullable=True), sa.Column('acreage', sa.Integer(), nullable=True), sa.Column('unit', sa.String(length=32), nullable=True), sa.Column('capacity', sa.Integer(), nullable=True), sa.Column('beds', sa.String(length=64), nullable=True), sa.Column('deposit', sa.Integer(), nullable=True), sa.Column('min_days', sa.Integer(), nullable=True), sa.Column('max_days', sa.Integer(), nullable=True), sa.Column('order_count', sa.Integer(), nullable=True), sa.Column('index_image_url', sa.String(length=256), nullable=True), sa.ForeignKeyConstraint( ['area_id'], ['ih_area_info.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['ih_user_profile.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'ih_house_facility', sa.Column('house_id', sa.Integer(), nullable=False), sa.Column('facility_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['facility_id'], ['ih_facility_info.id'], ), sa.ForeignKeyConstraint( ['house_id'], ['ih_house_info.id'], ), sa.PrimaryKeyConstraint('house_id', 'facility_id')) op.create_table( 'ih_house_image', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('house_id', sa.Integer(), nullable=False), sa.Column('url', sa.String(length=256), nullable=False), sa.ForeignKeyConstraint( ['house_id'], ['ih_house_info.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'ih_order_info', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('house_id', sa.Integer(), nullable=False), sa.Column('begin_date', sa.DateTime(), nullable=False), sa.Column('end_date', sa.DateTime(), nullable=False), sa.Column('days', sa.Integer(), nullable=False), sa.Column('house_price', sa.Integer(), nullable=False), sa.Column('amount', sa.Integer(), nullable=False), sa.Column('status', sa.Enum('WAIT_ACCEPT', 'WAIT_PAYMENT', 'PAID', 'WAIT_COMMENT', 'COMPLETE', 'CANCELED', 'REJECTED'), nullable=True), sa.Column('comment', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['house_id'], ['ih_house_info.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['ih_user_profile.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_ih_order_info_status'), 'ih_order_info', ['status'], unique=False)
class PullRequest(BASE): """ Stores the pull requests created on a project. Table -- pull_requests """ __tablename__ = 'pull_requests' id = sa.Column(sa.Integer, primary_key=True) uid = sa.Column(sa.String(32), unique=True, nullable=False) title = sa.Column(sa.Text, nullable=False) project_id = sa.Column(sa.Integer, sa.ForeignKey('projects.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True) branch = sa.Column(sa.Text(), nullable=False) project_id_from = sa.Column(sa.Integer, sa.ForeignKey('projects.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=True) remote_git = sa.Column(sa.Text(), nullable=True) branch_from = sa.Column(sa.Text(), nullable=False) commit_start = sa.Column(sa.Text(), nullable=True) commit_stop = sa.Column(sa.Text(), nullable=True) initial_comment = sa.Column(sa.Text(), nullable=True) user_id = sa.Column(sa.Integer, sa.ForeignKey('users.id', onupdate='CASCADE'), nullable=False, index=True) assignee_id = sa.Column(sa.Integer, sa.ForeignKey('users.id', onupdate='CASCADE'), nullable=True, index=True) merge_status = sa.Column(sa.Enum('NO_CHANGE', 'FFORWARD', 'CONFLICTS', 'MERGE', name='merge_status_enum'), nullable=True) status = sa.Column(sa.String(255), sa.ForeignKey('status_pull_requests.status', onupdate='CASCADE'), default='Open', nullable=False) closed_by_id = sa.Column(sa.Integer, sa.ForeignKey('users.id', onupdate='CASCADE'), nullable=True) closed_at = sa.Column(sa.DateTime, nullable=True) date_created = sa.Column(sa.DateTime, nullable=False, default=datetime.datetime.utcnow) updated_on = sa.Column(sa.DateTime, nullable=False, default=sa.func.now(), onupdate=sa.func.now()) __table_args__ = (sa.CheckConstraint( 'NOT(project_id_from IS NULL AND remote_git IS NULL)'), ) project = relation('Project', foreign_keys=[project_id], remote_side=[Project.id], backref=backref( 'requests', cascade="delete, delete-orphan", ), single_parent=True) project_from = relation('Project', foreign_keys=[project_id_from], remote_side=[Project.id]) user = relation('User', foreign_keys=[user_id], remote_side=[User.id], backref='pull_requests') assignee = relation('User', foreign_keys=[assignee_id], remote_side=[User.id], backref='assigned_requests') closed_by = relation('User', foreign_keys=[closed_by_id], remote_side=[User.id], backref='closed_requests') def __repr__(self): return 'PullRequest(%s, project:%s, user:%s, title:%s)' % ( self.id, self.project.name, self.user.user, self.title) @property def isa(self): ''' A string to allow finding out that this is an pull-request. ''' return 'pull-request' @property def mail_id(self): ''' Return a unique reprensetation of the issue as string that can be used when sending emails. ''' return '%s-pull-request-%s@pagure' % (self.project.name, self.uid) @property def discussion(self): ''' Return the list of comments related to the pull-request itself, ie: not related to a specific commit. ''' return [comment for comment in self.comments if not comment.commit_id] @property def score(self): ''' Return the review score of the pull-request by checking the number of +1, -1, :thumbup: and :thumbdown: in the comment of the pull-request. This includes only the main comments not the inline ones. An user can only give one +1 and one -1. ''' positive = set() negative = set() for comment in self.discussion: for word in ['+1', ':thumbsup:']: if word in comment.comment: positive.add(comment.user_id) break for word in ['-1', ':thumbsdown:']: if word in comment.comment: negative.add(comment.user_id) break return len(positive) - len(negative) @property def remote(self): ''' Return whether the current PullRequest is a remote pull-request or not. ''' return self.remote_git is not None @property def user_comments(self): ''' Return user comments only, filter it from notifications ''' return [ comment for comment in self.comments if not comment.notification ] def to_json(self, public=False, api=False, with_comments=True): ''' Returns a dictionnary representation of the pull-request. ''' output = { 'id': self.id, 'uid': self.uid, 'title': self.title, 'branch': self.branch, 'project': self.project.to_json(public=public, api=api), 'branch_from': self.branch_from, 'repo_from': self.project_from.to_json(public=public, api=api) if self.project_from else None, 'remote_git': self.remote_git, 'date_created': self.date_created.strftime('%s'), 'updated_on': self.updated_on.strftime('%s'), 'closed_at': self.closed_at.strftime('%s') if self.closed_at else None, 'user': self.user.to_json(public=public), 'assignee': self.assignee.to_json(public=public) if self.assignee else None, 'status': self.status, 'commit_start': self.commit_start, 'commit_stop': self.commit_stop, 'closed_by': self.closed_by.to_json(public=public) if self.closed_by else None, 'initial_comment': self.initial_comment, } comments = [] if with_comments: for comment in self.comments: comments.append(comment.to_json(public=public)) output['comments'] = comments return output
class TradeAgreement(ModelBase): """ TODO """ __tablename__ = 'agreements_agreement' __table_args__ = ( sa.UniqueConstraint('public_id'), sa.CheckConstraint( "(amount_percent IS NULL) OR (amount_percent >= 1 AND amount_percent <= 100)", name="amount_percent_is_NULL_or_between_1_and_100", ), sa.CheckConstraint( "(limit_to_consumption = 'f' and amount is not null and unit is not null) or (limit_to_consumption = 't')", name="limit_to_consumption_OR_amount_and_unit", ), ) # Meta id = sa.Column(sa.Integer(), primary_key=True, autoincrement=True, index=True) public_id = sa.Column(sa.String(), index=True, nullable=False) created = sa.Column(sa.DateTime(timezone=True), server_default=sa.func.now()) declined = sa.Column(sa.DateTime(timezone=True)) cancelled = sa.Column(sa.DateTime(timezone=True)) # Involved parties (users) user_proposed_id = sa.Column(sa.Integer(), sa.ForeignKey('auth_user.id'), index=True, nullable=False) user_proposed = relationship('User', foreign_keys=[user_proposed_id], lazy='joined') user_from_id = sa.Column(sa.Integer(), sa.ForeignKey('auth_user.id'), index=True, nullable=False) user_from = relationship('User', foreign_keys=[user_from_id], lazy='joined') user_to_id = sa.Column(sa.Integer(), sa.ForeignKey('auth_user.id'), index=True, nullable=False) user_to = relationship('User', foreign_keys=[user_to_id], lazy='joined') # Outbound facilities facility_gsrn = sa.Column(ARRAY(sa.Integer())) # Agreement details state = sa.Column(sa.Enum(AgreementState), index=True, nullable=False) date_from = sa.Column(sa.Date(), nullable=False) date_to = sa.Column(sa.Date(), nullable=False) technologies = sa.Column(ARRAY(sa.String()), index=True) reference = sa.Column(sa.String()) # Max. amount to transfer (per begin) amount = sa.Column(sa.Integer()) unit = sa.Column(sa.Enum(Unit)) # Transfer percentage (though never exceed max. amount - "amount" above) amount_percent = sa.Column(sa.Integer()) # Limit transferred amount to recipient's consumption? limit_to_consumption = sa.Column(sa.Boolean()) # Lowest number = highest priority # Is set when user accepts the agreement, otherwise None transfer_priority = sa.Column(sa.Integer()) # Senders proposal note to recipient proposal_note = sa.Column(sa.String()) @property def user_proposed_to(self): """ :rtype: User """ if self.user_from_id == self.user_proposed_id: return self.user_to else: return self.user_from @property def transfer_reference(self): """ :rtype: str """ return self.public_id @property def calculated_amount(self): """ :rtype: int """ return self.amount * self.unit.value def is_proposed_by(self, user): """ :param User user: :rtype: bool """ return user.id == self.user_proposed_id def is_inbound_to(self, user): """ :param User user: :rtype: bool """ return user.id == self.user_to_id def is_outbound_from(self, user): """ :param User user: :rtype: bool """ return user.id == self.user_from_id def is_pending(self): """ :rtype: bool """ return self.state == AgreementState.PENDING def decline_proposal(self): self.state = AgreementState.DECLINED self.declined = func.now() def cancel(self): self.state = AgreementState.CANCELLED self.cancelled = func.now() self.transfer_priority = None
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('info_category', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=64), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'info_user', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('nick_name', sa.String(length=32), nullable=False), sa.Column('password_hash', sa.String(length=128), nullable=False), sa.Column('mobile', sa.String(length=11), nullable=False), sa.Column('avatar_url', sa.String(length=256), nullable=True), sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column('is_admin', sa.Boolean(), nullable=True), sa.Column('signature', sa.String(length=512), nullable=True), sa.Column('gender', sa.Enum('MAN', 'WOMAN'), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('mobile'), sa.UniqueConstraint('nick_name')) op.create_table( 'info_news', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=256), nullable=False), sa.Column('source', sa.String(length=64), nullable=False), sa.Column('digest', sa.String(length=512), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('clicks', sa.Integer(), nullable=True), sa.Column('index_image_url', sa.String(length=256), nullable=True), sa.Column('category_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('reason', sa.String(length=256), nullable=True), sa.ForeignKeyConstraint( ['category_id'], ['info_category.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['info_user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'info_user_fans', sa.Column('follower_id', sa.Integer(), nullable=False), sa.Column('followed_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['followed_id'], ['info_user.id'], ), sa.ForeignKeyConstraint( ['follower_id'], ['info_user.id'], ), sa.PrimaryKeyConstraint('follower_id', 'followed_id')) op.create_table( 'info_comment', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('news_id', sa.Integer(), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('parent_id', sa.Integer(), nullable=True), sa.Column('like_count', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['news_id'], ['info_news.id'], ), sa.ForeignKeyConstraint( ['parent_id'], ['info_comment.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['info_user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('info_user_collection', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('news_id', sa.Integer(), nullable=False), sa.Column('create_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['news_id'], ['info_news.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['info_user.id'], ), sa.PrimaryKeyConstraint('user_id', 'news_id')) op.create_table( 'info_comment_like', sa.Column('create_time', sa.DateTime(), nullable=True), sa.Column('update_time', sa.DateTime(), nullable=True), sa.Column('comment_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['comment_id'], ['info_comment.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['info_user.id'], ), sa.PrimaryKeyConstraint('comment_id', 'user_id'))
Revision ID: ede4e3f1a232 Revises: d6f033d8fa5b Create Date: 2018-11-27 22:00:52.080713 """ # revision identifiers, used by Alembic. revision = 'ede4e3f1a232' down_revision = 'd6f033d8fa5b' from alembic import op import sqlalchemy as sa # TODO: The enum value should be further discussed. state = sa.Enum('Initial', 'Bound', 'BindFailed', name='state') substate = sa.Enum('Initial', name='substate') attach_type = sa.Enum('PCI', 'MDEV', name='attach_type') cpid_type = sa.Enum('PCI', name='cpid_type') control_type = sa.Enum('PCI', name='control_type') device_type = sa.Enum('GPU', 'FPGA', 'MLU', name='device_type') def upgrade(): # drop old table: deployable, accelerator op.drop_table('attributes') op.drop_table('deployables') op.drop_table('accelerators') op.create_table('devices', sa.Column('created_at', sa.DateTime(), nullable=True),
nullable=False, primary_key=True), sqlalchemy.Column( "account_from", sqlalchemy.Integer, sqlalchemy.ForeignKey("account.id"), nullable=False, ), sqlalchemy.Column( "account_to", sqlalchemy.Integer, sqlalchemy.ForeignKey("account.id"), nullable=False, ), sqlalchemy.Column("type", sqlalchemy.Enum(models.TransactionType), nullable=False), sqlalchemy.Column("amount", sqlalchemy.Integer, nullable=False), sqlalchemy.Column("created", sqlalchemy.DateTime, nullable=False, index=True), ) def init_db(engine: sqlalchemy.engine.Engine): """ Create all required tables and insert prerequisite data. """ metadata.create_all(engine) with engine.connect() as conn:
def _get_sqlalchemy_type(cls): db_type = cls.Attributes.db_type if db_type is not None: return db_type # must be above Unicode, because Ltree is Unicode's subclass if issubclass(cls, Ltree): return PGLTree # must be above Unicode, because Ip*Address is Unicode's subclass if issubclass(cls, (IpAddress, Ipv4Address, Ipv6Address)): return PGInet # must be above Unicode, because Uuid is Unicode's subclass if issubclass(cls, Uuid): return PGUuid(as_uuid=True) # must be above Unicode, because Point is Unicode's subclass if issubclass(cls, Point): return PGGeometry("POINT", dimension=cls.Attributes.dim) # must be above Unicode, because Line is Unicode's subclass if issubclass(cls, Line): return PGGeometry("LINESTRING", dimension=cls.Attributes.dim) # must be above Unicode, because Polygon is Unicode's subclass if issubclass(cls, Polygon): return PGGeometry("POLYGON", dimension=cls.Attributes.dim) # must be above Unicode, because MultiPoint is Unicode's subclass if issubclass(cls, MultiPoint): return PGGeometry("MULTIPOINT", dimension=cls.Attributes.dim) # must be above Unicode, because MultiLine is Unicode's subclass if issubclass(cls, MultiLine): return PGGeometry("MULTILINESTRING", dimension=cls.Attributes.dim) # must be above Unicode, because MultiPolygon is Unicode's subclass if issubclass(cls, MultiPolygon): return PGGeometry("MULTIPOLYGON", dimension=cls.Attributes.dim) # must be above Unicode, because String is Unicode's subclass if issubclass(cls, String): if cls.Attributes.max_len == String.Attributes.max_len: # Default is arbitrary-length return sqlalchemy.Text else: return sqlalchemy.String(cls.Attributes.max_len) if issubclass(cls, Unicode): if cls.Attributes.max_len == Unicode.Attributes.max_len: # Default is arbitrary-length return sqlalchemy.UnicodeText else: return sqlalchemy.Unicode(cls.Attributes.max_len) if issubclass(cls, EnumBase): return sqlalchemy.Enum(*cls.__values__, name=cls.__type_name__) if issubclass(cls, AnyXml): return PGXml if issubclass(cls, AnyHtml): return PGHtml if issubclass(cls, AnyDict): sa = cls.Attributes.store_as if isinstance(sa, c_json): return PGJson raise NotImplementedError(dict(cls=AnyDict, store_as=sa)) if issubclass(cls, ByteArray): return sqlalchemy.LargeBinary if issubclass(cls, (Integer64, UnsignedInteger64)): return sqlalchemy.BigInteger if issubclass(cls, (Integer32, UnsignedInteger32)): return sqlalchemy.Integer if issubclass(cls, (Integer16, UnsignedInteger16)): return sqlalchemy.SmallInteger if issubclass(cls, (Integer8, UnsignedInteger8)): return sqlalchemy.SmallInteger if issubclass(cls, Float): return FLOAT if issubclass(cls, Double): return DOUBLE_PRECISION if issubclass(cls, (Integer, UnsignedInteger)): return sqlalchemy.DECIMAL if issubclass(cls, Decimal): return sqlalchemy.DECIMAL if issubclass(cls, Boolean): if cls.Attributes.store_as is bool: return sqlalchemy.Boolean if cls.Attributes.store_as is int: return sqlalchemy.SmallInteger raise ValueError("Boolean.store_as has invalid value %r" % cls.Attributes.store_as) if issubclass(cls, Date): return sqlalchemy.Date if issubclass(cls, DateTime): if cls.Attributes.timezone is None: if cls.Attributes.as_timezone is None: return sqlalchemy.DateTime(timezone=True) else: return sqlalchemy.DateTime(timezone=False) else: return sqlalchemy.DateTime(timezone=cls.Attributes.timezone) if issubclass(cls, Time): return sqlalchemy.Time if issubclass(cls, Duration): return sqlalchemy.dialects.postgresql.base.INTERVAL if issubclass(cls, XmlModifier): retval = _get_sqlalchemy_type(cls.type) return retval
import sqlalchemy as sa """description of revision Revision ID: 0ff9e3881597 Revises: fb0167bd9639 Create Date: 2019-02-27 14:40:15.492884 """ # revision identifiers, used by Alembic. revision = '0ff9e3881597' down_revision = 'fb0167bd9639' network_segment_range_network_type = sa.Enum( 'vlan', 'vxlan', 'gre', 'geneve', name='network_segment_range_network_type') def upgrade(): op.create_table( 'network_segment_ranges', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('default', sa.Boolean(), nullable=False), sa.Column('shared', sa.Boolean(), nullable=False), sa.Column('project_id', sa.String(length=255), nullable=True), sa.Column('network_type', network_segment_range_network_type, nullable=False),
class GeographicAttribute(db.Model, SerializerMixin): __tablename__ = 'geographic_attributes' geographic_attribute_id = db.Column(db.Integer, primary_key=True) geo_code_id = db.Column(db.Integer, db.ForeignKey('geo_codes.geo_code_id'), nullable=False) dataset_id = db.Column( db.Integer, db.ForeignKey('geographic_datasets.geographic_dataset_id'), nullable=False) attribute_name = db.Column(db.Text, nullable=False) attribute_value = db.Column(db.Numeric, nullable=False) attribute_value_type = db.Column(sa.Enum('percent', 'count', name='value_type', create_type=True), server_default='percent') attribute_year = db.Column(db.SmallInteger) attribute_relative_weight = db.Column( sa.Enum('high', 'medium', 'low', name='relative_weights', create_type=True)) fips_code = db.Column(db.Text) deleted_at = db.Column(db.DateTime) __table_args__ = (db.UniqueConstraint('geo_code_id', 'attribute_name', 'attribute_year', 'dataset_id', name='_attribute_year_dataset_uc'), ) @property def geo_name(self): return self.geo_code.geo_name def to_dict(self): attribute_dict = dict() attribute_dict['attribute_name'] = self.attribute_name attribute_dict['attribute_value'] = self.attribute_value attribute_dict['fips_code'] = self.fips_code attribute_dict['dataset_id'] = self.dataset_id attribute_dict['attribute_year'] = self.attribute_year return attribute_dict @classmethod def get_attribute_years(cls, dataset_id, attribute_name): year_rows = cls.query\ .with_entities(cls.attribute_year)\ .filter_by(dataset_id=dataset_id, attribute_name=attribute_name).all() year_list = [row.attribute_year for row in year_rows] distinct_year_list = list(set(year_list)) distinct_year_list.sort(reverse=True) return distinct_year_list @staticmethod def bulk_insert(attributes, dataset_id): insert_list = [] for attribute in attributes: geocode = GeoCode.get_geocode(attribute.get('geographic-label')) row = {} row['geo_code_id'] = geocode.geo_code_id if geocode else 0 row['fips_code'] = geocode.fips_code if geocode else None row['dataset_id'] = dataset_id row['attribute_name'] = attribute.get('attribute-name') row['attribute_value'] = attribute.get('attribute-value') row['attribute_value_type'] = 'percent' row['attribute_year'] = attribute.get('attribute-year') insert_list.append(row) # insert_list = [{k: v for d in insert_list for k, v in d.items() if v is not None}] # This was the only way I could get the import to work. I think there is a problem with the csv parsing. for row in insert_list: if row['attribute_value'] is not None and row['geo_code_id'] != 0: db.session.execute(GeographicAttribute.__table__.insert(), row)
class Container(BASE, SoftDeleteMixIn, ModelBase): """Represents a Container for Secrets in the datastore. Containers store secret references. Containers are owned by Projects. Containers can be generic or have a predefined type. Predefined typed containers allow users to store structured key relationship inside Barbican. """ __tablename__ = 'containers' name = sa.Column(sa.String(255)) type = sa.Column( sa.Enum('generic', 'rsa', 'dsa', 'certificate', name='container_types')) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id', name='containers_project_fk'), index=True, nullable=False) consumers = sa.orm.relationship("ContainerConsumerMetadatum") creator_id = sa.Column(sa.String(255)) def __init__(self, parsed_request=None): """Creates a Container entity from a dict.""" super(Container, self).__init__() if parsed_request: self.name = parsed_request.get('name') self.type = parsed_request.get('type') self.status = States.ACTIVE self.creator_id = parsed_request.get('creator_id') secret_refs = parsed_request.get('secret_refs') if secret_refs: for secret_ref in parsed_request.get('secret_refs'): container_secret = ContainerSecret() container_secret.name = secret_ref.get('name') # TODO(hgedikli) move this into a common location # TODO(hgedikli) validate provided url # TODO(hgedikli) parse out secret_id with regex secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] else: secret_id = secret_id container_secret.secret_id = secret_id self.container_secrets.append(container_secret) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for container_secret in self.container_secrets: session.delete(container_secret) for container_acl in self.container_acls: session.delete(container_acl) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'container_id': self.id, 'name': self.name, 'type': self.type, 'creator_id': self.creator_id, 'secret_refs': [{ 'secret_id': container_secret.secret_id, 'name': container_secret.name if hasattr(container_secret, 'name') else None } for container_secret in self.container_secrets], 'consumers': [{ 'name': consumer.name, 'URL': consumer.URL } for consumer in self.consumers if not consumer.deleted] }
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('communities', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('userscount', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('interests', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_name', sa.String(length=20), nullable=False), sa.Column('email', sa.String(length=50), nullable=False), sa.Column('first_name', sa.String(length=20), nullable=False), sa.Column('last_name', sa.String(length=20), nullable=False), sa.Column('password', postgresql.BYTEA(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('events', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('creator_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['creator_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'friendship', sa.Column('id', sa.Integer(), nullable=False), sa.Column('requester_id', sa.Integer(), nullable=True), sa.Column('target_id', sa.Integer(), nullable=True), sa.Column('status', sa.Enum('requested', 'accepted', 'accepted_second_level', 'second_level', name='friendshipstatus'), nullable=True), sa.ForeignKeyConstraint( ['requester_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['target_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'users_interests', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('interests_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['interests_id'], ['interests.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'interests_id')) op.create_table( 'event_interests', sa.Column('event_id', sa.Integer(), nullable=False), sa.Column('interests_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['event_id'], ['events.id'], ), sa.ForeignKeyConstraint( ['interests_id'], ['interests.id'], ), sa.PrimaryKeyConstraint('event_id', 'interests_id')) op.create_table( 'user_friendship', sa.Column('user', sa.Integer(), nullable=False), sa.Column('friendship', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['friendship'], ['friendship.id'], ), sa.ForeignKeyConstraint( ['user'], ['user.id'], ), sa.PrimaryKeyConstraint('user', 'friendship'))
class Subnet(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a neutron subnet. When a subnet is created the first and last entries will be created. These are used for the IP allocation. """ name = sa.Column(sa.String(db_const.NAME_FIELD_SIZE)) network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id'), nullable=False) # Added by the segments service plugin segment_id = sa.Column(sa.String(36), sa.ForeignKey('networksegments.id')) subnetpool_id = sa.Column(sa.String(36), index=True) # NOTE: Explicitly specify join conditions for the relationship because # subnetpool_id in subnet might be 'prefix_delegation' when the IPv6 Prefix # Delegation is enabled subnetpool = orm.relationship( 'SubnetPool', lazy='joined', foreign_keys='Subnet.subnetpool_id', primaryjoin='Subnet.subnetpool_id==SubnetPool.id') ip_version = sa.Column(sa.Integer, nullable=False) cidr = sa.Column(sa.String(64), nullable=False) gateway_ip = sa.Column(sa.String(64)) network_standard_attr = orm.relationship('StandardAttribute', lazy='subquery', viewonly=True, secondary='networks', uselist=False, load_on_pending=True) revises_on_change = ('network_standard_attr', ) allocation_pools = orm.relationship(IPAllocationPool, backref='subnet', lazy="subquery", cascade='delete') enable_dhcp = sa.Column(sa.Boolean()) dns_nameservers = orm.relationship(DNSNameServer, backref='subnet', cascade='all, delete, delete-orphan', order_by=DNSNameServer.order, lazy='subquery') routes = orm.relationship(SubnetRoute, backref='subnet', cascade='all, delete, delete-orphan', lazy='subquery') ipv6_ra_mode = sa.Column(sa.Enum(constants.IPV6_SLAAC, constants.DHCPV6_STATEFUL, constants.DHCPV6_STATELESS, name='ipv6_ra_modes'), nullable=True) ipv6_address_mode = sa.Column(sa.Enum(constants.IPV6_SLAAC, constants.DHCPV6_STATEFUL, constants.DHCPV6_STATELESS, name='ipv6_address_modes'), nullable=True) # subnets don't have their own rbac_entries, they just inherit from # the network rbac entries rbac_entries = orm.relationship( rbac_db_models.NetworkRBAC, lazy='subquery', uselist=True, foreign_keys='Subnet.network_id', primaryjoin='Subnet.network_id==NetworkRBAC.object_id') api_collections = [subnet_def.COLLECTION_NAME] collection_resource_map = { subnet_def.COLLECTION_NAME: subnet_def.RESOURCE_NAME } tag_support = True
class Schema( Base, Referenceable, Describeable, Modifiable, ): """ An object that describes how an EAV schema is generated. Typically, an EAV schema represents a group of attributes that represent a meaningful data set. (e.g. contact details, name, test result.) Resulting schema objects can then be used to produce forms such as Zope-style interfaces. """ __tablename__ = 'schema' # Override for max length of 32 characters name = sa.Column(sa.String, nullable=False) categories = orm.relationship(Category, secondary=schema_category_table, collection_class=set, backref=orm.backref('schemata', collection_class=set), doc='Listing of schema categories') storage = sa.Column(sa.Enum(*sorted(['eav', 'resource', 'table']), name='schema_storage'), nullable=False, server_default='eav', doc=""" How the generated objects will be stored. Storage methods are: eav - values are stored in a type-sharded set of tables; resource - the object exists in an external service; table - the object is stored in a conventional SQL table; """) publish_date = sa.Column( sa.Date, doc='The date the schema was published for data collection') retract_date = sa.Column(sa.Date) is_association = sa.Column(sa.Boolean, doc=""" If set and True, the schema is an defines an association for multiple schemata. """) @hybrid_property def has_private(self): for attribute in self.attributes.values(): if attribute.is_private: return True return False @orm.validates('name') def valid_name(self, key, name): if not RE_VALID_NAME.match(name): raise ValueError('Invalid name: "%s"' % name) return name @declared_attr def __table_args__(cls): return (sa.CheckConstraint('publish_date <= retract_date', name='ck_%s_valid_publication' % cls.__tablename__), ) def itertraverse(self): """ Useful for iterating through attributes as a hierarchy """ for attribute in sorted(itervalues(self.attributes), key=lambda a: a.order): if attribute.parent_attribute is None: yield attribute def iterleafs(self): """ Lists all attributes flattened without their sections """ for attribute in sorted(itervalues(self.attributes), key=lambda a: a.order): if attribute.type != 'section': yield attribute def iterlist(self): """ Flattens the schema into a sorted list of all children """ return chain.from_iterable(a.iterlist() for a in self.itertraverse()) def __copy__(self): keys = ('name', 'title', 'description', 'storage') return self.__class__(**dict([(k, getattr(self, k)) for k in keys])) def __deepcopy__(self, memo): duplicate = copy(self) duplicate.categories = set([c for c in self.categories]) for attribute in self.itertraverse(): duplicate.attributes[attribute.name] = deepcopy(attribute) return duplicate @classmethod def from_json(cls, data): """ Loads a schema from parsed JSON data Parameters: data -- parsed json data (i.e. a dict) """ attributes = data.pop('attributes', None) schema = cls(**data) if data.get('publish_date'): schema.publish_date = \ datetime.strptime(data['publish_date'], '%Y-%m-%d').date() if data.get('retract_date'): schema.retract_date = \ datetime.strptime(data['retract_date'], '%Y-%m-%d').date() if attributes: for key, attribute in iteritems(attributes): schema.attributes[key] = Attribute.from_json(attribute) return schema def to_json(self, deep=False): """ Serializes to a JSON-ready dictionary """ data = { 'name': self.name, 'title': self.title, 'description': self.description, 'storage': self.storage, 'publish_date': (self.publish_date and self.publish_date.isoformat()), 'retract_date': (self.retract_date and self.retract_date.isoformat()) } if deep: data['attributes'] = \ dict([(a.name, a.to_json(deep)) for a in self.itertraverse()]) return data
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('event', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.String(length=100), nullable=True), sa.Column('time', sa.String(length=100), nullable=True), sa.Column('location', sa.String(length=100), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('group', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('description', sa.String(length=100), nullable=False), sa.Column('group_img', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('refresh_token', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('token', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('tag', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('user', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('username', sa.String(length=100), nullable=False), sa.Column('password', sa.String(length=100), nullable=False), sa.Column('email', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username') ) op.create_table('event_group', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('event_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('group_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['event.id'], ), sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('event_tag', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('event_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('tag_id', postgresql.UUID(as_uuid=True), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['event.id'], ), sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('poll', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('question', sa.String(length=100), nullable=False), sa.Column('event_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('type', sa.Enum('checklist', 'description', name='poll_type'), nullable=True), sa.Column('scope', sa.Enum('moderator_only', 'all_members', name='poll_scope'), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['event.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('profile', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('profile_img', sa.String(length=100), nullable=True), sa.Column('phone_number', sa.String(length=100), nullable=True), sa.Column('description', sa.String(length=100), nullable=True), sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('profile_event_role', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('profile_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('event_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('role', sa.Enum('leader', 'admin', 'member', name='role_type'), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['event.id'], ), sa.ForeignKeyConstraint(['profile_id'], ['profile.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('profile_group_role', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('profile_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('group_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('role', sa.Enum('leader', 'admin', 'member', name='role_type'), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), sa.ForeignKeyConstraint(['profile_id'], ['profile.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('selection', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('poll_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['poll_id'], ['poll.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('profile_selection', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('profile_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('selection_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['profile_id'], ['profile.id'], ), sa.ForeignKeyConstraint(['selection_id'], ['selection.id'], ), sa.PrimaryKeyConstraint('id') )
Create Date: 2018-10-02 00:34:43.822096 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '32c352490472' down_revision = '3bb2c6ca0e3e' branch_labels = None depends_on = None old_options = ('initial_import', 'update_status', 'update_tax_photo') new_options = old_options + ('order_created', ) old_type = sa.Enum(*old_options, name='event_type') new_type = sa.Enum(*new_options, name='event_type') tmp_type = sa.Enum(*new_options, name='_event_type') def upgrade(): # Create a temporary "_event_type" type, convert and drop the "old" type tmp_type.create(op.get_bind(), checkfirst=False) op.execute('ALTER TABLE events ALTER COLUMN type TYPE _event_type' ' USING type::TEXT::_event_type') old_type.drop(op.get_bind(), checkfirst=False) # Create and convert to the "new" type type new_type.create(op.get_bind(), checkfirst=False) op.execute('ALTER TABLE events ALTER COLUMN type TYPE event_type' ' USING type::TEXT::event_type') tmp_type.drop(op.get_bind(), checkfirst=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('admins', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('password', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('batches_story_billing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('time', sa.DateTime(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('put_into_queue', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('time') ) op.create_table('batches_story_peaks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('time', sa.DateTime(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('put_into_queue', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('time') ) op.create_table('clusters', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('cpu_price', sa.Float(), nullable=True), sa.Column('memory_price', sa.Float(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title') ) op.create_table('projects', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('created', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title') ) op.create_table('teams', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('created', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title') ) op.create_table('servers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('cpu_price', sa.Float(), nullable=True), sa.Column('memory_price', sa.Float(), nullable=True), sa.Column('cluster_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['cluster_id'], ['clusters.id'], name='clusters_fk'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title') ) op.create_table('services_groups', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('team_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], name='project_fk'), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], name='team_fk'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title') ) op.create_table('services', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('instance', sa.String(), nullable=False), sa.Column('server_id', sa.Integer(), nullable=True), sa.Column('services_group_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['server_id'], ['servers.id'], name='servers_fk'), sa.ForeignKeyConstraint(['services_group_id'], ['services_groups.id'], name='services_group_fk'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('title', 'instance', 'services_group_id', 'server_id', name='ut_2') ) op.create_table('metrics_billing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.Float(), nullable=False), sa.Column('type', sa.Enum('system_cpu_percent', 'user_cpu_percent', 'vsize', name='billing_metric_types'), nullable=True), sa.Column('batch_id', sa.Integer(), nullable=True), sa.Column('service_id', sa.Integer(), nullable=True), sa.Column('services_group_id', sa.Integer(), nullable=True), sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('team_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['batch_id'], ['batches_story_billing.id'], name='batch_story_billing_fk'), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], name='projects_fk'), sa.ForeignKeyConstraint(['service_id'], ['services.id'], name='service_fk'), sa.ForeignKeyConstraint(['services_group_id'], ['services_groups.id'], name='services_groups_fk'), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], name='teams_fk'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('batch_id', 'type', 'service_id', 'services_group_id', name='metric_billing_ut_1') ) op.create_table('metrics_peaks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.Float(), nullable=False), sa.Column('type', sa.Enum('system_cpu_percent', 'user_cpu_percent', 'vsize', name='peaks_metric_types'), nullable=True), sa.Column('batch_id', sa.Integer(), nullable=True), sa.Column('service_id', sa.Integer(), nullable=True), sa.Column('services_group_id', sa.Integer(), nullable=True), sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('team_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['batch_id'], ['batches_story_peaks.id'], name='batch_story_peaks_fk'), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], name='projects_fk'), sa.ForeignKeyConstraint(['service_id'], ['services.id'], name='service_fk'), sa.ForeignKeyConstraint(['services_group_id'], ['services_groups.id'], name='services_groups_fk'), sa.ForeignKeyConstraint(['team_id'], ['teams.id'], name='teams_fk'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('batch_id', 'type', 'service_id', 'services_group_id', name='metric_peaks_ut_1') )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.Column('email', sa.String(length=64), nullable=False), sa.Column('password', sa.String(length=256), nullable=False), sa.Column('logo_img', sa.String(length=128), nullable=True), sa.Column('role', sa.SmallInteger(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True) op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=True) op.create_table( 'company', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('website', sa.String(length=128), nullable=True), sa.Column('oneword', sa.String(length=256), nullable=True), sa.Column('description', sa.String(length=256), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_company_user_id'), 'company', ['user_id'], unique=False) op.create_table( 'employee', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('sex', sa.Enum('NONE', 'MALE', 'FEMALE', name='sextype'), nullable=True), sa.Column('location', sa.String(length=128), nullable=True), sa.Column('description', sa.String(length=256), nullable=True), sa.Column('resume', sa.String(length=128), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'job', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=64), nullable=True), sa.Column('wage', sa.String(length=64), nullable=True), sa.Column('location', sa.String(length=64), nullable=True), sa.Column('company_id', sa.Integer(), nullable=True), sa.Column('description', sa.String(length=256), nullable=True), sa.Column('requirement', sa.String(length=256), nullable=True), sa.ForeignKeyConstraint(['company_id'], ['company.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) op.create_table( 'send', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('company_id', sa.Integer(), nullable=True), sa.Column('job_id', sa.Integer(), nullable=True), sa.Column('employee_id', sa.Integer(), nullable=True), sa.Column('qualify', sa.Enum('UNREAD', 'READ', 'REFUSE', 'ACCEPT', name='qualify_type'), nullable=True), sa.ForeignKeyConstraint( ['company_id'], ['company.id'], ), sa.ForeignKeyConstraint( ['employee_id'], ['employee.id'], ), sa.ForeignKeyConstraint( ['job_id'], ['job.id'], ), sa.PrimaryKeyConstraint('id'))
Create Date: 2015-04-10 12:36:51.619419 """ # revision identifiers, used by Alembic. revision = '31cd4f4891ec' down_revision = '23530d604b96' from alembic import op from sqlalchemy.dialects.postgresql import ENUM import sqlalchemy as sa OLD_ENUM = ['rfc4733', 'inband', 'info'] NEW_ENUM = ['rfc4733', 'inband', 'info', 'auto'] old_type = sa.Enum(*OLD_ENUM, name='pjsip_dtmf_mode_values') new_type = sa.Enum(*NEW_ENUM, name='pjsip_dtmf_mode_values_v2') tcr = sa.sql.table('ps_endpoints', sa.Column('dtmf_mode', new_type, nullable=True)) def upgrade(): context = op.get_context() # Upgrading to this revision WILL clear your directmedia values. if context.bind.dialect.name != 'postgresql': op.alter_column('ps_endpoints', 'dtmf_mode', type_=new_type, existing_type=old_type)
sa.sql.column('typeval'), ) dialaction_action = sa.Enum( 'none', 'endcall:busy', 'endcall:congestion', 'endcall:hangup', 'user', 'group', 'queue', 'voicemail', 'extension', 'outcall', 'application:callbackdisa', 'application:disa', 'application:directory', 'application:faxtomail', 'application:voicemailmain', 'application:password', 'sound', 'custom', 'ivr', 'conference', 'switchboard', 'application:custom', name='dialaction_action', ) extenumbers_type = sa.Enum( 'extenfeatures',
""" from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = '1176fec745c0' down_revision = 'a82a9b16e320' branch_labels = None depends_on = None old_options = ('PAYMENT', 'DISBURSEMENT', 'WITHDRAWAL') new_options = ('PAYMENT', 'DEPOSIT', 'WITHDRAWAL', 'EXCHANGE') tmp_options = sorted(old_options + ('DEPOSIT', 'EXCHANGE')) old_type = sa.Enum(*old_options, name='transfertypeenum') new_type = sa.Enum(*new_options, name='transfertypeenum') tmp_type = sa.Enum(*tmp_options, name='_transfertypeenum') tcr = sa.sql.table('credit_transfer', sa.Column('transfer_type', new_type, nullable=False)) def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'credit_transfer', sa.Column('transfer_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
def upgrade(): from changes.constants import Result testsuites_table = table( 'testsuite', sa.Column('id', sa.GUID(), nullable=False), sa.Column('build_id', sa.GUID(), nullable=False), sa.Column('project_id', sa.GUID(), nullable=False), sa.Column('name_sha', sa.String(length=40), nullable=False), sa.Column('name', sa.Text(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), ) testgroups_table = table( 'testgroup', sa.Column('id', sa.GUID(), nullable=False), sa.Column('build_id', sa.GUID(), nullable=False), sa.Column('project_id', sa.GUID(), nullable=False), sa.Column('name_sha', sa.String(length=40), nullable=False), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('num_tests', sa.Integer(), nullable=True), sa.Column('num_failed', sa.Integer(), nullable=True), sa.Column('name', sa.Text(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), ) testgroups_m2m_table = table( 'testgroup_test', sa.Column('group_id', sa.GUID(), nullable=False), sa.Column('test_id', sa.GUID(), nullable=False), ) testcases_table = table( 'test', sa.Column('id', sa.GUID(), nullable=False), sa.Column('build_id', sa.GUID(), nullable=False), sa.Column('project_id', sa.GUID(), nullable=False), sa.Column('package', sa.Text(), nullable=True), sa.Column('name', sa.Text(), nullable=True), sa.Column('group', sa.Text(), nullable=True), sa.Column('suite_id', sa.GUID(), nullable=True), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('result', sa.Enum(), nullable=True), ) connection = op.get_bind() ### commands auto generated by Alembic - please adjust! ### op.create_table( 'testsuite', sa.Column('id', sa.GUID(), nullable=False), sa.Column('build_id', sa.GUID(), nullable=False), sa.Column('project_id', sa.GUID(), nullable=False), sa.Column('name_sha', sa.String(length=40), nullable=False), sa.Column('name', sa.Text(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['build_id'], ['build.id'], ), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('build_id', 'name_sha', name='_suite_key')) op.create_table( 'testgroup', sa.Column('id', sa.GUID(), nullable=False), sa.Column('build_id', sa.GUID(), nullable=False), sa.Column('project_id', sa.GUID(), nullable=False), sa.Column('suite_id', sa.GUID(), nullable=True), sa.Column('parent_id', sa.GUID(), nullable=True), sa.Column('name_sha', sa.String(length=40), nullable=False), sa.Column('name', sa.Text(), nullable=True), sa.Column('duration', sa.Integer(), default=0, nullable=True), sa.Column('num_tests', sa.Integer(), default=0, nullable=True), sa.Column('num_failed', sa.Integer(), default=0, nullable=True), sa.Column('data', sa.JSONEncodedDict(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['build_id'], ['build.id'], ), sa.ForeignKeyConstraint( ['parent_id'], ['testgroup.id'], ), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.ForeignKeyConstraint( ['suite_id'], ['testsuite.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('build_id', 'suite_id', 'name_sha', name='_group_key')) op.create_table('testgroup_test', sa.Column('group_id', sa.GUID(), nullable=False), sa.Column('test_id', sa.GUID(), nullable=False), sa.ForeignKeyConstraint( ['group_id'], ['testgroup.id'], ), sa.ForeignKeyConstraint( ['test_id'], ['test.id'], ), sa.PrimaryKeyConstraint('group_id', 'test_id')) op.add_column(u'test', sa.Column('suite_id', sa.GUID(), nullable=True)) # perform data migrations for testcase in connection.execute(testcases_table.select()): # migrate group to suite print "Migrating TestCase %s" % (testcase.id, ) suite_name = testcase.group or 'default' suite_sha = sha1(suite_name).hexdigest() result = connection.execute(testsuites_table.select().where( sa.and_( testsuites_table.c.build_id == testcase.build_id, testsuites_table.c.name_sha == suite_sha, )).limit(1)).fetchone() if not result: suite_id = uuid4() connection.execute(testsuites_table.insert().values( id=suite_id, build_id=testcase.build_id, project_id=testcase.project_id, name=suite_name, name_sha=suite_sha, date_created=datetime.utcnow(), )) else: suite_id = result[0] connection.execute(testcases_table.update().where( testcases_table.c.id == testcase.id, ).values({ testcases_table.c.suite_id: suite_id, })) # add package as group group_name = testcase.package or testcase.name.rsplit('.', 1)[1] group_sha = sha1(group_name).hexdigest() result = connection.execute(testgroups_table.select().where( sa.and_( testgroups_table.c.build_id == testcase.build_id, testgroups_table.c.name_sha == group_sha, )).limit(1)).fetchone() if not result: group_id = uuid4() connection.execute(testgroups_table.insert().values( id=group_id, build_id=testcase.build_id, project_id=testcase.project_id, name=group_name, name_sha=group_sha, date_created=datetime.utcnow(), duration=0, num_tests=0, num_failed=0, )) else: group_id = result[0] update_values = { testgroups_table.c.num_tests: testgroups_table.c.num_tests + 1, testgroups_table.c.duration: testgroups_table.c.duration + testcase.duration, } if testcase.result == Result.failed.value: update_values[testgroups_table.c. num_failed] = testgroups_table.c.num_failed + 1 connection.execute(testgroups_m2m_table.insert().values({ testgroups_m2m_table.c.group_id: group_id, testgroups_m2m_table.c.test_id: testcase.id, })) connection.execute(testgroups_table.update().where( testgroups_table.c.id == group_id, ).values(update_values)) op.drop_column(u'test', u'group') op.drop_column(u'test', u'group_sha')
class Group(Base, mixins.Timestamps): __tablename__ = 'group' id = sa.Column(sa.Integer, autoincrement=True, primary_key=True) # We don't expose the integer PK to the world, so we generate a short # random string to use as the publicly visible ID. pubid = sa.Column(sa.Text(), default=pubid.generate, unique=True, nullable=False) authority = sa.Column(sa.UnicodeText(), nullable=False) name = sa.Column(sa.UnicodeText(), nullable=False, index=True) creator_id = sa.Column( sa.Integer, sa.ForeignKey('user.id')) creator = sa.orm.relationship('User') description = sa.Column(sa.UnicodeText()) #: Which type of user is allowed to join this group, possible values are: #: authority, None joinable_by = sa.Column(sa.Enum(JoinableBy, name='group_joinable_by'), nullable=True) #: Which type of user is allowed to read annotations in this group, #: possible values are: authority, members, world readable_by = sa.Column(sa.Enum(ReadableBy, name='group_readable_by'), nullable=True, index=True) #: Which type of user is allowed to write to this group, possible values #: are: authority, members writeable_by = sa.Column(sa.Enum(WriteableBy, name='group_writeable_by'), nullable=True) # Group membership members = sa.orm.relationship( 'User', secondary='user_group', backref=sa.orm.backref( 'groups', order_by='Group.name')) scopes = sa.orm.relationship('GroupScope', backref='group', cascade='all, delete-orphan') def __init__(self, **kwargs): super(Group, self).__init__(**kwargs) @sa.orm.validates('name') def validate_name(self, key, name): if not GROUP_NAME_MIN_LENGTH <= len(name) <= GROUP_NAME_MAX_LENGTH: raise ValueError('name must be between {min} and {max} characters ' 'long'.format(min=GROUP_NAME_MIN_LENGTH, max=GROUP_NAME_MAX_LENGTH)) return name @property def slug(self): """A version of this group's name suitable for use in a URL.""" return slugify.slugify(self.name) @property def type(self): """ The "type" of this group, e.g. "open" or "private". :rtype: string :raises ValueError: if the type of the group isn't recognized """ self_type_flags = TypeFlags( joinable_by=self.joinable_by, readable_by=self.readable_by, writeable_by=self.writeable_by) for type_, type_flags in (('open', OPEN_GROUP_TYPE_FLAGS), ('private', PRIVATE_GROUP_TYPE_FLAGS), ('restricted', RESTRICTED_GROUP_TYPE_FLAGS)): if self_type_flags == type_flags: return type_ raise ValueError( "This group doesn't seem to match any known type of group. " "This shouldn't be in the database!") @property def is_public(self): return self.readable_by == ReadableBy.world def __acl__(self): terms = [] join_principal = _join_principal(self) if join_principal is not None: terms.append((security.Allow, join_principal, 'join')) read_principal = _read_principal(self) if read_principal is not None: terms.append((security.Allow, read_principal, 'read')) write_principal = _write_principal(self) if write_principal is not None: terms.append((security.Allow, write_principal, 'write')) if self.creator: terms.append((security.Allow, self.creator.userid, 'admin')) terms.append(security.DENY_ALL) return terms def __repr__(self): return '<Group: %s>' % self.slug @classmethod def created_by(cls, session, user): """Return a query object filtering groups by creator.""" return session.query(cls).filter(Group.creator == user)
""" import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = '2d6ce5f0bea2' down_revision = '3805d51e5bdd' branch_labels = None depends_on = None old_options = ('Received', 'Processing', 'Found', 'Printed', 'Mailed/Pickup', 'Not_Found', 'Letter_Generated', 'Undeliverable', 'Refunded', 'Done') new_options = old_options + ('Emailed', ) old_status = sa.Enum(*old_options, name='status') new_status = sa.Enum(*new_options, name='status') tmp_status = sa.Enum(*new_options, name='_status') def upgrade(): tmp_status.create(op.get_bind(), checkfirst=False) op.execute('ALTER TABLE suborders ALTER COLUMN status TYPE _status' ' USING status::TEXT::_status') old_status.drop(op.get_bind(), checkfirst=False) # Create and convert to the "new" type type new_status.create(op.get_bind(), checkfirst=False) op.execute('ALTER TABLE suborders ALTER COLUMN status TYPE status' ' USING status::TEXT::status') tmp_status.drop(op.get_bind(), checkfirst=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'miembros', sa.Column('id', sa.Integer(), nullable=False), sa.Column('nombre', sa.String(length=150), nullable=False), sa.Column('apellido', sa.String(length=150), nullable=False), sa.Column('email', sa.String(length=150), nullable=False), sa.Column('password', sa.String(length=150), nullable=False), sa.Column('puntos', sa.Integer(), nullable=True), sa.Column('comentario', sa.String(length=300), nullable=True), sa.Column('fecha_de_registro', sa.DateTime(timezone=True), nullable=True), sa.Column('fecha_de_nacimiento', sa.Date(), nullable=True), sa.Column('estado', sa.Enum('Activo', 'Inactivo'), nullable=False), sa.Column('permiso', sa.String(length=300), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'productos', sa.Column('id', sa.Integer(), nullable=False), sa.Column('nombre', sa.String(length=150), nullable=False), sa.Column('urlImage', sa.String(length=150), nullable=False), sa.Column('puntos', sa.Integer(), nullable=False), sa.Column('descripcion', sa.String(length=150), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'transacciones', sa.Column('id', sa.Integer(), nullable=False), sa.Column('descripcion', sa.String(length=300), nullable=True), sa.Column('tipo', sa.Enum('Acumulo', 'Canje'), nullable=False), sa.Column('monto', sa.Integer(), nullable=True), sa.Column('puntos', sa.Integer(), nullable=True), sa.Column('fecha_creacion', sa.DateTime(timezone=True), nullable=True), sa.Column('fecha_ultima_actualizacion', sa.DateTime(timezone=True), nullable=True), sa.Column('id_miembro', sa.Integer(), nullable=True), sa.Column('id_producto', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['id_miembro'], ['miembros.id'], ), sa.ForeignKeyConstraint( ['id_producto'], ['productos.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'vouchers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('estado', sa.Enum('Disponible', 'Usado'), nullable=False), sa.Column('fecha_de_creacion', sa.DateTime(timezone=True), nullable=True), sa.Column('fecha_de_vencimiento', sa.DateTime(timezone=True), nullable=True), sa.Column('id_miembro', sa.Integer(), nullable=True), sa.Column('id_producto', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['id_miembro'], ['miembros.id'], ), sa.ForeignKeyConstraint( ['id_producto'], ['productos.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "report_table_definition", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column( "system", sa.Enum("LAW_ENFORCEMENT", "COURT_PROCESSES", "CORRECTIONS", name="system"), nullable=True, ), sa.Column( "metric_type", sa.Enum( "ADMISSIONS", "ARRESTS", "POPULATION", "REVOCATIONS", "TERMINATIONS", name="metrictype", ), nullable=True, ), sa.Column( "measurement_type", sa.Enum( "INSTANT", "AVERAGE", "DELTA", "PERSON_BASED_DELTA", name="measurementtype", ), nullable=True, ), sa.Column("filtered_dimensions", sa.ARRAY(sa.String(length=255)), nullable=True), sa.Column("filtered_dimension_values", sa.ARRAY(sa.String(length=255)), nullable=True), sa.Column("aggregated_dimensions", sa.ARRAY(sa.String(length=255)), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "metric_type", "measurement_type", "filtered_dimensions", "filtered_dimension_values", "aggregated_dimensions", ), ) op.create_table( "source", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("name", sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) op.create_table( "report", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("source_id", sa.Integer(), nullable=False), sa.Column("type", sa.String(length=255), nullable=False), sa.Column("instance", sa.String(length=255), nullable=False), sa.Column("publish_date", sa.Date(), nullable=False), sa.Column( "acquisition_method", sa.Enum("SCRAPED", "UPLOADED", "MANUALLY_ENTERED", name="acquisitionmethod"), nullable=False, ), sa.ForeignKeyConstraint( ["source_id"], ["source.id"], ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("source_id", "type", "instance"), ) op.create_table( "report_table_instance", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("report_id", sa.Integer(), nullable=False), sa.Column("report_table_definition_id", sa.Integer(), nullable=False), sa.Column("time_window_start", sa.Date(), nullable=False), sa.Column("time_window_end", sa.Date(), nullable=False), sa.Column("methodology", sa.String(length=255), nullable=True), sa.ForeignKeyConstraint( ["report_id"], ["report.id"], ), sa.ForeignKeyConstraint( ["report_table_definition_id"], ["report_table_definition.id"], ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "report_id", "report_table_definition_id", "time_window_start", "time_window_end", ), ) op.create_table( "cell", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("report_table_instance_id", sa.Integer(), nullable=False), sa.Column( "aggregated_dimension_values", sa.ARRAY(sa.String(length=255)), nullable=False, ), sa.Column("value", sa.Numeric(), nullable=False), sa.ForeignKeyConstraint( ["report_table_instance_id"], ["report_table_instance.id"], ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("report_table_instance_id", "aggregated_dimension_values"), )
class Attribute( Base, Referenceable, Describeable, Modifiable, ): """ An object that describes how an EAV attribute is generated. Typically, an attribute is a meaningful property in the class data set. (e.g. user.firstname, user.lastname, contact.address, etc..) Note that if the attribute's type is an object, an object_class must be specified as well as a flag setting whether the object is to be rendered inline. Resulting attribute objects can then be used to produce forms such as Zope-style schema field. """ __tablename__ = 'attribute' # Overide for maximum character lenght of 20 name = sa.Column(sa.String(100), nullable=False) # Overide for nullable=True title = sa.Column(sa.Unicode, nullable=True) schema_id = sa.Column( sa.Integer, nullable=False, ) schema = orm.relationship( Schema, backref=orm.backref( name='attributes', collection_class=attribute_mapped_collection('name'), order_by='Attribute.order', cascade='all, delete, delete-orphan'), doc=u'The schema that this attribute belongs to') parent_attribute_id = sa.Column(sa.Integer) attributes = orm.relationship( 'Attribute', collection_class=attribute_mapped_collection('name'), order_by='Attribute.order', cascade='all, delete', backref=orm.backref(name='parent_attribute', remote_side='Attribute.id')) type = sa.Column(sa.Enum(*sorted([ 'number', 'choice', 'date', 'datetime', 'string', 'text', 'section', 'blob' ]), name='attribute_type'), nullable=False) is_collection = sa.Column(sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='Single or Multiple choice answers') is_shuffled = sa.Column(sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='Display answer choices in random order') is_required = sa.Column(sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='Forces attribute value to be required') is_private = sa.Column( sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='Stores Personnally Identifiable Information (PII).') is_system = sa.Column( sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='Is a variable that can only be managed by underlying system') is_readonly = sa.Column(sa.Boolean, nullable=False, default=False, server_default=sa.sql.false(), doc='The user may not modify this variable') widget = sa.Column( sa.Enum(*sorted(['checkbox', 'email', 'radio', 'select', 'phone']), name='attribute_widget')) value_min = sa.Column(sa.Integer, doc='Minimum length or value') value_max = sa.Column(sa.Integer, doc='Maximum length or value') collection_min = sa.Column(sa.Integer, doc='Minimum list length') collection_max = sa.Column(sa.Integer, doc='Maximum list length') pattern = sa.Column(sa.String, doc='String format regular expression') decimal_places = sa.Column(sa.Integer) constraint_logic = sa.Column(sa.UnicodeText) skip_logic = sa.Column(sa.UnicodeText) order = sa.Column(sa.Integer, nullable=False, doc='Display order') @orm.validates('name') def validate_name(self, key, name): if not RE_VALID_NAME.match(name): raise ValueError('Invalid name: "%s"' % name) if name in RESERVED_WORDS: raise ValueError('Cannot use reserved word as attribute name: %s' % name) return name @orm.validates('schema') def validate_schema(self, key, schema): """ Cascade schema setting to children (SA won't do this) """ if self.type == 'section': for subattribute in itervalues(self.attributes): subattribute.schema = schema return schema @orm.validates('parent_attribute') def validate_parent_attribute(self, key, parent_attribute): """ Pass the schema if being set as a subattribute (SA won't do this) """ if parent_attribute: self.schema = parent_attribute.schema return parent_attribute def itertraverse(self): """ Useful for iterating through attributes as a hierarchy """ return iter(sorted(itervalues(self.attributes), key=lambda a: a.order)) def iterlist(self): """ Flattens the current attribute into an sorted list with all children """ yield self if self.type == 'section': for a in chain.from_iterable(a.iterlist() for a in self.itertraverse()): yield a def iterchoices(self): """ Useful for iterating through attributes in order """ # TODO: Maybe apply shuffling here? return iter(sorted(itervalues(self.choices), key=lambda c: c.order)) @declared_attr def __table_args__(cls): return ( sa.ForeignKeyConstraint(columns=['schema_id'], refcolumns=['schema.id'], name='fk_%s_schema_id' % cls.__tablename__, ondelete='CASCADE'), sa.ForeignKeyConstraint(columns=['parent_attribute_id'], refcolumns=['attribute.id'], name='fk_%s_attribute_id' % cls.__tablename__, ondelete='CASCADE'), sa.UniqueConstraint('schema_id', 'order', name='uq_%s_order' % cls.__tablename__, deferrable=True, initially='DEFERRED'), sa.CheckConstraint("collection_min IS NULL OR collection_min >= 0", name='ck_%s_unsigned_collection_min' % cls.__tablename__), sa.CheckConstraint("collection_max IS NULL OR collection_max >= 0", name='ck_%s_unsigned_collection_max' % cls.__tablename__), sa.CheckConstraint("collection_min < collection_max", name='ck_%s_valid_collection' % cls.__tablename__), sa.CheckConstraint("value_min IS NULL OR value_min >= 0", name='ck_%s_unsigned_value_min' % cls.__tablename__), sa.CheckConstraint("value_max IS NULL OR value_max >= 0", name='ck_%s_unsigned_value_max' % cls.__tablename__), sa.CheckConstraint("value_min <= value_max", name='ck_%s_valid_value' % cls.__tablename__), sa.CheckConstraint( "CASE WHEN type != 'number' THEN decimal_places IS NULL END", name='ck_%s_number_decimal_places' % cls.__tablename__), sa.CheckConstraint(""" CASE WHEN widget IS NOT NULL THEN CASE type WHEN 'string' THEN widget IN ('phone', 'email') WHEN 'choice' THEN CASE WHEN is_collection THEN widget IN ('select', 'checkbox') ELSE widget IN ('select', 'radio') END END END """, name='ck_%s_type_widget' % cls.__tablename__)) def __copy__(self): keys = ('name', 'title', 'description', 'type', 'is_collection', 'is_required', 'is_system', 'is_readonly', 'is_shuffled', 'widget', 'skip_logic', 'constraint_logic', 'decimal_places', 'collection_min', 'collection_max', 'value_min', 'value_max', 'pattern', 'order') return self.__class__(**dict([(k, getattr(self, k)) for k in keys])) def __deepcopy__(self, memo): duplicate = copy(self) for choice in itervalues(self.choices): duplicate.choices[choice.name] = deepcopy(choice) for attribute in itervalues(self.attributes): duplicate.attributes[attribute.name] = deepcopy(attribute) return duplicate @classmethod def from_json(cls, data): """ Loads a attribute from parsed JSON data Parameters: data -- parsed json data (i.e. a dict) """ attributes = data.pop('attributes', None) choices = data.pop('choices', None) attribute = cls(**data) if attributes: for key, sub in iteritems(attributes): attribute.attributes[key] = Attribute.from_json(sub) if choices: for key, choice in iteritems(choices): attribute.choices[key] = Choice.from_json(choice) return attribute def to_json(self, deep=False): """ Serializes to a JSON-ready dictionary """ data = { 'name': self.name, 'title': self.title, 'description': self.description, 'type': self.type, 'is_required': self.is_required, 'is_collection': self.is_collection, 'is_private': self.is_private, 'is_system': self.is_system, 'is_readonly': self.is_readonly, 'is_shuffled': self.is_shuffled, 'value_min': self.value_min, 'value_max': self.value_max, 'pattern': self.pattern, 'decimal_places': self.decimal_places, 'constraint_logic': self.constraint_logic, 'skip_logic': self.skip_logic, 'collection_min': self.collection_min, 'collection_max': self.collection_max, 'order': self.order, } if deep: data['attributes'] = \ dict([(a.name, a.to_json(deep)) for a in itervalues(self.attributes)]) data['choices'] = \ dict([(c.name, c.to_json(deep)) for c in itervalues(self.choices)]) return data def apply(self, data): self.name = data['name'] self.title = data['title'] self.description = data['description'] self.type = data['type'] if self.type != 'section': self.is_required = data['is_required'] self.is_private = data['is_private'] self.is_readonly = data['is_readonly'] self.is_system = data['is_system'] if self.type in ('string', 'number', 'choice'): self.value_min = data['value_min'] self.value_max = data['value_max'] if self.type == 'number': self.decimal_places = data['decimal_places'] if self.type == 'string': self.pattern = data['pattern'] if self.type == 'choice': self.is_collection = data['is_collection'] self.is_shuffled = data['is_shuffled'] new_codes = set(c['name'] for c in data['choices']) old_codes = list(iterkeys(self.choices)) for code in old_codes: if code not in new_codes: del self.choices[code] for i, choice_data in enumerate(data['choices']): name = choice_data['name'] if name in self.choices: choice = self.choices[name] else: self.choices[name] = choice = Choice(name=name) choice.title = choice_data['title'] choice.order = i
def type(cls) -> sa.Column: return sa.Column(sa.Enum(ValueType), nullable=False)
Revision ID: 30bcf72d7430 Revises: 4d09adff27fb Create Date: 2015-12-08 13:09:19.160565 """ # revision identifiers, used by Alembic. revision = '30bcf72d7430' down_revision = '4d09adff27fb' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql old_rating_ride_reason_enum = sa.Enum('late', 'no_credit_card', 'bad_itinerary', 'dirty_taxi', name='rating_ride_reason_enum') tmp_rating_ride_reason_enum = sa.Enum('ko', 'payment', 'courtesy', 'route', 'cleanliness', 'late', 'no_credit_card', 'bad_itinerary', 'dirty_taxi', name='_rating_ride_reason_enum') new_rating_ride_reason_enum = sa.Enum('ko', 'payment', 'courtesy', 'route', 'cleanliness', 'late', 'no_credit_card', 'bad_itinerary', 'dirty_taxi', name='rating_ride_reason_enum') old_reporting_customer_reason_enum = sa.Enum('late', 'aggressive', 'no_show', name='reporting_customer_reason_enum') tmp_reporting_customer_reason_enum = sa.Enum('ko', 'payment', 'courtesy', 'route', 'cleanliness', 'late', 'aggressive', 'no_show', name='_reporting_customer_reason_enum') new_reporting_customer_reason_enum = sa.Enum('ko', 'payment', 'courtesy', 'route', 'cleanliness', 'late', 'aggressive', 'no_show', name='reporting_customer_reason_enum')