def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('role', sa.Column('ID', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('createdDate', sa.DateTime(), nullable=False), sa.Column('lastModifiedDate', sa.DateTime(), nullable=False), sa.Column('restId', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('ID'), sa.UniqueConstraint('restId'), schema='users') op.create_table('user', sa.Column('ID', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('createdDate', sa.DateTime(), nullable=False), sa.Column('lastModifiedDate', sa.DateTime(), nullable=False), sa.Column('restId', sa.Integer(), nullable=True), sa.Column('userame', sa.String(length=80), nullable=True), sa.Column('firstname', sa.String(length=80), nullable=True), sa.Column('lastname', sa.String(length=80), nullable=True), sa.PrimaryKeyConstraint('ID'), sa.UniqueConstraint('restId'), sa.UniqueConstraint('userame'), schema='users') op.create_table('information', sa.Column('ID', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('createdDate', sa.DateTime(), nullable=False), sa.Column('lastModifiedDate', sa.DateTime(), nullable=False), sa.Column('restId', sa.Integer(), nullable=True), sa.Column('userId', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=True), sa.ForeignKeyConstraint( ['userId'], ['users.user.ID'], ), sa.PrimaryKeyConstraint('ID'), sa.UniqueConstraint('restId'), schema='billing') op.create_table('login', sa.Column('ID', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('createdDate', sa.DateTime(), nullable=False), sa.Column('lastModifiedDate', sa.DateTime(), nullable=False), sa.Column('restId', sa.Integer(), nullable=True), sa.Column('username', sa.String(length=80), nullable=True), sa.Column('passwordHash', sa.String(length=80), nullable=True), sa.Column('userId', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=True), sa.ForeignKeyConstraint( ['userId'], ['users.user.ID'], ), sa.PrimaryKeyConstraint('ID'), sa.UniqueConstraint('passwordHash'), sa.UniqueConstraint('restId'), sa.UniqueConstraint('username'), schema='users')
def load_dialect_impl(self, dialect: dialects) -> DialectType: if dialect.name == 'postgresql': return dialect.type_descriptor(postgresql.UUID()) else: return dialect.type_descriptor(CHAR(32))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "organizations", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("name", sa.Text(), nullable=False), sa.Column("display_name", sa.Text(), nullable=False), sa.Column("orgtype", sa.Text(), nullable=False), sa.Column("link_url", sqlalchemy_utils.types.url.URLType(), nullable=False), sa.Column("description", sa.Text(), nullable=False), sa.Column("is_active", sa.Boolean(), nullable=False, server_default=sa.sql.false()), sa.Column("is_approved", sa.Boolean(), nullable=True), sa.Column("created", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("date_approved", sa.DateTime(), nullable=True), sa.CheckConstraint( "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text", name="organizations_valid_name", ), sa.PrimaryKeyConstraint("id"), ) op.create_index(op.f("ix_organizations_created"), "organizations", ["created"], unique=False) op.create_table( "organization_invitations", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("invite_status", sa.Text(), nullable=False), sa.Column("token", sa.Text(), nullable=False), sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ["organization_id"], ["organizations.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.ForeignKeyConstraint(["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "user_id", "organization_id", name="_organization_invitations_user_organization_uc", ), ) op.create_index( op.f("ix_organization_invitations_organization_id"), "organization_invitations", ["organization_id"], unique=False, ) op.create_index( op.f("ix_organization_invitations_user_id"), "organization_invitations", ["user_id"], unique=False, ) op.create_index( "organization_invitations_user_id_idx", "organization_invitations", ["user_id"], unique=False, ) op.create_table( "organization_name_catalog", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("normalized_name", sa.Text(), nullable=False), sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ["organization_id"], ["organizations.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "normalized_name", "organization_id", name="_organization_name_catalog_normalized_name_organization_uc", ), ) op.create_index( "organization_name_catalog_normalized_name_idx", "organization_name_catalog", ["normalized_name"], unique=False, ) op.create_index( "organization_name_catalog_organization_id_idx", "organization_name_catalog", ["organization_id"], unique=False, ) op.create_table( "organization_project", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("is_active", sa.Boolean(), nullable=False, server_default=sa.sql.false()), sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ["organization_id"], ["organizations.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.ForeignKeyConstraint(["project_id"], ["projects.id"], onupdate="CASCADE", ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "organization_id", "project_id", name="_organization_project_organization_project_uc", ), ) op.create_index( "organization_project_organization_id_idx", "organization_project", ["organization_id"], unique=False, ) op.create_index( "organization_project_project_id_idx", "organization_project", ["project_id"], unique=False, ) op.create_table( "organization_roles", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("gen_random_uuid()"), nullable=False, ), sa.Column("role_name", sa.Text(), nullable=False), sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ["organization_id"], ["organizations.id"], onupdate="CASCADE", ondelete="CASCADE", ), sa.ForeignKeyConstraint(["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "user_id", "organization_id", name="_organization_roles_user_organization_uc", ), ) op.create_index( "organization_roles_organization_id_idx", "organization_roles", ["organization_id"], unique=False, ) op.create_index( "organization_roles_user_id_idx", "organization_roles", ["user_id"], unique=False, )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'drivers', sa.Column('uuid', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('driver_type', sa.Enum('mqtt', name='drivertypeenum'), nullable=True), sa.Column('comment', sa.String(length=128), nullable=True), sa.PrimaryKeyConstraint('uuid'), sa.UniqueConstraint('uuid')) op.create_table( 'mqtt_types', sa.Column('uuid', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('read_template', sa.String(length=256), nullable=False), sa.Column('write_template', sa.String(length=256), nullable=True), sa.Column('comment', sa.String(length=64), nullable=True), sa.Column('parameters', postgresql.JSON(none_as_null=256, astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('uuid'), sa.UniqueConstraint('uuid')) op.create_table( 'driver_parameters', sa.Column('uuid', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('driver_uuid', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('param_name', sa.String(length=64), nullable=True), sa.Column('param_type', sa.String(length=64), nullable=True), sa.Column('param_value', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint( ['driver_uuid'], ['drivers.uuid'], ), sa.PrimaryKeyConstraint('uuid'), sa.UniqueConstraint('uuid')) op.create_table( 'endpoints', sa.Column('uuid', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('name', sa.String(length=64), nullable=True), sa.Column('driver_uuid', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('driver_type', sa.Enum('mqtt', name='drivertypeenum'), nullable=True), sa.ForeignKeyConstraint( ['driver_uuid'], ['drivers.uuid'], ), sa.PrimaryKeyConstraint('uuid'), sa.UniqueConstraint('uuid')) op.create_table( 'mqtt_params', sa.Column('uuid', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('ep_uuid', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('type_uuid', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('topic_read', sa.String(length=128), nullable=False), sa.Column('topic_write', sa.String(length=128), nullable=True), sa.ForeignKeyConstraint( ['ep_uuid'], ['endpoints.uuid'], ), sa.ForeignKeyConstraint( ['type_uuid'], ['mqtt_types.uuid'], ), sa.PrimaryKeyConstraint('uuid'), sa.UniqueConstraint('uuid'))
from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy import Enum from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy import Table from sqlalchemy import Text from sqlalchemy import Unicode from sqlalchemy import UniqueConstraint from sqlalchemy.orm import relationship, backref from sqlalchemy.dialects import postgresql as psql interface_slaves = Table( 'interface_slaves', Base.metadata, Column('parent', psql.UUID(as_uuid=True), ForeignKey('interface.id', ondelete='CASCADE')), Column('slave', psql.UUID(as_uuid=True), ForeignKey('interface.id', ondelete='CASCADE'))) class Interface(Base): __tablename__ = 'interface' id = Column(psql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) name = Column(Unicode(128)) mac = Column(Unicode(17), nullable=False) node_id = Column(Unicode) if_type = Column(Unicode) slaves = relationship( 'Interface',
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('tweets', sa.Column('likes', postgresql.UUID(as_uuid=True), nullable=True))
def downgrade(): op.add_column('deliverynote', sa.Column('ethereum_address', citext.CIText(), nullable=True), schema=f'{get_inv()}') op.alter_column('deliverynote', 'amount', new_column_name='deposit', schema=f'{get_inv()}') op.add_column('computer', sa.Column('deliverynote_address', citext.CIText(), nullable=True), schema=f'{get_inv()}') op.add_column('lot', sa.Column('deliverynote_address', citext.CIText(), nullable=True), schema=f'{get_inv()}') # ===== op.alter_column('computer', 'amount', new_column_name='deposit', schema=f'{get_inv()}') op.alter_column('lot', 'amount', new_column_name='deposit', schema=f'{get_inv()}') # ===== op.add_column('computer', sa.Column('ethereum_address', citext.CIText(), nullable=True), schema=f'{get_inv()}') op.add_column('user', sa.Column('ethereum_address', citext.CIText(), unique=True, nullable=True), schema='common') op.drop_column('lot', 'receiver_address', schema=f'{get_inv()}') op.add_column('lot', sa.Column('receiver_address', citext.CIText(), sa.ForeignKey('common.user.ethereum_address'), nullable=True), schema=f'{get_inv()}') # ===== op.create_table( 'proof', sa.Column( 'updated', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False, comment= 'The last time Devicehub recorded a change for \n this thing.\n ' ), sa.Column('created', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False, comment='When Devicehub created this.'), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('type', sa.Unicode(), nullable=False), sa.Column('ethereum_hash', citext.CIText(), nullable=False), sa.Column('device_id', sa.BigInteger(), nullable=False), sa.ForeignKeyConstraint( ['device_id'], [f'{get_inv()}.device.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}') op.create_index(op.f('ix_proof_created'), 'proof', ['created'], unique=False, schema=f'{get_inv()}') op.create_index(op.f('ix_proof_updated'), 'proof', ['updated'], unique=False, schema=f'{get_inv()}') op.create_table('proof_recycling', sa.Column('collection_point', citext.CIText(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('contact', citext.CIText(), nullable=False), sa.Column('ticket', citext.CIText(), nullable=False), sa.Column('gps_location', citext.CIText(), nullable=False), sa.Column('recycler_code', citext.CIText(), nullable=False), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ['id'], [f'{get_inv()}.proof.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}') # Proof reuse table op.create_table('proof_reuse', sa.Column('receiver_segment', citext.CIText(), nullable=False), sa.Column('id_receipt', citext.CIText(), nullable=False), sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('receiver_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('price', sa.Integer(), nullable=True), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ['id'], [f'{get_inv()}.proof.id'], ), sa.ForeignKeyConstraint( ['receiver_id'], ['common.user.id'], ), sa.ForeignKeyConstraint( ['supplier_id'], ['common.user.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}') # Proof transfer table op.create_table('proof_transfer', sa.Column('supplier_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('receiver_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('deposit', sa.Integer(), nullable=True), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ['id'], [f'{get_inv()}.proof.id'], ), sa.ForeignKeyConstraint( ['receiver_id'], ['common.user.id'], ), sa.ForeignKeyConstraint( ['supplier_id'], ['common.user.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}') # ProofDataWipe table op.create_table('proof_data_wipe', sa.Column('date', sa.DateTime(), nullable=False), sa.Column( 'result', sa.Boolean(), nullable=False, comment='Identifies proof datawipe as a result.'), sa.Column('proof_author_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('erasure_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ['erasure_id'], [f'{get_inv()}.erase_basic.id'], ), sa.ForeignKeyConstraint( ['id'], [f'{get_inv()}.proof.id'], ), sa.ForeignKeyConstraint( ['proof_author_id'], ['common.user.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}') # PRoofFuntion op.create_table('proof_function', sa.Column('disk_usage', sa.Integer(), nullable=True), sa.Column('proof_author_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('rate_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ['id'], [f'{get_inv()}.proof.id'], ), sa.ForeignKeyConstraint( ['proof_author_id'], ['common.user.id'], ), sa.ForeignKeyConstraint( ['rate_id'], [f'{get_inv()}.rate.id'], ), sa.PrimaryKeyConstraint('id'), schema=f'{get_inv()}')
class Token(Base, mixins.Timestamps): """ An API access token for a user. These fall into two categories: - Long-lived developer tokens, which are generated for an account for third-party integrations. These do not expire. - Temporary access tokens, which are currently only issued from JWTs generated by third party `AuthClient`s. These do expire. """ __tablename__ = 'token' id = sqlalchemy.Column(sqlalchemy.Integer, autoincrement=True, primary_key=True) userid = sqlalchemy.Column(sqlalchemy.UnicodeText(), nullable=False) value = sqlalchemy.Column(sqlalchemy.UnicodeText(), nullable=False, unique=True) #: A timestamp after which this token will no longer be considered valid. #: A NULL value in this column indicates a token that does not expire. expires = sqlalchemy.Column(sqlalchemy.DateTime, nullable=True) #: A refresh token that can be exchanged for a new token (with a new value #: and expiry time). A NULL value in this column indicates a token that #: cannot be refreshed. refresh_token = sqlalchemy.Column(sqlalchemy.UnicodeText(), unique=True, nullable=True) #: A timestamp after which this token's refresh token will no longer be #: considered valid. A NULL value in this column indicates a refresh token #: that does not expire. refresh_token_expires = sqlalchemy.Column(sqlalchemy.DateTime, nullable=True) _authclient_id = sqlalchemy.Column('authclient_id', postgresql.UUID(), sqlalchemy.ForeignKey('authclient.id', ondelete='cascade'), nullable=True) #: The authclient which created the token. #: A NULL value means it is a developer token. authclient = sqlalchemy.orm.relationship('AuthClient') @property def expired(self): """True if this access token has expired, False otherwise.""" if self.expires: return datetime.datetime.utcnow() > self.expires return False @property def refresh_token_expired(self): """True if this refresh token has expired, False otherwise.""" if self.refresh_token_expires: return datetime.datetime.utcnow() > self.refresh_token_expires return False @property def ttl(self): """The amount of time from now until this token expires, in seconds.""" if not self.expires: return None now = datetime.datetime.utcnow() ttl = self.expires - now ttl_in_seconds = ttl.total_seconds() # We truncate (rather than round) ttl_in_seconds to get an int. # For example 2.3 beccomes 2, but 2.9 also becomes 2. ttl_in_seconds_truncated = int(ttl_in_seconds) return ttl_in_seconds_truncated
def upgrade(): op.create_table('account_google', sa.Column('google_user_id', sa.String(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('google_user_id')) op.create_index('account_google_idx_account_id', 'account_google', ['account_id'], unique=False) op.create_table( 'account_stats_control', sa.Column('id', sa.Integer(), nullable=False), sa.Column('last_updated', sa.DateTime(timezone=True), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'acoustid_mb_replication_control', sa.Column('id', sa.Integer(), nullable=False), sa.Column('current_schema_sequence', sa.Integer(), nullable=False), sa.Column('current_replication_sequence', sa.Integer(), nullable=True), sa.Column('last_replication_date', sa.DateTime(timezone=True), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'application', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('version', sa.String(), nullable=False), sa.Column('apikey', sa.String(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('active', sa.Boolean(), server_default=sa.text(u'true'), nullable=True), sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('email', sa.String(), nullable=True), sa.Column('website', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index('application_idx_apikey', 'application', ['apikey'], unique=True) op.create_table('fingerprint_index_queue', sa.Column('fingerprint_id', sa.Integer(), nullable=False)) op.create_table('foreignid_vendor', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('foreignid_vendor_idx_name', 'foreignid_vendor', ['name'], unique=True) op.create_table('format', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('format_idx_name', 'format', ['name'], unique=True) op.create_table('meta', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track', sa.String(), nullable=True), sa.Column('artist', sa.String(), nullable=True), sa.Column('album', sa.String(), nullable=True), sa.Column('album_artist', sa.String(), nullable=True), sa.Column('track_no', sa.Integer(), nullable=True), sa.Column('disc_no', sa.Integer(), nullable=True), sa.Column('year', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'mirror_queue', sa.Column('id', sa.Integer(), nullable=False), sa.Column('txid', sa.BigInteger(), server_default=sa.text(u'txid_current()'), nullable=False), sa.Column('tblname', sa.String(), nullable=False), sa.Column('op', sa.CHAR(length=1), nullable=False), sa.Column('data', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table( 'recording_acoustid', sa.Column('id', sa.Integer(), autoincrement=False, nullable=False), sa.Column('acoustid', postgresql.UUID(), nullable=False), sa.Column('recording', postgresql.UUID(), nullable=False), sa.Column('disabled', sa.Boolean(), server_default=sa.text(u'false'), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('updated', sa.DateTime(timezone=True), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('recording_acoustid_idx_acoustid'), 'recording_acoustid', ['acoustid'], unique=False) op.create_index('recording_acoustid_idx_uniq', 'recording_acoustid', ['recording', 'acoustid'], unique=True) op.create_table( 'replication_control', sa.Column('id', sa.Integer(), nullable=False), sa.Column('current_schema_sequence', sa.Integer(), nullable=False), sa.Column('current_replication_sequence', sa.Integer(), nullable=True), sa.Column('last_replication_date', sa.DateTime(timezone=True), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table( 'stats', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('date', sa.Date(), server_default=sa.text(u'CURRENT_DATE'), nullable=False), sa.Column('value', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('stats_idx_date', 'stats', ['date'], unique=False) op.create_index('stats_idx_name_date', 'stats', ['name', 'date'], unique=False) op.create_table( 'stats_lookups', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.Date(), nullable=False), sa.Column('hour', sa.Integer(), nullable=False), sa.Column('application_id', sa.Integer(), nullable=False), sa.Column('count_nohits', sa.Integer(), server_default=sa.text(u'0'), nullable=False), sa.Column('count_hits', sa.Integer(), server_default=sa.text(u'0'), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('stats_lookups_idx_date', 'stats_lookups', ['date'], unique=False) op.create_table( 'stats_user_agents', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.Date(), nullable=False), sa.Column('application_id', sa.Integer(), nullable=False), sa.Column('user_agent', sa.String(), nullable=False), sa.Column('ip', sa.String(), nullable=False), sa.Column('count', sa.Integer(), server_default=sa.text(u'0'), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index('stats_user_agents_idx_date', 'stats_user_agents', ['date'], unique=False) op.create_table( 'track', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('new_id', sa.Integer(), nullable=True), sa.Column('gid', postgresql.UUID(), nullable=False), sa.ForeignKeyConstraint(['new_id'], ['track.id'], name=op.f('track_fk_new_id')), sa.PrimaryKeyConstraint('id')) op.create_index('track_idx_gid', 'track', ['gid'], unique=True) op.create_table( 'account', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('apikey', sa.String(), nullable=False), sa.Column('mbuser', sa.String(), nullable=True), sa.Column('anonymous', sa.Boolean(), server_default=sa.text(u'false'), nullable=True), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('lastlogin', sa.DateTime(timezone=True), nullable=True), sa.Column('submission_count', sa.Integer(), server_default=sa.text(u'0'), nullable=False), sa.Column('application_id', sa.Integer(), nullable=True), sa.Column('application_version', sa.String(), nullable=True), sa.Column('created_from', postgresql.INET(), nullable=True), sa.ForeignKeyConstraint(['application_id'], ['application.id'], name=op.f('account_fk_application_id')), sa.PrimaryKeyConstraint('id')) op.create_index('account_idx_apikey', 'account', ['apikey'], unique=True) op.create_index('account_idx_mbuser', 'account', ['mbuser'], unique=True) op.create_table( 'fingerprint', sa.Column('id', sa.Integer(), nullable=False), sa.Column('fingerprint', postgresql.ARRAY(sa.Integer()), nullable=False), sa.Column('length', sa.SmallInteger(), nullable=False), sa.Column('bitrate', sa.SmallInteger(), nullable=True), sa.Column('format_id', sa.Integer(), nullable=True), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=False), sa.Column('track_id', sa.Integer(), nullable=False), sa.Column('submission_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['format_id'], ['format.id'], name=op.f('fingerprint_fk_format_id')), sa.ForeignKeyConstraint(['track_id'], ['track.id'], name=op.f('fingerprint_fk_track_id')), sa.PrimaryKeyConstraint('id'), sa.CheckConstraint('length > 0', name=op.f('fingerprint_length_check')), sa.CheckConstraint('bitrate > 0', name=op.f('fingerprint_bitrate_check')), ) op.create_index('fingerprint_idx_length', 'fingerprint', ['length'], unique=False) op.create_index('fingerprint_idx_track_id', 'fingerprint', ['track_id'], unique=False) op.create_table( 'foreignid', sa.Column('id', sa.Integer(), nullable=False), sa.Column('vendor_id', sa.Integer(), nullable=False), sa.Column('name', sa.Text(), nullable=False), sa.ForeignKeyConstraint(['vendor_id'], ['foreignid_vendor.id'], name=op.f('foreignid_fk_vendor_id')), sa.PrimaryKeyConstraint('id')) op.create_index('foreignid_idx_vendor', 'foreignid', ['vendor_id'], unique=False) op.create_index('foreignid_idx_vendor_name', 'foreignid', ['vendor_id', 'name'], unique=True) op.create_table( 'track_mbid', sa.Column('track_id', sa.Integer(), nullable=False), sa.Column('mbid', postgresql.UUID(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('submission_count', sa.Integer(), nullable=False), sa.Column('disabled', sa.Boolean(), server_default=sa.text(u'false'), nullable=False), sa.ForeignKeyConstraint(['track_id'], ['track.id'], name=op.f('track_mbid_fk_track_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_mbid_idx_mbid'), 'track_mbid', ['mbid'], unique=False) op.create_index('track_mbid_idx_uniq', 'track_mbid', ['track_id', 'mbid'], unique=False) op.create_table( 'track_meta', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_id', sa.Integer(), nullable=False), sa.Column('meta_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('submission_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['meta_id'], ['meta.id'], name=op.f('track_meta_fk_meta_id')), sa.ForeignKeyConstraint(['track_id'], ['track.id'], name=op.f('track_meta_fk_track_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_meta_idx_meta_id'), 'track_meta', ['meta_id'], unique=False) op.create_index('track_meta_idx_uniq', 'track_meta', ['track_id', 'meta_id'], unique=False) op.create_table( 'track_puid', sa.Column('track_id', sa.Integer(), nullable=False), sa.Column('puid', postgresql.UUID(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('submission_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['track_id'], ['track.id'], name=op.f('track_puid_fk_track_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_puid_idx_puid'), 'track_puid', ['puid'], unique=False) op.create_index('track_puid_idx_uniq', 'track_puid', ['track_id', 'puid'], unique=False) op.create_table( 'account_openid', sa.Column('openid', sa.String(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['account_id'], ['account.id'], name=op.f('account_openid_fk_account_id')), sa.PrimaryKeyConstraint('openid')) op.create_index('account_openid_idx_account_id', 'account_openid', ['account_id'], unique=False) op.create_table( 'source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('application_id', sa.Integer(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('version', sa.String(), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['account.id'], name=op.f('source_fk_account_id')), sa.ForeignKeyConstraint(['application_id'], ['application.id'], name=op.f('source_fk_application_id')), sa.PrimaryKeyConstraint('id')) op.create_index('source_idx_uniq', 'source', ['application_id', 'account_id', 'version'], unique=True) op.create_table( 'stats_top_accounts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['account_id'], ['account.id'], name=op.f('stats_top_accounts_fk_account_id')), sa.PrimaryKeyConstraint('id')) op.create_table( 'track_foreignid', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_id', sa.Integer(), nullable=False), sa.Column('foreignid_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('submission_count', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['foreignid_id'], ['foreignid.id'], name=op.f('track_foreignid_fk_foreignid_id')), sa.ForeignKeyConstraint(['track_id'], ['track.id'], name=op.f('track_foreignid_fk_track_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_foreignid_idx_foreignid_id'), 'track_foreignid', ['foreignid_id'], unique=False) op.create_index('track_foreignid_idx_uniq', 'track_foreignid', ['track_id', 'foreignid_id'], unique=False) op.create_table( 'track_mbid_change', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_mbid_id', sa.Integer(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.Column('disabled', sa.Boolean(), nullable=False), sa.Column('note', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['account.id'], name=op.f('track_mbid_change_fk_account_id')), sa.ForeignKeyConstraint( ['track_mbid_id'], ['track_mbid.id'], name=op.f('track_mbid_change_fk_track_mbid_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_mbid_change_idx_track_mbid_id'), 'track_mbid_change', ['track_mbid_id'], unique=False) op.create_table( 'track_mbid_flag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_mbid_id', sa.Integer(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('handled', sa.Boolean(), server_default=sa.text(u'false'), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['account.id'], name=op.f('track_mbid_flag_fk_account_id')), sa.ForeignKeyConstraint(['track_mbid_id'], ['track_mbid.id'], name=op.f('track_mbid_flag_fk_track_mbid_id')), sa.PrimaryKeyConstraint('id')) op.create_table( 'submission', sa.Column('id', sa.Integer(), nullable=False), sa.Column('fingerprint', postgresql.ARRAY(sa.Integer()), nullable=False), sa.Column('length', sa.SmallInteger(), nullable=False), sa.Column('bitrate', sa.SmallInteger(), nullable=True), sa.Column('format_id', sa.Integer(), nullable=True), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=False), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('mbid', postgresql.UUID(), nullable=True), sa.Column('handled', sa.Boolean(), server_default=sa.text(u'false'), nullable=True), sa.Column('puid', postgresql.UUID(), nullable=True), sa.Column('meta_id', sa.Integer(), nullable=True), sa.Column('foreignid_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['foreignid_id'], ['foreignid.id'], name=op.f('submission_fk_foreignid_id')), sa.ForeignKeyConstraint(['format_id'], ['format.id'], name=op.f('submission_fk_format_id')), sa.ForeignKeyConstraint(['meta_id'], ['meta.id'], name=op.f('submission_fk_meta_id')), sa.ForeignKeyConstraint(['source_id'], ['source.id'], name=op.f('submission_fk_source_id')), sa.PrimaryKeyConstraint('id'), sa.CheckConstraint('length > 0', name=op.f('submission_length_check')), sa.CheckConstraint('bitrate > 0', name=op.f('submission_bitrate_check')), ) op.create_index('submission_idx_handled', 'submission', ['id'], unique=False, postgresql_where=sa.text(u'handled = false')) op.create_table( 'fingerprint_source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('fingerprint_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=False), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint( ['fingerprint_id'], ['fingerprint.id'], name=op.f('fingerprint_source_fk_fingerprint_id')), sa.ForeignKeyConstraint(['source_id'], ['source.id'], name=op.f('fingerprint_source_fk_source_id')), sa.ForeignKeyConstraint( ['submission_id'], ['submission.id'], name=op.f('fingerprint_source_fk_submission_id')), sa.PrimaryKeyConstraint('id')) op.create_index('fingerprint_source_idx_submission_id', 'fingerprint_source', ['submission_id'], unique=False) op.create_table( 'track_foreignid_source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_foreignid_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=False), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint( ['source_id'], ['source.id'], name=op.f('track_foreignid_source_fk_source_id')), sa.ForeignKeyConstraint( ['submission_id'], ['submission.id'], name=op.f('track_foreignid_source_fk_submission_id')), sa.ForeignKeyConstraint( ['track_foreignid_id'], ['track_foreignid.id'], name=op.f('track_foreignid_source_fk_track_foreignid_id')), sa.PrimaryKeyConstraint('id')) op.create_table( 'track_mbid_source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_mbid_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=True), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint(['source_id'], ['source.id'], name=op.f('track_mbid_source_fk_source_id')), sa.ForeignKeyConstraint( ['submission_id'], ['submission.id'], name=op.f('track_mbid_source_fk_submission_id')), sa.ForeignKeyConstraint( ['track_mbid_id'], ['track_mbid.id'], name=op.f('track_mbid_source_fk_track_mbid_id')), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('track_mbid_source_idx_source_id'), 'track_mbid_source', ['source_id'], unique=False) op.create_index(op.f('track_mbid_source_idx_track_mbid_id'), 'track_mbid_source', ['track_mbid_id'], unique=False) op.create_table( 'track_meta_source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_meta_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=False), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint(['source_id'], ['source.id'], name=op.f('track_meta_source_fk_source_id')), sa.ForeignKeyConstraint( ['submission_id'], ['submission.id'], name=op.f('track_meta_source_fk_submission_id')), sa.ForeignKeyConstraint( ['track_meta_id'], ['track_meta.id'], name=op.f('track_meta_source_fk_track_meta_id')), sa.PrimaryKeyConstraint('id')) op.create_table( 'track_puid_source', sa.Column('id', sa.Integer(), nullable=False), sa.Column('track_puid_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=False), sa.Column('source_id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text(u'CURRENT_TIMESTAMP'), nullable=True), sa.ForeignKeyConstraint(['source_id'], ['source.id'], name=op.f('track_puid_source_fk_source_id')), sa.ForeignKeyConstraint( ['submission_id'], ['submission.id'], name=op.f('track_puid_source_fk_submission_id')), sa.ForeignKeyConstraint( ['track_puid_id'], ['track_puid.id'], name=op.f('track_puid_source_fk_track_puid_id')), sa.PrimaryKeyConstraint('id'))
def upgrade(): bind = op.get_bind() session = Session(bind=bind) now = datetime.datetime.now().isoformat() empty_uuid = '00000000-0000-0000-0000-000000000000' # EDITED MIGRATION: Updates on existing fields where null are populated. sql = "UPDATE {table} SET {column} = {value} WHERE {column} IS NULL;" session.execute( sql.format(table="adminnote", column="created_at", value="'%s'" % now)) session.execute( sql.format(table="adminnote", column="creator_id", value="'%s'" % empty_uuid)) session.execute( sql.format(table="adminnote", column="note", value="'Empty Note'")) session.execute( sql.format(table="adminnote", column="updated_at", value="'%s'" % now)) session.execute( sql.format(table="adminnote", column="user_id", value="'%s'" % empty_uuid)) session.execute( sql.format(table="sitedataschema", column="created_at", value="'%s'" % now)) session.execute( sql.format(table="sitedataschema", column="schema", value="jsonb_object('{}')")) session.execute( sql.format(table="sitedataschema", column="updated_at", value="'%s'" % now)) session.execute( sql.format(table="usersitedata", column="created_at", value="'%s'" % now)) session.execute( sql.format(table="usersitedata", column="data", value="jsonb_object('{}')")) session.execute( sql.format(table="usersitedata", column="updated_at", value="'%s'" % now)) session.commit() # ### commands auto generated by Alembic - please adjust! ### op.alter_column('adminnote', 'created_at', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('adminnote', 'creator_id', existing_type=postgresql.UUID(), nullable=False) op.alter_column('adminnote', 'note', existing_type=sa.TEXT(), nullable=False) op.alter_column('adminnote', 'updated_at', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('adminnote', 'user_id', existing_type=postgresql.UUID(), nullable=False) op.alter_column('sitedataschema', 'created_at', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('sitedataschema', 'schema', existing_type=postgresql.JSON(astext_type=sa.Text()), nullable=False) op.alter_column('sitedataschema', 'updated_at', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('usersitedata', 'created_at', existing_type=postgresql.TIMESTAMP(), nullable=False) op.alter_column('usersitedata', 'data', existing_type=postgresql.JSON(astext_type=sa.Text()), nullable=False) op.alter_column('usersitedata', 'updated_at', existing_type=postgresql.TIMESTAMP(), nullable=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "jobs", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("title", sa.String(), nullable=False), sa.Column("description", sa.String(), nullable=False), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("description"), sa.UniqueConstraint("title"), ) candidates = op.create_table( "candidates", sa.Column( "id", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.Column("email", sa.String(), nullable=False), sa.Column("linkedin_url", sa.String(), nullable=False), sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=True), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("email"), sa.UniqueConstraint("linkedin_url"), ) op.create_table( "candidate_jobs", sa.Column("created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("candidate_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("job_id", postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint( ["candidate_id"], ["candidates.id"], ), sa.ForeignKeyConstraint( ["job_id"], ["jobs.id"], ), sa.PrimaryKeyConstraint("candidate_id", "job_id"), ) op.add_column("users", sa.Column("email", sa.String(), nullable=False)) op.create_unique_constraint(None, "users", ["email"]) op.drop_column("users", "hashed_password") op.bulk_insert( candidates, [ { "id": "518efc42-4b0e-4618-97f0-c4a5277baecd", "name": "Ezequiel", "email": "*****@*****.**", "linkedin_url": "https://www.linkedin.com/in/epicatto/", }, ], )
CUBEDASH_SCHEMA = "cubedash" METADATA = MetaData(schema=CUBEDASH_SCHEMA) GRIDCELL_COL_SPEC = f"{CUBEDASH_SCHEMA}.gridcell" # Albers equal area. Allows us to show coverage in m^2 easily. FOOTPRINT_SRID = 3577 DATASET_SPATIAL = Table( "dataset_spatial", METADATA, # Note that we deliberately don't foreign-key to datacube tables: # - We don't want to add an external dependency on datacube core # (breaking, eg, product deletion scripts) # - they may be in a separate database. Column("id", postgres.UUID(as_uuid=True), primary_key=True, comment="Dataset ID"), Column( "dataset_type_ref", SmallInteger, comment="The ODC dataset_type id)", nullable=False, ), Column("center_time", DateTime(timezone=True), nullable=False), # When was the dataset created? # Creation_time if it has one, otherwise datacube index time. Column("creation_time", DateTime(timezone=True), nullable=False), # Nullable: Some products have no region. Column("region_code", String, comment=""), # Size of this dataset in bytes, if the product includes it.
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('sites', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), nullable=False), sa.Column('domain', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('domain') ) op.create_table('users', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.Column('password', sa.String(length=255), nullable=True), sa.Column('email', sa.String(length=32), nullable=True), sa.Column('display', sa.String(length=50), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('display'), sa.UniqueConstraint('email'), sa.UniqueConstraint('name') ) op.create_table('posts', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), nullable=False), sa.Column('site_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('author_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('reviser_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('type', sa.String(length=4), nullable=True), sa.Column('name', sa.String(length=128), nullable=True), sa.Column('title', sa.String(length=128), nullable=True), sa.Column('content', sa.Text(), nullable=True), sa.Column('published', sa.Boolean(), nullable=True), sa.Column('publish_status', sa.String(length=20), nullable=True), sa.Column('published_time', sa.DateTime(timezone=True), nullable=True), sa.Column('allow_comment', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint(['author_id'], ['users.id'], ), sa.ForeignKeyConstraint(['reviser_id'], ['users.id'], ), sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('site_settings', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), nullable=False), sa.Column('site_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('key', sa.String(length=128), nullable=True), sa.Column('value', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('terms', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_time', sa.DateTime(timezone=True), nullable=False), sa.Column('site_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('type', sa.String(length=50), nullable=True), sa.Column('name', sa.String(length=50), nullable=True), sa.Column('desc', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['site_id'], ['sites.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('comments', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('created_time', sa.DateTime(timezone=True), nullable=False), sa.Column('post_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('commenter_name', sa.String(length=50), nullable=True), sa.Column('commenter_email', sa.String(length=32), nullable=True), sa.Column('commenter_ip', sa.String(length=19), nullable=True), sa.Column('commenter_url', sa.Text(), nullable=True), sa.Column('content', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('post_term_relations', sa.Column('term_id', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('post_id', postgresql.UUID(as_uuid=True), nullable=True), sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ), sa.ForeignKeyConstraint(['term_id'], ['terms.id'], ) )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "set_bonus_translation", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("set_translation_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("locale", sa.String(), nullable=False), sa.ForeignKeyConstraint( ["set_translation_id"], ["set_bonus.uuid"], name=op.f("fk_set_bonus_translation_set_translation_id_set_bonus"), ondelete="CASCADE", ), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_set_bonus_translation")), ) op.create_table( "item_stat_translation", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("item_stat_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("locale", sa.String(), nullable=False), sa.ForeignKeyConstraint( ["item_stat_id"], ["item_stat.uuid"], name=op.f("fk_item_stat_translation_item_stat_id_item_stat"), ), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_item_stat_translation")), ) op.create_index( op.f("ix_item_stat_translation_item_stat_id"), "item_stat_translation", ["item_stat_id"], unique=False, ) op.create_index( op.f("ix_item_stat_translation_locale"), "item_stat_translation", ["locale"], unique=False, ) op.create_table( "set_custom_bonus", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column( "set_bonus_translation_id", postgresql.UUID(as_uuid=True), nullable=False ), sa.Column("custom_stat", sa.String(), nullable=True), sa.ForeignKeyConstraint( ["set_bonus_translation_id"], ["set_bonus_translation.uuid"], name=op.f( "fk_set_custom_bonus_set_bonus_translation_id_set_bonus_translation" ), ondelete="CASCADE", ), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_set_custom_bonus")), ) op.create_table( "item_custom_stat", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column( "item_stat_translation_id", postgresql.UUID(as_uuid=True), nullable=False ), sa.Column("custom_stat", sa.String(), nullable=True), sa.ForeignKeyConstraint( ["item_stat_translation_id"], ["item_stat_translation.uuid"], name=op.f( "fk_item_custom_stat_item_stat_translation_id_item_stat_translation" ), ondelete="CASCADE", ), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_item_custom_stat")), ) op.add_column("item_slot", sa.Column("order", sa.Integer(), nullable=False)) op.add_column("item_stat", sa.Column("order", sa.Integer(), nullable=False)) op.drop_constraint("fk_item_stats_item_id_item", "item_stat", type_="foreignkey") op.create_foreign_key( op.f("fk_item_stat_item_id_item"), "item_stat", "item", ["item_id"], ["uuid"], ondelete="CASCADE", ) op.drop_column("item_stat", "alt_stat") op.drop_column("set_bonus", "alt_stat")
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('mailing_list', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=80), nullable=False), sa.Column('uploaded_at', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('id')) op.create_table( 'daily_item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('item_name', sa.String(length=32), nullable=False), sa.Column('item_id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('item_name')) op.create_table('blockchain', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.Column('symbol', sa.String(length=16), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('name'), sa.UniqueConstraint('symbol')) op.create_table('ranking_name', sa.Column('id', sa.SmallInteger(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('name')) op.create_table('category', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=32), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('name')) op.create_table( 'block_interval', sa.Column('id', sa.Integer(), nullable=False), sa.Column('blockchain_id', sa.Integer(), nullable=False), sa.Column('time_start', sa.Integer(), nullable=False), sa.Column('time_stop', sa.Integer(), nullable=False), sa.Column('block_start', sa.Integer(), nullable=False), sa.Column('block_stop', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['blockchain_id'], ['blockchain.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id')) op.create_table( 'dappest_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('address', sa.String(length=64), nullable=False), sa.Column('username', sa.String(length=64), nullable=False), sa.Column('email', sa.String(length=64), nullable=True), sa.Column('profile_picture', sa.String(length=100), nullable=True), sa.Column('blockchain_id', sa.Integer(), nullable=False), sa.Column('s3_id', postgresql.UUID(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('nonce', sa.String(length=9), nullable=False), sa.ForeignKeyConstraint( ['blockchain_id'], ['blockchain.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('address'), sa.UniqueConstraint('id'), sa.UniqueConstraint('s3_id'), sa.UniqueConstraint('username')) op.create_table( 'dapp', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=80), nullable=False), sa.Column('url', sa.String(length=80), nullable=False), sa.Column('address', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('author', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('email', sa.String(length=80), nullable=False), sa.Column('logo_path', sa.String(length=100), nullable=False), sa.Column('screenshot', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('tagline', sa.String(length=40), nullable=False), sa.Column('description', sa.String(length=100), nullable=False), sa.Column('whitepaper', sa.String(length=80), nullable=True), sa.Column('social_media', postgresql.JSONB(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=False), sa.Column('blockchain_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('s3_id', postgresql.UUID(), nullable=False), sa.Column('uploaded_at', sa.DateTime(), nullable=False), sa.Column('launch_date', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['blockchain_id'], ['blockchain.id'], ), sa.ForeignKeyConstraint( ['category_id'], ['category.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['dappest_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('address'), sa.UniqueConstraint('id'), sa.UniqueConstraint('logo_path'), sa.UniqueConstraint('name'), sa.UniqueConstraint('s3_id'), sa.UniqueConstraint('screenshot'), sa.UniqueConstraint('url')) op.create_table( 'dapp_submission', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=80), nullable=False), sa.Column('url', sa.String(length=80), nullable=False), sa.Column('address', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('blockchain_id', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('author', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('email', sa.String(length=80), nullable=False), sa.Column('logo_path', sa.String(length=100), nullable=False), sa.Column('screenshot', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('tagline', sa.String(length=40), nullable=False), sa.Column('description', sa.String(length=100), nullable=False), sa.Column('whitepaper', sa.String(length=80), nullable=True), sa.Column('social_media', postgresql.JSONB(), nullable=False), sa.Column('s3_id', postgresql.UUID(), nullable=False), sa.Column('status', sa.SmallInteger(), nullable=False), sa.Column('uploaded_at', sa.DateTime(), nullable=False), sa.Column('launch_date', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['blockchain_id'], ['blockchain.id'], ), sa.ForeignKeyConstraint( ['category_id'], ['category.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['dappest_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('s3_id'), sa.UniqueConstraint('screenshot'), sa.UniqueConstraint('whitepaper')) op.create_table( 'review', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dapp_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('rating', sa.SmallInteger(), nullable=False), sa.Column('title', sa.String(length=50), nullable=False), sa.Column('review', sa.Text(), nullable=False), sa.Column('feature', postgresql.JSONB(), nullable=False), sa.Column('uploaded_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['dapp_id'], ['dapp.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['dappest_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id')) op.create_table( 'metric', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dapp_id', sa.Integer(), nullable=False), sa.Column('block_interval_id', sa.Integer(), nullable=False), sa.Column('data', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['block_interval_id'], ['block_interval.id'], ), sa.ForeignKeyConstraint( ['dapp_id'], ['dapp.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id')) op.create_table( 'ranking', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dapp_id', sa.Integer(), nullable=False), sa.Column('block_interval_id', sa.Integer(), nullable=False), sa.Column('ranking_name_id', sa.SmallInteger(), nullable=False), sa.Column('rank', sa.SmallInteger(), nullable=False), sa.ForeignKeyConstraint( ['block_interval_id'], ['block_interval.id'], ), sa.ForeignKeyConstraint( ['dapp_id'], ['dapp.id'], ), sa.ForeignKeyConstraint( ['ranking_name_id'], ['ranking_name.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id')) op.create_table( 'review_like', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dapp_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('review_id', sa.Integer(), nullable=False), sa.Column('helpful', sa.SmallInteger(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['dapp_id'], ['dapp.id'], ), sa.ForeignKeyConstraint( ['review_id'], ['review.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['dappest_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('user_id', 'review_id', 'helpful'))
def test_success(self): column = Column("my_uuid", postgresql.UUID(as_uuid=True)) _, django_field = fields.to_django_field(TestTable, column) assert isinstance(django_field, models.UUIDField)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column("buy_orders", "round_id", existing_type=postgresql.UUID(), nullable=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "set_translation", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("set_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("locale", sa.String(), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.ForeignKeyConstraint(["set_id"], ["set.uuid"], name=op.f("fk_set_translation_set_id_set")), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_set_translation")), ) op.create_table( "item_translation", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("item_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("locale", sa.String(), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.ForeignKeyConstraint(["item_id"], ["item.uuid"], name=op.f("fk_item_translation_item_id_item")), sa.PrimaryKeyConstraint("uuid", name=op.f("pk_item_translation")), ) op.drop_index("ix_item_condition_item_id", table_name="item_condition") op.drop_table("item_condition") op.add_column("item", sa.Column("condtions", sa.JSON(), nullable=True)) op.add_column("item", sa.Column("dofus_db_id", sa.String(), nullable=False)) op.drop_column("item", "name") op.add_column("item_stat", sa.Column("alt_stat", sa.String(), nullable=True)) op.add_column("set", sa.Column("dofus_db_id", sa.String(), nullable=False)) op.drop_column("set", "name") op.add_column("set_bonus", sa.Column("alt_stat", sa.String(), nullable=True)) op.add_column("set_bonus", sa.Column("value", sa.Integer(), nullable=True)) op.alter_column( "set_bonus", "stat", existing_type=postgresql.ENUM( "VITALITY", "AP", "MP", "INITIATIVE", "PROSPECTING", "RANGE", "SUMMON", "WISDOM", "STRENGTH", "INTELLIGENCE", "CHANCE", "AGILITY", "AP_PARRY", "AP_REDUCTION", "MP_PARRY", "MP_REDUCTION", "CRITICAL", "HEALS", "LOCK", "DODGE", "PCT_FINAL_DAMAGE", "POWER", "DAMAGE", "CRITICAL_DAMAGE", "NEUTRAL_DAMAGE", "EARTH_DAMAGE", "FIRE_DAMAGE", "WATER_DAMAGE", "AIR_DAMAGE", "REFLECT", "TRAP_DAMAGE", "TRAP_POWER", "PUSHBACK_DAMAGE", "PCT_SPELL_DAMAGE", "PCT_WEAPON_DAMAGE", "PCT_RANGED_DAMAGE", "PCT_MELEE_DAMAGE", "NEUTRAL_RES", "PCT_NEUTRAL_RES", "EARTH_RES", "PCT_EARTH_RES", "FIRE_RES", "PCT_FIRE_RES", "WATER_RES", "PCT_WATER_RES", "AIR_RES", "PCT_AIR_RES", "CRITICAL_RES", "PUSHBACK_RES", "PCT_RANGED_RES", "PCT_MELEE_RES", "PODS", name="stat", ), nullable=True, ) op.drop_column("set_bonus", "max_value")
import uuid import sqlalchemy.orm from sqlalchemy.dialects import postgresql from .db import db from . import applications from . import security user_table = db.Table('user', db.Column('user_id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), db.Column('login', db.String(32), nullable=False), db.Column('name', db.String(128), nullable=False), db.Column('email', db.String(256), nullable=False), db.Column('password', db.String(256), nullable=True), db.Column('active', db.Boolean()), db.Column('confirmed_at', db.DateTime()), ) db.Index('idx_user_login', user_table.c.login, unique=True) db.Index('idx_user_email', user_table.c.email, unique=True) sqlalchemy.orm.mapper(security.User, user_table) application_settings_table = db.Table('application_settings', db.Column('application_settings_id', postgresql.UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), db.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False), db.Column('application_id', db.String(64), nullable=False), db.Column('settings', postgresql.JSON, nullable=False),
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "game", sa.Column("player_opponent", postgresql.UUID(), nullable=True)) op.create_foreign_key("game_opponent_player_uuid_fk", "game", "player", ["player_opponent"], ["uuid"])
def upgrade(): op.create_table('db_dbuser', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('email', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('password', sa.VARCHAR(length=128), autoincrement=False, nullable=True), sa.Column('is_superuser', sa.BOOLEAN(), autoincrement=False, nullable=False), sa.Column('first_name', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('last_name', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('institution', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('is_staff', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('date_joined', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='db_dbuser_pkey'), postgresql_ignore_search_path=False) op.create_index('ix_db_dbuser_email', 'db_dbuser', ['email'], unique=True) op.create_table('db_dbworkflow', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('lastsyncedversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('report', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('module', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('module_class', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('script_path', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('script_md5', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbworkflow_user_id_fkey'), sa.PrimaryKeyConstraint('id', name='db_dbworkflow_pkey'), postgresql_ignore_search_path=False) op.create_index('ix_db_dbworkflow_label', 'db_dbworkflow', ['label']) op.create_table( 'db_dbworkflowstep', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('nextcall', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['parent_id'], ['db_dbworkflow.id'], name='db_dbworkflowstep_parent_id_fkey'), sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbworkflowstep_user_id_fkey'), sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_pkey'), sa.UniqueConstraint('parent_id', 'name', name='db_dbworkflowstep_parent_id_name_key'), postgresql_ignore_search_path=False) op.create_table( 'db_dbcomputer', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('hostname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('enabled', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('transport_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('scheduler_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('transport_params', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='db_dbcomputer_pkey'), sa.UniqueConstraint('name', name='db_dbcomputer_name_key')) op.create_table( 'db_dbauthinfo', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('aiidauser_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbcomputer_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('auth_params', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('enabled', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['aiidauser_id'], ['db_dbuser.id'], name='db_dbauthinfo_aiidauser_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['dbcomputer_id'], ['db_dbcomputer.id'], name='db_dbauthinfo_dbcomputer_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbauthinfo_pkey'), sa.UniqueConstraint( 'aiidauser_id', 'dbcomputer_id', name='db_dbauthinfo_aiidauser_id_dbcomputer_id_key')) op.create_table( 'db_dbgroup', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbgroup_user_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbgroup_pkey'), sa.UniqueConstraint('name', 'type', name='db_dbgroup_name_type_key')) op.create_index('ix_db_dbgroup_name', 'db_dbgroup', ['name']) op.create_index('ix_db_dbgroup_type', 'db_dbgroup', ['type']) op.create_table('db_dbnode', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('public', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('attributes', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('extras', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('dbcomputer_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint( ['dbcomputer_id'], ['db_dbcomputer.id'], name='db_dbnode_dbcomputer_id_fkey', ondelete='RESTRICT', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbnode_user_id_fkey', ondelete='RESTRICT', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbnode_pkey'), postgresql_ignore_search_path=False) op.create_index('ix_db_dbnode_label', 'db_dbnode', ['label']) op.create_index('ix_db_dbnode_type', 'db_dbnode', ['type']) op.create_table( 'db_dbgroup_dbnodes', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbgroup_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbgroup_id'], ['db_dbgroup.id'], name='db_dbgroup_dbnodes_dbgroup_id_fkey', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], name='db_dbgroup_dbnodes_dbnode_id_fkey', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbgroup_dbnodes_pkey')) op.create_table( 'db_dblock', sa.Column('key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('creation', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('timeout', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('owner', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('key', name='db_dblock_pkey')) op.create_table( 'db_dbworkflowdata', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('data_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('value_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('json_value', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('aiida_obj_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['aiida_obj_id'], ['db_dbnode.id'], name='db_dbworkflowdata_aiida_obj_id_fkey'), sa.ForeignKeyConstraint(['parent_id'], ['db_dbworkflow.id'], name='db_dbworkflowdata_parent_id_fkey'), sa.PrimaryKeyConstraint('id', name='db_dbworkflowdata_pkey'), sa.UniqueConstraint( 'parent_id', 'name', 'data_type', name='db_dbworkflowdata_parent_id_name_data_type_key')) op.create_table( 'db_dblink', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('input_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('output_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['input_id'], ['db_dbnode.id'], name='db_dblink_input_id_fkey', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['output_id'], ['db_dbnode.id'], name='db_dblink_output_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dblink_pkey'), ) op.create_index('ix_db_dblink_label', 'db_dblink', ['label']) op.create_table( 'db_dbworkflowstep_calculations', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint( ['dbnode_id'], ['db_dbnode.id'], name='db_dbworkflowstep_calculations_dbnode_id_fkey'), sa.ForeignKeyConstraint( ['dbworkflowstep_id'], ['db_dbworkflowstep.id'], name='db_dbworkflowstep_calculations_dbworkflowstep_id_fkey'), sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_calculations_pkey'), sa.UniqueConstraint( 'dbworkflowstep_id', 'dbnode_id', name='db_dbworkflowstep_calculations_id_dbnode_id_key')) op.create_table( 'db_dbpath', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('child_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('depth', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('entry_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('direct_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('exit_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['child_id'], ['db_dbnode.id'], name='db_dbpath_child_id_fkey', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['parent_id'], ['db_dbnode.id'], name='db_dbpath_parent_id_fkey', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbpath_pkey')) op.create_table( 'db_dbcalcstate', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], name='db_dbcalcstate_dbnode_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbcalcstate_pkey'), sa.UniqueConstraint('dbnode_id', 'state', name='db_dbcalcstate_dbnode_id_state_key')) op.create_index('ix_db_dbcalcstate_state', 'db_dbcalcstate', ['state']) op.create_table( 'db_dbsetting', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('val', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('description', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='db_dbsetting_pkey'), sa.UniqueConstraint('key', name='db_dbsetting_key_key')) op.create_index('ix_db_dbsetting_key', 'db_dbsetting', ['key']) op.create_table( 'db_dbcomment', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], name='db_dbcomment_dbnode_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbcomment_user_id_fkey', ondelete='CASCADE', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbcomment_pkey')) op.create_table( 'db_dblog', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('loggername', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('levelname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('objname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('objpk', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('message', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='db_dblog_pkey')) op.create_index('ix_db_dblog_levelname', 'db_dblog', ['levelname']) op.create_index('ix_db_dblog_loggername', 'db_dblog', ['loggername']) op.create_index('ix_db_dblog_objname', 'db_dblog', ['objname']) op.create_index('ix_db_dblog_objpk', 'db_dblog', ['objpk']) op.create_table( 'db_dbworkflowstep_sub_workflows', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbworkflow_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint( ['dbworkflow_id'], ['db_dbworkflow.id'], name='db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey'), sa.ForeignKeyConstraint( ['dbworkflowstep_id'], ['db_dbworkflowstep.id'], name='db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey'), sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_sub_workflows_pkey'), sa.UniqueConstraint( 'dbworkflowstep_id', 'dbworkflow_id', name='db_dbworkflowstep_sub_workflows_id_dbworkflow__key')) # I get the session using the alembic connection # (Keep in mind that alembic uses the AiiDA SQLA # session) session = Session(bind=op.get_bind()) install_tc(session)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### bind = op.get_bind() session = Session(bind=bind) session.execute('create extension if not exists "uuid-ossp";') ### DIM BOOK STATUSES ### book_statuses_table = op.create_table( 'dim_book_statuses', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('name', sa.String(length=2048), nullable=False), sa.Column('display_name', sa.String(length=2048), nullable=False), sa.Column('order_index', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.bulk_insert(book_statuses_table, book_statuses_fixture) ### DIM USERS ### users_table = op.create_table( 'dim_users', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('name', sa.String(length=2048), nullable=False), sa.Column('email', sa.String(length=2048), nullable=False), sa.Column('password', sa.LargeBinary(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email') ) op.bulk_insert(users_table, users_fixture) ### FCT BOOKS ### op.create_table( 'fct_books', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('author', sa.String(length=2048), nullable=False), sa.Column('image_key', sa.String(length=2048), nullable=True), sa.Column('synopsis', sa.String(length=2048), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=False), sa.Column('title', sa.String(length=2048), nullable=False), sa.Column('dim_user_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('dim_book_status_id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint(['dim_book_status_id'], ['dim_book_statuses.id'], ), sa.ForeignKeyConstraint(['dim_user_id'], ['dim_users.id'], ), sa.PrimaryKeyConstraint('id') ) ### FCT GENRES ### genres_table = op.create_table( 'fct_genres', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('bucket_name', sa.String(length=2048), nullable=True), sa.Column('display_name', sa.String(length=2048), nullable=False), sa.Column('is_primary', sa.Boolean(), nullable=False), sa.Column('name', sa.String(length=2048), nullable=False), sa.Column('dim_user_id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint(['dim_user_id'], ['dim_users.id'], ), sa.PrimaryKeyConstraint('id') ) op.bulk_insert(genres_table, genres_fixture) ### FCT BOOK GENRES ### op.create_table( 'fct_book_genres', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('fct_genre_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('fct_book_id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint(['fct_book_id'], ['fct_books.id'], ), sa.ForeignKeyConstraint(['fct_genre_id'], ['fct_genres.id'], ), sa.PrimaryKeyConstraint('id') ) ### FCT SESSIONS ### op.create_table( 'fct_sessions', sa.Column( 'id', postgresql.UUID(as_uuid=True), server_default=sa.text('uuid_generate_v4()'), nullable=False ), sa.Column('token', sa.String(), nullable=False), sa.Column( 'timestamp', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True ), sa.Column('dim_user_id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint(['dim_user_id'], ['dim_users.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token') )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "res_thc_questions", sa.Column("question_id", sa.Integer(), nullable=False), sa.Column("deck", sa.String(), nullable=True), sa.Column("topic", sa.String(), nullable=False), sa.Column("answer_green", sa.String(), nullable=True), sa.Column("answer_red", sa.String(), nullable=True), sa.PrimaryKeyConstraint("question_id"), sa.UniqueConstraint("topic", name="res_thc_questions_topic_key"), ) op.create_table( "teams", sa.Column("team_id", sa.Integer(), nullable=False), sa.Column("parent_id", sa.Integer(), nullable=True), sa.Column("code", sa.String(), nullable=False), sa.Column("name", sa.String(), nullable=False), sa.ForeignKeyConstraint( ["parent_id"], ["teams.team_id"], ), sa.PrimaryKeyConstraint("team_id"), sa.UniqueConstraint("code", name="teams_code_key"), ) op.create_table( "activities", sa.Column("activity_id", sa.Integer(), nullable=False), sa.Column("team_id", sa.Integer(), nullable=True), sa.Column("activity_name", sa.String(), nullable=True), sa.Column("jira_project_key", sa.String(), nullable=True), sa.Column("jira_project_name", sa.String(), nullable=True), sa.ForeignKeyConstraint( ["team_id"], ["teams.team_id"], ), sa.PrimaryKeyConstraint("activity_id"), sa.UniqueConstraint("activity_name", name="activities_activity_name_key"), ) op.create_table( "m_overtime", sa.Column("pk", sa.Integer(), nullable=False), sa.Column("measurement_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("measurement_date", sa.Date(), nullable=False), sa.Column("team_id", sa.Integer(), nullable=True), sa.Column("workdays_fix", sa.Integer(), nullable=True), sa.Column("workdays_actual", sa.Integer(), nullable=False), sa.Column("overtime", sa.Interval(), nullable=True), sa.ForeignKeyConstraint( ["team_id"], ["teams.team_id"], ), sa.PrimaryKeyConstraint("pk"), sa.UniqueConstraint("measurement_id", name="m_overtime_measurement_id_key"), ) op.create_table( "m_thc", sa.Column("pk", sa.Integer(), nullable=False), sa.Column("measurement_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("measurement_date", sa.DateTime(), nullable=False), sa.Column("session_name", sa.String(), nullable=False), sa.Column("team_id", sa.Integer(), nullable=True), sa.Column("question_id", sa.Integer(), nullable=True), sa.Column("result_red", sa.Integer(), nullable=True), sa.Column("result_yellow", sa.Integer(), nullable=True), sa.Column("result_green", sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ["question_id"], ["res_thc_questions.question_id"], ), sa.ForeignKeyConstraint( ["team_id"], ["teams.team_id"], ), sa.PrimaryKeyConstraint("pk"), sa.UniqueConstraint("measurement_id", name="m_thc_measurement_id_key"), sa.UniqueConstraint( "session_name", "team_id", "question_id", name="m_thc_session_name_team_id_question_id_key", ), ) op.create_table( "sprints", sa.Column("sprint_id", sa.Integer(), nullable=False), sa.Column("team_id", sa.Integer(), nullable=True), sa.Column("start_date", sa.DateTime(), nullable=False), sa.Column("end_date", sa.DateTime(), nullable=False), sa.Column("sp_plan", sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ["team_id"], ["teams.team_id"], ), sa.PrimaryKeyConstraint("sprint_id"), ) op.create_table( "m_burndown", sa.Column("pk", sa.Integer(), nullable=False), sa.Column("measurement_id", postgresql.UUID(as_uuid=True), nullable=False), sa.Column("measurement_date", sa.DateTime(), nullable=False), sa.Column("activity_id", sa.Integer(), nullable=True), sa.Column("sprint_id", sa.Integer(), nullable=True), sa.Column("sp_not_done", sa.Integer(), nullable=True), sa.Column("sp_added", sa.Integer(), nullable=True), sa.Column("sp_swapped", sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ["activity_id"], ["activities.activity_id"], ), sa.ForeignKeyConstraint( ["sprint_id"], ["sprints.sprint_id"], ), sa.PrimaryKeyConstraint("pk"), sa.UniqueConstraint( "measurement_date", "sprint_id", name="m_burndown_measurement_date_sprint_id_key", ), sa.UniqueConstraint("measurement_id", name="m_burndown_measurement_id_key"), )
from sqlalchemy.dialects import postgresql import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "133fc1714306" down_revision = "426b74e836df" branch_labels = () depends_on = None Build = sa.Table( "build", sa.MetaData(), sa.Column("id", postgresql.UUID(), primary_key=True), sa.Column("repository_id", postgresql.UUID()), sa.Column("author_id", postgresql.UUID()), sa.Column("label", sa.String()), sa.Column("revision_sha", sa.String()), ) Revision = sa.Table( "revision", sa.MetaData(), sa.Column("repository_id", postgresql.UUID()), sa.Column("sha", sa.String()), sa.Column("message", sa.String()), sa.Column("author_id", postgresql.UUID()), )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('persons', sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='persons_pkey'), postgresql_ignore_search_path=False) op.create_table('genres', sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='genres_pkey'), postgresql_ignore_search_path=False) op.create_table('movies', sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('plot', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('imdb_rating', sa.NUMERIC(), server_default=sa.text('0'), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='movies_pkey'), postgresql_ignore_search_path=False) op.create_table( 'movies_person', sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('movie_id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('person_id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('position', postgresql.ENUM('director', 'writer', 'actor', name='person_position'), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['id'], ['movies.id'], name='movies_person_movie'), sa.ForeignKeyConstraint(['id'], ['persons.id'], name='movies_person_person'), sa.PrimaryKeyConstraint('id', name='movies_person_pkey')) op.create_table( 'movies_genres', sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('movie_id', postgresql.UUID(), autoincrement=False, nullable=False), sa.Column('genre_id', postgresql.UUID(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['id'], ['genres.id'], name='movies_genres_genre'), sa.ForeignKeyConstraint(['id'], ['movies.id'], name='movies_genres_movie'), sa.PrimaryKeyConstraint('id', name='movies_genres_pkey'))
def test_should_postgresql_uuid_convert(): assert get_field(postgresql.UUID()).type == graphene.String
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'asset_instance_link', sa.Column('entity_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False), sa.Column('asset_instance_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False), sa.ForeignKeyConstraint( ['asset_instance_id'], ['asset_instance.id'], ), sa.ForeignKeyConstraint( ['entity_id'], ['entity.id'], ), sa.PrimaryKeyConstraint('entity_id', 'asset_instance_id')) op.add_column( 'asset_instance', sa.Column('scene_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True)) op.alter_column('asset_instance', 'entity_id', existing_type=postgresql.UUID(), nullable=True) op.alter_column('asset_instance', 'entity_type_id', existing_type=postgresql.UUID(), nullable=True) op.create_index(op.f('ix_asset_instance_scene_id'), 'asset_instance', ['scene_id'], unique=False) op.drop_constraint('asset_instance_name_uc', 'asset_instance', type_='unique') op.create_unique_constraint('asset_instance_name_uc', 'asset_instance', ['scene_id', 'name']) op.drop_constraint('asset_instance_uc', 'asset_instance', type_='unique') op.create_unique_constraint('asset_instance_uc', 'asset_instance', ['asset_id', 'scene_id', 'number']) op.drop_index('ix_asset_instance_entity_id', table_name='asset_instance') op.drop_index('ix_asset_instance_entity_type_id', table_name='asset_instance') op.create_foreign_key(None, 'asset_instance', 'entity', ['scene_id'], ['id']) op.add_column( 'output_file', sa.Column('temporal_entity_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True)) op.drop_constraint('output_file_uc', 'output_file', type_='unique') op.create_unique_constraint('output_file_uc', 'output_file', [ 'name', 'entity_id', 'asset_instance_id', 'output_type_id', 'task_type_id', 'temporal_entity_id', 'representation', 'revision' ]) op.create_foreign_key(None, 'output_file', 'entity', ['temporal_entity_id'], ['id']) op.drop_column('output_file', 'uploaded_movie_name') op.drop_column('output_file', 'uploaded_movie_url')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'team', sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=True)) op.create_index(op.f('ix_team_uuid'), 'team', ['uuid'], unique=False)
def test_convert_UUID(self, converter): prop = make_property(postgresql.UUID()) field = converter.property2field(prop) assert type(field) == fields.UUID
def upgrade() -> None: conn = op.get_bind() conn.execute( 'CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public') # ### commands auto generated by Alembic - please adjust! ### op.create_table( "maps", sa.Column( "id", sqlalchemy_utils.types.uuid.UUIDType(), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(length=510), nullable=False), sa.Column("description", sa.Text(), nullable=True), sa.Column("size_x", sa.Integer(), nullable=True), sa.Column("size_y", sa.Integer(), nullable=True), sa.Column("status", sa.String(length=255), nullable=False), sa.Column( "created_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.Column( "updated_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) op.create_table( "product_types", sa.Column( "id", sqlalchemy_utils.types.uuid.UUIDType(), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("product_type", sa.String(length=510), nullable=False), sa.Column("description", sa.Text(), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("product_type"), ) op.create_table( "products", sa.Column( "id", sqlalchemy_utils.types.uuid.UUIDType(), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(), nullable=False), sa.Column("description", sa.Text(), nullable=False), sa.Column( "created_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) op.create_table( "roles", sa.Column( "id", sqlalchemy_utils.types.uuid.UUIDType(), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(length=255), nullable=False), sa.Column( "created_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.Column( "updated_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), ) op.create_table( "users", sa.Column( "id", sqlalchemy_utils.types.uuid.UUIDType(), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("username", sa.String(length=32), nullable=False), sa.Column("email", sa.String(length=255), nullable=False), sa.Column("hashed_password", sa.String(length=255), nullable=False), sa.Column("is_active", sa.Boolean(), nullable=False), sa.Column("is_superuser", sa.Boolean(), nullable=False), sa.Column( "created_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.Column( "updated_at", db.UtcTimestamp(timezone=True), server_default=sa.text("current_timestamp"), nullable=False, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("username"), ) op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True) op.create_table( "roles_users", sa.Column("id", sa.Integer(), nullable=False), sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=True), sa.Column("role_id", postgresql.UUID(as_uuid=True), nullable=True), sa.ForeignKeyConstraint( ["role_id"], ["roles.id"], ), sa.ForeignKeyConstraint( ["user_id"], ["users.id"], ), sa.PrimaryKeyConstraint("id"), )