def add(self, name): """ Add a new schema :param name: name of the schema :rtype: MigrationSchema instance """ with cnx(self.migration) as conn: conn.execute(CreateSchema(name)) return MigrationSchema(self.migration, name)
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: if not _schema_exists(connection, schema): CreateSchema(schema).execute(connection) signals.db_schema_created.send(unicode(schema), connection=connection) # Create the indico_unaccent function create_unaccent_function(connection)
def test_dataset(engine): dataset = "".join(random.choices(string.ascii_lowercase, k=10)) with engine.connect() as conn: logging.info(f"Creating dataset {dataset}") conn.execute(CreateSchema(dataset)) yield dataset logging.info(f"Dropping dataset {dataset}") conn.execute(DropSchema(dataset))
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: if not _schema_exists(connection, schema): CreateSchema(schema).execute(connection) signals.core.db_schema_created.send(str(schema), connection=connection) # Create our custom functions create_unaccent_function(connection) create_natsort_function(connection)
def _create_schemas(self, metadata): if self._ddl_created: return all_schemas = {table.schema for table in metadata.tables.values() if table.schema} for schema in all_schemas: if self.default_schema == schema: continue statement = CreateSchema(schema, quote=True) self.engine.execute(statement)
def upgrade(): op.execute(CreateSchema('plugin_example')) op.create_table('foo', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bar', sa.String(), nullable=True), sa.Column('location_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['location_id'], ['roombooking.locations.id'], ), sa.PrimaryKeyConstraint('id'), schema='plugin_example')
def create_namespaces(): """Create the loaded table namespaces (schemas) in database.""" ext = current_app.extensions['bdc-db'] with _db.session.begin_nested(): for namespace in ext.namespaces: if not _db.engine.dialect.has_schema(_db.engine, namespace): click.secho(f'Creating namespace {namespace}...', bold=True, fg='yellow') _db.engine.execute(CreateSchema(namespace)) _db.session.commit() click.secho('Namespaces created!', bold=True, fg='green')
def upgrade(): op.execute(CreateSchema('plugin_vc_vidyo')) op.create_table( 'vidyo_extensions', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('extension', sa.BigInteger(), nullable=True), sa.Column('owned_by_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id'], name='vidyo_extensions_vc_room_id_fkey'), sa.PrimaryKeyConstraint('vc_room_id', name='vidyo_extensions_pkey'), sa.Index('ix_plugin_vc_vidyo_vidyo_extensions_extension', 'extension'), sa.Index('ix_plugin_vc_vidyo_vidyo_extensions_owned_by_id', 'owned_by_id'), schema='plugin_vc_vidyo')
def upgrade(): op.execute(CreateSchema('categories')) op.execute('ALTER TABLE indico.category_index SET SCHEMA categories') op.create_table('legacy_id_map', sa.Column('legacy_category_id', sa.String(), nullable=False, index=True), sa.Column('category_id', sa.Integer(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('legacy_category_id', 'category_id'), schema='categories')
def make_create_ddl(self, metadata: MetaData) -> DdlString: if not self.dialect: raise ValueError( "Dialect must be specified to use default metadata creation function" ) ddl = [] if metadata.schema: schema_ddl = str( CreateSchema(metadata.schema).compile(dialect=self.dialect)) ddl.append(schema_ddl) for table_obj in metadata.tables.values(): table_ddl = str( CreateTable(table_obj).compile(dialect=self.dialect)) ddl.append(table_ddl) return ";\n".join(d for d in ddl) + ";\n"
async def create_dataset(dataset) -> None: # Create dataset record and dataset schema async with ContextEngine("WRITE"): await datasets.create_dataset(dataset) await db.status(CreateSchema(dataset)) await db.status( f"GRANT USAGE ON SCHEMA {dataset} TO {READER_USERNAME};") await db.status( f"ALTER DEFAULT PRIVILEGES IN SCHEMA {dataset} GRANT SELECT ON TABLES TO {READER_USERNAME};" ) row = await datasets.get_dataset(dataset) assert row.dataset == dataset assert dataset == await db.scalar( f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{dataset}';" )
def connection(sync_engine): with sync_engine.connect() as conn: metadata = MetaData() Table("table", metadata, Column("column1", Integer, primary_key=True)) Table("table2", metadata, Column("fk_column", ForeignKey("table.column1"))) if conn.dialect.name != "sqlite": conn.execute(CreateSchema("altschema")) Table("table3", metadata, Column("fk_column", Integer), schema="altschema") metadata.create_all(conn) yield conn if conn.dialect.name != "sqlite": metadata.drop_all(conn) conn.execute(DropSchema("altschema"))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSchema('plugin_vc_zoom')) op.create_table('zoom_meetings', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('meeting', sa.BigInteger(), nullable=True), sa.Column('url_zoom', sa.Text(), nullable=False), sa.Column('owned_by_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['owned_by_id'], [u'users.users.id'], name=op.f('fk_zoom_meetings_owned_by_id_users')), sa.ForeignKeyConstraint(['vc_room_id'], [u'events.vc_rooms.id'], name=op.f('fk_zoom_meetings_vc_room_id_vc_rooms')), sa.PrimaryKeyConstraint('vc_room_id', name=op.f('pk_zoom_meetings')), schema='plugin_vc_zoom' ) op.create_index(op.f('ix_zoom_meetings_meeting'), 'zoom_meetings', ['meeting'], unique=False, schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_meetings_owned_by_id'), 'zoom_meetings', ['owned_by_id'], unique=False, schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_meetings_url_zoom'), 'zoom_meetings', ['url_zoom'], unique=False, schema='plugin_vc_zoom')
def upgrade(): op.execute(CreateSchema('event_editing')) op.create_table( 'file_types', sa.Column('id', sa.Integer(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('name', sa.String(), nullable=False), sa.Column('extensions', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('allow_multiple_files', sa.Boolean(), nullable=False), sa.Column('required', sa.Boolean(), nullable=False), sa.Column('publishable', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.PrimaryKeyConstraint('id'), schema='event_editing' ) op.create_index('ix_uq_file_types_event_id_name_lower', 'file_types', ['event_id', sa.text('lower(name)')], unique=True, schema='event_editing')
def upgrade(): op.execute(CreateSchema('plugin_vc_vidyo')) op.create_table('vidyo_extensions', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('extension', sa.BigInteger(), nullable=True, index=True), sa.Column('owned_by_id', sa.Integer(), nullable=False, index=True), sa.ForeignKeyConstraint(['owned_by_id'], ['users.users.id']), sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id']), sa.PrimaryKeyConstraint('vc_room_id'), schema='plugin_vc_vidyo')
def database(app, request): db.app = app db.init_app(app) db.drop_all() try: db.engine.execute(DropSchema('tests', cascade=True)) except ProgrammingError: pass db.engine.execute(CreateSchema('tests')) db.create_all() @request.addfinalizer def drop_database(): db.drop_all() db.engine.execute(DropSchema('tests', cascade=True)) return db
def upgrade(): op.execute(CreateSchema('event_paper_reviewing')) op.create_table('contribution_roles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('contribution_id', sa.Integer(), nullable=False, index=True), sa.Column('role', PyIntEnum(PaperReviewingRoleType), nullable=False, index=True), sa.ForeignKeyConstraint(['contribution_id'], [u'events.contributions.id']), sa.ForeignKeyConstraint(['user_id'], [u'users.users.id']), sa.PrimaryKeyConstraint('id'), schema='event_paper_reviewing') op.create_table('paper_files', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contribution_id', sa.Integer(), nullable=False, index=True), sa.Column('revision_id', sa.Integer(), nullable=True), sa.Column('storage_backend', sa.String(), nullable=False), sa.Column('content_type', sa.String(), nullable=False), sa.Column('size', sa.BigInteger(), nullable=False), sa.Column('storage_file_id', sa.String(), nullable=False), sa.Column('filename', sa.String(), nullable=False), sa.Column('created_dt', UTCDateTime, nullable=False), sa.ForeignKeyConstraint(['contribution_id'], [u'events.contributions.id']), sa.PrimaryKeyConstraint('id'), schema='event_paper_reviewing')
def upgrade(): op.execute(CreateSchema('plugin_chat')) op.create_table('chatrooms', sa.Column('id', sa.Integer(), nullable=False), sa.Column('jid_node', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('password', sa.String(), nullable=False), sa.Column('custom_server', sa.String(), nullable=False), sa.Column('created_by_id', sa.Integer(), nullable=False, index=True), sa.Column('created_dt', UTCDateTime, nullable=False), sa.Column('modified_dt', UTCDateTime, nullable=True), sa.ForeignKeyConstraint(['created_by_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('jid_node', 'custom_server'), schema='plugin_chat') op.create_table('chatroom_events', sa.Column('event_id', sa.Integer(), autoincrement=False, nullable=False, index=True), sa.Column('chatroom_id', sa.Integer(), autoincrement=False, nullable=False, index=True), sa.Column('hidden', sa.Boolean(), nullable=False), sa.Column('show_password', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['chatroom_id'], ['plugin_chat.chatrooms.id']), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.PrimaryKeyConstraint('event_id', 'chatroom_id'), schema='plugin_chat')
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectible = None print(f"run_migrations_online") print(f"environment:{api_config.environment}") print(f"connection string:{connection_string}") connectible = create_engine(connection_string, poolclass=pool.NullPool) SCHEMA_NAME = "NOT_test_fktdb" def include_object(object, name, type_, reflected, compare_to): if False: # (type_ == "table"): return object.schema == 'Common' else: return True if connectible is not None: # Create schema; if it already exists, skip this try: connectible.execute(CreateSchema("Common")) except sqlalchemy.exc.ProgrammingError: pass with connectible.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata, compare_type=True, compare_server_default=True, include_schemas=True, version_table='AlembicVersion', version_table_schema='Common', include_object=include_object) with context.begin_transaction(): context.run_migrations()
def upgrade(): op.execute(CreateSchema('plugin_outlook')) op.create_table('queue', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('action', PyIntEnum(OutlookAction), nullable=False), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='plugin_outlook') op.create_index(None, 'queue', ['user_id', 'event_id', 'action'], schema='plugin_outlook')
def engine(request, tmpdir_factory): engine = None if request.param == 'sqlite-file': tmpdir = tmpdir_factory.mktemp('asphalt-sqlalchemy') db_path = str(tmpdir.join('test.db')) engine = create_engine('sqlite:///' + db_path) elif request.param == 'sqlite-memory': engine = create_engine('sqlite:///:memory:') elif request.param == 'mysql': url = request.getfixturevalue('mysql_url') engine = create_engine(url) elif request.param == 'postgresql': url = request.getfixturevalue('postgresql_url') engine = create_engine(url) if engine.dialect.name != 'sqlite': engine.execute(CreateSchema('altschema')) if request.param != 'sqlite-memory': metadata = MetaData() Table('table', metadata, Column('column1', Integer, primary_key=True)) Table('table2', metadata, Column('fk_column', ForeignKey('table.column1'))) if engine.dialect.name != 'sqlite': Table('table3', metadata, Column('fk_column', Integer), schema='altschema') metadata.create_all(engine) yield engine if engine.dialect.name != 'sqlite': metadata.drop_all(engine) engine.execute(DropSchema('altschema'))
def upgrade(): op.execute(CreateSchema('plugin_print_checkin')) # ### commands auto generated by Alembic - please adjust! ### pass
def upgrade(): op.execute(CreateSchema('attachments')) op.create_table( 'folders', sa.Column('link_type', PyIntEnum(LinkType), nullable=False), sa.Column('category_id', sa.Integer(), nullable=True, index=True), sa.Column('event_id', sa.Integer(), nullable=True, index=True), sa.Column('session_id', sa.String(), nullable=True), sa.Column('contribution_id', sa.String(), nullable=True), sa.Column('subcontribution_id', sa.String(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(), nullable=True), sa.Column('description', sa.Text(), nullable=False), sa.Column('is_deleted', sa.Boolean(), nullable=False), sa.Column('is_default', sa.Boolean(), nullable=False), sa.Column('is_always_visible', sa.Boolean(), nullable=False), sa.Column('protection_mode', PyIntEnum(ProtectionMode, exclude_values={ProtectionMode.public}), nullable=False), sa.Index(None, 'category_id', unique=True, postgresql_where=sa.text('link_type = 1 AND is_default')), sa.Index(None, 'event_id', unique=True, postgresql_where=sa.text('link_type = 2 AND is_default')), sa.Index(None, 'event_id', 'contribution_id', unique=True, postgresql_where=sa.text('link_type = 3 AND is_default')), sa.Index(None, 'event_id', 'contribution_id', 'subcontribution_id', unique=True, postgresql_where=sa.text('link_type = 4 AND is_default')), sa.Index(None, 'event_id', 'session_id', unique=True, postgresql_where=sa.text('link_type = 5 AND is_default')), sa.CheckConstraint( 'link_type != 1 OR (event_id IS NULL AND contribution_id IS NULL AND ' 'subcontribution_id IS NULL AND session_id IS NULL AND category_id IS NOT NULL)', name='valid_category_link'), sa.CheckConstraint( 'link_type != 2 OR (contribution_id IS NULL AND subcontribution_id IS NULL AND ' 'category_id IS NULL AND session_id IS NULL AND event_id IS NOT NULL)', name='valid_event_link'), sa.CheckConstraint( 'link_type != 3 OR (subcontribution_id IS NULL AND category_id IS NULL AND ' 'session_id IS NULL AND event_id IS NOT NULL AND contribution_id IS NOT NULL)', name='valid_contribution_link'), sa.CheckConstraint( 'link_type != 4 OR (category_id IS NULL AND session_id IS NULL AND event_id IS NOT NULL AND ' 'contribution_id IS NOT NULL AND subcontribution_id IS NOT NULL)', name='valid_subcontribution_link'), sa.CheckConstraint( 'link_type != 5 OR (contribution_id IS NULL AND subcontribution_id IS NULL AND ' 'category_id IS NULL AND event_id IS NOT NULL AND session_id IS NOT NULL)', name='valid_session_link'), sa.CheckConstraint('not (is_default and protection_mode != 1)', name='default_inheriting'), sa.CheckConstraint('is_default = (title IS NULL)', name='default_or_title'), sa.CheckConstraint('not (is_default and is_deleted)', name='default_not_deleted'), sa.PrimaryKeyConstraint('id'), schema='attachments') op.create_table( 'attachments', sa.Column('id', sa.Integer(), nullable=False), sa.Column('folder_id', sa.Integer(), nullable=False, index=True), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('is_deleted', sa.Boolean(), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('modified_dt', UTCDateTime, nullable=False), sa.Column('type', PyIntEnum(AttachmentType), nullable=False), sa.Column('link_url', sa.String(), nullable=True), sa.Column('file_id', sa.Integer(), nullable=True), sa.Column('protection_mode', PyIntEnum(ProtectionMode, exclude_values={ProtectionMode.public}), nullable=False), sa.CheckConstraint('link_url IS NULL OR file_id IS NULL', name='link_or_file'), sa.CheckConstraint( 'type != 2 OR (link_url IS NOT NULL AND file_id IS NULL)', name='valid_link'), sa.ForeignKeyConstraint(['file_id'], ['attachments.files.id'], use_alter=True), sa.ForeignKeyConstraint(['folder_id'], ['attachments.folders.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='attachments') op.create_table('files', sa.Column('id', sa.Integer(), nullable=False), sa.Column('attachment_id', sa.Integer(), nullable=False, index=True), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('created_dt', UTCDateTime, nullable=False), sa.Column('filename', sa.String(), nullable=False), sa.Column('content_type', sa.String(), nullable=False), sa.Column('size', sa.BigInteger(), nullable=False), sa.Column('storage_backend', sa.String(), nullable=False), sa.Column('storage_file_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['attachment_id'], ['attachments.attachments.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='attachments') op.create_foreign_key(None, 'attachments', 'files', ['file_id'], ['id'], source_schema='attachments', referent_schema='attachments') op.create_table( 'folder_principals', sa.Column('type', PyIntEnum(PrincipalType), nullable=True), sa.Column('mp_group_provider', sa.String(), nullable=True), sa.Column('mp_group_name', sa.String(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('folder_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True, index=True), sa.Column('local_group_id', sa.Integer(), nullable=True, index=True), sa.CheckConstraint( 'type != 1 OR (local_group_id IS NULL AND mp_group_provider IS NULL ' 'AND mp_group_name IS NULL AND user_id IS NOT NULL)', name='valid_user'), sa.CheckConstraint( 'type != 2 OR (user_id IS NULL AND mp_group_provider IS NULL AND mp_group_name IS NULL AND ' 'local_group_id IS NOT NULL)', name='valid_local_group'), sa.CheckConstraint( 'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND ' 'mp_group_provider IS NOT NULL AND mp_group_name IS NOT NULL)', name='valid_multipass_group'), sa.ForeignKeyConstraint(['folder_id'], ['attachments.folders.id']), sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='attachments') op.create_index(None, 'folder_principals', ['mp_group_provider', 'mp_group_name'], schema='attachments') op.create_table( 'attachment_principals', sa.Column('type', PyIntEnum(PrincipalType), nullable=True), sa.Column('mp_group_provider', sa.String(), nullable=True), sa.Column('mp_group_name', sa.String(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('attachment_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True, index=True), sa.Column('local_group_id', sa.Integer(), nullable=True, index=True), sa.CheckConstraint( 'type != 1 OR (local_group_id IS NULL AND mp_group_provider IS NULL ' 'AND mp_group_name IS NULL AND user_id IS NOT NULL)', name='valid_user'), sa.CheckConstraint( 'type != 2 OR (user_id IS NULL AND mp_group_provider IS NULL AND mp_group_name IS NULL AND ' 'local_group_id IS NOT NULL)', name='valid_local_group'), sa.CheckConstraint( 'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND ' 'mp_group_provider IS NOT NULL AND mp_group_name IS NOT NULL)', name='valid_multipass_group'), sa.ForeignKeyConstraint(['attachment_id'], ['attachments.attachments.id']), sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='attachments') op.create_index(None, 'attachment_principals', ['mp_group_provider', 'mp_group_name'], schema='attachments')
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: CreateSchema(schema).execute_if( callable_=_should_create_schema).execute(connection)
def upgrade(): op.execute(CreateSchema('plugin_livesync')) op.create_table('agents', sa.Column('id', sa.Integer(), nullable=False), sa.Column('backend_name', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('initial_data_exported', sa.Boolean(), nullable=False), sa.Column('last_run', UTCDateTime, nullable=False), sa.Column('settings', postgresql.JSON(astext_type=sa.Text()), nullable=False), sa.PrimaryKeyConstraint('id'), schema='plugin_livesync') op.create_table( 'queues', sa.Column('id', sa.Integer(), nullable=False), sa.Column('agent_id', sa.Integer(), nullable=False, index=True), sa.Column('timestamp', UTCDateTime, nullable=False), sa.Column('processed', sa.Boolean(), nullable=False), sa.Column('change', PyIntEnum(ChangeType), nullable=False), sa.Column('type', PyIntEnum(EntryType), nullable=False), sa.Column('category_id', sa.Integer(), nullable=True, index=True), sa.Column('event_id', sa.Integer(), nullable=True, index=True), sa.Column('contribution_id', sa.Integer(), nullable=True, index=True), sa.Column('session_id', sa.Integer(), nullable=True, index=True), sa.Column('subcontribution_id', sa.Integer(), nullable=True, index=True), sa.CheckConstraint( 'type != 1 OR (contribution_id IS NULL AND event_id IS NULL AND session_id ' 'IS NULL AND subcontribution_id IS NULL AND category_id IS NOT NULL)', name='valid_category_entry'), sa.CheckConstraint( 'type != 2 OR (category_id IS NULL AND contribution_id IS NULL AND session_id ' 'IS NULL AND subcontribution_id IS NULL AND event_id IS NOT NULL)', name='valid_event_entry'), sa.CheckConstraint( 'type != 3 OR (category_id IS NULL AND event_id IS NULL AND session_id ' 'IS NULL AND subcontribution_id IS NULL AND contribution_id IS NOT NULL)', name='valid_contribution_entry'), sa.CheckConstraint( 'type != 4 OR (category_id IS NULL AND contribution_id IS NULL AND event_id ' 'IS NULL AND session_id IS NULL AND subcontribution_id IS NOT NULL)', name='valid_subcontribution_entry'), sa.CheckConstraint( 'type != 5 OR (category_id IS NULL AND contribution_id IS NULL AND event_id ' 'IS NULL AND subcontribution_id IS NULL AND session_id IS NOT NULL)', name='valid_session_entry'), sa.ForeignKeyConstraint(['agent_id'], ['plugin_livesync.agents.id']), sa.ForeignKeyConstraint(['category_id'], ['categories.categories.id']), sa.ForeignKeyConstraint(['contribution_id'], ['events.contributions.id']), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.ForeignKeyConstraint(['session_id'], ['events.sessions.id']), sa.ForeignKeyConstraint(['subcontribution_id'], ['events.subcontributions.id']), sa.PrimaryKeyConstraint('id', name=op.f('pk_queues')), schema='plugin_livesync')
def upgrade(): op.execute(CreateSchema('plugin_citadel')) op.create_table( 'id_map', sa.Column('id', sa.Integer(), nullable=False), sa.Column('citadel_id', sa.Integer(), nullable=False, index=True, unique=True), sa.Column('entry_type', PyIntEnum(_EntryType), nullable=False), sa.Column('event_id', sa.Integer(), nullable=True, index=True, unique=True), sa.Column('contrib_id', sa.Integer(), nullable=True, index=True, unique=True), sa.Column('subcontrib_id', sa.Integer(), nullable=True, index=True, unique=True), sa.Column('attachment_id', sa.Integer(), nullable=True, index=True, unique=True), sa.Column('note_id', sa.Integer(), nullable=True, index=True, unique=True), sa.Column('attachment_file_id', sa.Integer(), nullable=True, index=True, unique=True), sa.CheckConstraint( 'entry_type != 1 OR (event_id IS NOT NULL AND attachment_id IS NULL AND contrib_id IS NULL AND note_id IS NULL AND subcontrib_id IS NULL)', name='valid_event_entry'), sa.CheckConstraint( 'entry_type != 2 OR (contrib_id IS NOT NULL AND attachment_id IS NULL AND event_id IS NULL AND note_id IS NULL AND subcontrib_id IS NULL)', name='valid_contribution_entry'), sa.CheckConstraint( 'entry_type != 3 OR (subcontrib_id IS NOT NULL AND attachment_id IS NULL AND contrib_id IS NULL AND event_id IS NULL AND note_id IS NULL)', name='valid_subcontribution_entry'), sa.CheckConstraint( 'entry_type != 4 OR (attachment_id IS NOT NULL AND contrib_id IS NULL AND event_id IS NULL AND note_id IS NULL AND subcontrib_id IS NULL)', name='valid_attachment_entry'), sa.CheckConstraint( 'entry_type != 5 OR (note_id IS NOT NULL AND attachment_id IS NULL AND contrib_id IS NULL AND event_id IS NULL AND subcontrib_id IS NULL)', name='valid_note_entry'), sa.ForeignKeyConstraint(['attachment_id'], ['attachments.attachments.id']), sa.ForeignKeyConstraint(['contrib_id'], ['events.contributions.id']), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.ForeignKeyConstraint(['note_id'], ['events.notes.id']), sa.ForeignKeyConstraint(['subcontrib_id'], ['events.subcontributions.id']), sa.ForeignKeyConstraint(['attachment_file_id'], ['attachments.files.id']), sa.PrimaryKeyConstraint('id'), schema='plugin_citadel')
def upgrade(): op.execute(CreateSchema('event_surveys')) op.create_table('surveys', sa.Column('id', sa.Integer(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('title', sa.String(), nullable=False), sa.Column('introduction', sa.Text(), nullable=False), sa.Column('anonymous', sa.Boolean(), nullable=False), sa.Column('require_user', sa.Boolean(), nullable=False), sa.Column('submission_limit', sa.Integer(), nullable=True), sa.Column('start_dt', UTCDateTime, nullable=True), sa.Column('end_dt', UTCDateTime, nullable=True), sa.Column('is_deleted', sa.Boolean(), nullable=False), sa.Column('start_notification_sent', sa.Boolean(), nullable=False), sa.Column('notifications_enabled', sa.Boolean(), nullable=False), sa.Column('notify_participants', sa.Boolean(), nullable=False), sa.Column('start_notification_emails', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('new_submission_emails', postgresql.ARRAY(sa.String()), nullable=False), sa.CheckConstraint('anonymous OR require_user', name='valid_anonymous_user'), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.PrimaryKeyConstraint('id'), schema='event_surveys') op.create_table( 'items', sa.Column('id', sa.Integer(), nullable=False), sa.Column('survey_id', sa.Integer(), nullable=False, index=True), sa.Column('parent_id', sa.Integer(), nullable=True, index=True), sa.Column('position', sa.Integer(), nullable=False), sa.Column('type', PyIntEnum(SurveyItemType), nullable=False), sa.Column('title', sa.String(), nullable=True), sa.Column('description', sa.Text(), nullable=False), sa.Column('is_required', sa.Boolean(), nullable=True), sa.Column('field_type', sa.String(), nullable=True), sa.Column('field_data', postgresql.JSON(), nullable=False), sa.Column('display_as_section', sa.Boolean(), nullable=True), sa.CheckConstraint( "type != 1 OR (title IS NOT NULL AND is_required IS NOT NULL AND field_type IS NOT NULL AND " "parent_id IS NOT NULL AND display_as_section IS NULL)", name='valid_question'), sa.CheckConstraint( "type != 2 OR (title IS NOT NULL AND is_required IS NULL AND field_type IS NULL AND " "field_data::text = '{}' AND parent_id IS NULL AND display_as_section IS NOT NULL)", name='valid_section'), sa.CheckConstraint( "type != 3 OR (title IS NULL AND is_required IS NULL AND field_type IS NULL " "AND field_data::text = '{}' AND parent_id IS NOT NULL AND display_as_section IS NULL)", name='valid_text'), sa.ForeignKeyConstraint(['survey_id'], ['event_surveys.surveys.id']), sa.ForeignKeyConstraint(['parent_id'], ['event_surveys.items.id']), sa.PrimaryKeyConstraint('id'), schema='event_surveys') op.create_table('submissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('survey_id', sa.Integer(), nullable=False, index=True), sa.Column('user_id', sa.Integer(), nullable=True, index=True), sa.Column('submitted_dt', UTCDateTime, nullable=False), sa.ForeignKeyConstraint(['survey_id'], ['event_surveys.surveys.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='event_surveys') op.create_table('answers', sa.Column('submission_id', sa.Integer(), nullable=False), sa.Column('question_id', sa.Integer(), nullable=False), sa.Column('data', postgresql.JSON(), nullable=False), sa.ForeignKeyConstraint(['question_id'], ['event_surveys.items.id']), sa.ForeignKeyConstraint(['submission_id'], ['event_surveys.submissions.id']), sa.PrimaryKeyConstraint('submission_id', 'question_id'), schema='event_surveys')
from flaskdb.database import db app = Flask(__name__) @app.route('/') def new_user(): db.session.add(User(username='******')) db.session.commit() return 'New user created!!' if __name__ == '__main__': DATABASE_URI = os.getenv('DATABASE_URI') assert DATABASE_URI, 'Environment variable "DATABASE_URI" required.' app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db.app = app db.init_app(app) try: db.engine.execute(DropSchema('tests', cascade=True)) except ProgrammingError: pass db.engine.execute(CreateSchema('tests')) db.create_all() app.run()