def upgrade(): op.execute(CreateSchema('plugin_vc_zoom')) # ### commands auto generated by Alembic - please adjust! ### op.create_table('zoom_extensions', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('url_zoom', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['vc_room_id'], [u'events.vc_rooms.id'], name=op.f('fk_zoom_extensions_vc_room_id_vc_rooms')), sa.PrimaryKeyConstraint('vc_room_id', name=op.f('pk_zoom_extensions')), schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_extensions_url_zoom'), 'zoom_extensions', ['url_zoom'], unique=False, schema='plugin_vc_zoom') op.create_table('zoom_licenses', sa.Column('id', sa.Integer(), nullable=False), sa.Column('license_id', sa.Text(), nullable=False), sa.Column('license_name', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('pk_zoom_licenses')), schema='plugin_vc_zoom') op.create_index(op.f('ix_uq_zoom_licenses_license_id'), 'zoom_licenses', ['license_id'], unique=True, schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_licenses_license_name'), 'zoom_licenses', ['license_name'], unique=False, schema='plugin_vc_zoom')
def upgrade(): op.execute(CreateSchema('plugin_checkin_webhook')) # ### commands auto generated by Alembic ### op.create_table( 'checkin_webhook_settings', sa.Column('id', sa.Integer(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=False), sa.Column('webhookurl', sa.String(), nullable=False), sa.Column('ticket_template_id', sa.Integer(), nullable=False), sa.Column('send_json', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['event_id'], [u'events.events.id'], name=op.f('fk_checkin_webhook_settings_event_id_events')), sa.ForeignKeyConstraint( ['ticket_template_id'], [u'indico.designer_templates.id'], name=op. f('fk_checkin_webhook_settings_ticket_template_id_designer_templates' )), sa.PrimaryKeyConstraint('id', name=op.f('pk_checkin_webhook_settings')), schema='plugin_checkin_webhook') op.create_index(op.f('ix_checkin_webhook_settings_event_id'), 'checkin_webhook_settings', ['event_id'], unique=False, schema='plugin_checkin_webhook') op.create_index(op.f('ix_checkin_webhook_settings_ticket_template_id'), 'checkin_webhook_settings', ['ticket_template_id'], unique=False, schema='plugin_checkin_webhook')
def upgrade(): op.execute(CreateSchema('oauth')) op.create_table('applications', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('client_id', postgresql.UUID(), nullable=False), sa.Column('client_secret', postgresql.UUID, nullable=False), sa.Column('default_scopes', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('redirect_uris', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('is_enabled', sa.Boolean(), nullable=False), sa.Column('is_trusted', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('client_id'), schema='oauth') op.create_index(op.f('ix_uq_applications_name_lower'), 'applications', [sa.text('lower(name)')], unique=True, schema='oauth') op.create_table('tokens', sa.Column('id', sa.Integer(), nullable=False), sa.Column('application_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('access_token', postgresql.UUID, nullable=False), sa.Column('scopes', postgresql.ARRAY(sa.String()), nullable=True), sa.Column('last_used_dt', UTCDateTime, nullable=True), sa.ForeignKeyConstraint(['application_id'], ['oauth.applications.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('access_token'), sa.UniqueConstraint('application_id', 'user_id'), schema='oauth')
def upgrade(): op.execute(CreateSchema('plugin_cern_access')) op.create_table( 'access_requests', sa.Column('registration_id', sa.Integer(), nullable=False), sa.Column('request_state', PyIntEnum(CERNAccessRequestState), nullable=False), sa.Column('reservation_code', sa.String(), nullable=False), sa.Column('birth_place', sa.String(), nullable=True), sa.Column('nationality', sa.String(), nullable=True), sa.Column('birth_date', sa.Date(), nullable=True), sa.ForeignKeyConstraint(['registration_id'], ['event_registration.registrations.id']), sa.PrimaryKeyConstraint('registration_id'), schema='plugin_cern_access') op.create_table('access_request_regforms', sa.Column('form_id', sa.Integer(), nullable=False), sa.Column('request_state', PyIntEnum(CERNAccessRequestState), nullable=False), sa.ForeignKeyConstraint(['form_id'], ['event_registration.forms.id']), sa.PrimaryKeyConstraint('form_id'), schema='plugin_cern_access')
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: if schema and not _schema_exists(connection, schema): CreateSchema(schema).execute(connection) signals.db_schema_created.send(str(schema), connection=connection)
def upgrade(): op.execute(CreateSchema('plugin_livesync')) op.create_table('agents', sa.Column('id', sa.Integer(), nullable=False), sa.Column('backend_name', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('initial_data_exported', sa.Boolean(), nullable=False), sa.Column('last_run', UTCDateTime(), nullable=False), sa.Column('settings', postgresql.JSON(), nullable=False), sa.PrimaryKeyConstraint('id', name='agents_pkey'), schema='plugin_livesync') op.create_table('queues', sa.Column('id', sa.Integer(), nullable=False), sa.Column('agent_id', sa.Integer(), nullable=False), sa.Column('timestamp', UTCDateTime(), nullable=False), sa.Column('processed', sa.Boolean(), nullable=False), sa.Column('change', PyIntEnum(ChangeType), nullable=False), sa.Column('type', sa.String(), nullable=False), sa.Column('category_id', sa.String()), sa.Column('event_id', sa.String()), sa.Column('contrib_id', sa.String()), sa.Column('subcontrib_id', sa.String()), sa.ForeignKeyConstraint(['agent_id'], ['plugin_livesync.agents.id'], name='queues_agent_id_fkey'), sa.PrimaryKeyConstraint('id', name='queues_pkey'), sa.Index('ix_plugin_livesync_queues_agent_id', 'agent_id'), schema='plugin_livesync') # later migrations expect the old name... op.execute(_rename_constraint('plugin_livesync', 'queues', 'ck_queues_valid_enum_change', 'queues_change_check'))
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: CreateSchema(schema).execute_if( callable_=_should_create_schema).execute(connection) # Create the indico_unaccent function create_unaccent_function(connection)
def upgrade(): op.execute(CreateSchema('event_abstracts')) op.create_table('abstracts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('friendly_id', sa.Integer(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('description', sa.Text(), nullable=False), sa.Column('accepted_track_id', sa.Integer(), nullable=True, index=True), sa.Column('accepted_type_id', sa.Integer(), nullable=True, index=True), sa.Column('type_id', sa.Integer(), nullable=True, index=True), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.ForeignKeyConstraint(['accepted_type_id'], ['events.contribution_types.id']), sa.ForeignKeyConstraint(['type_id'], ['events.contribution_types.id']), sa.UniqueConstraint('friendly_id', 'event_id'), sa.PrimaryKeyConstraint('id'), schema='event_abstracts') op.create_table('abstract_field_values', sa.Column('data', postgresql.JSON(), nullable=False), sa.Column('abstract_id', sa.Integer(), nullable=False, index=True), sa.Column('contribution_field_id', sa.Integer(), nullable=False, index=True), sa.ForeignKeyConstraint(['abstract_id'], ['event_abstracts.abstracts.id']), sa.ForeignKeyConstraint( ['contribution_field_id'], ['events.contribution_fields.id'], name='fk_abstract_field_values_contribution_field'), sa.PrimaryKeyConstraint('abstract_id', 'contribution_field_id'), schema='event_abstracts') op.create_foreign_key(None, 'contributions', 'abstracts', ['abstract_id'], ['id'], source_schema='events', referent_schema='event_abstracts')
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: if not _schema_exists(connection, schema): CreateSchema(schema).execute(connection) signals.db_schema_created.send(unicode(schema), connection=connection) # Create the indico_unaccent function create_unaccent_function(connection)
def upgrade(): op.execute(CreateSchema('users')) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('first_name', sa.String(), nullable=False, index=True), sa.Column('last_name', sa.String(), nullable=False, index=True), sa.Column('title', PyIntEnum(UserTitle), nullable=False), sa.Column('phone', sa.String(), nullable=False), sa.Column('affiliation', sa.String(), nullable=False, index=True), sa.Column('address', sa.Text(), nullable=False), sa.Column('merged_into_id', sa.Integer(), nullable=True), sa.Column('is_admin', sa.Boolean(), nullable=False, index=True), sa.Column('is_blocked', sa.Boolean(), nullable=False), sa.Column('is_deleted', sa.Boolean(), nullable=False), sa.Column('is_pending', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['merged_into_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='users') op.create_table('emails', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('email', sa.String(), nullable=False, index=True), sa.Column('is_primary', sa.Boolean(), nullable=False), sa.Column('is_user_deleted', sa.Boolean(), nullable=False), sa.CheckConstraint('email = lower(email)', name='lowercase_email'), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='users') op.create_index(None, 'emails', ['email'], unique=True, schema='users', postgresql_where=sa.text('NOT is_user_deleted')) op.create_index( None, 'emails', ['user_id'], unique=True, schema='users', postgresql_where=sa.text('is_primary AND NOT is_user_deleted'))
def create_schema(schema): """ Create schema if it doesn't exist :param schema: name of schema :return: """ result = engine.execute(SCHEMA_EXISTS_QUERY % schema) if result.rowcount == 0: engine.execute(CreateSchema(schema))
def add(self, name): """ Add a new schema :param name: name of the schema :rtype: MigrationSchema instance """ with cnx(self.migration) as conn: conn.execute(CreateSchema(name)) return MigrationSchema(self.migration, name)
def _before_create(target, connection, **kw): # SQLAlchemy doesn't create schemas so we need to take care of it... schemas = {table.schema for table in kw['tables']} for schema in schemas: if not _schema_exists(connection, schema): CreateSchema(schema).execute(connection) signals.core.db_schema_created.send(str(schema), connection=connection) # Create our custom functions create_unaccent_function(connection) create_natsort_function(connection)
def test_dataset(engine): dataset = "".join(random.choices(string.ascii_lowercase, k=10)) with engine.connect() as conn: logging.info(f"Creating dataset {dataset}") conn.execute(CreateSchema(dataset)) yield dataset logging.info(f"Dropping dataset {dataset}") conn.execute(DropSchema(dataset))
def _create_schemas(self, metadata): if self._ddl_created: return all_schemas = {table.schema for table in metadata.tables.values() if table.schema} for schema in all_schemas: if self.default_schema == schema: continue statement = CreateSchema(schema, quote=True) self.engine.execute(statement)
def upgrade(): op.execute(CreateSchema('plugin_example')) op.create_table('foo', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bar', sa.String(), nullable=True), sa.Column('location_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['location_id'], ['roombooking.locations.id'], ), sa.PrimaryKeyConstraint('id'), schema='plugin_example')
def upgrade(): op.execute(CreateSchema('plugin_vc_vidyo')) op.create_table( 'vidyo_extensions', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('extension', sa.BigInteger(), nullable=True), sa.Column('owned_by_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id'], name='vidyo_extensions_vc_room_id_fkey'), sa.PrimaryKeyConstraint('vc_room_id', name='vidyo_extensions_pkey'), sa.Index('ix_plugin_vc_vidyo_vidyo_extensions_extension', 'extension'), sa.Index('ix_plugin_vc_vidyo_vidyo_extensions_owned_by_id', 'owned_by_id'), schema='plugin_vc_vidyo')
def create_namespaces(): """Create the loaded table namespaces (schemas) in database.""" ext = current_app.extensions['bdc-db'] with _db.session.begin_nested(): for namespace in ext.namespaces: if not _db.engine.dialect.has_schema(_db.engine, namespace): click.secho(f'Creating namespace {namespace}...', bold=True, fg='yellow') _db.engine.execute(CreateSchema(namespace)) _db.session.commit() click.secho('Namespaces created!', bold=True, fg='green')
def upgrade(): op.execute(CreateSchema('categories')) op.execute('ALTER TABLE indico.category_index SET SCHEMA categories') op.create_table('legacy_id_map', sa.Column('legacy_category_id', sa.String(), nullable=False, index=True), sa.Column('category_id', sa.Integer(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('legacy_category_id', 'category_id'), schema='categories')
def make_create_ddl(self, metadata: MetaData) -> DdlString: if not self.dialect: raise ValueError( "Dialect must be specified to use default metadata creation function" ) ddl = [] if metadata.schema: schema_ddl = str( CreateSchema(metadata.schema).compile(dialect=self.dialect)) ddl.append(schema_ddl) for table_obj in metadata.tables.values(): table_ddl = str( CreateTable(table_obj).compile(dialect=self.dialect)) ddl.append(table_ddl) return ";\n".join(d for d in ddl) + ";\n"
async def create_dataset(dataset) -> None: # Create dataset record and dataset schema async with ContextEngine("WRITE"): await datasets.create_dataset(dataset) await db.status(CreateSchema(dataset)) await db.status( f"GRANT USAGE ON SCHEMA {dataset} TO {READER_USERNAME};") await db.status( f"ALTER DEFAULT PRIVILEGES IN SCHEMA {dataset} GRANT SELECT ON TABLES TO {READER_USERNAME};" ) row = await datasets.get_dataset(dataset) assert row.dataset == dataset assert dataset == await db.scalar( f"SELECT schema_name FROM information_schema.schemata WHERE schema_name = '{dataset}';" )
def connection(sync_engine): with sync_engine.connect() as conn: metadata = MetaData() Table("table", metadata, Column("column1", Integer, primary_key=True)) Table("table2", metadata, Column("fk_column", ForeignKey("table.column1"))) if conn.dialect.name != "sqlite": conn.execute(CreateSchema("altschema")) Table("table3", metadata, Column("fk_column", Integer), schema="altschema") metadata.create_all(conn) yield conn if conn.dialect.name != "sqlite": metadata.drop_all(conn) conn.execute(DropSchema("altschema"))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSchema('plugin_vc_zoom')) op.create_table('zoom_meetings', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('meeting', sa.BigInteger(), nullable=True), sa.Column('url_zoom', sa.Text(), nullable=False), sa.Column('owned_by_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['owned_by_id'], [u'users.users.id'], name=op.f('fk_zoom_meetings_owned_by_id_users')), sa.ForeignKeyConstraint(['vc_room_id'], [u'events.vc_rooms.id'], name=op.f('fk_zoom_meetings_vc_room_id_vc_rooms')), sa.PrimaryKeyConstraint('vc_room_id', name=op.f('pk_zoom_meetings')), schema='plugin_vc_zoom' ) op.create_index(op.f('ix_zoom_meetings_meeting'), 'zoom_meetings', ['meeting'], unique=False, schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_meetings_owned_by_id'), 'zoom_meetings', ['owned_by_id'], unique=False, schema='plugin_vc_zoom') op.create_index(op.f('ix_zoom_meetings_url_zoom'), 'zoom_meetings', ['url_zoom'], unique=False, schema='plugin_vc_zoom')
def upgrade(): op.execute(CreateSchema('event_editing')) op.create_table( 'file_types', sa.Column('id', sa.Integer(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('name', sa.String(), nullable=False), sa.Column('extensions', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('allow_multiple_files', sa.Boolean(), nullable=False), sa.Column('required', sa.Boolean(), nullable=False), sa.Column('publishable', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.PrimaryKeyConstraint('id'), schema='event_editing' ) op.create_index('ix_uq_file_types_event_id_name_lower', 'file_types', ['event_id', sa.text('lower(name)')], unique=True, schema='event_editing')
def upgrade(): op.execute(CreateSchema('plugin_vc_vidyo')) op.create_table('vidyo_extensions', sa.Column('vc_room_id', sa.Integer(), nullable=False), sa.Column('extension', sa.BigInteger(), nullable=True, index=True), sa.Column('owned_by_id', sa.Integer(), nullable=False, index=True), sa.ForeignKeyConstraint(['owned_by_id'], ['users.users.id']), sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id']), sa.PrimaryKeyConstraint('vc_room_id'), schema='plugin_vc_vidyo')
def database(app, request): db.app = app db.init_app(app) db.drop_all() try: db.engine.execute(DropSchema('tests', cascade=True)) except ProgrammingError: pass db.engine.execute(CreateSchema('tests')) db.create_all() @request.addfinalizer def drop_database(): db.drop_all() db.engine.execute(DropSchema('tests', cascade=True)) return db
def upgrade(): op.execute(CreateSchema('event_paper_reviewing')) op.create_table('contribution_roles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('contribution_id', sa.Integer(), nullable=False, index=True), sa.Column('role', PyIntEnum(PaperReviewingRoleType), nullable=False, index=True), sa.ForeignKeyConstraint(['contribution_id'], [u'events.contributions.id']), sa.ForeignKeyConstraint(['user_id'], [u'users.users.id']), sa.PrimaryKeyConstraint('id'), schema='event_paper_reviewing') op.create_table('paper_files', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contribution_id', sa.Integer(), nullable=False, index=True), sa.Column('revision_id', sa.Integer(), nullable=True), sa.Column('storage_backend', sa.String(), nullable=False), sa.Column('content_type', sa.String(), nullable=False), sa.Column('size', sa.BigInteger(), nullable=False), sa.Column('storage_file_id', sa.String(), nullable=False), sa.Column('filename', sa.String(), nullable=False), sa.Column('created_dt', UTCDateTime, nullable=False), sa.ForeignKeyConstraint(['contribution_id'], [u'events.contributions.id']), sa.PrimaryKeyConstraint('id'), schema='event_paper_reviewing')
def upgrade(): op.execute(CreateSchema('plugin_chat')) op.create_table('chatrooms', sa.Column('id', sa.Integer(), nullable=False), sa.Column('jid_node', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('password', sa.String(), nullable=False), sa.Column('custom_server', sa.String(), nullable=False), sa.Column('created_by_id', sa.Integer(), nullable=False, index=True), sa.Column('created_dt', UTCDateTime, nullable=False), sa.Column('modified_dt', UTCDateTime, nullable=True), sa.ForeignKeyConstraint(['created_by_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('jid_node', 'custom_server'), schema='plugin_chat') op.create_table('chatroom_events', sa.Column('event_id', sa.Integer(), autoincrement=False, nullable=False, index=True), sa.Column('chatroom_id', sa.Integer(), autoincrement=False, nullable=False, index=True), sa.Column('hidden', sa.Boolean(), nullable=False), sa.Column('show_password', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['chatroom_id'], ['plugin_chat.chatrooms.id']), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.PrimaryKeyConstraint('event_id', 'chatroom_id'), schema='plugin_chat')
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectible = None print(f"run_migrations_online") print(f"environment:{api_config.environment}") print(f"connection string:{connection_string}") connectible = create_engine(connection_string, poolclass=pool.NullPool) SCHEMA_NAME = "NOT_test_fktdb" def include_object(object, name, type_, reflected, compare_to): if False: # (type_ == "table"): return object.schema == 'Common' else: return True if connectible is not None: # Create schema; if it already exists, skip this try: connectible.execute(CreateSchema("Common")) except sqlalchemy.exc.ProgrammingError: pass with connectible.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata, compare_type=True, compare_server_default=True, include_schemas=True, version_table='AlembicVersion', version_table_schema='Common', include_object=include_object) with context.begin_transaction(): context.run_migrations()
def upgrade(): op.execute(CreateSchema('plugin_outlook')) op.create_table('queue', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False, index=True), sa.Column('event_id', sa.Integer(), nullable=False, index=True), sa.Column('action', PyIntEnum(OutlookAction), nullable=False), sa.ForeignKeyConstraint(['event_id'], ['events.events.id']), sa.ForeignKeyConstraint(['user_id'], ['users.users.id']), sa.PrimaryKeyConstraint('id'), schema='plugin_outlook') op.create_index(None, 'queue', ['user_id', 'event_id', 'action'], schema='plugin_outlook')