def _add_timetable_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_timetable AFTER INSERT OR UPDATE OF event_id, session_id, session_block_id, duration ON {} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('contribution'); """.format(target.fullname) DDL(sql).execute(conn)
def _add_timetable_consistency_trigger(target, conn, **kw): sql = ''' CREATE CONSTRAINT TRIGGER consistent_timetable AFTER INSERT OR UPDATE OF duration ON {} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('break'); '''.format(target.fullname) DDL(sql).execute(conn)
def _add_timetable_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_timetable AFTER UPDATE OF start_dt, end_dt ON {table} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('event'); """.format(table=target.fullname) DDL(sql).execute(conn)
def _add_deletion_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_deleted AFTER INSERT OR UPDATE OF category_id, is_deleted ON {table} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE categories.check_consistency_deleted(); """.format(table=target.fullname) DDL(sql).execute(conn)
def _add_cycle_check_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER no_cycles AFTER INSERT OR UPDATE OF parent_id ON {table} NOT DEFERRABLE FOR EACH ROW EXECUTE PROCEDURE categories.check_cycles(); """.format(table=target.fullname) DDL(sql).execute(conn)
def sync_db(): from platformio_api.models import LibFTS event.listen( LibFTS.__table__, "after_create", DDL("ALTER TABLE %s ADD FULLTEXT(name, description, keywords, " "headerslist, authornames, frameworkslist, platformslist)" % LibFTS.__tablename__)) Base.metadata.create_all(bind=engine)
def setup(self): self.col_num = 150 self.metadata = MetaData(testing.db) t = Table( "base_table", self.metadata, *[ Column("long_named_column_number_%d" % i, Integer) for i in range(self.col_num) ]) self.view_str = view_str = ( "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (",".join("long_named_column_number_%d" % i for i in range(self.col_num)))) assert len(view_str) > 4000 event.listen(t, "after_create", DDL(view_str)) event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view")) self.metadata.create_all()
def define_tables(cls, metadata): Table( "cattable", metadata, Column("id", Integer), Column("description", String(50)), PrimaryKeyConstraint("id", name="PK_cattable"), ) Table( "matchtable", metadata, Column("id", Integer), Column("title", String(200)), Column("category_id", Integer, ForeignKey("cattable.id")), PrimaryKeyConstraint("id", name="PK_matchtable"), ) event.listen( metadata, "before_create", DDL("CREATE FULLTEXT CATALOG Catalog AS DEFAULT"), ) event.listen( metadata, "after_create", DDL("""CREATE FULLTEXT INDEX ON cattable (description) KEY INDEX PK_cattable"""), ) event.listen( metadata, "after_create", DDL("""CREATE FULLTEXT INDEX ON matchtable (title) KEY INDEX PK_matchtable"""), ) event.listen( metadata, "after_drop", DDL("DROP FULLTEXT CATALOG Catalog"), )
def intercept_authenticator(): """ Replaces the existing authenticate function with our custom one. """ meta = MetaData(bind=Session.get_bind(), reflect=True) if 'user' in meta.tables and 'login_attempts' not in meta.tables[ 'user'].columns: LOG.warn("'login_attempts' field does not exist, adding...") DDL("ALTER TABLE public.user ADD COLUMN login_attempts SMALLINT DEFAULT 0" ).execute(Session.get_bind()) UsernamePasswordAuthenticator.authenticate = QGOVAuthenticator( ).authenticate
def define_tables(cls, metadata): col_num = 150 t = Table( "base_table", metadata, *[ Column("long_named_column_number_%d" % i, Integer) for i in range(col_num) ], ) cls.view_str = ( view_str ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % ( ",".join("long_named_column_number_%d" % i for i in range(col_num))) assert len(view_str) > 4000 event.listen(t, "after_create", DDL(view_str)) event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view"))
def setup_bigint_id_for_all_tables(metadata): """ This is more for Base.create_all() usage than for migrations, but still important for that flow. Alembic migrations have a different flow """ tables = metadata.sorted_tables for table in tables: next_bigint_id_sql = generate_next_bigint_id_sql_for_table(table) if next_bigint_id_sql: alter_table_bigint_id = DDL(next_bigint_id_sql) event.listen(table, 'after_create', alter_table_bigint_id)
def create_schemas(*args, **kwargs): try: from app.db.models import get_schemas except ImportError: schemas = ['admin'] else: schemas = get_schemas() for schema in schemas: _sa.engine.execute(DDL(f'CREATE SCHEMA IF NOT EXISTS "{schema}"')) _sa.session.commit()
def disable_partition_and_purge_trigger(db_engine, table_name): """ :param db_engine: The database engine to execute the SQL statements :param table_name: The name of the table to construct the name of the insert trigger and partition handler. :return: None. Only print output. """ drop_trigger_ddl_stmt = ( "DROP RULE IF EXISTS autocall_createfuturepartitions_%s ON %s RESTRICT; " "DROP TRIGGER IF EXISTS purge_%s_trigger ON %s RESTRICT;" % (table_name, table_name, table_name, table_name)) db_engine.execute(DDL(drop_trigger_ddl_stmt)) drop_function_ddl_stmt = ( "DROP FUNCTION IF EXISTS createfuturepartitions_%s(timestamp without time zone) RESTRICT;" "DROP FUNCTION IF EXISTS purge_%s() RESTRICT;" % table_name, table_name) db_engine.execute(DDL(drop_function_ddl_stmt)) print( "Purge trigger and partition rule disabled for table '%s.' Existing partitions left " "as is." % table_name)
def create_db(self): ''' creates the tables in the database ''' self.Base.metadata.bind = self.engine insert_update_trigger = DDL('''CREATE TRIGGER insert_update_trigger \ after INSERT or UPDATE on file \ FOR EACH ROW \ SET NEW.source = concat(NEW.host, ':', NEW.base_path, '/', NEW.filename)''' ) event.listen(File.__table__, 'after_create', insert_update_trigger) self.Base.metadata.create_all()
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=schema_name, ))) SABase.metadata.create_all() engine.execute( DDL((''' DROP VIEW IF EXISTS {schema}.recommendations; ''' + '''CREATE OR REPLACE VIEW {schema}.recommendations AS SELECT t.name , p.name AS recommended_by , t.notes , t.location , k.kind , t.created_at AS created FROM things t INNER JOIN people p using(pid) INNER JOIN kind k using(kid); ; ''').format(schema=schema_name, )))
def check_postgres_database_exist(): db = create_engine(database_uri) try: db.connect() db.execute('SELECT * FROM History') except Exception: # Switch database component of the uri print("Create New postgres database") event.listen(Base.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS sqlalchemyqueenspuzzle")) db = create_engine(database_uri) Base.metadata.create_all(db)
def initialise_db(app): from application.models.models import CommunicationTemplate, CommunicationType # NOQA # pylint: disable=wrong-import-position from application.models.classification_type import ClassificationType # NOQA # pylint: disable=wrong-import-position # Set up database with app.app_context(): db.init_app(app) # Creates the schema, can't create the tables otherwise event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS templatesvc")) # Creates the tables from the models db.create_all()
def create_db(): from sqlalchemy import DDL from sqlalchemy import event from split_expenses_api.api.database import Base, \ db_engine, get_default_schema default_schema = get_default_schema() event.listen(Base.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS {}".format(default_schema)), once=True) Base.metadata.create_all(db_engine)
def _update_version(connection, version): """ Updates version in the db to the given version. Args: connection (sqlalchemy connection): sqlalchemy session where to update version. version (int): version of the migration. """ if connection.engine.name == 'sqlite': connection.execute('PRAGMA user_version = {}'.format(version)) elif connection.engine.name == 'postgresql': connection.execute( DDL('CREATE SCHEMA IF NOT EXISTS {};'.format( POSTGRES_SCHEMA_NAME))) connection.execute( DDL('CREATE SCHEMA IF NOT EXISTS {};'.format( POSTGRES_PARTITION_SCHEMA_NAME))) connection.execute( 'CREATE TABLE IF NOT EXISTS {}.user_version(version INTEGER NOT NULL);' .format(POSTGRES_SCHEMA_NAME)) # upsert. if connection.execute('SELECT * FROM {}.user_version;'.format( POSTGRES_SCHEMA_NAME)).fetchone(): # update connection.execute( 'UPDATE {}.user_version SET version = {};'.format( POSTGRES_SCHEMA_NAME, version)) else: # insert connection.execute( 'INSERT INTO {}.user_version (version) VALUES ({})'.format( POSTGRES_SCHEMA_NAME, version)) else: raise DatabaseMissingError( 'Do not know how to migrate {} engine.'.format( connection.engine.driver))
def test_ensure_version_is_qualified(self, future_connection, testing_engine, metadata): default_schema_name = future_connection.dialect.default_schema_name event.listen( metadata, "after_create", DDL(""" CREATE OR REPLACE FUNCTION %s.version() RETURNS integer AS $$ BEGIN return 0; END; $$ LANGUAGE plpgsql;""" % (default_schema_name, )), ) event.listen( metadata, "before_drop", DDL("DROP FUNCTION %s.version" % (default_schema_name, )), ) metadata.create_all(future_connection) future_connection.commit() e = testing_engine() @event.listens_for(e, "do_connect") def receive_do_connect(dialect, conn_rec, cargs, cparams): conn = dialect.dbapi.connect(*cargs, **cparams) cursor = conn.cursor() cursor.execute("set search_path = %s,pg_catalog" % (default_schema_name, )) cursor.close() return conn with e.connect(): pass eq_( e.dialect.server_version_info, future_connection.dialect.server_version_info, )
def createtable(self): from sqlalchemy import event, DDL from sqlalchemy.schema import CreateSchema # event.listen(db.metadata, 'before_create', CreateSchema('my_schema')) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS used_car CHARACTER SET = utf8mb4;")) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS 법정동정보 CHARACTER SET = utf8mb4;")) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS 아파트정보 CHARACTER SET = utf8mb4;")) try: from ..models.aptinfo import AptInfo from ..models.bjdinfo import BjdInfo db.create_all() result = '<h1>It works.</h1>' except Exception as e: print('테이블 생성 오류') print(e.args) result = '<h1>Something is broken.</h1>' + str(e.args) try: from ..models.aptinfo import AptInfo from ..models.bjdinfo import BjdInfo import csv with open(self.bjdCode_path, mode='r', encoding='utf-8') as f: df = pd.read_csv(filepath_or_buffer=self.bjdCode_path, encoding='utf-8') df.to_sql(name="법정동정보", con=db.engine, if_exists='replace') result = 'nothing' except Exception as e: print('테이블 생성 오류') print(e.args) result = '<h1>Something is broken.</h1>' + str(e.args) return result
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=schema_name, ))) engine.execute( DDL('''CREATE TABLE {schema}.todos ( tid serial primary key, title text not null, completed_at timestamp without time zone default null, notes text, created_at timestamp without time zone not null default now(), modified_at timestamp without time zone not null default now(), time_commitment int CHECK (time_commitment between 1 and 10), due_time timestamp without time zone, category text, person_waiting text, life_importance int CHECK (life_importance between 1 and 10), career_importance int CHECK (career_importance between 1 and 10), urgency int CHECK (urgency between 1 and 10), deleted_at timestamp without time zone default null ) '''.format(schema=schema_name, )))
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=network_schema_name, ))) SABase.metadata.create_all() engine.execute( DDL(''' DROP VIEW IF EXISTS {schema}.network_history; CREATE OR REPLACE VIEW {schema}.network_history AS select e.eid , e.timefrom , e.timeto , s.status , ip.ip , d.mac , d.name from entry e inner join status s using(sid) inner join ip using(ipid) inner join devices d using(did) where e.timeto=(select max(timeto) from entry) order by regexp_replace(ip, '.*\.', '')::int ; -- thanks to https://stackoverflow.com/a/18939742/2821804 CREATE OR REPLACE FUNCTION {schema}.uppercase_mac_on_insert() RETURNS trigger AS $uppercase_mac_on_insert$ BEGIN NEW.mac = upper(NEW.mac); RETURN NEW; END; $uppercase_mac_on_insert$ LANGUAGE plpgsql ; -- https://stackoverflow.com/a/40479291/2821804 DROP TRIGGER IF EXISTS {schema}.uppercase_mac_on_insert_trigger on {schema}.devices ; CREATE TRIGGER {schema}.uppercase_mac_on_insert_trigger BEFORE INSERT OR UPDATE ON {schema}.devices FOR EACH ROW EXECUTE PROCEDURE {schema}.uppercase_mac_on_insert() ; '''.format(schema=network_schema_name, )))
def updateTableConnexion(session): add_column = DDL('ALTER TABLE connexion ADD COLUMN ANNEE_CONN INTEGER AFTER DATE_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN MOIS_CONN INTEGER AFTER ANNEE_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN JOUR_CONN INTEGER AFTER MOIS_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN TIME_CONN INTEGER AFTER JOUR_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN HEURE_CONN INTEGER AFTER HEURE_CONN') session.execute(add_column) #session.commit() CI = aliased(tables.ConnexionINDSQLITE) listeConnexion = session.query(CI.SESAME_ETU, CI.DATE_CONN).all() for connexion in listeConnexion: date = DateTime(connexion[1]) session.execute("update connexion set ANNEE_CONN=%s, MOIS_CONN=%s, JOUR_CONN=%s, TIME_CONN='0', HEURE_CONN=0 where SESAME_ETU='%s' and DATE_CONN='%s'" % (date.year(), date.month(), date.day(), connexion[0], connexion[1])) listeConnexion = session.query(CI.SESAME_ETU, CI.DATE_CONN, CI.ANNEE_CONN, CI.MOIS_CONN, CI.JOUR_CONN, CI.TIME_CONN, CI.HEURE_CONN).all() return listeConnexion
class BinaryBlob(BaseSQLAlchemy): __tablename__ = 'binary_blobs' # Store binary data in its own schema metadata = MetaData(schema=BINARY_STORAGE_SCHEMA) id = Column(GUID, primary_key=True, default=uuid.uuid4) object_type = Column(String, nullable=False) object_id = Column(GUID, nullable=False) # TODO: Figure this out and think it through... binary_blob = Column(LargeBinary) event.listen(metadata, 'before_create', DDL('''CREATE SCHEMA IF NOT EXISTS "{}";'''.format(BINARY_STORAGE_SCHEMA)))
def _run_test(self, specs, attributes): columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)] # Early 5.0 releases seem to report more "general" for columns # in a view, e.g. char -> varchar, tinyblob -> mediumblob use_views = testing.db.dialect.server_version_info > (5, 0, 10) m = self.metadata Table('mysql_types', m, *columns) if use_views: event.listen( m, 'after_create', DDL('CREATE OR REPLACE VIEW mysql_types_v ' 'AS SELECT * from mysql_types')) event.listen(m, 'before_drop', DDL("DROP VIEW IF EXISTS mysql_types_v")) m.create_all() m2 = MetaData(testing.db) tables = [Table('mysql_types', m2, autoload=True)] if use_views: tables.append(Table('mysql_types_v', m2, autoload=True)) for table in tables: for i, (reflected_col, spec) in enumerate(zip(table.c, specs)): expected_spec = spec[1] reflected_type = reflected_col.type is_(type(reflected_type), type(expected_spec)) for attr in attributes: eq_( getattr(reflected_type, attr), getattr(expected_spec, attr), "Column %s: Attribute %s value of %s does not " "match %s for type %s" % ("c%i" % (i + 1), attr, getattr(reflected_type, attr), getattr(expected_spec, attr), spec[0]))
def _insert_default_values(Author: Base, Book: Base): """Add after_create handlers for init DB with default values """ event.listen( Author.__table__, 'after_create', DDL(''' INSERT INTO %(table)s(id, fullname) VALUES (1, 'Henry Charles Bukowski'), (2, 'Nelle Harper Lee'), (3, 'Arthur Conan Doyle') ''')) event.listen( Book.__table__, 'after_create', DDL(''' INSERT INTO %(table)s(title, publication_year, author_id) VALUES ('Post Office', 1971, 1), ('Factotum', 1975, 1), ('Hollywood', 1989, 1), ('To Kill a Mockingbird', 1960, 2), ('The Hound of the Baskervilles', 1901, 3), ('The Adventure of the Red Circle', 1911, 3) '''))
def setup_trigger(db): r""" When player battle values are updated, create new records in today's diff_battle table """ engine = create_engine( "{protocol}://{user}:{password}@{address}/{name}".format(**db), echo=False) Session = sessionmaker(bind=engine) session = Session() battle_ddl = DDL(""" CREATE TRIGGER update_battles BEFORE UPDATE ON players FOR EACH ROW BEGIN IF (OLD.battles < NEW.battles) THEN INSERT INTO {} VALUES (NEW.account_id, NEW.battles); INSERT INTO {} VALUES (NEW.account_id, NEW.battles - OLD.battles); END IF; END """.format(Total_Battles.__tablename__, Diff_Battles.__tablename__)) event.listen(Player.__table__, 'after_create', battle_ddl.execute_if(dialect='mysql')) newplayer_ddl = DDL(""" CREATE TRIGGER new_player AFTER INSERT ON players FOR EACH ROW INSERT INTO {} VALUES (NEW.account_id, NEW.battles); """.format(Total_Battles.__tablename__)) event.listen(Player.__table__, 'after_create', newplayer_ddl.execute_if(dialect='mysql')) Base.metadata.create_all(engine) session.execute(""" DROP TRIGGER IF EXISTS new_player; DROP TRIGGER IF EXISTS update_battles; """) session.execute(battle_ddl) session.execute(newplayer_ddl) session.commit()
def register_last_modified_trigger_listener(table: Table, id_column: str = "id"): """ Registers an after-create event listener that creates a trigger to update the last_modified column. :param table: the table to add the trigger to :param id_column: the name of the id column """ statement = f"""\ CREATE TRIGGER update_last_modified_{table.name} AFTER UPDATE ON {table.name} BEGIN UPDATE {table.name} SET last_modified = datetime('now') WHERE {id_column}=new.{id_column}; END;""" event.listen(table, 'after_create', DDL(statement))
def insert_data(cls, connection): cattable, matchtable = cls.tables("cattable", "matchtable") connection.execute( cattable.insert(), [ { "id": 1, "description": "Python" }, { "id": 2, "description": "Ruby" }, ], ) connection.execute( matchtable.insert(), [ { "id": 1, "title": "Web Development with Rails", "category_id": 2, }, { "id": 2, "title": "Dive Into Python", "category_id": 1 }, { "id": 3, "title": "Programming Matz's Ruby", "category_id": 2, }, { "id": 4, "title": "Guide to Django", "category_id": 1 }, { "id": 5, "title": "Python in a Nutshell", "category_id": 1 }, ], ) # apparently this is needed! index must run asynchronously connection.execute(DDL("WAITFOR DELAY '00:00:05'"))