def attach_triggers(): """ Attach some database triggers to the File table """ function_snippet = DDL(""" CREATE OR REPLACE FUNCTION update_file_search_text_vector() RETURNS TRIGGER AS $$ BEGIN IF TG_OP = 'INSERT' THEN new.search_text = to_tsvector('pg_catalog.english', NEW.tags) || to_tsvector('pg_catalog.english', translate(NEW.path, '/.', ' ')); END IF; IF TG_OP = 'UPDATE' THEN IF NEW.tags <> OLD.tags || NEW.path <> OLD.path THEN new.search_text = to_tsvector('pg_catalog.english', NEW.tags) || to_tsvector('pg_catalog.english', translate(NEW.path, '/.', ' ')); END IF; END IF; RETURN NEW; END $$ LANGUAGE 'plpgsql'; """) trigger_snippet = DDL(""" CREATE TRIGGER search_text_update BEFORE INSERT OR UPDATE ON files FOR EACH ROW EXECUTE PROCEDURE update_file_search_text_vector() """) event.listen(File.__table__, 'after_create', function_snippet.execute_if(dialect='postgresql')) event.listen(File.__table__, 'after_create', trigger_snippet.execute_if(dialect='postgresql'))
def initialize_db(db): from sqlalchemy import Table, Column, Integer, String, Sequence if 'scheduled_jobs' in db['metadata'].tables: # Table already exists. Nothing to do. return scheduled_jobs = Table('scheduled_jobs', db['metadata'], Column("id", Integer, Sequence('scheduled_jobs_id_seq', start=1000), primary_key=True), Column("owner", String(50), nullable=False, index=True), Column("name", String(100), nullable=False, unique=True), Column("timeout_minutes", Integer, nullable=False), Column("code_uri", String(300), nullable=False), Column("commandline", String, nullable=False), Column("data_bucket", String(200), nullable=False), Column("num_workers", Integer, nullable=True), Column("output_dir", String(100), nullable=False), Column("output_visibility", String(10), nullable=False), Column("schedule_minute", String(20), nullable=False), Column("schedule_hour", String(20), nullable=False), Column("schedule_day_of_month", String(20), nullable=False), Column("schedule_month", String(20), nullable=False), Column("schedule_day_of_week", String(20), nullable=False) ) # Postgres-specific stuff seq_default = DDL("ALTER TABLE scheduled_jobs ALTER COLUMN id SET DEFAULT nextval('scheduled_jobs_id_seq');") event.listen(scheduled_jobs, "after_create", seq_default.execute_if(dialect='postgresql')) # Create the table db['metadata'].create_all(tables=[scheduled_jobs])
def after_table(self): statement = self.statement if hasattr(statement, '__call__'): statement = statement() if not isinstance(statement, list): statement = [statement] for s in statement: ddl = DDL(s, self.on, self.context) ddl.execute_at(self.when, self.entity.table)
def create_session(metadata, autoincrement=True, session_id_start=1000): """Create Session table. This function creates the Session table for tracking the various simulations run. For MySQL, it adds a post-create command to set the lower limit of the auto increment value. Table Description: This table contains the log of all simulations (MySQL) or a single simulation (SQLite). Simulation runs are identified by the combination of the hostname and session Id: *sessionHost_sessionId*. Parameters ---------- metadata : sqlalchemy.MetaData The database object that collects the tables. autoincrement : bool A flag to set auto incrementing on the sessionID column. session_id_start : int A new starting session Id for counting new simulations. Returns ------- sqlalchemy.Table The Session table object. """ table = Table("Session", metadata, Column("sessionId", Integer, primary_key=True, autoincrement=autoincrement, nullable=False, doc="Numeric identifier for the current simulation instance."), Column("sessionUser", String(80), nullable=False, doc="Computer username of the simulation runner."), Column("sessionHost", String(80), nullable=False, doc="Computer hostname where the simulation was run."), Column("sessionDate", DATETIME, nullable=False, doc="The UTC date/time of the simulation start."), Column("version", String(25), nullable=True, doc="The version number of the SOCS code."), Column("runComment", String(200), nullable=True, doc="A description of the simulation setup.")) Index("s_host_user_date_idx", table.c.sessionUser, table.c.sessionHost, table.c.sessionDate, unique=True) alter_table = DDL("ALTER TABLE %(table)s AUTO_INCREMENT={};".format(session_id_start)) event.listen(table, 'after_create', alter_table.execute_if(dialect='mysql')) return table
def sync_db(): from platformio_api.models import LibFTS event.listen( LibFTS.__table__, "after_create", DDL("ALTER TABLE %s ADD FULLTEXT(name, description, keywords, " "headerslist, authornames, frameworkslist, platformslist)" % LibFTS.__tablename__)) Base.metadata.create_all(bind=engine)
def _add_cycle_check_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER no_cycles AFTER INSERT OR UPDATE OF parent_id ON {table} NOT DEFERRABLE FOR EACH ROW EXECUTE PROCEDURE categories.check_cycles(); """.format(table=target.fullname) DDL(sql).execute(conn)
def _add_deletion_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_deleted AFTER INSERT OR UPDATE OF category_id, is_deleted ON {table} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE categories.check_consistency_deleted(); """.format(table=target.fullname) DDL(sql).execute(conn)
def setup(self): self.col_num = 150 self.metadata = MetaData(testing.db) t = Table( "base_table", self.metadata, *[ Column("long_named_column_number_%d" % i, Integer) for i in range(self.col_num) ]) self.view_str = view_str = ( "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (",".join("long_named_column_number_%d" % i for i in range(self.col_num)))) assert len(view_str) > 4000 event.listen(t, "after_create", DDL(view_str)) event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view")) self.metadata.create_all()
def _add_timetable_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_timetable AFTER INSERT OR UPDATE OF event_id, session_id, session_block_id, duration ON {} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('contribution'); """.format(target.fullname) DDL(sql).execute(conn)
def on_drop(class_name, sqltext): ddl = DDL(sqltext) def listener(tablename, ddl, table, bind, **kw): if table.name == tablename: ddl(table, bind, **kw) listen(Table, 'before_drop', partial(listener, class_name.__table__.name, ddl))
def _add_timetable_consistency_trigger(target, conn, **kw): sql = ''' CREATE CONSTRAINT TRIGGER consistent_timetable AFTER INSERT OR UPDATE ON {} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('timetable_entry'); '''.format(target.fullname) DDL(sql).execute(conn)
def _add_timetable_consistency_trigger(target, conn, **kw): sql = """ CREATE CONSTRAINT TRIGGER consistent_timetable AFTER UPDATE OF start_dt, end_dt ON {table} DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE events.check_timetable_consistency('event'); """.format(table=target.fullname) DDL(sql).execute(conn)
def on_create(class_name, sqltext): ddl = DDL(sqltext) def listener(tablename, ddl, table, bind, **kw): if table.name == tablename: ddl(table, bind, **kw) listen(Table, 'after_create', partial(listener, class_name.__table__.name, ddl))
def define_tables(cls, metadata): Table( "cattable", metadata, Column("id", Integer), Column("description", String(50)), PrimaryKeyConstraint("id", name="PK_cattable"), ) Table( "matchtable", metadata, Column("id", Integer), Column("title", String(200)), Column("category_id", Integer, ForeignKey("cattable.id")), PrimaryKeyConstraint("id", name="PK_matchtable"), ) event.listen( metadata, "before_create", DDL("CREATE FULLTEXT CATALOG Catalog AS DEFAULT"), ) event.listen( metadata, "after_create", DDL("""CREATE FULLTEXT INDEX ON cattable (description) KEY INDEX PK_cattable"""), ) event.listen( metadata, "after_create", DDL("""CREATE FULLTEXT INDEX ON matchtable (title) KEY INDEX PK_matchtable"""), ) event.listen( metadata, "after_drop", DDL("DROP FULLTEXT CATALOG Catalog"), )
def intercept_authenticator(): """ Replaces the existing authenticate function with our custom one. """ meta = MetaData(bind=Session.get_bind(), reflect=True) if 'user' in meta.tables and 'login_attempts' not in meta.tables[ 'user'].columns: LOG.warn("'login_attempts' field does not exist, adding...") DDL("ALTER TABLE public.user ADD COLUMN login_attempts SMALLINT DEFAULT 0" ).execute(Session.get_bind()) UsernamePasswordAuthenticator.authenticate = QGOVAuthenticator( ).authenticate
def setup_bigint_id_for_all_tables(metadata): """ This is more for Base.create_all() usage than for migrations, but still important for that flow. Alembic migrations have a different flow """ tables = metadata.sorted_tables for table in tables: next_bigint_id_sql = generate_next_bigint_id_sql_for_table(table) if next_bigint_id_sql: alter_table_bigint_id = DDL(next_bigint_id_sql) event.listen(table, 'after_create', alter_table_bigint_id)
def define_tables(cls, metadata): col_num = 150 t = Table( "base_table", metadata, *[ Column("long_named_column_number_%d" % i, Integer) for i in range(col_num) ], ) cls.view_str = ( view_str ) = "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % ( ",".join("long_named_column_number_%d" % i for i in range(col_num))) assert len(view_str) > 4000 event.listen(t, "after_create", DDL(view_str)) event.listen(t, "before_drop", DDL("DROP VIEW huge_named_view"))
def create_db(): from sqlalchemy import DDL from sqlalchemy import event from split_expenses_api.api.database import Base, \ db_engine, get_default_schema default_schema = get_default_schema() event.listen(Base.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS {}".format(default_schema)), once=True) Base.metadata.create_all(db_engine)
def create_schemas(*args, **kwargs): try: from app.db.models import get_schemas except ImportError: schemas = ['admin'] else: schemas = get_schemas() for schema in schemas: _sa.engine.execute(DDL(f'CREATE SCHEMA IF NOT EXISTS "{schema}"')) _sa.session.commit()
def disable_partition_and_purge_trigger(db_engine, table_name): """ :param db_engine: The database engine to execute the SQL statements :param table_name: The name of the table to construct the name of the insert trigger and partition handler. :return: None. Only print output. """ drop_trigger_ddl_stmt = ( "DROP RULE IF EXISTS autocall_createfuturepartitions_%s ON %s RESTRICT; " "DROP TRIGGER IF EXISTS purge_%s_trigger ON %s RESTRICT;" % (table_name, table_name, table_name, table_name)) db_engine.execute(DDL(drop_trigger_ddl_stmt)) drop_function_ddl_stmt = ( "DROP FUNCTION IF EXISTS createfuturepartitions_%s(timestamp without time zone) RESTRICT;" "DROP FUNCTION IF EXISTS purge_%s() RESTRICT;" % table_name, table_name) db_engine.execute(DDL(drop_function_ddl_stmt)) print( "Purge trigger and partition rule disabled for table '%s.' Existing partitions left " "as is." % table_name)
def check_postgres_database_exist(): db = create_engine(database_uri) try: db.connect() db.execute('SELECT * FROM History') except Exception: # Switch database component of the uri print("Create New postgres database") event.listen(Base.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS sqlalchemyqueenspuzzle")) db = create_engine(database_uri) Base.metadata.create_all(db)
def initialise_db(app): from application.models.models import CommunicationTemplate, CommunicationType # NOQA # pylint: disable=wrong-import-position from application.models.classification_type import ClassificationType # NOQA # pylint: disable=wrong-import-position # Set up database with app.app_context(): db.init_app(app) # Creates the schema, can't create the tables otherwise event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS templatesvc")) # Creates the tables from the models db.create_all()
def create_db(self): ''' creates the tables in the database ''' self.Base.metadata.bind = self.engine insert_update_trigger = DDL('''CREATE TRIGGER insert_update_trigger \ after INSERT or UPDATE on file \ FOR EACH ROW \ SET NEW.source = concat(NEW.host, ':', NEW.base_path, '/', NEW.filename)''' ) event.listen(File.__table__, 'after_create', insert_update_trigger) self.Base.metadata.create_all()
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=schema_name, ))) SABase.metadata.create_all() engine.execute( DDL((''' DROP VIEW IF EXISTS {schema}.recommendations; ''' + '''CREATE OR REPLACE VIEW {schema}.recommendations AS SELECT t.name , p.name AS recommended_by , t.notes , t.location , k.kind , t.created_at AS created FROM things t INNER JOIN people p using(pid) INNER JOIN kind k using(kid); ; ''').format(schema=schema_name, )))
def _update_version(connection, version): """ Updates version in the db to the given version. Args: connection (sqlalchemy connection): sqlalchemy session where to update version. version (int): version of the migration. """ if connection.engine.name == 'sqlite': connection.execute('PRAGMA user_version = {}'.format(version)) elif connection.engine.name == 'postgresql': connection.execute( DDL('CREATE SCHEMA IF NOT EXISTS {};'.format( POSTGRES_SCHEMA_NAME))) connection.execute( DDL('CREATE SCHEMA IF NOT EXISTS {};'.format( POSTGRES_PARTITION_SCHEMA_NAME))) connection.execute( 'CREATE TABLE IF NOT EXISTS {}.user_version(version INTEGER NOT NULL);' .format(POSTGRES_SCHEMA_NAME)) # upsert. if connection.execute('SELECT * FROM {}.user_version;'.format( POSTGRES_SCHEMA_NAME)).fetchone(): # update connection.execute( 'UPDATE {}.user_version SET version = {};'.format( POSTGRES_SCHEMA_NAME, version)) else: # insert connection.execute( 'INSERT INTO {}.user_version (version) VALUES ({})'.format( POSTGRES_SCHEMA_NAME, version)) else: raise DatabaseMissingError( 'Do not know how to migrate {} engine.'.format( connection.engine.driver))
def createtable(self): from sqlalchemy import event, DDL from sqlalchemy.schema import CreateSchema # event.listen(db.metadata, 'before_create', CreateSchema('my_schema')) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS used_car CHARACTER SET = utf8mb4;")) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS 법정동정보 CHARACTER SET = utf8mb4;")) event.listen(db.metadata, 'before_create', DDL("CREATE SCHEMA IF NOT EXISTS 아파트정보 CHARACTER SET = utf8mb4;")) try: from ..models.aptinfo import AptInfo from ..models.bjdinfo import BjdInfo db.create_all() result = '<h1>It works.</h1>' except Exception as e: print('테이블 생성 오류') print(e.args) result = '<h1>Something is broken.</h1>' + str(e.args) try: from ..models.aptinfo import AptInfo from ..models.bjdinfo import BjdInfo import csv with open(self.bjdCode_path, mode='r', encoding='utf-8') as f: df = pd.read_csv(filepath_or_buffer=self.bjdCode_path, encoding='utf-8') df.to_sql(name="법정동정보", con=db.engine, if_exists='replace') result = 'nothing' except Exception as e: print('테이블 생성 오류') print(e.args) result = '<h1>Something is broken.</h1>' + str(e.args) return result
def test_ensure_version_is_qualified(self, future_connection, testing_engine, metadata): default_schema_name = future_connection.dialect.default_schema_name event.listen( metadata, "after_create", DDL(""" CREATE OR REPLACE FUNCTION %s.version() RETURNS integer AS $$ BEGIN return 0; END; $$ LANGUAGE plpgsql;""" % (default_schema_name, )), ) event.listen( metadata, "before_drop", DDL("DROP FUNCTION %s.version" % (default_schema_name, )), ) metadata.create_all(future_connection) future_connection.commit() e = testing_engine() @event.listens_for(e, "do_connect") def receive_do_connect(dialect, conn_rec, cargs, cparams): conn = dialect.dbapi.connect(*cargs, **cparams) cursor = conn.cursor() cursor.execute("set search_path = %s,pg_catalog" % (default_schema_name, )) cursor.close() return conn with e.connect(): pass eq_( e.dialect.server_version_info, future_connection.dialect.server_version_info, )
def updateTableConnexion(session): add_column = DDL('ALTER TABLE connexion ADD COLUMN ANNEE_CONN INTEGER AFTER DATE_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN MOIS_CONN INTEGER AFTER ANNEE_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN JOUR_CONN INTEGER AFTER MOIS_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN TIME_CONN INTEGER AFTER JOUR_CONN') session.execute(add_column) add_column = DDL('ALTER TABLE connexion ADD COLUMN HEURE_CONN INTEGER AFTER HEURE_CONN') session.execute(add_column) #session.commit() CI = aliased(tables.ConnexionINDSQLITE) listeConnexion = session.query(CI.SESAME_ETU, CI.DATE_CONN).all() for connexion in listeConnexion: date = DateTime(connexion[1]) session.execute("update connexion set ANNEE_CONN=%s, MOIS_CONN=%s, JOUR_CONN=%s, TIME_CONN='0', HEURE_CONN=0 where SESAME_ETU='%s' and DATE_CONN='%s'" % (date.year(), date.month(), date.day(), connexion[0], connexion[1])) listeConnexion = session.query(CI.SESAME_ETU, CI.DATE_CONN, CI.ANNEE_CONN, CI.MOIS_CONN, CI.JOUR_CONN, CI.TIME_CONN, CI.HEURE_CONN).all() return listeConnexion
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=schema_name, ))) engine.execute( DDL('''CREATE TABLE {schema}.todos ( tid serial primary key, title text not null, completed_at timestamp without time zone default null, notes text, created_at timestamp without time zone not null default now(), modified_at timestamp without time zone not null default now(), time_commitment int CHECK (time_commitment between 1 and 10), due_time timestamp without time zone, category text, person_waiting text, life_importance int CHECK (life_importance between 1 and 10), career_importance int CHECK (career_importance between 1 and 10), urgency int CHECK (urgency between 1 and 10), deleted_at timestamp without time zone default null ) '''.format(schema=schema_name, )))
def create_tables(): engine.execute( DDL('CREATE SCHEMA IF NOT EXISTS {schema}'.format( schema=network_schema_name, ))) SABase.metadata.create_all() engine.execute( DDL(''' DROP VIEW IF EXISTS {schema}.network_history; CREATE OR REPLACE VIEW {schema}.network_history AS select e.eid , e.timefrom , e.timeto , s.status , ip.ip , d.mac , d.name from entry e inner join status s using(sid) inner join ip using(ipid) inner join devices d using(did) where e.timeto=(select max(timeto) from entry) order by regexp_replace(ip, '.*\.', '')::int ; -- thanks to https://stackoverflow.com/a/18939742/2821804 CREATE OR REPLACE FUNCTION {schema}.uppercase_mac_on_insert() RETURNS trigger AS $uppercase_mac_on_insert$ BEGIN NEW.mac = upper(NEW.mac); RETURN NEW; END; $uppercase_mac_on_insert$ LANGUAGE plpgsql ; -- https://stackoverflow.com/a/40479291/2821804 DROP TRIGGER IF EXISTS {schema}.uppercase_mac_on_insert_trigger on {schema}.devices ; CREATE TRIGGER {schema}.uppercase_mac_on_insert_trigger BEFORE INSERT OR UPDATE ON {schema}.devices FOR EACH ROW EXECUTE PROCEDURE {schema}.uppercase_mac_on_insert() ; '''.format(schema=network_schema_name, )))
class BinaryBlob(BaseSQLAlchemy): __tablename__ = 'binary_blobs' # Store binary data in its own schema metadata = MetaData(schema=BINARY_STORAGE_SCHEMA) id = Column(GUID, primary_key=True, default=uuid.uuid4) object_type = Column(String, nullable=False) object_id = Column(GUID, nullable=False) # TODO: Figure this out and think it through... binary_blob = Column(LargeBinary) event.listen(metadata, 'before_create', DDL('''CREATE SCHEMA IF NOT EXISTS "{}";'''.format(BINARY_STORAGE_SCHEMA)))
def _run_test(self, specs, attributes): columns = [Column('c%i' % (i + 1), t[0]) for i, t in enumerate(specs)] # Early 5.0 releases seem to report more "general" for columns # in a view, e.g. char -> varchar, tinyblob -> mediumblob use_views = testing.db.dialect.server_version_info > (5, 0, 10) m = self.metadata Table('mysql_types', m, *columns) if use_views: event.listen( m, 'after_create', DDL('CREATE OR REPLACE VIEW mysql_types_v ' 'AS SELECT * from mysql_types')) event.listen(m, 'before_drop', DDL("DROP VIEW IF EXISTS mysql_types_v")) m.create_all() m2 = MetaData(testing.db) tables = [Table('mysql_types', m2, autoload=True)] if use_views: tables.append(Table('mysql_types_v', m2, autoload=True)) for table in tables: for i, (reflected_col, spec) in enumerate(zip(table.c, specs)): expected_spec = spec[1] reflected_type = reflected_col.type is_(type(reflected_type), type(expected_spec)) for attr in attributes: eq_( getattr(reflected_type, attr), getattr(expected_spec, attr), "Column %s: Attribute %s value of %s does not " "match %s for type %s" % ("c%i" % (i + 1), attr, getattr(reflected_type, attr), getattr(expected_spec, attr), spec[0]))
def _insert_default_values(Author: Base, Book: Base): """Add after_create handlers for init DB with default values """ event.listen( Author.__table__, 'after_create', DDL(''' INSERT INTO %(table)s(id, fullname) VALUES (1, 'Henry Charles Bukowski'), (2, 'Nelle Harper Lee'), (3, 'Arthur Conan Doyle') ''')) event.listen( Book.__table__, 'after_create', DDL(''' INSERT INTO %(table)s(title, publication_year, author_id) VALUES ('Post Office', 1971, 1), ('Factotum', 1975, 1), ('Hollywood', 1989, 1), ('To Kill a Mockingbird', 1960, 2), ('The Hound of the Baskervilles', 1901, 3), ('The Adventure of the Red Circle', 1911, 3) '''))
def test_ddl_execute(self): engine = create_engine("sqlite:///") cx = engine.connect() table = self.users ddl = DDL("SELECT 1") eng_msg = r"The Engine.execute\(\) method is considered legacy" ddl_msg = r"The DDL.execute\(\) method is considered legacy" for spec in ( (engine.execute, ddl, eng_msg), (engine.execute, ddl, table, eng_msg), (ddl.execute, engine, ddl_msg), (ddl.execute, engine, table, ddl_msg), (ddl.execute, cx, ddl_msg), (ddl.execute, cx, table, ddl_msg), ): fn = spec[0] arg = spec[1:-1] warning = spec[-1] with testing.expect_deprecated_20(warning): r = fn(*arg) eq_(list(r), [(1,)]) for fn, kw in ((ddl.execute, {}), (ddl.execute, dict(target=table))): with testing.expect_deprecated_20(ddl_msg): assert_raises(exc.UnboundExecutionError, fn, **kw) for bind in engine, cx: ddl.bind = bind for fn, kw in ( (ddl.execute, {}), (ddl.execute, dict(target=table)), ): with testing.expect_deprecated_20(ddl_msg): r = fn(**kw) eq_(list(r), [(1,)])
def load_ddl(): for script in ('triggers.sql', 'rpmvercmp.sql'): with open(os.path.join(get_config('directories.datadir'), script)) as ddl_script: ddl = DDL(ddl_script.read()) listen(Base.metadata, 'after_create', ddl.execute_if(dialect='postgresql'))
db.session.query(cls, func.count(PostTag.c.post_id).label('num_posts')) .outerjoin(PostTag) .group_by(cls) .order_by('num_posts DESC') ) @property def url_list(self): return url_for('.tag_list') @property def url_show(self): return url_for('.post_list', tag_id=self.id) @property def url_no_delete(self): return self.url_list @property def url_edit(self): return url_for('.tag_edit', tag_id=self.id) _here = os.path.dirname(__file__) _sql_path = os.path.join(_here, 'ddl-post.sql') _on_ddl = DDL(open(_sql_path).read()) event.listen(Post.__table__, 'after_create', _on_ddl.execute_if(dialect='postgresql'))
def dictify(self): artist_dict = dict() artist_dict['artist_id'] = (self.artist_id) artist_dict['name'] = (self.name) artist_dict['num_followers'] = self.num_followers artist_dict['image_url'] = (self.image_url) artist_dict['popularity'] = self.popularity artist_dict['charted_songs'] = [ (song.song_name) for song in self.charted_songs] artist_dict['genres'] = [(genre.name) for genre in self.genres] return artist_dict # Create a trigger to check for updates to Artist and update the TsVector # accordingly. ARTIST_VECTOR_TRIGGER = DDL(""" CREATE TRIGGER artist_tsvector_update BEFORE INSERT OR UPDATE ON "Artist" FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(tsvector_col, 'pg_catalog.english', 'name') """) event.listen(Artist.__table__, 'after_create', ARTIST_VECTOR_TRIGGER.execute_if(dialect='postgresql')) class Year(BASE): """ Database model of table 'Year', which stores: year: the year's number top_album_name: the name of the top album top_album_id: the Spotify id of the top album top_genre_name: the name of the genre of the year's top album top_album_artist_id: the id of the artist who made the top album top_genre: the genre of the year's top album
elif action == "edit": return url_for("domain_edit", domain=self.name, _external=_external, **kwargs) @classmethod def get(cls, name, create=False): name = name.lower() result = cls.query.filter_by(name=name).one_or_none() if not result and create: result = cls(name=name, is_webmail=name in webmail_domains) db.session.add(result) return result create_domain_search_trigger = DDL( """ CREATE FUNCTION domain_search_vector_update() RETURNS TRIGGER AS $$ BEGIN NEW.search_vector = to_tsvector('english', COALESCE(NEW.name, '') || ' ' || COALESCE(NEW.title, '') || ' ' || COALESCE(NEW.legal_title, '') || ' ' || COALESCE(NEW.description, '')); RETURN NEW; END $$ LANGUAGE 'plpgsql'; CREATE TRIGGER domain_search_vector_trigger BEFORE INSERT OR UPDATE ON domain FOR EACH ROW EXECUTE PROCEDURE domain_search_vector_update(); CREATE INDEX ix_domain_search_vector ON domain USING gin(search_vector); """ ) event.listen(Domain.__table__, "after_create", create_domain_search_trigger.execute_if(dialect="postgresql"))
def get_by_username(cls, email): return _DBSESSION.query(cls).filter(cls.email == email).first() @classmethod def check_password(cls, email, password): user = cls.get_by_username(email) if not user: return False return crypt.check(user.password, password) notify_ddl = DDL(""" ALTER TABLE %(table)s ALTER COLUMN "xid" SET DEFAULT txid_current(); CREATE OR REPLACE FUNCTION snovault_transaction_notify() RETURNS trigger AS $$ DECLARE BEGIN PERFORM pg_notify('snovault.transaction', NEW.xid::TEXT); RETURN NEW; END; $$ LANGUAGE plpgsql; CREATE TRIGGER snovault_transactions_insert AFTER INSERT ON %(table)s FOR EACH ROW EXECUTE PROCEDURE snovault_transaction_notify(); """) event.listen( TransactionRecord.__table__, 'after_create', notify_ddl.execute_if(dialect='postgresql'), ) @event.listens_for(PropertySheet, 'before_insert') def set_tid(mapper, connection, target):
def sorted_properties(self): return sorted(self.properties.items()) trig_ddl = DDL( """ ALTER TABLE pep ADD COLUMN search_col tsvector; CREATE INDEX search_col_gin_idx ON pep USING gin(search_col); CREATE OR REPLACE FUNCTION search_col_update_trigger() RETURNS trigger AS $$ begin new.search_col := setweight(to_tsvector('english', new.number || ''), 'A') || setweight(to_tsvector('english', coalesce(new.title,'')), 'B') || setweight(to_tsvector('english', coalesce(new.properties->'author','')), 'C') || setweight(to_tsvector('english', coalesce(new.content,'')), 'D'); return new; end $$ LANGUAGE plpgsql; CREATE TRIGGER search_col_update BEFORE INSERT OR UPDATE ON pep FOR EACH ROW EXECUTE PROCEDURE search_col_update_trigger(); """ ) event.listen(Pep.__table__, "after_create", trig_ddl.execute_if(dialect="postgresql")) event.listen( Pep.__table__, "before_create", DDL("CREATE EXTENSION IF NOT EXISTS hstore").execute_if(dialect="postgresql") )
elif action == 'edit': return url_for('domain_edit', domain=self.name, _external=_external, **kwargs) @classmethod def get(cls, name, create=False): name = name.lower() result = cls.query.filter_by(name=name).one_or_none() if not result and create: result = cls(name=name, is_webmail=name in webmail_domains) db.session.add(result) return result create_domain_search_trigger = DDL( ''' CREATE FUNCTION domain_search_vector_update() RETURNS TRIGGER AS $$ BEGIN NEW.search_vector = to_tsvector('english', COALESCE(NEW.name, '') || ' ' || COALESCE(NEW.title, '') || ' ' || COALESCE(NEW.legal_title, '') || ' ' || COALESCE(NEW.description, '')); RETURN NEW; END $$ LANGUAGE 'plpgsql'; CREATE TRIGGER domain_search_vector_trigger BEFORE INSERT OR UPDATE ON domain FOR EACH ROW EXECUTE PROCEDURE domain_search_vector_update(); CREATE INDEX ix_domain_search_vector ON domain USING gin(search_vector); ''') event.listen(Domain.__table__, 'after_create', create_domain_search_trigger.execute_if(dialect='postgresql'))
code = db.Column(db.Unicode(100), nullable=False, default=generate_coupon_code) usage_limit = db.Column(db.Integer, nullable=False, default=1) used_count = cached(db.Column(db.Integer, nullable=False, default=0)) discount_policy_id = db.Column(None, db.ForeignKey('discount_policy.id'), nullable=False) discount_policy = db.relationship(DiscountPolicy, backref=db.backref('discount_coupons', cascade='all, delete-orphan')) @classmethod def is_signed_code_usable(cls, policy, code): obj = cls.query.filter(cls.discount_policy == policy, cls.code == code, cls.used_count == cls.usage_limit).one_or_none() if obj: return False return True def update_used_count(self): from ..models import LineItem, LINE_ITEM_STATUS self.used_count = db.select([db.func.count()]).where(LineItem.discount_coupon == self).where(LineItem.status == LINE_ITEM_STATUS.CONFIRMED).as_scalar() create_title_trgm_trigger = DDL( ''' CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE INDEX idx_discount_policy_title_trgm on discount_policy USING gin (title gin_trgm_ops); ''') event.listen(DiscountPolicy.__table__, 'after_create', create_title_trgm_trigger.execute_if(dialect='postgresql'))
__tablename__ = 'geo_alt_name' geonameid = db.Column(None, db.ForeignKey('geo_name.id'), nullable=False) geoname = db.relationship(GeoName, backref=db.backref('alternate_titles', cascade='all, delete-orphan')) lang = db.Column(db.Unicode(7), nullable=True, index=True) title = db.Column(db.Unicode(200), nullable=False) is_preferred_name = db.Column(db.Boolean, nullable=False) is_short_name = db.Column(db.Boolean, nullable=False) is_colloquial = db.Column(db.Boolean, nullable=False) is_historic = db.Column(db.Boolean, nullable=False) def __repr__(self): return '<GeoAltName %s "%s" of %s>' % (self.lang, self.title, repr(self.geoname)[1:-1] if self.geoname else None) create_geo_country_info_index = DDL( "CREATE INDEX ix_geo_country_info_title ON geo_country_info (lower(title) varchar_pattern_ops);") event.listen(GeoCountryInfo.__table__, 'after_create', create_geo_country_info_index.execute_if(dialect='postgresql')) create_geo_name_index = DDL( "CREATE INDEX ix_geo_name_title ON geo_name (lower(title) varchar_pattern_ops); " "CREATE INDEX ix_geo_name_ascii_title ON geo_name (lower(ascii_title) varchar_pattern_ops);") event.listen(GeoName.__table__, 'after_create', create_geo_name_index.execute_if(dialect='postgresql')) create_geo_alt_name_index = DDL( "CREATE INDEX ix_geo_alt_name_title ON geo_alt_name (lower(title) varchar_pattern_ops);") event.listen(GeoAltName.__table__, 'after_create', create_geo_alt_name_index.execute_if(dialect='postgresql'))
def __repr__(self): return "<Monkey #{0}>".format(self.id) change_monkey_friends_count_trigger_ddl = DDL( """ CREATE OR REPLACE FUNCTION process_change_monkey_friends_count() RETURNS TRIGGER AS $change_monkey_friends_count$ BEGIN IF (TG_OP = 'DELETE') THEN UPDATE monkeys SET friends_count = friends_count - 1 WHERE id = OLD.monkey_id; RETURN OLD; ELSIF (TG_OP = 'INSERT') THEN UPDATE monkeys SET friends_count = friends_count + 1 WHERE id = NEW.monkey_id; RETURN NEW; END IF; RETURN NULL; END; $change_monkey_friends_count$ LANGUAGE plpgsql; CREATE TRIGGER change_monkey_friends_count AFTER INSERT OR DELETE ON friends FOR EACH ROW EXECUTE PROCEDURE process_change_monkey_friends_count(); """ ) event.listen( friends_relationships, "after_create", change_monkey_friends_count_trigger_ddl.execute_if(dialect="postgresql") )
# Add Twitter/GitHub accounts to the head of results users = cls.query.filter(cls.status == USER_STATUS.ACTIVE, cls.id.in_( db.session.query(UserExternalId.user_id).filter( UserExternalId.service.in_(UserExternalId.__at_username_services__), db.func.lower(UserExternalId.username).like(db.func.lower(query[1:])) ).subquery())).options(*cls._defercols).limit(100).all() + users elif '@' in query: users = cls.query.filter(cls.status == USER_STATUS.ACTIVE, cls.id.in_( db.session.query(UserEmail.user_id).filter(UserEmail.user_id != None).filter( # NOQA db.func.lower(UserEmail.email).like(db.func.lower(query)) ).subquery())).options(*cls._defercols).limit(100).all() + users return users create_user_index = DDL( 'CREATE INDEX ix_user_username_lower ON "user" (lower(username) varchar_pattern_ops); ' 'CREATE INDEX ix_user_fullname_lower ON "user" (lower(fullname) varchar_pattern_ops);') event.listen(User.__table__, 'after_create', create_user_index.execute_if(dialect='postgresql')) class UserOldId(TimestampMixin, db.Model): __tablename__ = 'useroldid' __bind_key__ = 'lastuser' query_class = CoasterQuery # userid here is NOT a foreign key since it has to continue to exist # even if the User record is removed userid = db.Column(db.String(22), nullable=False, primary_key=True) olduser = db.relationship(User, primaryjoin=foreign(userid) == remote(User.userid), backref=db.backref('oldid', uselist=False))
def create_triggers(db, tables): db = create_engine(engine_name) db.echo = True db.connect() metadata = MetaData(db) insp = reflection.Inspector.from_engine(db) tables = [] for table_name in insp.get_table_names(): if not table_name.endswith('_aud'): table = Table(table_name, metadata, autoload=True, autoload_with=db) tables.append(table) #print("TABLE: %s"%table) #print table.__repr__ else: table = Table(table_name, metadata, autoload=True, autoload_with=db) table.drop(db) metadata.remove(table) drop_trigger_text = """DROP TRIGGER IF EXISTS %(trigger_name)s;""" for table in tables: pk_cols = [c.name for c in table.primary_key] for pk_col in pk_cols: try: db.execute(drop_trigger_text % { 'trigger_name' : table.name + "_ins_trig", }) except: pass for pk_col in pk_cols: try: db.execute(drop_trigger_text % { 'trigger_name' : table.name + "_upd_trig", }) except: pass #metadata.create_all() trigger_text = """ CREATE TRIGGER %(trigger_name)s AFTER %(action)s ON %(table_name)s FOR EACH ROW BEGIN INSERT INTO %(table_name)s_aud SELECT d.*, '%(action)s', NULL, date('now') FROM %(table_name)s AS d WHERE %(pkd)s; END """ for table in tables: pk_cols = [c.name for c in table.primary_key] pkd = [] for pk_col in pk_cols: pkd.append("d.%s = NEW.%s"%(pk_col, pk_col)) text_dict = { 'action' : 'INSERT', 'trigger_name' : table.name + "_ins_trig", 'table_name' : table.name, 'pkd' : ' and '.join(pkd), } logging.info(trigger_text % text_dict) trig_ddl = DDL(trigger_text % text_dict) trig_ddl.execute_at('after-create', table.metadata) text_dict['action'] = 'UPDATE' text_dict['trigger_name'] = table.name + "_upd_trig" trig_ddl = DDL(trigger_text % text_dict) trig_ddl.execute_at('after-create', table.metadata) metadata.create_all()
from datetime import datetime from sqlalchemy import event, DDL, Index from app import db from util.hstore import HSTORE class Pep(db.Model): id = db.Column(db.Integer, primary_key=True) number = db.Column(db.Integer, unique=True) added = db.Column(db.DateTime, nullable=False, default=datetime.now) updated = db.Column(db.DateTime, nullable=False, default=datetime.now) properties = db.Column(HSTORE, nullable=False, default={}) content = db.Column(db.Text) raw_content = db.Column(db.Text) filename = db.Column(db.String(200)) __table_args__ = ( Index('pep_number_idx', 'properties'), ) trig_ddl = DDL("CREATE INDEX content_gin_idx ON pep USING gin(to_tsvector('english', content))") event.listen(Pep.__table__, 'after_create', trig_ddl.execute_if(dialect='postgresql')) event.listen(Pep.__table__, 'before_create', DDL("CREATE EXTENSION IF NOT EXISTS hstore").execute_if(dialect='postgresql'))
organization_dict[key] = getattr(self, key)() if include_extras: for key in ('current_events', 'current_projects', 'current_stories'): organization_dict[key] = getattr(self, key)() return organization_dict tbl = Organization.__table__ # Index the tsvector column db.Index('index_org_tsv_body', tbl.c.tsv_body, postgresql_using='gin') # Trigger to populate the search index column trig_ddl = DDL(""" CREATE TRIGGER tsvupdate_orgs_trigger BEFORE INSERT OR UPDATE ON organization FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(tsv_body, 'pg_catalog.english', name); """) # Initialize the trigger after table is created event.listen(tbl, 'after_create', trig_ddl.execute_if(dialect='postgresql')) class Story(db.Model): ''' Blog posts from a Brigade. ''' # Columns id = db.Column(db.Integer(), primary_key=True) title = db.Column(db.Unicode()) link = db.Column(db.Unicode()) type = db.Column(db.Unicode()) keep = db.Column(db.Boolean())
hashid = random_long_key() if not hashid.isdigit() and model.query.filter_by(hashid=hashid).isempty(): break return hashid create_jobpost_search_trigger = DDL( ''' CREATE FUNCTION jobpost_search_vector_update() RETURNS TRIGGER AS $$ BEGIN IF TG_OP = 'INSERT' THEN NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, '')); END IF; IF TG_OP = 'UPDATE' THEN IF NEW.headline <> OLD.headline OR COALESCE(NEW.headlineb, '') <> COALESCE(OLD.headlineb, '') OR NEW.description <> OLD.description OR NEW.perks <> OLD.perks THEN NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, '')); END IF; END IF; RETURN NEW; END $$ LANGUAGE 'plpgsql'; CREATE TRIGGER jobpost_search_vector_trigger BEFORE INSERT OR UPDATE ON jobpost FOR EACH ROW EXECUTE PROCEDURE jobpost_search_vector_update(); CREATE INDEX ix_jobpost_search_vector ON jobpost USING gin(search_vector); ''') event.listen(JobPost.__table__, 'after_create', create_jobpost_search_trigger.execute_if(dialect='postgresql'))
('vlm.description', 'B', 'english'), ('vlm.search_keywords', 'B', 'english'), # simple? ('a.name', 'C', 'english'), # simple? ) ) account_user_search_vector_trigger = DDL(''' create or replace function account_user_search_vector_trigger() returns trigger as $$ declare vector tsvector; begin select %s into vector from account a left join video v on v.account = a.id left join video_locale_meta vlm on vlm.video = v.id and vlm.locale = '' where a.id = new.account group by a.id; new.search_vector := vector; return new; end $$ language plpgsql; create trigger account_user_search_vector_update before insert or update on account_user for each row execute procedure account_user_search_vector_trigger(); ''' % ACCOUNT_USER_SEARCH_VECTOR) event.listen(AccountUser.__table__, 'after_create', account_user_search_vector_trigger.execute_if(dialect='postgresql')) video_search_vector_trigger = DDL(''' create or replace function video_search_vector_trigger() returns trigger as $$
addrloc_ip = Column(Text) addr_act = Column(Text) addr_obj = Column(Text) ogrn = Column(Text, index=True) inn = Column(Text, index=True) goal = Column(Text) osn_datestart = Column(Text) osn_dateend = Column(Text) osn_datestart2 = Column(Text) osn_other = Column(Text) check_month = Column(Text) check_days = Column(Text) check_hours = Column(Text) check_form = Column(Text) check_org = Column(Text) details_tsvector = Column(TsVector) # Триггер на таблицу genproc trigger_snippet = DDL( """ CREATE TRIGGER details_tsvector_update BEFORE INSERT OR UPDATE ON genproc FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(details_tsvector,'pg_catalog.russian', 'name', 'inn', 'ogrn') """ ) event.listen(Genproc.__table__, "after_create", trigger_snippet.execute_if(dialect="postgresql"))
ForeignKey('entities.entity_id'), default=None), Column('version', Integer, nullable=False), Column('deleted_at_version', Integer, default=None), mysql_engine='InnoDB' ) Index('idx_attrs_entity_version', ATTR_TABLE.c.entity_id, ATTR_TABLE.c.version, ATTR_TABLE.c.deleted_at_version) Index('idx_attrs_key', ATTR_TABLE.c.key) Index('idx_attrs_subkey', ATTR_TABLE.c.subkey) create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s (string_value(20))') event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='mysql')) create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s ((substring(string_value,0,20)))') event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='postgresql')) create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s (string_value)') event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='sqlite')) COUNTER_TABLE = Table('counters', METADATA, Column('counter_id', Integer, primary_key=True), Column('entity_id', Integer, ForeignKey('entities.entity_id'), nullable=False), Column('attr_key', String(256, convert_unicode=True)), Column('value', Integer, default=0), mysql_engine='InnoDB' )
Column("number", Integer, nullable=True, default=None), Column("datatype", String(32), default="string", nullable=False), Column("int_value", Integer, default=None), Column("string_value", Text(convert_unicode=True, assert_unicode=None), default=None), Column("datetime_value", DateTime, default=None), Column("relation_id", Integer, ForeignKey("entities.entity_id"), default=None), Column("version", Integer, nullable=False), Column("deleted_at_version", Integer, default=None), mysql_engine="InnoDB", ) Index("idx_attrs_entity_version", ATTR_TABLE.c.entity_id, ATTR_TABLE.c.version, ATTR_TABLE.c.deleted_at_version) Index("idx_attrs_key", ATTR_TABLE.c.key) Index("idx_attrs_subkey", ATTR_TABLE.c.subkey) create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s (string_value(20))") event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="mysql")) create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s ((substring(string_value,0,20)))") event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="postgresql")) create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s (string_value)") event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="sqlite")) COUNTER_TABLE = Table( "counters", METADATA, Column("counter_id", Integer, primary_key=True), Column("entity_id", Integer, ForeignKey("entities.entity_id"), nullable=False), Column("attr_key", String(256, convert_unicode=True, assert_unicode=None)), Column("value", Integer, default=0),
) title_table = sa.Table( "title", meta.metadata, sa.Column("title_id", sa.types.Integer, primary_key=True), sa.Column("name", sa.types.Unicode(255), nullable=False), sa.Column("year", sa.types.SmallInteger, nullable=False), sa.Column("type", EnumIntType(config.TITLE_TYPES), nullable=False), sa.Column("created", sa.types.DateTime(), nullable=False, default=datetime.datetime.now), sa.Column( "modified", sa.types.DateTime(), nullable=False, default=datetime.datetime.now, onupdate=datetime.datetime.now ), sa.UniqueConstraint("name", "year", "type", name="title_info_unq"), ) title_lower_index = DDL("create index title_name_lower_idx on title ((lower(name)))") title_trgm_index = DDL("create index title_name_trgm_idx" "on title using gin (name gin_trgm_ops)") event.listen(title_table, "after_create", title_lower_index.execute_if(dialect="postgresql")) event.listen(title_table, "after_create", title_trgm_index.execute_if(dialect="postgresql")) aka_title_table = sa.Table( "aka_title", meta.metadata, sa.Column("aka_title_id", sa.types.Integer, primary_key=True), sa.Column("title_id", sa.types.Integer, sa.ForeignKey("title.title_id")), sa.Column("name", sa.types.Unicode(511), nullable=False), sa.Column("year", sa.types.SmallInteger, nullable=False), sa.Column("region", sa.types.Unicode(100), nullable=False), sa.Column("created", sa.types.DateTime(), nullable=False, default=datetime.datetime.now), sa.Column( "modified", sa.types.DateTime(), nullable=False, default=datetime.datetime.now, onupdate=datetime.datetime.now
trigger = DDL(""" CREATE OR REPLACE FUNCTION update_last_complete_build() RETURNS TRIGGER AS $$ BEGIN UPDATE package SET last_complete_build_id = lcb.id, last_complete_build_state = lcb.state FROM (SELECT id, state FROM build WHERE package_id = NEW.package_id AND (state = 3 OR state = 5) ORDER BY id DESC LIMIT 1) AS lcb WHERE package.id = NEW.package_id; RETURN NEW; END $$ LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION update_last_build() RETURNS TRIGGER AS $$ BEGIN UPDATE package SET last_build_id = lb.id FROM (SELECT id, state, started FROM build WHERE package_id = NEW.package_id ORDER BY id DESC LIMIT 1) AS lb WHERE package.id = NEW.package_id; RETURN NEW; END $$ LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION update_last_build_del() RETURNS TRIGGER AS $$ BEGIN UPDATE package SET last_build_id = lb.id FROM (SELECT id, state, started FROM build WHERE package_id = OLD.package_id AND build.id != OLD.id ORDER BY id DESC LIMIT 1) AS lb WHERE package.id = OLD.package_id; RETURN OLD; END $$ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS update_last_complete_build_trigger ON build; DROP TRIGGER IF EXISTS update_last_build_trigger ON build; CREATE TRIGGER update_last_complete_build_trigger AFTER INSERT ON build FOR EACH ROW WHEN (NEW.state = 3 OR NEW.state = 5) EXECUTE PROCEDURE update_last_complete_build(); CREATE TRIGGER update_last_build_trigger AFTER INSERT ON build FOR EACH ROW EXECUTE PROCEDURE update_last_build(); DROP TRIGGER IF EXISTS update_last_complete_build_trigger_up ON build; CREATE TRIGGER update_last_complete_build_trigger_up AFTER UPDATE ON build FOR EACH ROW WHEN (OLD.state != NEW.state) EXECUTE PROCEDURE update_last_complete_build(); DROP TRIGGER IF EXISTS update_last_build_trigger_del ON build; CREATE TRIGGER update_last_build_trigger_del BEFORE DELETE ON build FOR EACH ROW EXECUTE PROCEDURE update_last_build_del(); """)