Esempio n. 1
0
def attach_triggers():
    """ Attach some database triggers to the File table """
    function_snippet = DDL("""
    CREATE OR REPLACE FUNCTION update_file_search_text_vector() RETURNS TRIGGER AS $$
    BEGIN
        IF TG_OP = 'INSERT' THEN
            new.search_text = to_tsvector('pg_catalog.english', NEW.tags) || to_tsvector('pg_catalog.english', translate(NEW.path, '/.', '  '));
        END IF;
        IF TG_OP = 'UPDATE' THEN
            IF NEW.tags <> OLD.tags || NEW.path <> OLD.path THEN
                new.search_text = to_tsvector('pg_catalog.english', NEW.tags) || to_tsvector('pg_catalog.english', translate(NEW.path, '/.', '  '));
            END IF;
        END IF;
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';
    """)

    trigger_snippet = DDL("""
    CREATE TRIGGER search_text_update BEFORE INSERT OR UPDATE
    ON files
    FOR EACH ROW EXECUTE PROCEDURE
    update_file_search_text_vector()
    """)

    event.listen(File.__table__, 'after_create',
                 function_snippet.execute_if(dialect='postgresql'))
    event.listen(File.__table__, 'after_create',
                 trigger_snippet.execute_if(dialect='postgresql'))
Esempio n. 2
0
def mount_declare_schema(target_schema):
    schema = pg.Schema(target_schema.schema)
    ddl_statement = DDL(schema.create_statement())
    event.listen(target_schema, 'before_create',
                 ddl_statement.execute_if(dialect='postgresql'))

    return target_schema
Esempio n. 3
0
def load_ddl():
    for script in ('triggers.sql', 'rpmvercmp.sql'):
        with open(os.path.join(get_config('directories.datadir'),
                               script)) as ddl_script:
            ddl = DDL(ddl_script.read())
        listen(Base.metadata, 'after_create',
               ddl.execute_if(dialect='postgresql'))
Esempio n. 4
0
def initialize_db(db):
    from sqlalchemy import Table, Column, Integer, String, Sequence
    if 'scheduled_jobs' in db['metadata'].tables:
        # Table already exists. Nothing to do.
        return

    scheduled_jobs = Table('scheduled_jobs', db['metadata'],
        Column("id",                    Integer,
            Sequence('scheduled_jobs_id_seq', start=1000), primary_key=True),
        Column("owner",                 String(50),  nullable=False, index=True),
        Column("name",                  String(100), nullable=False, unique=True),
        Column("timeout_minutes",       Integer,     nullable=False),
        Column("code_uri",              String(300), nullable=False),
        Column("commandline",           String,      nullable=False),
        Column("data_bucket",           String(200), nullable=False),
        Column("num_workers",           Integer,     nullable=True),
        Column("output_dir",            String(100), nullable=False),
        Column("output_visibility",     String(10),  nullable=False),
        Column("schedule_minute",       String(20),  nullable=False),
        Column("schedule_hour",         String(20),  nullable=False),
        Column("schedule_day_of_month", String(20),  nullable=False),
        Column("schedule_month",        String(20),  nullable=False),
        Column("schedule_day_of_week",  String(20),  nullable=False)
    )

    # Postgres-specific stuff
    seq_default = DDL("ALTER TABLE scheduled_jobs ALTER COLUMN id SET DEFAULT nextval('scheduled_jobs_id_seq');")
    event.listen(scheduled_jobs, "after_create", seq_default.execute_if(dialect='postgresql'))

    # Create the table
    db['metadata'].create_all(tables=[scheduled_jobs])
Esempio n. 5
0
    def create_reference_update_trigger(self, table_name, description,
                                        reference_table, model):
        func_name = f"{table_name}_update_reference_id"
        func = DDL(f"""
                    CREATE or REPLACE function {func_name}()
                    returns TRIGGER
                    as $func$
                    begin
                        update {table_name} ref
                        set target_id = new.superceded_id
                        where ref.target_id = old.id;
                        return new;
                    end;
                    $func$ language plpgsql;
                    """)
        trigger = DDL(
            f"""CREATE TRIGGER update_{table_name}_target_id AFTER UPDATE ON {reference_table}
                    FOR EACH ROW EXECUTE PROCEDURE {func_name}();""")

        event.listen(
            model.__table__,
            "after_create",
            func.execute_if(dialect="postgresql"),
        )

        event.listen(
            model.__table__,
            "after_create",
            trigger.execute_if(dialect="postgresql"),
        )
        description += f" [Note: This table '{model.__name__}' will update the 'target_id' foreign_key when updates are made to the '{reference_table}' table]"
        return description
Esempio n. 6
0
def before_first():
    sched = CoreScheduler()
    sched.start(app)
    sched.schedule(advert.update_statuses,
                   seconds=10,
                   start_date=datetime.strptime("01/01/18", "%d/%m/%y"))
    sched.schedule(product.update_statuses,
                   seconds=10,
                   start_date=datetime.strptime("01/01/18", "%d/%m/%y"))
    sched.schedule(companydocument.date_doct,
                   seconds=86400,
                   start_date=datetime.strptime("01/01/18", "%d/%m/%y"))
    sched.schedule(companyproduct.update_status,
                   seconds=86400,
                   start_date=datetime.strptime("01/01/18", "%d/%m/%y"))
    sched.schedule(company.update_statuses,
                   seconds=86400,
                   start_date=datetime.strptime("01/01/18", "%d/%m/%y"))
    app.db = db

    classes = [x for x in dir(db) if isclass(getattr(db, x))]
    for c in classes:
        m = getattr(db, c)
        if issubclass(m, Base) and issubclass(
                m, CouchSync) and m.__name__ != CouchSync.__name__:
            trigger = DDL("""
                CREATE TRIGGER timetravel_{0}
        BEFORE INSERT OR DELETE OR UPDATE ON {0}
        FOR EACH ROW
        EXECUTE PROCEDURE
          timetravel(_created, _deleted);
                """.format(c))
            event.listen(m.__table__, 'after_create',
                         trigger.execute_if(dialect='postgresql'))
Esempio n. 7
0
def initialize_db(db):
    from sqlalchemy import Table, Column, Integer, String, Sequence
    if 'scheduled_jobs' in db['metadata'].tables:
        # Table already exists. Nothing to do.
        return

    scheduled_jobs = Table('scheduled_jobs', db['metadata'],
        Column("id",                    Integer,
            Sequence('scheduled_jobs_id_seq', start=1000), primary_key=True),
        Column("owner",                 String(50),  nullable=False, index=True),
        Column("name",                  String(100), nullable=False, unique=True),
        Column("timeout_minutes",       Integer,     nullable=False),
        Column("code_uri",              String(300), nullable=False),
        Column("commandline",           String,      nullable=False),
        Column("data_bucket",           String(200), nullable=False),
        Column("num_workers",           Integer,     nullable=True),
        Column("output_dir",            String(100), nullable=False),
        Column("output_visibility",     String(10),  nullable=False),
        Column("schedule_minute",       String(20),  nullable=False),
        Column("schedule_hour",         String(20),  nullable=False),
        Column("schedule_day_of_month", String(20),  nullable=False),
        Column("schedule_month",        String(20),  nullable=False),
        Column("schedule_day_of_week",  String(20),  nullable=False)
    )

    # Postgres-specific stuff
    seq_default = DDL("ALTER TABLE scheduled_jobs ALTER COLUMN id SET DEFAULT nextval('scheduled_jobs_id_seq');")
    event.listen(scheduled_jobs, "after_create", seq_default.execute_if(dialect='postgresql'))

    # Create the table
    db['metadata'].create_all(tables=[scheduled_jobs])
def setup_trigger(db):
    r"""
    When player battle values are updated, create new records in today's
    diff_battle table
    """
    engine = create_engine(
        "{protocol}://{user}:{password}@{address}/{name}".format(**db),
        echo=False)
    Session = sessionmaker(bind=engine)
    session = Session()
    battle_ddl = DDL("""
        CREATE TRIGGER update_battles BEFORE UPDATE ON players
        FOR EACH ROW
        BEGIN
            IF (OLD.battles < NEW.battles) THEN
                INSERT INTO {} VALUES (NEW.account_id, NEW.battles);
                INSERT INTO {} VALUES (NEW.account_id, NEW.battles - OLD.battles);
            END IF;
        END
    """.format(Total_Battles.__tablename__, Diff_Battles.__tablename__))
    event.listen(Player.__table__, 'after_create',
                 battle_ddl.execute_if(dialect='mysql'))
    newplayer_ddl = DDL("""
        CREATE TRIGGER new_player AFTER INSERT ON players
        FOR EACH ROW INSERT INTO {} VALUES (NEW.account_id, NEW.battles);
    """.format(Total_Battles.__tablename__))
    event.listen(Player.__table__, 'after_create',
                 newplayer_ddl.execute_if(dialect='mysql'))
    Base.metadata.create_all(engine)
    session.execute("""
        DROP TRIGGER IF EXISTS new_player;
        DROP TRIGGER IF EXISTS update_battles;
    """)
    session.execute(battle_ddl)
    session.execute(newplayer_ddl)
    session.commit()
Esempio n. 9
0
def build_tsvector_trigger():
    # use postgres built in trigger tsvector_update_trigger
    # https://www.postgresql.org/docs/10/functions-textsearch.html
    # https://www.postgresql.org/docs/9.5/textsearch-features.html
    trig_searchable_tsvector_stmt = """
    CREATE TRIGGER trig_searchable_tsvector BEFORE INSERT OR UPDATE
    ON company
    FOR EACH ROW EXECUTE PROCEDURE
    tsvector_update_trigger(
        searchable,'pg_catalog.english', 'name', 'about'
    )
    """
    trig_searchable_tsvector = DDL(trig_searchable_tsvector_stmt)

    event.listen(Company.__table__, 'after_create',
                 trig_searchable_tsvector.execute_if(dialect='postgresql'))
Esempio n. 10
0
def before_first():
    app.db = db

    classes = [x for x in dir(db) if isclass(getattr(db, x))]
    for c in classes:
        m = getattr(db, c)
        if issubclass(m, Base) and issubclass(
                m, CouchSync) and m.__name__ != CouchSync.__name__:
            trigger = DDL("""
                CREATE TRIGGER timetravel_{0}
        BEFORE INSERT OR DELETE OR UPDATE ON {0}
        FOR EACH ROW
        EXECUTE PROCEDURE
          timetravel(_created, _deleted);
                """.format(c))
            event.listen(m.__table__, 'after_create',
                         trigger.execute_if(dialect='postgresql'))
Esempio n. 11
0
def create_session(metadata, autoincrement=True, session_id_start=1000):
    """Create Session table.

    This function creates the Session table for tracking the various simulations run. For MySQL, it adds
    a post-create command to set the lower limit of the auto increment value.

    Table Description:

    This table contains the log of all simulations (MySQL) or a single simulation (SQLite). Simulation
    runs are identified by the combination of the hostname and session Id: *sessionHost_sessionId*.

    Parameters
    ----------
    metadata : sqlalchemy.MetaData
        The database object that collects the tables.
    autoincrement : bool
        A flag to set auto incrementing on the sessionID column.
    session_id_start : int
        A new starting session Id for counting new simulations.

    Returns
    -------
    sqlalchemy.Table
        The Session table object.
    """
    table = Table("Session", metadata,
                  Column("sessionId", Integer, primary_key=True, autoincrement=autoincrement, nullable=False,
                         doc="Numeric identifier for the current simulation instance."),
                  Column("sessionUser", String(80), nullable=False,
                         doc="Computer username of the simulation runner."),
                  Column("sessionHost", String(80), nullable=False,
                         doc="Computer hostname where the simulation was run."),
                  Column("sessionDate", DATETIME, nullable=False,
                         doc="The UTC date/time of the simulation start."),
                  Column("version", String(25), nullable=True, doc="The version number of the SOCS code."),
                  Column("runComment", String(200), nullable=True,
                         doc="A description of the simulation setup."))

    Index("s_host_user_date_idx", table.c.sessionUser, table.c.sessionHost, table.c.sessionDate, unique=True)

    alter_table = DDL("ALTER TABLE %(table)s AUTO_INCREMENT={};".format(session_id_start))
    event.listen(table, 'after_create', alter_table.execute_if(dialect='mysql'))

    return table
Esempio n. 12
0
    __tablename__ = 'geo_alt_name'

    geonameid = db.Column(None, db.ForeignKey('geo_name.id'), nullable=False)
    geoname = db.relationship(GeoName, backref=db.backref('alternate_titles', cascade='all, delete-orphan'))
    lang = db.Column(db.Unicode(7), nullable=True, index=True)
    title = db.Column(db.Unicode(200), nullable=False)
    is_preferred_name = db.Column(db.Boolean, nullable=False)
    is_short_name = db.Column(db.Boolean, nullable=False)
    is_colloquial = db.Column(db.Boolean, nullable=False)
    is_historic = db.Column(db.Boolean, nullable=False)

    def __repr__(self):
        return '<GeoAltName %s "%s" of %s>' % (self.lang, self.title, repr(self.geoname)[1:-1] if self.geoname else None)


create_geo_country_info_index = DDL(
    "CREATE INDEX ix_geo_country_info_title ON geo_country_info (lower(title) varchar_pattern_ops);")
event.listen(GeoCountryInfo.__table__, 'after_create',
    create_geo_country_info_index.execute_if(dialect='postgresql'))

create_geo_name_index = DDL(
    "CREATE INDEX ix_geo_name_title ON geo_name (lower(title) varchar_pattern_ops); "
    "CREATE INDEX ix_geo_name_ascii_title ON geo_name (lower(ascii_title) varchar_pattern_ops);")
event.listen(GeoName.__table__, 'after_create',
    create_geo_name_index.execute_if(dialect='postgresql'))

create_geo_alt_name_index = DDL(
    "CREATE INDEX ix_geo_alt_name_title ON geo_alt_name (lower(title) varchar_pattern_ops);")
event.listen(GeoAltName.__table__, 'after_create',
    create_geo_alt_name_index.execute_if(dialect='postgresql'))
Esempio n. 13
0
            for key in ('current_events', 'current_projects', 'current_stories'):
                organization_dict[key] = getattr(self, key)()

        return organization_dict


tbl = Organization.__table__
# Index the tsvector column
db.Index('index_org_tsv_body', tbl.c.tsv_body, postgresql_using='gin')

# Trigger to populate the search index column
trig_ddl = DDL("""
    CREATE TRIGGER tsvupdate_orgs_trigger BEFORE INSERT OR UPDATE ON organization FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(tsv_body, 'pg_catalog.english', name);
""")
# Initialize the trigger after table is created
event.listen(tbl, 'after_create', trig_ddl.execute_if(dialect='postgresql'))


class Story(db.Model):
    '''
        Blog posts from a Brigade.
    '''
    # Columns
    id = db.Column(db.Integer(), primary_key=True)
    title = db.Column(db.Unicode())
    link = db.Column(db.Unicode())
    type = db.Column(db.Unicode())
    keep = db.Column(db.Boolean())

    # Relationships
    # child
Esempio n. 14
0
            db.session.query(cls,
                             func.count(PostTag.c.post_id).label('num_posts'))
            .outerjoin(PostTag)
            .group_by(cls)
            .order_by('num_posts DESC')
        )

    @property
    def url_list(self):
        return url_for('.tag_list')

    @property
    def url_show(self):
        return url_for('.post_list', tag_id=self.id)

    @property
    def url_no_delete(self):
        return self.url_list

    @property
    def url_edit(self):
        return url_for('.tag_edit', tag_id=self.id)


_here = os.path.dirname(__file__)
_sql_path = os.path.join(_here, 'ddl-post.sql')
_on_ddl = DDL(open(_sql_path).read())

event.listen(Post.__table__, 'after_create',
             _on_ddl.execute_if(dialect='postgresql'))
Esempio n. 15
0
    Column("datatype", String(32), default="string", nullable=False),
    Column("int_value", Integer, default=None),
    Column("string_value", Text(convert_unicode=True, assert_unicode=None), default=None),
    Column("datetime_value", DateTime, default=None),
    Column("relation_id", Integer, ForeignKey("entities.entity_id"), default=None),
    Column("version", Integer, nullable=False),
    Column("deleted_at_version", Integer, default=None),
    mysql_engine="InnoDB",
)
Index("idx_attrs_entity_version", ATTR_TABLE.c.entity_id, ATTR_TABLE.c.version, ATTR_TABLE.c.deleted_at_version)

Index("idx_attrs_key", ATTR_TABLE.c.key)
Index("idx_attrs_subkey", ATTR_TABLE.c.subkey)

create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s (string_value(20))")
event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="mysql"))

create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s ((substring(string_value,0,20)))")
event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="postgresql"))

create_index = DDL("CREATE INDEX idx_attrs_str_value on %(table)s (string_value)")
event.listen(ATTR_TABLE, "after_create", create_index.execute_if(dialect="sqlite"))

COUNTER_TABLE = Table(
    "counters",
    METADATA,
    Column("counter_id", Integer, primary_key=True),
    Column("entity_id", Integer, ForeignKey("entities.entity_id"), nullable=False),
    Column("attr_key", String(256, convert_unicode=True, assert_unicode=None)),
    Column("value", Integer, default=0),
    mysql_engine="InnoDB",
Esempio n. 16
0
    addrloc_ip = Column(Text)
    addr_act = Column(Text)
    addr_obj = Column(Text)
    ogrn = Column(Text, index=True)
    inn = Column(Text, index=True)
    goal = Column(Text)
    osn_datestart = Column(Text)
    osn_dateend = Column(Text)
    osn_datestart2 = Column(Text)
    osn_other = Column(Text)
    check_month = Column(Text)
    check_days = Column(Text)
    check_hours = Column(Text)
    check_form = Column(Text)
    check_org = Column(Text)

    details_tsvector = Column(TsVector)


# Триггер на таблицу genproc
trigger_snippet = DDL(
    """
    CREATE TRIGGER details_tsvector_update BEFORE INSERT OR UPDATE
    ON genproc
    FOR EACH ROW EXECUTE PROCEDURE
    tsvector_update_trigger(details_tsvector,'pg_catalog.russian', 'name', 'inn', 'ogrn')
"""
)

event.listen(Genproc.__table__, "after_create", trigger_snippet.execute_if(dialect="postgresql"))
Esempio n. 17
0
        elif action == 'edit':
            return url_for('domain_edit', domain=self.name, _external=_external, **kwargs)

    @classmethod
    def get(cls, name, create=False):
        name = name.lower()
        result = cls.query.filter_by(name=name).one_or_none()
        if not result and create:
            result = cls(name=name, is_webmail=name in webmail_domains)
            db.session.add(result)
        return result


create_domain_search_trigger = DDL(
    '''
    CREATE FUNCTION domain_search_vector_update() RETURNS TRIGGER AS $$
    BEGIN
        NEW.search_vector = to_tsvector('english', COALESCE(NEW.name, '') || ' ' || COALESCE(NEW.title, '') || ' ' || COALESCE(NEW.legal_title, '') || ' ' || COALESCE(NEW.description, ''));
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';

    CREATE TRIGGER domain_search_vector_trigger BEFORE INSERT OR UPDATE ON domain
    FOR EACH ROW EXECUTE PROCEDURE domain_search_vector_update();

    CREATE INDEX ix_domain_search_vector ON domain USING gin(search_vector);
    ''')

event.listen(Domain.__table__, 'after_create',
    create_domain_search_trigger.execute_if(dialect='postgresql'))
Esempio n. 18
0
def load_ddl():
    for script in ('triggers.sql', 'rpmvercmp.sql'):
        with open(os.path.join(get_config('directories.datadir'), script)) as ddl_script:
            ddl = DDL(ddl_script.read())
        listen(Base.metadata, 'after_create', ddl.execute_if(dialect='postgresql'))
Esempio n. 19
0

# PostgreSQL
if PSQL_ENVIRONMENT:
    trigger_snippet = DDL(
        """
    CREATE TRIGGER ix_events_tsv_update BEFORE INSERT OR UPDATE
    ON events
    FOR EACH ROW EXECUTE PROCEDURE
    tsvector_update_trigger(events_tsv,'pg_catalog.english','title','content')
    """, )

    event.listen(
        Event.__table__,
        "after_create",
        trigger_snippet.execute_if(dialect="postgresql"),
    )


class Invitation(Base):
    __tablename__ = "invitations"

    id = Column(Integer, primary_key=True, index=True)
    status = Column(String, nullable=False, default="unread")
    recipient_id = Column(Integer, ForeignKey("users.id"))
    event_id = Column(Integer, ForeignKey("events.id"))
    creation = Column(DateTime, default=datetime.now)

    recipient = relationship("User")
    event = relationship("Event")
Esempio n. 20
0
                    UserExternalId.service.in_(UserExternalId.__at_username_services__),
                    db.func.lower(UserExternalId.username).like(db.func.lower(query[1:]))
                ).subquery())).options(*cls._defercols).limit(100).all() + users
        elif '@' in query:
            users = cls.query.filter(cls.status == USER_STATUS.ACTIVE, cls.id.in_(
                db.session.query(UserEmail.user_id).filter(UserEmail.user_id != None).filter(  # NOQA
                    db.func.lower(UserEmail.email).like(db.func.lower(query))
                ).subquery())).options(*cls._defercols).limit(100).all() + users
        return users


create_user_index = DDL(
    'CREATE INDEX ix_user_username_lower ON "user" (lower(username) varchar_pattern_ops); '
    'CREATE INDEX ix_user_fullname_lower ON "user" (lower(fullname) varchar_pattern_ops);')
event.listen(User.__table__, 'after_create',
    create_user_index.execute_if(dialect='postgresql'))


class UserOldId(TimestampMixin, db.Model):
    __tablename__ = 'useroldid'
    __bind_key__ = 'lastuser'
    query_class = CoasterQuery

    # userid here is NOT a foreign key since it has to continue to exist
    # even if the User record is removed
    userid = db.Column(db.String(22), nullable=False, primary_key=True)
    olduser = db.relationship(User, primaryjoin=foreign(userid) == remote(User.userid),
        backref=db.backref('oldid', uselist=False))
    user_id = db.Column(None, db.ForeignKey('user.id'), nullable=False)
    user = db.relationship(User, primaryjoin=user_id == User.id,
        backref=db.backref('oldids', cascade='all, delete-orphan'))
Esempio n. 21
0
title_table = sa.Table(
    "title",
    meta.metadata,
    sa.Column("title_id", sa.types.Integer, primary_key=True),
    sa.Column("name", sa.types.Unicode(255), nullable=False),
    sa.Column("year", sa.types.SmallInteger, nullable=False),
    sa.Column("type", EnumIntType(config.TITLE_TYPES), nullable=False),
    sa.Column("created", sa.types.DateTime(), nullable=False, default=datetime.datetime.now),
    sa.Column(
        "modified", sa.types.DateTime(), nullable=False, default=datetime.datetime.now, onupdate=datetime.datetime.now
    ),
    sa.UniqueConstraint("name", "year", "type", name="title_info_unq"),
)
title_lower_index = DDL("create index title_name_lower_idx on title ((lower(name)))")
title_trgm_index = DDL("create index title_name_trgm_idx" "on title using gin (name gin_trgm_ops)")
event.listen(title_table, "after_create", title_lower_index.execute_if(dialect="postgresql"))
event.listen(title_table, "after_create", title_trgm_index.execute_if(dialect="postgresql"))

aka_title_table = sa.Table(
    "aka_title",
    meta.metadata,
    sa.Column("aka_title_id", sa.types.Integer, primary_key=True),
    sa.Column("title_id", sa.types.Integer, sa.ForeignKey("title.title_id")),
    sa.Column("name", sa.types.Unicode(511), nullable=False),
    sa.Column("year", sa.types.SmallInteger, nullable=False),
    sa.Column("region", sa.types.Unicode(100), nullable=False),
    sa.Column("created", sa.types.DateTime(), nullable=False, default=datetime.datetime.now),
    sa.Column(
        "modified", sa.types.DateTime(), nullable=False, default=datetime.datetime.now, onupdate=datetime.datetime.now
    ),
    sa.UniqueConstraint("name", "year", "region", name="aka_title_info_unq"),
Esempio n. 22
0
        else:
            basequery = basequery.filter(cls.keywords == '')

        if filters.get('anywhere'):
            basequery = basequery.filter(cls.remote_location == True)  # NOQA
        else:
            basequery = basequery.filter(cls.remote_location == False)  # NOQA

        return basequery.one_or_none()


@event.listens_for(Filterset, 'before_update')
@event.listens_for(Filterset, 'before_insert')
def _format_and_validate(mapper, connection, target):
    with db.session.no_autoflush:
        if target.geonameids:
            target.geonameids = sorted(target.geonameids)

        filterset = Filterset.from_filters(target.board, target.to_filters())
        if filterset and filterset.id != target.id:
            raise ValueError(
                "There already exists a filter set with this filter criteria")


create_geonameids_trigger = DDL('''
    CREATE INDEX ix_filterset_geonameids on filterset USING gin (geonameids);
''')

event.listen(Filterset.__table__, 'after_create',
             create_geonameids_trigger.execute_if(dialect='postgresql'))
Esempio n. 23
0
        return "<Monkey #{0}>".format(self.id)


change_monkey_friends_count_trigger_ddl = DDL(
    """
CREATE OR REPLACE FUNCTION process_change_monkey_friends_count()
RETURNS TRIGGER AS $change_monkey_friends_count$
    BEGIN
        IF (TG_OP = 'DELETE') THEN
            UPDATE monkeys SET friends_count = friends_count - 1
                WHERE id = OLD.monkey_id;
            RETURN OLD;
        ELSIF (TG_OP = 'INSERT') THEN
            UPDATE monkeys SET friends_count = friends_count + 1
                WHERE id = NEW.monkey_id;
            RETURN NEW;
        END IF;
        RETURN NULL;
    END;
$change_monkey_friends_count$ LANGUAGE plpgsql;

CREATE TRIGGER change_monkey_friends_count
AFTER INSERT OR DELETE ON friends
    FOR EACH ROW EXECUTE PROCEDURE process_change_monkey_friends_count();
"""
)

event.listen(
    friends_relationships, "after_create", change_monkey_friends_count_trigger_ddl.execute_if(dialect="postgresql")
)
Esempio n. 24
0
engine = create_engine(SQLALCHEMY_DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

Base = declarative_base()

func = DDL("""
    CREATE OR REPLACE FUNCTION insere_data_registro()
    RETURNS TRIGGER AS $$
        BEGIN
            IF (TG_OP = 'INSERT') THEN
                new.data_insercao = current_timestamp;
                RETURN NEW;    
            ELSEIF (TG_OP = 'UPDATE') THEN
                new.data_atualizacao = current_timestamp;
                RETURN NEW;
            END IF;
        END;
    $$ LANGUAGE PLPGSQL
""")

event.listen(Table, 'after_create', func.execute_if(dialect='postgresql'))


def get_db():
    db = SessionLocal()
    try:
        yield db
    finally:
        db.close()
Esempio n. 25
0
class UserTable(Base, SQLAlchemyBaseUserTable):
    cod_unidade = Column(Integer)
    data_atualizacao = Column(DateTime)
    data_insercao = Column(DateTime)
    pass

users = UserTable.__table__

trigger = DDL("""
    CREATE TRIGGER inseredata_trigger
    BEFORE INSERT OR UPDATE ON public.user
    FOR EACH ROW EXECUTE PROCEDURE insere_data_registro();
"""
)

event.listen(
    UserTable.__table__,
    'after_create',
    trigger.execute_if(dialect='postgresql')
)

user_db = SQLAlchemyUserDatabase(UserDB, database_meta, users)

engine = sqlalchemy.create_engine(SQLALCHEMY_DATABASE_URL)

Base.metadata.create_all(engine)

class UserInDB(User):
    hashed_password: str

Esempio n. 26
0
        elif action == "edit":
            return url_for("domain_edit", domain=self.name, _external=_external, **kwargs)

    @classmethod
    def get(cls, name, create=False):
        name = name.lower()
        result = cls.query.filter_by(name=name).one_or_none()
        if not result and create:
            result = cls(name=name, is_webmail=name in webmail_domains)
            db.session.add(result)
        return result


create_domain_search_trigger = DDL(
    """
    CREATE FUNCTION domain_search_vector_update() RETURNS TRIGGER AS $$
    BEGIN
        NEW.search_vector = to_tsvector('english', COALESCE(NEW.name, '') || ' ' || COALESCE(NEW.title, '') || ' ' || COALESCE(NEW.legal_title, '') || ' ' || COALESCE(NEW.description, ''));
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';

    CREATE TRIGGER domain_search_vector_trigger BEFORE INSERT OR UPDATE ON domain
    FOR EACH ROW EXECUTE PROCEDURE domain_search_vector_update();

    CREATE INDEX ix_domain_search_vector ON domain USING gin(search_vector);
    """
)

event.listen(Domain.__table__, "after_create", create_domain_search_trigger.execute_if(dialect="postgresql"))
Esempio n. 27
0
from datetime import datetime

from sqlalchemy import event, DDL, Index

from app import db
from util.hstore import HSTORE


class Pep(db.Model):

    id = db.Column(db.Integer, primary_key=True)
    number = db.Column(db.Integer, unique=True)
    added = db.Column(db.DateTime, nullable=False, default=datetime.now)
    updated = db.Column(db.DateTime, nullable=False, default=datetime.now)

    properties = db.Column(HSTORE, nullable=False, default={})
    content = db.Column(db.Text)
    raw_content = db.Column(db.Text)
    filename = db.Column(db.String(200))

    __table_args__ = (
        Index('pep_number_idx', 'properties'),
    )

trig_ddl = DDL("CREATE INDEX content_gin_idx ON pep USING gin(to_tsvector('english', content))")
event.listen(Pep.__table__, 'after_create', trig_ddl.execute_if(dialect='postgresql'))
event.listen(Pep.__table__, 'before_create', DDL("CREATE EXTENSION IF NOT EXISTS hstore").execute_if(dialect='postgresql'))
Esempio n. 28
0
                         is_webmail=is_public_email_domain(name,
                                                           default=False))
            db.session.add(result)
        return result

    @classmethod
    def autocomplete(cls, prefix):
        return cls.query.filter(cls.name.ilike(escape_for_sql_like(prefix)),
                                cls.is_banned.is_(False)).all()


create_domain_search_trigger = DDL('''
    CREATE FUNCTION domain_search_vector_update() RETURNS TRIGGER AS $$
    BEGIN
        NEW.search_vector = to_tsvector('english', COALESCE(NEW.name, '') || ' ' || COALESCE(NEW.title, '') || ' ' || COALESCE(NEW.legal_title, '') || ' ' || COALESCE(NEW.description, ''));
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';

    CREATE TRIGGER domain_search_vector_trigger BEFORE INSERT OR UPDATE ON domain
    FOR EACH ROW EXECUTE PROCEDURE domain_search_vector_update();

    CREATE INDEX ix_domain_search_vector ON domain USING gin(search_vector);
    ''')

event.listen(
    Domain.__table__,
    'after_create',
    create_domain_search_trigger.execute_if(dialect='postgresql'),
)
Esempio n. 29
0
    code = db.Column(db.Unicode(100), nullable=False, default=generate_coupon_code)
    usage_limit = db.Column(db.Integer, nullable=False, default=1)

    used_count = cached(db.Column(db.Integer, nullable=False, default=0))

    discount_policy_id = db.Column(None, db.ForeignKey('discount_policy.id'), nullable=False)
    discount_policy = db.relationship(DiscountPolicy, backref=db.backref('discount_coupons', cascade='all, delete-orphan'))

    @classmethod
    def is_signed_code_usable(cls, policy, code):
        obj = cls.query.filter(cls.discount_policy == policy, cls.code == code, cls.used_count == cls.usage_limit).one_or_none()
        if obj:
            return False
        return True

    def update_used_count(self):
        from ..models import LineItem, LINE_ITEM_STATUS

        self.used_count = db.select([db.func.count()]).where(LineItem.discount_coupon == self).where(LineItem.status == LINE_ITEM_STATUS.CONFIRMED).as_scalar()


create_title_trgm_trigger = DDL(
    '''
    CREATE EXTENSION IF NOT EXISTS pg_trgm;
    CREATE INDEX idx_discount_policy_title_trgm on discount_policy USING gin (title gin_trgm_ops);
    ''')

event.listen(DiscountPolicy.__table__, 'after_create',
    create_title_trgm_trigger.execute_if(dialect='postgresql'))
Esempio n. 30
0
                        'current_stories'):
                organization_dict[key] = getattr(self, key)()

        return organization_dict


tbl = Organization.__table__
# Index the tsvector column
db.Index('index_org_tsv_body', tbl.c.tsv_body, postgresql_using='gin')

# Trigger to populate the search index column
trig_ddl = DDL("""
    CREATE TRIGGER tsvupdate_orgs_trigger BEFORE INSERT OR UPDATE ON organization FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(tsv_body, 'pg_catalog.english', name);
""")
# Initialize the trigger after table is created
event.listen(tbl, 'after_create', trig_ddl.execute_if(dialect='postgresql'))


class Story(db.Model):
    '''
        Blog posts from a Brigade.
    '''
    # Columns
    id = db.Column(db.Integer(), primary_key=True)
    title = db.Column(db.Unicode())
    link = db.Column(db.Unicode())
    type = db.Column(db.Unicode())
    keep = db.Column(db.Boolean())

    # Relationships
    # child
Esempio n. 31
0
            hashid = random_long_key()
            if not hashid.isdigit() and model.query.filter_by(hashid=hashid).isempty():
                break
    return hashid


create_jobpost_search_trigger = DDL(
    '''
    CREATE FUNCTION jobpost_search_vector_update() RETURNS TRIGGER AS $$
    BEGIN
        IF TG_OP = 'INSERT' THEN
            NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, ''));
        END IF;
        IF TG_OP = 'UPDATE' THEN
            IF NEW.headline <> OLD.headline OR COALESCE(NEW.headlineb, '') <> COALESCE(OLD.headlineb, '') OR NEW.description <> OLD.description OR NEW.perks <> OLD.perks THEN
                NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, ''));
            END IF;
        END IF;
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';

    CREATE TRIGGER jobpost_search_vector_trigger BEFORE INSERT OR UPDATE ON jobpost
    FOR EACH ROW EXECUTE PROCEDURE jobpost_search_vector_update();

    CREATE INDEX ix_jobpost_search_vector ON jobpost USING gin(search_vector);
    ''')

event.listen(JobPost.__table__, 'after_create',
    create_jobpost_search_trigger.execute_if(dialect='postgresql'))
Esempio n. 32
0
    @classmethod
    def by_posts_num(cls):
        return (db.session.query(
            cls,
            func.count(PostTag.c.post_id).label('num_posts')).outerjoin(
                PostTag).group_by(cls).order_by('num_posts DESC'))

    @property
    def url_list(self):
        return url_for('.tag_list')

    @property
    def url_show(self):
        return url_for('.post_list', tag_id=self.id)

    @property
    def url_no_delete(self):
        return self.url_list

    @property
    def url_edit(self):
        return url_for('.tag_edit', tag_id=self.id)


_here = os.path.dirname(__file__)
_sql_path = os.path.join(_here, 'ddl-post.sql')
_on_ddl = DDL(open(_sql_path).read())

event.listen(Post.__table__, 'after_create',
             _on_ddl.execute_if(dialect='postgresql'))
def trigger_for_table(table, timestamp):
    trigger = DDL(f"SELECT create_hypertable('{table.name}', '{timestamp}');")

    event.listen(table, 'after_create',
                 trigger.execute_if(dialect='postgresql'))
Esempio n. 34
0
                   Column('version', Integer, nullable=False),
                   Column('deleted_at_version', Integer, default=None),
                   mysql_engine='InnoDB'

                   )
Index('idx_attrs_entity_version',
      ATTR_TABLE.c.entity_id,
      ATTR_TABLE.c.version,
      ATTR_TABLE.c.deleted_at_version)

Index('idx_attrs_key', ATTR_TABLE.c.key)
Index('idx_attrs_subkey', ATTR_TABLE.c.subkey)

create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s (string_value(20))')
event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='mysql'))

create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s ((substring(string_value,0,20)))')
event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='postgresql'))

create_index = DDL('CREATE INDEX idx_attrs_str_value on %(table)s (string_value)')
event.listen(ATTR_TABLE, 'after_create', create_index.execute_if(dialect='sqlite'))

COUNTER_TABLE = Table('counters', METADATA,
                      Column('counter_id', Integer, primary_key=True),
                      Column('entity_id', Integer, ForeignKey('entities.entity_id'), nullable=False),
                      Column('attr_key', String(256, convert_unicode=True)),
                      Column('value', Integer, default=0),
                      mysql_engine='InnoDB'
                      )
Esempio n. 35
0
        artist_dict['num_followers'] = self.num_followers
        artist_dict['image_url'] = (self.image_url)
        artist_dict['popularity'] = self.popularity
        artist_dict['charted_songs'] = [
            (song.song_name) for song in self.charted_songs]
        artist_dict['genres'] = [(genre.name) for genre in self.genres]
        return artist_dict

# Create a trigger to check for updates to Artist and update the TsVector
# accordingly.
ARTIST_VECTOR_TRIGGER = DDL("""
    CREATE TRIGGER artist_tsvector_update BEFORE INSERT OR UPDATE ON "Artist" FOR EACH ROW EXECUTE PROCEDURE
    tsvector_update_trigger(tsvector_col, 'pg_catalog.english', 'name')
    """)
event.listen(Artist.__table__, 'after_create',
             ARTIST_VECTOR_TRIGGER.execute_if(dialect='postgresql'))


class Year(BASE):

    """
    Database model of table 'Year', which stores:
        year: the year's number
        top_album_name: the name of the top album
        top_album_id: the Spotify id of the top album
        top_genre_name: the name of the genre of the year's top album
        top_album_artist_id: the id of the artist who made the top album
        top_genre: the genre of the year's top album
        top_songs: The top 100 songs of that year
    """
    __tablename__ = 'Year'
Esempio n. 36
0
                          Integer,
                          ForeignKey('entities.entity_id'),
                          default=None),
                   Column('version', Integer, nullable=False),
                   Column('deleted_at_version', Integer, default=None),
                   mysql_engine='InnoDB')
Index('idx_attrs_entity_version', ATTR_TABLE.c.entity_id, ATTR_TABLE.c.version,
      ATTR_TABLE.c.deleted_at_version)

Index('idx_attrs_key', ATTR_TABLE.c.key)
Index('idx_attrs_subkey', ATTR_TABLE.c.subkey)

create_index = DDL(
    'CREATE INDEX idx_attrs_str_value on %(table)s (string_value(20))')
event.listen(ATTR_TABLE, 'after_create',
             create_index.execute_if(dialect='mysql'))

create_index = DDL(
    'CREATE INDEX idx_attrs_str_value on %(table)s ((substring(string_value,0,20)))'
)
event.listen(ATTR_TABLE, 'after_create',
             create_index.execute_if(dialect='postgresql'))

create_index = DDL(
    'CREATE INDEX idx_attrs_str_value on %(table)s (string_value)')
event.listen(ATTR_TABLE, 'after_create',
             create_index.execute_if(dialect='sqlite'))

COUNTER_TABLE = Table('counters',
                      METADATA,
                      Column('counter_id', Integer, primary_key=True),
Esempio n. 37
0
    pass


# PostgreSQL
if PSQL_ENVIRONMENT:
    trigger_snippet = DDL("""
    CREATE TRIGGER ix_events_tsv_update BEFORE INSERT OR UPDATE
    ON events
    FOR EACH ROW EXECUTE PROCEDURE
    tsvector_update_trigger(events_tsv,'pg_catalog.english','title','content')
    """)

    event.listen(
        Event.__table__,
        'after_create',
        trigger_snippet.execute_if(dialect='postgresql')
    )


class Invitation(Base):
    __tablename__ = "invitations"

    id = Column(Integer, primary_key=True, index=True)
    status = Column(String, nullable=False, default="unread")
    recipient_id = Column(Integer, ForeignKey("users.id"))
    event_id = Column(Integer, ForeignKey("events.id"))
    creation = Column(DateTime, default=datetime.now)

    recipient = relationship("User")
    event = relationship("Event")
Esempio n. 38
0
        return sorted(self.properties.items())


trig_ddl = DDL(
    """
ALTER TABLE pep ADD COLUMN search_col tsvector;
CREATE INDEX search_col_gin_idx ON pep USING gin(search_col);

CREATE OR REPLACE FUNCTION search_col_update_trigger() RETURNS trigger AS $$
begin
  new.search_col :=
    setweight(to_tsvector('english', new.number || ''), 'A') ||
    setweight(to_tsvector('english', coalesce(new.title,'')), 'B') ||
    setweight(to_tsvector('english', coalesce(new.properties->'author','')), 'C') ||
    setweight(to_tsvector('english', coalesce(new.content,'')), 'D');
  return new;
end
$$ LANGUAGE plpgsql;


CREATE TRIGGER search_col_update BEFORE INSERT OR UPDATE
ON pep FOR EACH ROW EXECUTE PROCEDURE
search_col_update_trigger();
"""
)

event.listen(Pep.__table__, "after_create", trig_ddl.execute_if(dialect="postgresql"))
event.listen(
    Pep.__table__, "before_create", DDL("CREATE EXTENSION IF NOT EXISTS hstore").execute_if(dialect="postgresql")
)
Esempio n. 39
0
    def is_signed_code_usable(cls, policy, code):
        obj = cls.query.filter(
            cls.discount_policy == policy,
            cls.code == code,
            cls.used_count == cls.usage_limit,
        ).one_or_none()
        if obj:
            return False
        return True

    def update_used_count(self):
        self.used_count = (db.select(
            [db.func.count()]).where(LineItem.discount_coupon == self).where(
                LineItem.status == LINE_ITEM_STATUS.CONFIRMED).as_scalar())


create_title_trgm_trigger = DDL('''
    CREATE INDEX idx_discount_policy_title_trgm on discount_policy USING gin (title gin_trgm_ops);
    ''')

event.listen(
    DiscountPolicy.__table__,
    'after_create',
    create_title_trgm_trigger.execute_if(
        dialect='postgresql'  # type: ignore[arg-type]
    ),
)

# Tail imports
from .line_item import LINE_ITEM_STATUS, LineItem  # isort:skip
Esempio n. 40
0

func = DDL("""
    CREATE FUNCTION check_user_events() RETURNS trigger AS $check_user_events$
        BEGIN
            IF NEW.user_id NOT IN (SELECT t.user_id FROM event_types as t WHERE t.id = NEW.type_id) THEN
                RAISE EXCEPTION 'Different user for event_types';
            END IF;

            IF NEW.user_id NOT IN (SELECT t.user_id FROM event_statuses as t WHERE t.id = NEW.status_id) THEN
                RAISE EXCEPTION 'Different user for event_statuses';
            END IF;

            IF NEW.hive_id IS NOT NULL AND NEW.user_id NOT IN (SELECT t.user_id FROM hives as t WHERE t.id = NEW.hive_id) THEN
                RAISE EXCEPTION 'Different user for hives';
            END IF;

            RETURN NEW;
        END; $check_user_events$ LANGUAGE PLPGSQL
""")

trigger = DDL("""
    CREATE TRIGGER trigger_user_events BEFORE INSERT OR UPDATE ON events
        FOR EACH ROW EXECUTE PROCEDURE check_user_events();
""")

event.listen(Events.metadata, "after_create",
             func.execute_if(dialect="postgresql"))
event.listen(Events.metadata, "after_create",
             trigger.execute_if(dialect="postgresql"))
Esempio n. 41
0
        left join video v on v.account = a.id
        left join video_locale_meta vlm on vlm.video = v.id and vlm.locale = ''
        where a.id = new.account
        group by a.id;

        new.search_vector := vector;
        return new;
    end
    $$ language plpgsql;

    create trigger account_user_search_vector_update
    before insert or update on account_user
    for each row execute procedure account_user_search_vector_trigger();
''' % ACCOUNT_USER_SEARCH_VECTOR)
event.listen(AccountUser.__table__, 'after_create',
             account_user_search_vector_trigger.execute_if(dialect='postgresql'))


video_search_vector_trigger = DDL('''
    create or replace function video_search_vector_trigger() returns trigger as $$
    declare vector tsvector;
    begin
        update account_user set date_updated = new.date_updated where account = new.account;

        select %s into vector
        from account a
        left join video_locale_meta vlm on vlm.video = new.id and vlm.locale = ''
        where a.id = new.account
        group by a.id, vlm.id;

        new.search_vector := vector;
Esempio n. 42
0
                  AFTER INSERT ON build FOR EACH ROW
                  EXECUTE PROCEDURE update_last_build();
              DROP TRIGGER IF EXISTS update_last_complete_build_trigger_up
                    ON build;
              CREATE TRIGGER update_last_complete_build_trigger_up
                  AFTER UPDATE ON build FOR EACH ROW
                  WHEN (OLD.state != NEW.state)
                  EXECUTE PROCEDURE update_last_complete_build();
              DROP TRIGGER IF EXISTS update_last_build_trigger_del
                    ON build;
              CREATE TRIGGER update_last_build_trigger_del
                  BEFORE DELETE ON build FOR EACH ROW
                  EXECUTE PROCEDURE update_last_build_del();
              """)

listen(Base.metadata, 'after_create', trigger.execute_if(dialect='postgresql'))


def grant_db_access(_, conn, *args, **kwargs):
    user = get_config('unpriv_db_username', None)
    if user:
        conn.execute("""
                     GRANT SELECT, INSERT, UPDATE, DELETE
                     ON ALL TABLES IN SCHEMA PUBLIC TO {user};
                     GRANT SELECT, USAGE ON ALL SEQUENCES
                     IN SCHEMA PUBLIC TO {user};
                     """.format(user=user))


listen(Table, 'after_create', grant_db_access)
Esempio n. 43
0
Index(
    'package_version_filename_index',
    PackageVersion.channel_name,
    PackageVersion.filename,
    PackageVersion.platform,
    unique=True,
)

UniqueConstraint(
    PackageVersion.channel_name,
    PackageVersion.package_name,
    PackageVersion.package_format,
    PackageVersion.platform,
    PackageVersion.version,
    PackageVersion.build_string,
    PackageVersion.build_number,
    name='package_version_index',
)

collation = DDL("CREATE COLLATION IF NOT EXISTS nocase ("
                "provider = icu, "
                "locale = 'und-u-ks-level2', "
                "deterministic = false);")

event.listen(
    Channel.__table__,
    'before_create',
    collation.execute_if(dialect='postgresql'),  # type: ignore
)
Esempio n. 44
0
            -- because (NULL = NULL) is always FALSE.
            IF found IS NULL THEN

              -- If we didn't find a collision then leave the LOOP.
              EXIT;
            END IF;

            -- We haven't EXITed yet, so return to the top of the LOOP
            -- and try again.
          END LOOP;

          -- NEW and OLD are available in TRIGGER PROCEDURES.
          -- NEW is the mutated row that will actually be INSERTed.
          -- We're replacing id, regardless of what it was before
          NEW.id = key;

          -- The RECORD returned here is what will actually be INSERTed,
          -- or what the next trigger will get if there is one.
          RETURN NEW;
        END;
        $$ language 'plpgsql';

        CREATE TRIGGER trigger_gen_id
        BEFORE INSERT
        ON account
        FOR EACH ROW
        EXECUTE PROCEDURE unique_short_id();
    """)

event.listen(Account.__table__, 'after_create', id_url62_trigger_function.execute_if(dialect='postgresql'))
Esempio n. 45
0
                  AFTER INSERT ON build FOR EACH ROW
                  EXECUTE PROCEDURE update_last_build();
              DROP TRIGGER IF EXISTS update_last_complete_build_trigger_up
                    ON build;
              CREATE TRIGGER update_last_complete_build_trigger_up
                  AFTER UPDATE ON build FOR EACH ROW
                  WHEN (OLD.state != NEW.state)
                  EXECUTE PROCEDURE update_last_complete_build();
              DROP TRIGGER IF EXISTS update_last_build_trigger_del
                    ON build;
              CREATE TRIGGER update_last_build_trigger_del
                  BEFORE DELETE ON build FOR EACH ROW
                  EXECUTE PROCEDURE update_last_build_del();
              """)

listen(Base.metadata, 'after_create', trigger.execute_if(dialect='postgresql'))


def grant_db_access(_, conn, *args, **kwargs):
    user = get_config('unpriv_db_username', None)
    if user:
        conn.execute("""
                     GRANT SELECT, INSERT, UPDATE, DELETE
                     ON ALL TABLES IN SCHEMA PUBLIC TO {user};
                     GRANT SELECT, USAGE ON ALL SEQUENCES
                     IN SCHEMA PUBLIC TO {user};
                     """.format(user=user))


listen(Table, 'after_create', grant_db_access)
Esempio n. 46
0
    menuItem = relationship(MenuItem,
                            backref=backref("review", cascade="all, delete"))

    @property
    def serialize(self):
        """Return object data in easily serializable format"""
        return {
            'content': self.content,
            'rate': self.rate,
            'date': self.created_date,
            'id': self.id,
            'user id': self.user_id,
            'menu item id': self.menuItem_id,
        }


trigger = DDL("""
CREATE TRIGGER aft_insert2 AFTER INSERT ON review
BEGIN
UPDATE menuItem SET rate = ((rate * reviews) + NEW.rate)/(reviews + 1),
reviews = reviews + 1 WHERE NEW.menuItem_id = menuItem.id;
END;
""")

event.listen(Review.__table__, 'after_create',
             trigger.execute_if(dialect='sqlite'))

engine = create_engine('sqlite:///restaurantmenu.db')
# Add classes as tables to the 'restaurantmenu.db' database
Base.metadata.create_all(engine)
Esempio n. 47
0
    ALTER TABLE %(table)s ALTER COLUMN "xid" SET DEFAULT txid_current();
    CREATE OR REPLACE FUNCTION snovault_transaction_notify() RETURNS trigger AS $$
    DECLARE
    BEGIN
        PERFORM pg_notify('snovault.transaction', NEW.xid::TEXT);
        RETURN NEW;
    END;
    $$ LANGUAGE plpgsql;

    CREATE TRIGGER snovault_transactions_insert AFTER INSERT ON %(table)s
    FOR EACH ROW EXECUTE PROCEDURE snovault_transaction_notify();
""")

event.listen(
    TransactionRecord.__table__, 'after_create',
    notify_ddl.execute_if(dialect='postgresql'),
)


@event.listens_for(PropertySheet, 'before_insert')
def set_tid(mapper, connection, target):
    if target.tid is not None:
        return
    txn = transaction.get()
    data = txn._extension
    target.tid = data['tid']


def register(DBSession):
    event.listen(DBSession, 'before_flush', add_transaction_record)
    event.listen(DBSession, 'before_commit', record_transaction_data)
Esempio n. 48
0
            IF NEW.user_id NOT IN (SELECT t.user_id FROM owners as t WHERE t.id = NEW.owner_id) THEN
                RAISE EXCEPTION 'Different user for owners';
            END IF;

            IF NEW.swarm_id IS NOT NULL AND NEW.user_id NOT IN (SELECT t.user_id FROM swarms as t WHERE t.id = NEW.swarm_id) THEN
                RAISE EXCEPTION 'Different user for swarms';
            END IF;

            IF NEW.apiary_id IS NOT NULL AND NEW.user_id NOT IN (SELECT t.user_id FROM apiaries as t WHERE t.id = NEW.apiary_id) THEN
                RAISE EXCEPTION 'Different user for apiaries';
            END IF;

            IF NEW.user_id NOT IN (SELECT t.user_id FROM hive_conditions as t WHERE t.id = NEW.condition_id) THEN
                RAISE EXCEPTION 'Different user for hive_conditions';
            END IF;

            RETURN NEW;
        END; $check_user_hives$ LANGUAGE PLPGSQL
"""
)

trigger = DDL(
    """
    CREATE TRIGGER trigger_user_hives BEFORE INSERT OR UPDATE ON hives
        FOR EACH ROW EXECUTE PROCEDURE check_user_hives();
"""
)

event.listen(Hives.metadata, "after_create", func.execute_if(dialect="postgresql"))
event.listen(Hives.metadata, "after_create", trigger.execute_if(dialect="postgresql"))
Esempio n. 49
0
            hashid = random_long_key()
            if not hashid.isdigit() and model.query.filter_by(
                    hashid=hashid).isempty():
                break
    return hashid


create_jobpost_search_trigger = DDL('''
    CREATE FUNCTION jobpost_search_vector_update() RETURNS TRIGGER AS $$
    BEGIN
        IF TG_OP = 'INSERT' THEN
            NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, ''));
        END IF;
        IF TG_OP = 'UPDATE' THEN
            IF NEW.headline <> OLD.headline OR COALESCE(NEW.headlineb, '') <> COALESCE(OLD.headlineb, '') OR NEW.description <> OLD.description OR NEW.perks <> OLD.perks THEN
                NEW.search_vector = to_tsvector('english', COALESCE(NEW.company_name, '') || ' ' || COALESCE(NEW.headline, '') || ' ' || COALESCE(NEW.headlineb, '') || ' ' || COALESCE(NEW.description, '') || ' ' || COALESCE(NEW.perks, ''));
            END IF;
        END IF;
        RETURN NEW;
    END
    $$ LANGUAGE 'plpgsql';

    CREATE TRIGGER jobpost_search_vector_trigger BEFORE INSERT OR UPDATE ON jobpost
    FOR EACH ROW EXECUTE PROCEDURE jobpost_search_vector_update();

    CREATE INDEX ix_jobpost_search_vector ON jobpost USING gin(search_vector);
    ''')

event.listen(JobPost.__table__, 'after_create',
             create_jobpost_search_trigger.execute_if(dialect='postgresql'))
Esempio n. 50
0
                cls.status == USER_STATUS.ACTIVE,
                cls.id.in_(
                    db.session.query(UserEmail.user_id).filter(
                        UserEmail.user_id != None).filter(  # NOQA
                            db.func.lower(UserEmail.email).like(
                                db.func.lower(query))).subquery())).options(
                                    *cls._defercols).limit(100).all() + users
        return users


create_user_index = DDL(
    'CREATE INDEX ix_user_username_lower ON "user" (lower(username) varchar_pattern_ops); '
    'CREATE INDEX ix_user_fullname_lower ON "user" (lower(fullname) varchar_pattern_ops);'
)
event.listen(User.__table__, 'after_create',
             create_user_index.execute_if(dialect='postgresql'))

event.listen(
    User.__table__, 'before_drop',
    DDL('DROP INDEX ix_user_username_lower; DROP INDEX ix_user_fullname_lower;'
        ).execute_if(dialect='postgresql'))


class UserOldId(UuidMixin, BaseMixin, db.Model):
    __tablename__ = 'useroldid'
    __uuid_primary_key__ = True

    #: Old user account, if still present
    olduser = db.relationship(
        User,
        primaryjoin='foreign(UserOldId.id) == remote(User.uuid)',