def test_drops_triggers_and_functions(self): conn = self.session.bind sync_trigger(conn, 'article', 'search_vector', ['name', 'content']) def trigger_exist(): return conn.execute("""SELECT COUNT(*) FROM pg_trigger WHERE tgname='article_search_vector_trigger' """).scalar() def function_exist(): return conn.execute("""SELECT COUNT(*) FROM pg_proc WHERE proname='article_search_vector_update' """).scalar() assert trigger_exist() == 1 assert function_exist() == 1 drop_trigger( conn, 'article', 'search_vector', ) assert trigger_exist() == 0 assert function_exist() == 0
def upgrade(): conn = op.get_bind() op.create_table( 'user_data', sa.Column('data_id', postgresql.UUID(), nullable=False), sa.Column('field_type', sa.String(), nullable=False), sa.Column('field_data', sa.String(), nullable=False), sa.Column('search_vector', TSVectorType('field_data'), nullable=False), sa.Column('user_id', postgresql.UUID(), nullable=False), sa.Column('add_name', sa.String(), nullable=True), sa.Column('add_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('last_maintenance_name', sa.String(), nullable=True), sa.Column('last_maintenance_date', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.PrimaryKeyConstraint('data_id'), sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ondelete='CASCADE')) sync_trigger(conn=conn, table_name='user_data', tsvector_column='search_vector', indexed_columns=['field_data'])
def upgrade(): conn = op.get_bind() op.add_column('queries', sa.Column('search_vector', su.TSVectorType())) op.create_index('ix_queries_search_vector', 'queries', ['search_vector'], unique=False, postgresql_using='gin') ss.sync_trigger(conn, 'queries', 'search_vector', ['name', 'description', 'query'])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.add_column('challenges', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) sync_trigger(conn, "challenges", "search_vector", ["title", "content"])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('upcomings', sa.Column('search_vector', TSVectorType(), nullable=True)) conn = op.get_bind() sync_trigger(conn, 'upcomings', 'search_vector', ['name', 'description'])
def test_creates_triggers_and_functions(self): conn = self.session.bind sync_trigger(conn, 'article', 'search_vector', ['name', 'content']) conn.execute('''INSERT INTO article (name, content) VALUES ('some name', 'some content')''') vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':4 'name':2"
def test_custom_vectorizers(self): articles = sa.Table( 'article', self.Base.metadata, autoload=True, autoload_with=self.session.bind ) @vectorizer(articles.c.content) def vectorize_content(column): return sa.func.replace(column, 'bad', 'good') conn = self.session.bind sync_trigger( conn, 'article', 'search_vector', ['name', 'content'], metadata=self.Base.metadata ) conn.execute( '''INSERT INTO article (name, content) VALUES ('some name', 'some bad content')''' ) vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':5 'good':4 'name':2"
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"') op.create_table('blogs', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.Text(), nullable=False), sa.Column('slug', sa.Text(), nullable=False), sa.Column('tags', postgresql.ARRAY(sa.Text()), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('creation_date', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('edit_date', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType("title", "content", weights={ "title": "A", "content": "B" }), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug'), sa.UniqueConstraint('title') ) op.create_index('blogs_tags_array_idx', 'blogs', ['tags'], unique=False, postgresql_using='gin') op.create_table('images', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('filetype', sa.Text(), nullable=False), sa.Column('image', sa.LargeBinary(), nullable=False), sa.PrimaryKeyConstraint('id') ) conn.execute(sql_expressions.statement) sync_trigger(conn, "blogs", "search_vector", ["title", "content"])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.drop_table('sections') op.add_column('notes', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_notes_search_vector', 'notes', ['search_vector'], unique=False, postgresql_using='gin') ### end Alembic commands ### sync_trigger(conn, 'notes', 'search_vector', ['title', 'body'])
def downgrade(): conn = op.get_bind() ss.drop_trigger(conn, 'queries', 'search_vector') op.drop_index('ix_queries_search_vector', table_name='queries') op.create_index('ix_queries_search_vector', 'queries', ['search_vector'], unique=False, postgresql_using='gin') ss.sync_trigger(conn, 'queries', 'search_vector', ['name', 'description', 'query'])
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.drop_index("writeups_tags_array_idx", table_name="writeups") sync_trigger(conn, "writeups", "search_vector", ["title", "content"]) op.drop_table("writeups")
def test_trigger_with_reserved_word(self): conn = self.session.bind conn.execute('''INSERT INTO article (name, content, "current_user") VALUES ('some name', 'some bad content', now())''') sync_trigger(conn, 'article', 'search_vector', ['name', 'content', 'current_user']) # raises ProgrammingError without reserved_words: conn.execute('''UPDATE article SET name=name''')
def test_updates_column_values(self): conn = self.session.bind sync_trigger(conn, 'article', 'search_vector', ['name', 'content']) conn.execute('''INSERT INTO article (name, content) VALUES ('some name', 'some content')''') conn.execute('ALTER TABLE article DROP COLUMN name') sync_trigger(conn, 'article', 'search_vector', ['content']) vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':2"
def index_search(): try: from settings import searches_index for _index in searches_index: db = DataBaseConnectionManager().db sync_trigger(db.engine, *_index) except Exception: raise
def upgrade(): conn = op.get_bind() op.add_column("queries", sa.Column("search_vector", su.TSVectorType())) op.create_index( "ix_queries_search_vector", "queries", ["search_vector"], unique=False, postgresql_using="gin", ) ss.sync_trigger(conn, "queries", "search_vector", ["name", "description", "query"])
def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.drop_table('Archive') op.drop_table('tab') op.drop_table('tag') sync_trigger(conn, 'Bookmarks', 'fulltext_vector', ['full_text']) sync_trigger(conn, 'Bookmarks', 'search_vector', ['title', 'description']) op.drop_table('Bookmarks') op.drop_table('roles_users') op.drop_table('User') op.drop_table('role')
def downgrade(): conn = op.get_bind() ss.drop_trigger(conn, "queries", "search_vector") op.drop_index("ix_queries_search_vector", table_name="queries") op.create_index( "ix_queries_search_vector", "queries", ["search_vector"], unique=False, postgresql_using="gin", ) ss.sync_trigger(conn, "queries", "search_vector", ["name", "description", "query"])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'note', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_note_search_vector', 'note', ['search_vector'], unique=False, postgresql_using='gin') sync_trigger(op.get_bind(), 'note', 'search_vector', ['content'])
def test_creates_triggers_and_functions(self): conn = self.session.bind sync_trigger( conn, 'article', 'search_vector', ['name', 'content'] ) conn.execute( '''INSERT INTO article (name, content) VALUES ('some name', 'some content')''' ) vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':4 'name':2"
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'quote_type', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=25), nullable=False), sa.Column('display_name', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('firebase_user_id', sa.String(), nullable=False), sa.Column('is_admin', sa.Boolean(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('picture_url', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('firebase_user_id')) op.create_table( 'quote', sa.Column('id', sa.Integer(), nullable=False), sa.Column('author', sa.String(length=100), nullable=False), sa.Column('quotation', sa.String(length=200), nullable=False), sa.Column('source', sa.String(), nullable=True), sa.Column('likes', sa.Integer(), nullable=False), sa.Column('contributor_id', sa.Integer(), nullable=False), sa.Column('type_id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('search_vector', TSVectorType(), nullable=False), sa.ForeignKeyConstraint( ['contributor_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['type_id'], ['quote_type.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index('ix_quote_search_vector', 'quote', ['search_vector'], unique=False, postgresql_using='gin') op.create_table('like', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('quote_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['quote_id'], ['quote.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'quote_id')) # ### end Alembic commands ### sync_trigger(op.get_bind(), 'quote', 'search_vector', ['quotation', 'author'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() sync_trigger(conn, 'TBL_Products', 'search_vector', ['product_name', 'brand', 'aisle', 'sub_aise', 'shelf', 'shelf_gran']) op.alter_column('TBL_Products', 'aisle', existing_type=sa.VARCHAR(length=200), nullable=False) op.alter_column('TBL_Products', 'brand', existing_type=sa.VARCHAR(length=200), nullable=False) op.alter_column('TBL_Products', 'product_name', existing_type=sa.VARCHAR(length=300), nullable=False) op.create_index(op.f('ix_TBL_Products_product_id'), 'TBL_Products', ['product_id'], unique=False)
def upgrade(): ss.vectorizer.clear() conn = op.get_bind() metadata = sa.MetaData(bind=conn) queries = sa.Table('queries', metadata, autoload=True) @ss.vectorizer(queries.c.id) def integer_vectorizer(column): return sa.func.cast(column, sa.Text) ss.sync_trigger(conn, 'queries', 'search_vector', ['id', 'name', 'description', 'query'], metadata=metadata)
def test_updates_column_values(self): conn = self.session.bind sync_trigger( conn, 'article', 'search_vector', ['name', 'content'] ) conn.execute( '''INSERT INTO article (name, content) VALUES ('some name', 'some content')''' ) conn.execute('ALTER TABLE article DROP COLUMN name') sync_trigger(conn, 'article', 'search_vector', ['content']) vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':2"
def test_trigger_with_reserved_word(self): conn = self.session.bind conn.execute( '''INSERT INTO article (name, content, "current_user") VALUES ('some name', 'some bad content', now())''' ) sync_trigger( conn, 'article', 'search_vector', ['name', 'content', 'current_user'] ) # raises ProgrammingError without reserved_words: conn.execute( '''UPDATE article SET name=name''' )
def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.add_column('TBL_Products', sa.Column('search_vector', sa.Text)) sync_trigger(conn, 'TBL_Products', 'search_vector', ['product_name', 'brand', 'aisle', 'sub_aise', 'shelf', 'shelf_gran']) op.drop_index(op.f('ix_TBL_Products_product_id'), table_name='TBL_Products') op.alter_column('TBL_Products', 'product_name', existing_type=sa.VARCHAR(length=300), nullable=True) op.alter_column('TBL_Products', 'brand', existing_type=sa.VARCHAR(length=200), nullable=True) op.alter_column('TBL_Products', 'aisle', existing_type=sa.VARCHAR(length=200), nullable=True)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('word', sa.Column('search_vector', sqlalchemy_utils.TSVectorType(), nullable=True)) op.create_index('ix_word_search_vector', 'word', ['search_vector'], unique=False, postgresql_using='gin') ### end Alembic commands ### conn = op.get_bind() sync_trigger(conn, 'word', 'search_vector', ['singular', 'plural', 'infinitive', 'past_tense', 'present_perfect_tense', 'positive', 'comparative', 'superlative', 'adverb', 'conjunction', 'preposition', 'meaning', 'examples'])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.create_table( "blogs", sa.Column("id", sa.Integer(), nullable=False), sa.Column("title", sa.Text(), nullable=False), sa.Column("slug", sa.Text(), nullable=False), sa.Column("tags", sa.ARRAY(sa.Text()), nullable=False), sa.Column("content", sa.Text(), nullable=False), sa.Column( "creation_date", sa.DateTime(), server_default=sa.text("now()"), nullable=False, ), sa.Column("edit_date", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column( "search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType("title", "content", weights={ "title": "A", "content": "B" }), nullable=True, ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("slug"), sa.UniqueConstraint("title"), ) op.create_index( "blogs_tags_array_idx", "blogs", ["tags"], unique=False, postgresql_using="gin", ) sync_trigger(conn, "blogs", "search_vector", ["title", "content"])
def upgrade(): ss.vectorizer.clear() conn = op.get_bind() metadata = sa.MetaData(bind=conn) queries = sa.Table("queries", metadata, autoload=True) @ss.vectorizer(queries.c.id) def integer_vectorizer(column): return sa.func.cast(column, sa.Text) ss.sync_trigger( conn, "queries", "search_vector", ["id", "name", "description", "query"], metadata=metadata, )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('package', 'shortDesc', nullable=False, new_column_name='short_desc') op.add_column( 'package', sa.Column('search_vector', TSVectorType("title", "short_desc", "desc"), nullable=True)) op.create_index('ix_package_search_vector', 'package', ['search_vector'], unique=False, postgresql_using='gin') conn = op.get_bind() sync_trigger(conn, 'package', 'search_vector', ["title", "short_desc", "desc"])
def test_custom_vectorizers(self): articles = sa.Table('article', self.Base.metadata, autoload=True, autoload_with=self.session.bind) @vectorizer(articles.c.content) def vectorize_content(column): return sa.func.replace(column, 'bad', 'good') conn = self.session.bind sync_trigger(conn, 'article', 'search_vector', ['name', 'content'], metadata=self.Base.metadata) conn.execute('''INSERT INTO article (name, content) VALUES ('some name', 'some bad content')''') vector = conn.execute('SELECT search_vector FROM article').scalar() assert vector == "'content':5 'good':4 'name':2"
def upgrade(): ss.vectorizer.clear() conn = op.get_bind() metadata = sa.MetaData(bind=conn) queries = sa.Table('queries', metadata, autoload=True) @ss.vectorizer(queries.c.id) def integer_vectorizer(column): return sa.func.cast(column, sa.Text) ss.sync_trigger( conn, 'queries', 'search_vector', ['id', 'name', 'description', 'query'], metadata=metadata )
def upgrade(): op.create_table( 'pokedexes', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, unique=True, nullable=False), sa.Column('official_name', sa.Text, unique=True, nullable=False), sa.Column('region', sa.Text, nullable=True, default=None), sa.Column('description', sa.Text, nullable=True, default=None), sa.Column('search_vector', TSVectorType(searchable)), sa.Column('inserted_at', sa.DateTime, default=sa.func.current_timestamp(), nullable=False), sa.Column('updated_at', sa.DateTime, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp(), nullable=False) ) conn = op.get_bind() sync_trigger(conn, 'pokedexes', 'search_vector', searchable)
def upgrade(): op.create_table( 'pokemon', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, unique=True, nullable=False), sa.Column('flavor_text', sa.Text, nullable=False), sa.Column('habitat', sa.Text, nullable=True, default=None), sa.Column('color', sa.Text, nullable=False), sa.Column('shape', sa.Text, nullable=False), sa.Column('search_vector', TSVectorType(searchable)), sa.Column('inserted_at', sa.DateTime, default=sa.func.current_timestamp(), nullable=False), sa.Column('updated_at', sa.DateTime, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp(), nullable=False)) conn = op.get_bind() sync_trigger(conn, 'pokemon', 'search_vector', searchable)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.add_column( 'category', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_category_search_vector', 'category', ['search_vector'], unique=False, postgresql_using='gin') op.add_column( 'group', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_group_search_vector', 'group', ['search_vector'], unique=False, postgresql_using='gin') op.add_column( 'project', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_project_search_vector', 'project', ['search_vector'], unique=False, postgresql_using='gin') sync_trigger(conn, 'category', 'search_vector', ['name']) sync_trigger(conn, 'group', 'search_vector', ['name']) sync_trigger(conn, 'project', 'search_vector', ['name', 'summary'])
def upgrade(): conn = op.get_bind() op.add_column("log", sa.Column("search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index("ix_log_search_vector", "log", ["search_vector"], unique=False, postgresql_using="gin") sync_trigger( conn, "log", "search_vector", [ "call", "comment", "country", "email", "name", "notes", "operator", "owner_callsign", "qslmsg", "station_callsign", "web", "qsl_comment", ], )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() # op.execute("CREATE EXTENSION pg_trgm") op.create_table( "writeups", sa.Column("id", sa.Integer(), nullable=False), sa.Column("author_id", sa.BigInteger(), nullable=False), sa.Column("title", sa.Text(), nullable=False), sa.Column("tags", sa.ARRAY(sa.Text()), nullable=False), sa.Column("content", sa.Text(), nullable=False), sa.Column( "search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType("title", "content", weights={ "title": "A", "content": "B" }), nullable=True, ), sa.ForeignKeyConstraint(["author_id"], ["users.discord_id"]), sa.PrimaryKeyConstraint("id"), ) op.create_index( "writeups_tags_array_idx", "writeups", ["tags"], unique=False, postgresql_using="gin", ) conn.execute(sql_expressions.statement) sync_trigger(conn, "writeups", "search_vector", ["title", "content"])
def upgrade(): op.create_table( 'moves', sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.Text, unique=True, nullable=False), sa.Column('flavor_text', sa.Text, nullable=True, default=None), sa.Column('short_effect', sa.Text, nullable=False), sa.Column('effect', sa.Text, nullable=False), sa.Column('damage_class', sa.Text, nullable=True, default=None), sa.Column('power_points', sa.Integer, nullable=True, default=None), sa.Column('power', sa.Integer, nullable=True, default=None), sa.Column('accuracy', sa.Integer, nullable=True, default=None), sa.Column('search_vector', TSVectorType(searchable)), sa.Column('inserted_at', sa.DateTime, default=sa.func.current_timestamp(), nullable=False), sa.Column('updated_at', sa.DateTime, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp(), nullable=False)) conn = op.get_bind() sync_trigger(conn, 'moves', 'search_vector', searchable)
def test_drops_triggers_and_functions(self): conn = self.session.bind sync_trigger( conn, 'article', 'search_vector', ['name', 'content'] ) def trigger_exist(): return conn.execute( """SELECT COUNT(*) FROM pg_trigger WHERE tgname = 'article_search_vector_trigger' """ ).scalar() def function_exist(): return conn.execute( """SELECT COUNT(*) FROM pg_proc WHERE proname = 'article_search_vector_update' """ ).scalar() assert trigger_exist() == 1 assert function_exist() == 1 drop_trigger( conn, 'article', 'search_vector', ) assert trigger_exist() == 0 assert function_exist() == 0
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.add_column('episode', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True)) op.create_index('ix_episode_search_vector', 'episode', ['search_vector'], unique=False, postgresql_using='gin') sync_trigger(conn, 'episode', 'search_vector', ['title', 'description'])
def downgrade(): sync_trigger(conn, 'passport', 'lexemes', ['surname', 'name']) op.drop_column('passport', 'lexemes')
def upgrade_triggers(): conn = op.get_bind() op.add_column('TBL_Products', db.Column('search_vector', db.Text)) sync_trigger(conn, 'TBL_Products', 'search_vector', ['product_name', 'brand', 'aisle', 'sub_aise', 'shelf', 'shelf_gran'])
def sync_triggers(): from sqlalchemy_searchable import sync_trigger sync_trigger(engine, "definition", "search_vector", ["text"]) sync_trigger(engine, "dispatch_user", "search_vector", ["email"]) sync_trigger(engine, "document", "search_vector", ["name"]) sync_trigger(engine, "incident", "search_vector", ["name", "title", "description"]) sync_trigger(engine, "incident_cost_type", "search_vector", ["name", "description"]) sync_trigger(engine, "incident_priority", "search_vector", ["name", "description"]) sync_trigger(engine, "incident_type", "search_vector", ["name", "description"]) sync_trigger( engine, "individual_contact", "search_vector", ["name", "title", "company", "notes"] ) sync_trigger(engine, "notification", "search_vector", ["name", "description"]) sync_trigger( engine, "plugin", "search_vector", ["title", "slug", "type", "description"], ) sync_trigger(engine, "report", "search_vector", ["details_raw"]) sync_trigger(engine, "search_filter", "search_vector", ["name", "description"]) sync_trigger(engine, "service", "search_vector", ["name"]) sync_trigger(engine, "tag", "search_vector", ["name"]) sync_trigger(engine, "task", "search_vector", ["description"]) sync_trigger(engine, "team_contact", "search_vector", ["name", "company", "notes"]) sync_trigger(engine, "term", "search_vector", ["text"]) sync_trigger(engine, "workflow", "search_vector", ["name", "description"]) sync_trigger(engine, "dispatch_user", "search_vector", ["email"])
def index_search(): try: for _index in searches_index: sync_trigger(db.engine, *_index) except Exception: raise
def upgrade(): op.add_column('passport', sa.Column('lexemes', TSVectorType(), nullable=False)) sync_trigger(conn, 'passport', 'lexemes', ['surname', 'name'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.create_table('role', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=80), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('User', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=255), nullable=True), sa.Column('password', sa.String(length=255), nullable=True), sa.Column('first_name', sa.String(length=255), nullable=True), sa.Column('last_name', sa.String(length=255), nullable=True), sa.Column('active', sa.Boolean(), nullable=True), sa.Column('confirmed_at', sa.DateTime(), nullable=True), sa.Column('bookmarks_per_page', sa.Integer(), nullable=True), sa.Column('api_key', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('api_key'), sa.UniqueConstraint('email') ) op.create_table('roles_users', sa.Column('User_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['User_id'], ['User.id'], ), sa.ForeignKeyConstraint(['role_id'], ['role.id'], ) ) op.create_table('Bookmarks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=1024), nullable=True), sa.Column('description', sa.String(length=256), nullable=True), sa.Column('main_url', sa.String(length=2000), nullable=True), sa.Column('added_on', sa.DateTime(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('search_vector', TSVECTOR), sa.Column('readability_html', sa.Text(), nullable=True), sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True), sa.Column('full_text', sa.Text(), nullable=True), sa.Column('fulltext_vector', TSVECTOR), sa.ForeignKeyConstraint(['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id') ) sync_trigger(conn, 'Bookmarks', 'fulltext_vector', ['full_text']) sync_trigger(conn, 'Bookmarks', 'search_vector', ['title', 'description']) op.create_table('tag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('text', sa.String(length=255), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('count', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('tab', sa.Column('id', sa.String(length=50), nullable=False), sa.Column('tabs', sa.PickleType(), nullable=True), sa.Column('added_on', sa.DateTime(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('title', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('Archive', sa.Column('id', sa.Integer(), nullable=False), sa.Column('web_page', sa.Integer(), nullable=True), sa.Column('service', sa.String(length=2048), nullable=True), sa.Column('archived_on', sa.DateTime(), nullable=True), sa.Column('archive_url', sa.String(length=2000), nullable=True), sa.Column('status', sa.String(length=2000), nullable=True), sa.ForeignKeyConstraint(['web_page'], ['Bookmarks.id'], ), sa.PrimaryKeyConstraint('id') )
def downgrade(): conn = op.get_bind() #op.add_column('TBL_Products', sa.Column('search_vector', sa.Text)) sync_trigger(conn, 'TBL_Products', 'search_vector', ['product_name', 'brand', 'aisle', 'sub_aisle', 'shelf', 'shelf_gran'])