def test_ddl_execute(self): engine = create_engine("sqlite:///") cx = engine.connect() table = self.users ddl = DDL("SELECT 1") for spec in ( (engine.execute, ddl), (engine.execute, ddl, table), (cx.execute, ddl), (cx.execute, ddl, table), (ddl.execute, engine), (ddl.execute, engine, table), (ddl.execute, cx), (ddl.execute, cx, table), ): fn = spec[0] arg = spec[1:] r = fn(*arg) eq_(list(r), [(1,)]) for fn, kw in ((ddl.execute, {}), (ddl.execute, dict(target=table))): assert_raises(tsa.exc.UnboundExecutionError, fn, **kw) for bind in engine, cx: ddl.bind = bind for fn, kw in ( (ddl.execute, {}), (ddl.execute, dict(target=table)), ): r = fn(**kw) eq_(list(r), [(1,)])
def test_ddl_execute(self): engine = create_engine('sqlite:///') cx = engine.connect() table = self.users ddl = DDL('SELECT 1') for py in ('engine.execute(ddl)', 'engine.execute(ddl, table)', 'cx.execute(ddl)', 'cx.execute(ddl, table)', 'ddl.execute(engine)', 'ddl.execute(engine, table)', 'ddl.execute(cx)', 'ddl.execute(cx, table)'): r = eval(py) assert list(r) == [(1,)], py for py in ('ddl.execute()', 'ddl.execute(target=table)'): try: r = eval(py) assert False except tsa.exc.UnboundExecutionError: pass for bind in engine, cx: ddl.bind = bind for py in ('ddl.execute()', 'ddl.execute(target=table)'): r = eval(py) assert list(r) == [(1,)], py
def test_platform_escape(self): """test the escaping of % characters in the DDL construct.""" default_from = testing.db.dialect.statement_compiler( testing.db.dialect, None ).default_from() # We're abusing the DDL() # construct here by pushing a SELECT through it # so that we can verify the round trip. # the DDL() will trigger autocommit, which prohibits # some DBAPIs from returning results (pyodbc), so we # run in an explicit transaction. with testing.db.begin() as conn: eq_( conn.execute( text("select 'foo%something'" + default_from) ).scalar(), "foo%something", ) eq_( conn.execute( DDL("select 'foo%%something'" + default_from) ).scalar(), "foo%something", )
def test_ddl_execute(self): engine = create_engine("sqlite:///") cx = engine.connect() table = self.users ddl = DDL("SELECT 1") for py in ( "engine.execute(ddl)", "engine.execute(ddl, table)", "cx.execute(ddl)", "cx.execute(ddl, table)", "ddl.execute(engine)", "ddl.execute(engine, table)", "ddl.execute(cx)", "ddl.execute(cx, table)", ): r = eval(py) assert list(r) == [(1, )], py for py in ("ddl.execute()", "ddl.execute(target=table)"): try: r = eval(py) assert False except tsa.exc.UnboundExecutionError: pass for bind in engine, cx: ddl.bind = bind for py in ("ddl.execute()", "ddl.execute(target=table)"): r = eval(py) assert list(r) == [(1, )], py
def test_filter(self): cx = self.mock_engine() tbl = Table('t', MetaData(), Column('id', Integer)) target = cx.name assert DDL('')._should_execute(tbl, cx) assert DDL('').execute_if(dialect=target)._should_execute(tbl, cx) assert not DDL('').execute_if(dialect='bogus').\ _should_execute(tbl, cx) assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\ _should_execute(tbl, cx) assert(DDL('').execute_if( callable_=lambda d, y,z, **kw: z.engine.name != 'bogus'). _should_execute(tbl, cx))
def search_trigger_ddl(self, column): """ Returns the ddl for creating an automatically updated search trigger. :param column: TSVectorType typed SQLAlchemy column object """ return DDL(str(CreateSearchTriggerSQL(column)))
def attach_ddl_listeners(self): # Remove all previously added listeners, so that same listener don't # get added twice in situations where class configuration happens in # multiple phases (issue #31). for listener in self.listeners: event.remove(*listener) self.listeners = [] for column in self.processed_columns: # This sets up the trigger that keeps the tsvector column up to # date. if column.type.columns: table = column.table if self.option(column, 'remove_symbols'): self.add_listener(( table, 'after_create', self.search_function_ddl(column) )) self.add_listener(( table, 'after_drop', DDL(str(DropSearchFunctionSQL(column))) )) self.add_listener(( table, 'after_create', self.search_trigger_ddl(column) ))
def _setup_mysql_fulltext_indexes(): for name, cols in _fulltext_indexes.iteritems(): sql = ('ALTER TABLE %%(table)s ' 'ADD FULLTEXT INDEX media_fulltext_%(name)s (%(cols)s)') % { 'name': name, 'cols': ', '.join(col.name for col in cols) } DDL(sql, on='mysql').execute_at('after-create', media_fulltext)
def test_ddl_execute(self): engine = create_engine("sqlite:///") cx = engine.connect() cx.begin() ddl = DDL("SELECT 1") r = cx.execute(ddl) eq_(list(r), [(1, )])
def _setup_postgres_ddl(table): """ Barcode default for PostgreSQL and a sequence to support the legacy DB """ DDL(""" ALTER TABLE %(table)s ALTER COLUMN barcode SET DATA TYPE cenix_barcode """, on='postgres').execute_at('after-create', table)
def before_create(event, metadata): for fn_sql in glob(str(FNS_PATTERN)): event.listen( metadata, 'before_create', DDL(open(fn_sql).read()) ) stmts = [ "CREATE EXTENSION IF NOT EXISTS btree_gist", "CREATE EXTENSION IF NOT EXISTS pg_trgm" ] for stmt in stmts: event.listen( metadata, 'before_create', DDL(stmt) )
def test_tokens(self): m = MetaData() sane_alone = Table("t", m, Column("id", Integer)) sane_schema = Table("t", m, Column("id", Integer), schema="s") insane_alone = Table("t t", m, Column("id", Integer)) insane_schema = Table("t t", m, Column("id", Integer), schema="s s") ddl = DDL("%(schema)s-%(table)s-%(fullname)s") dialect = self.mock_engine().dialect self.assert_compile(ddl.against(sane_alone), "-t-t", dialect=dialect) self.assert_compile(ddl.against(sane_schema), "s-t-s.t", dialect=dialect) self.assert_compile(ddl.against(insane_alone), '-"t t"-"t t"', dialect=dialect) self.assert_compile( ddl.against(insane_schema), '"s s"-"t t"-"s s"."t t"', dialect=dialect, ) # overrides are used piece-meal and verbatim. ddl = DDL( "%(schema)s-%(table)s-%(fullname)s-%(bonus)s", context={ "schema": "S S", "table": "T T", "bonus": "b" }, ) self.assert_compile(ddl.against(sane_alone), "S S-T T-t-b", dialect=dialect) self.assert_compile(ddl.against(sane_schema), "S S-T T-s.t-b", dialect=dialect) self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b', dialect=dialect) self.assert_compile( ddl.against(insane_schema), 'S S-T T-"s s"."t t"-b', dialect=dialect, )
def _setup_mysql_fulltext_indexes(): for name, cols in _fulltext_indexes.iteritems(): sql = ('ALTER TABLE %%(table)s ' 'ADD FULLTEXT INDEX media_fulltext_%(name)s (%(cols)s)') % { 'name': name, 'cols': ', '.join(col.name for col in cols) } event.listen(media_fulltext, u'after_create', DDL(sql).execute_if(dialect=u'mysql'))
def timestamps_triggers(table): create_update_fcn_ddl = DDL('''\ CREATE OR REPLACE FUNCTION update_updated_at() RETURNS trigger AS $$ BEGIN new.updated_at = now(); return new; END; $$ LANGUAGE 'plpgsql' IMMUTABLE CALLED ON NULL INPUT SECURITY INVOKER; ''') create_create_fcn_ddl = DDL('''\ CREATE OR REPLACE FUNCTION set_created_at() RETURNS trigger AS $$ BEGIN new.created_at = now(); return new; END; $$ LANGUAGE 'plpgsql' IMMUTABLE CALLED ON NULL INPUT SECURITY INVOKER; ''') event.listen( metadata, 'before_create', create_update_fcn_ddl ) event.listen( metadata, 'before_create', create_create_fcn_ddl ) update_ddl = DDL('''\ CREATE TRIGGER update_updated_at_on_%s BEFORE UPDATE ON "%s" FOR EACH ROW EXECUTE PROCEDURE update_updated_at() ''' % (table.__tablename__, table.__tablename__)) event.listen(table.__table__, 'after_create', update_ddl) create_ddl = DDL('''\ CREATE TRIGGER set_created_at_on_%s BEFORE INSERT ON "%s" FOR EACH ROW EXECUTE PROCEDURE set_created_at() ''' % (table.__tablename__, table.__tablename__)) event.listen(table.__table__, 'after_create', create_ddl)
def polymorphic_view(self) -> DDL: template = """ CREATE VIEW IF NOT EXISTS %(name)s AS %(select)s """ info = { 'name': self.view_name, 'select': self.selectable.compile(), } return DDL(template % info)
def test_filter_deprecated(self): cx = self.engine tbl = Table("t", MetaData(), Column("id", Integer)) target = cx.name assert DDL("")._should_execute_deprecated("x", tbl, cx) with testing.expect_deprecated(".* is deprecated .*"): assert DDL("", on=target)._should_execute_deprecated("x", tbl, cx) with testing.expect_deprecated(".* is deprecated .*"): assert not DDL("", on="bogus")._should_execute_deprecated( "x", tbl, cx) with testing.expect_deprecated(".* is deprecated .*"): assert DDL("", on=lambda d, x, y, z: True)._should_execute_deprecated( "x", tbl, cx) with testing.expect_deprecated(".* is deprecated .*"): assert DDL("", on=lambda d, x, y, z: z.engine.name != "bogus" )._should_execute_deprecated("x", tbl, cx)
def define_views(cls, metadata, schema): for table_name in ('users', 'email_addresses'): fullname = table_name if schema: fullname = "%s.%s" % (schema, table_name) view_name = fullname + '_v' query = "CREATE VIEW %s AS SELECT * FROM %s" % ( view_name, fullname) event.listen( metadata, "after_create", DDL(query) ) event.listen( metadata, "before_drop", DDL("DROP VIEW %s" % view_name) )
def dump_schema(): from sqlalchemy import create_engine import os.path directory = os.path.dirname(__file__) with open(os.path.join(directory, "triggers.sql")) as f: triggers = f.read() with open(os.path.join(directory, "grants.sql")) as f: grants = f.read() event.listen(Base.metadata, "after_create", DDL(triggers)) event.listen(Base.metadata, "after_create", DDL(grants)) def dump(sql, *multiparams, **params): print(sql.compile(dialect=engine.dialect), ";") engine = create_engine('postgresql://', strategy='mock', executor=dump) Base.metadata.create_all(engine, checkfirst=False)
def test_metadata(self): metadata, engine = self.metadata, self.engine DDL('mxyzptlk').execute_at('before-create', metadata) DDL('klptzyxm').execute_at('after-create', metadata) DDL('xyzzy').execute_at('before-drop', metadata) DDL('fnord').execute_at('after-drop', metadata) metadata.create_all() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' in strings assert 'klptzyxm' in strings assert 'xyzzy' not in strings assert 'fnord' not in strings del engine.mock[:] metadata.drop_all() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' not in strings assert 'klptzyxm' not in strings assert 'xyzzy' in strings assert 'fnord' in strings
def test_table_standalone(self): users, engine = self.users, self.engine DDL('mxyzptlk').execute_at('before-create', users) DDL('klptzyxm').execute_at('after-create', users) DDL('xyzzy').execute_at('before-drop', users) DDL('fnord').execute_at('after-drop', users) users.create() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' in strings assert 'klptzyxm' in strings assert 'xyzzy' not in strings assert 'fnord' not in strings del engine.mock[:] users.drop() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' not in strings assert 'klptzyxm' not in strings assert 'xyzzy' in strings assert 'fnord' in strings
def test_table_by_metadata_deprecated(self): metadata, users, engine = self.metadata, self.users, self.engine DDL("mxyzptlk").execute_at("before-create", users) DDL("klptzyxm").execute_at("after-create", users) DDL("xyzzy").execute_at("before-drop", users) DDL("fnord").execute_at("after-drop", users) metadata.create_all() strings = [str(x) for x in engine.mock] assert "mxyzptlk" in strings assert "klptzyxm" in strings assert "xyzzy" not in strings assert "fnord" not in strings del engine.mock[:] metadata.drop_all() strings = [str(x) for x in engine.mock] assert "mxyzptlk" not in strings assert "klptzyxm" not in strings assert "xyzzy" in strings assert "fnord" in strings
def test_table_standalone(self): users, engine = self.users, self.engine event.listen(users, "before_create", DDL("mxyzptlk")) event.listen(users, "after_create", DDL("klptzyxm")) event.listen(users, "before_drop", DDL("xyzzy")) event.listen(users, "after_drop", DDL("fnord")) users.create() strings = [str(x) for x in engine.mock] assert "mxyzptlk" in strings assert "klptzyxm" in strings assert "xyzzy" not in strings assert "fnord" not in strings del engine.mock[:] users.drop() strings = [str(x) for x in engine.mock] assert "mxyzptlk" not in strings assert "klptzyxm" not in strings assert "xyzzy" in strings assert "fnord" in strings
def test_table_standalone(self): users, engine = self.users, self.engine event.listen(users, 'before_create', DDL('mxyzptlk')) event.listen(users, 'after_create', DDL('klptzyxm')) event.listen(users, 'before_drop', DDL('xyzzy')) event.listen(users, 'after_drop', DDL('fnord')) users.create() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' in strings assert 'klptzyxm' in strings assert 'xyzzy' not in strings assert 'fnord' not in strings del engine.mock[:] users.drop() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' not in strings assert 'klptzyxm' not in strings assert 'xyzzy' in strings assert 'fnord' in strings
def fts_virtual_table(self) -> DDL: template = """ CREATE VIRTUAL TABLE IF NOT EXISTS %(name)s USING fts5(%(columns)s, content=%(view_name)s, content_rowid=%(rowid_name)s) """ info = { 'name': self.idx_name, 'columns': ', '.join([c.key for c in self.columns]), 'view_name': self.view_name, 'rowid_name': self.rowid_c.key, } return DDL(template % info)
def test_metadata(self): metadata, engine = self.metadata, self.engine event.listen(metadata, "before_create", DDL("mxyzptlk")) event.listen(metadata, "after_create", DDL("klptzyxm")) event.listen(metadata, "before_drop", DDL("xyzzy")) event.listen(metadata, "after_drop", DDL("fnord")) metadata.create_all() strings = [str(x) for x in engine.mock] assert "mxyzptlk" in strings assert "klptzyxm" in strings assert "xyzzy" not in strings assert "fnord" not in strings del engine.mock[:] metadata.drop_all() strings = [str(x) for x in engine.mock] assert "mxyzptlk" not in strings assert "klptzyxm" not in strings assert "xyzzy" in strings assert "fnord" in strings
def test_metadata(self): metadata, engine = self.metadata, self.engine event.listen(metadata, 'before_create', DDL('mxyzptlk')) event.listen(metadata, 'after_create', DDL('klptzyxm')) event.listen(metadata, 'before_drop', DDL('xyzzy')) event.listen(metadata, 'after_drop', DDL('fnord')) metadata.create_all() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' in strings assert 'klptzyxm' in strings assert 'xyzzy' not in strings assert 'fnord' not in strings del engine.mock[:] metadata.drop_all() strings = [str(x) for x in engine.mock] assert 'mxyzptlk' not in strings assert 'klptzyxm' not in strings assert 'xyzzy' in strings assert 'fnord' in strings
def init(): """Initializes the service.""" # Create ES indexes. es = Elasticsearch(app.config['ELASTICSEARCH_URL']) for key in ['ELASTICSEARCH_GLOSSARY', 'ELASTICSEARCH_SIMILARITY']: try: if config_name == 'test': es.indices.delete(index=app.config[key], ignore=[400, 404]) es.indices.create(index=app.config[key]) except TransportError as e: # ignore already existing index if e.error == 'resource_already_exists_exception': pass else: raise es.indices.put_mapping( doc_type='_doc', body=json.load(open('./elasticsearch/alegre_glossary.json')), index=app.config['ELASTICSEARCH_GLOSSARY'] ) es.indices.put_mapping( doc_type='_doc', body=json.load(open('./elasticsearch/alegre_similarity.json')), index=app.config['ELASTICSEARCH_SIMILARITY'] ) es.indices.close(index=app.config['ELASTICSEARCH_SIMILARITY']) es.indices.put_settings( body=json.load(open('./elasticsearch/alegre_similarity_settings.json')), index=app.config['ELASTICSEARCH_SIMILARITY'] ) es.indices.open(index=app.config['ELASTICSEARCH_SIMILARITY']) # Create database. with app.app_context(): if not database_exists(db.engine.url): create_database(db.engine.url) if config_name == 'test': db.drop_all() sqlalchemy.event.listen( db.metadata, 'before_create', DDL(""" CREATE OR REPLACE FUNCTION bit_count(value bigint) RETURNS integer AS $$ SELECT length(replace(value::bit(64)::text,'0','')); $$ LANGUAGE SQL IMMUTABLE STRICT; """) ) db.create_all()
def _create_model(self, model): model_name = model.__name__ meta = type(self._base_declarative) if isinstance(model, meta): raise ImproperlyConfigured('Cannot register declarative classes ' 'only mixins allowed') base = getattr(model, '__inherit_from__', None) if base: if base not in self._declarative_register: models = self._bases.get(base) if not models: self._bases[base] = models = [] models.append(model) return else: base = self._declarative_register[base] else: base = self._base_declarative # # Create SqlAlchemy Model model = meta(model_name, (model, base), {}) create = getattr(model, '__create_sql__', None) name = model_name.lower() if create: event.listen(self.metadata, 'after_create', DDL(create.format({'name': name}))) drop = getattr(model, '__drop_sql__', None) if not drop: logger.warning('Model %s has create statement but not drop. ' 'To mute this warning add a __drop_sql__ ' 'statement in the model class', name) else: event.listen(self.metadata, 'before_drop', DDL(drop.format({'name': name}))) return model, name
def define_temp_tables(cls, metadata): user_tmp = Table( "user_tmp", metadata, Column("id", sa.INT, primary_key=True), Column('name', sa.VARCHAR(50)), Column('foo', sa.INT), sa.UniqueConstraint('name', name='user_tmp_uq'), sa.Index("user_tmp_ix", "foo"), prefixes=["TEMPORARY"], schema='tmp', ) if testing.requires.view_reflection.enabled and \ testing.requires.temporary_views.enabled: event.listen( user_tmp, "after_create", DDL("create view user_tmp_v as " "select * from tmp.user_tmp") ) event.listen( user_tmp, "before_drop", DDL("drop view user_tmp_v") )
def test_tokens(self): m = MetaData() sane_alone = Table('t', m, Column('id', Integer)) sane_schema = Table('t', m, Column('id', Integer), schema='s') insane_alone = Table('t t', m, Column('id', Integer)) insane_schema = Table('t t', m, Column('id', Integer), schema='s s') ddl = DDL('%(schema)s-%(table)s-%(fullname)s') dialect = self.mock_engine().dialect self.assert_compile(ddl.against(sane_alone), '-t-t', dialect=dialect) self.assert_compile(ddl.against(sane_schema), 's-t-s.t', dialect=dialect) self.assert_compile(ddl.against(insane_alone), '-"t t"-"t t"', dialect=dialect) self.assert_compile(ddl.against(insane_schema), '"s s"-"t t"-"s s"."t t"', dialect=dialect) # overrides are used piece-meal and verbatim. ddl = DDL('%(schema)s-%(table)s-%(fullname)s-%(bonus)s', context={ 'schema': 'S S', 'table': 'T T', 'bonus': 'b' }) self.assert_compile(ddl.against(sane_alone), 'S S-T T-t-b', dialect=dialect) self.assert_compile(ddl.against(sane_schema), 'S S-T T-s.t-b', dialect=dialect) self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b', dialect=dialect) self.assert_compile(ddl.against(insane_schema), 'S S-T T-"s s"."t t"-b', dialect=dialect)