def test_python_fn_default(self): metadata = MetaData() table = Table( "sometable", metadata, Column("id", Integer, primary_key=True), Column("data", String), Column("foo", Integer, default=lambda: 10), ) values = [ {"id": 1, "data": "data1"}, {"id": 2, "data": "data2", "foo": 15}, {"id": 3, "data": "data3"}, ] checkparams = { "id_m0": 1, "id_m1": 2, "id_m2": 3, "data_m0": "data1", "data_m1": "data2", "data_m2": "data3", "foo": None, # evaluated later "foo_m1": 15, "foo_m2": None, # evaluated later } stmt = table.insert().values(values) eq_( dict( [ (k, v.type._type_affinity) for (k, v) in stmt.compile( dialect=postgresql.dialect() ).binds.items() ] ), { "foo": Integer, "data_m2": String, "id_m0": Integer, "id_m2": Integer, "foo_m1": Integer, "data_m1": String, "id_m1": Integer, "foo_m2": Integer, "data_m0": String, }, ) self.assert_compile( stmt, "INSERT INTO sometable (id, data, foo) VALUES " "(%(id_m0)s, %(data_m0)s, %(foo)s), " "(%(id_m1)s, %(data_m1)s, %(foo_m1)s), " "(%(id_m2)s, %(data_m2)s, %(foo_m2)s)", checkparams=checkparams, dialect=postgresql.dialect(), )
def test_mysql(self): from sqlalchemy.dialects import mysql as diale_ dialect = diale_.dialect() query = str(self.group_concat_md.compile(dialect=dialect)) expected = ( '''SELECT metric_dimension.dimension_set_id, ''' '''GROUP_CONCAT(concat(concat(metric_dimension.name, ''' ''''='), metric_dimension.value) SEPARATOR ',') AS dimensions ''' ''' FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') self.assertEqual(expected, query) query = str(self.group_concat_md_order.compile(dialect=dialect)) expected = ( '''SELECT metric_dimension.dimension_set_id, ''' '''GROUP_CONCAT(concat(concat(metric_dimension.name, ''' ''''='), metric_dimension.value) ORDER BY metric_dimension.name ASC ''' '''SEPARATOR ',') AS dimensions ''' ''' FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') self.assertEqual(expected, query) expected = ( '''SELECT metric_dimension.dimension_set_id \n''' '''FROM metric_dimension ''' '''ORDER BY FIELD(metric_dimension.dimension_set_id, 'A', 'B', 'C') ASC''') query = str(self.order_by_field.compile(dialect=dialect)) self.assertEqual(expected, query)
def test_render_explain_analyze(self, session, Article): assert str( explain_analyze(session.query(Article)) .compile( dialect=postgresql.dialect() ) ).startswith('EXPLAIN (ANALYZE true) SELECT')
def test_server_default(self): metadata = MetaData() table = Table( "sometable", metadata, Column("id", Integer, primary_key=True), Column("data", String), Column("foo", Integer, server_default=func.foobar()), ) values = [ {"id": 1, "data": "data1"}, {"id": 2, "data": "data2", "foo": "plainfoo"}, {"id": 3, "data": "data3"}, ] checkparams = { "id_m0": 1, "id_m1": 2, "id_m2": 3, "data_m0": "data1", "data_m1": "data2", "data_m2": "data3", } self.assert_compile( table.insert().values(values), "INSERT INTO sometable (id, data) VALUES " "(%(id_m0)s, %(data_m0)s), " "(%(id_m1)s, %(data_m1)s), " "(%(id_m2)s, %(data_m2)s)", checkparams=checkparams, dialect=postgresql.dialect(), )
def test_server_default_absent_value(self): metadata = MetaData() table = Table('sometable', metadata, Column('id', Integer, primary_key=True), Column('data', String), Column('foo', Integer, server_default=func.foobar())) values = [ {'id': 1, 'data': 'data1', 'foo': 'plainfoo'}, {'id': 2, 'data': 'data2'}, {'id': 3, 'data': 'data3', 'foo': 'otherfoo'}, ] checkparams = { 'id_0': 1, 'id_1': 2, 'id_2': 3, 'data_0': 'data1', 'data_1': 'data2', 'data_2': 'data3', 'foo_0': 'plainfoo', 'foo_2': 'otherfoo', } # note the effect here is that the first set of params # takes effect for the rest of them, when one is absent self.assert_compile(table.insert().values(values), 'INSERT INTO sometable (id, data, foo) VALUES ' '(%(id_0)s, %(data_0)s, %(foo_0)s), ' '(%(id_1)s, %(data_1)s, %(foo_0)s), ' '(%(id_2)s, %(data_2)s, %(foo_2)s)', checkparams=checkparams, dialect=postgresql.dialect())
def print_query(query_result): try: return str(query_result.statement.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})) except Exception as e: logging.getLogger(__name__).warning(str(e)) return str(query_result) pass
def test_modify_metadata(self): metadata = ConceptNameTransform.modify_metadata(self.metadata) indexes = metadata.tables['table1'].indexes self.assertEqual(len(indexes), 2, 'Indexes created') for index in indexes: index_sql = str(CreateIndex(index).compile( dialect=postgresql.dialect())) if index.name == 'tab_fcn_81387a6132eed7f69b5_ix': expected = clean(""" CREATE INDEX tab_fcn_81387a6132eed7f69b5_ix ON table1 (foo_concept_name) """) self.assertEqual(index_sql, expected) elif index.name == 'tab_bcn_7baa5e16ad1f8129b90_ix': expected = clean(""" CREATE INDEX tab_bcn_7baa5e16ad1f8129b90_ix ON table1 (bar_concept_name) """) self.assertEqual(index_sql, expected) else: self.fail( 'Unexpected index encountered: {}'.format(index.name))
def test_modify_select(self): select_obj = select([self.table1]) join_obj = self.table1 select_obj, join_obj = ConceptNameTransform.modify_select( self.metadata, 'user', select_obj, join_obj) select_obj = select_obj.select_from(join_obj) new_sql = str(select_obj.compile(dialect=postgresql.dialect())) expected = clean(""" SELECT table1.foo_concept_id, table1.bar_concept_id, concept_1.concept_name AS foo_concept_name, concept_2.concept_name AS bar_concept_name {NL}FROM table1 LEFT OUTER JOIN concept AS concept_1 ON concept_1.concept_id = foo_concept_id LEFT OUTER JOIN concept AS concept_2 ON concept_2.concept_id = bar_concept_id """) self.maxDiff = None self.assertEqual(expected, new_sql)
def test_insert_returning(self): dialect = postgresql.dialect() table1 = table( "mytable", column("myid", Integer), column("name", String(128)), column("description", String(128)) ) i = insert(table1, values=dict(name="foo")).returning(table1.c.myid, table1.c.name) self.assert_compile( i, "INSERT INTO mytable (name) VALUES " "(%(name)s) RETURNING mytable.myid, " "mytable.name", dialect=dialect, ) i = insert(table1, values=dict(name="foo")).returning(table1) self.assert_compile( i, "INSERT INTO mytable (name) VALUES " "(%(name)s) RETURNING mytable.myid, " "mytable.name, mytable.description", dialect=dialect, ) i = insert(table1, values=dict(name="foo")).returning(func.length(table1.c.name)) self.assert_compile( i, "INSERT INTO mytable (name) VALUES " "(%(name)s) RETURNING length(mytable.name) " "AS length_1", dialect=dialect, )
def test_sql_expression_pk_noautoinc_returning(self): # test that return_defaults() works with a primary key where we are # sending a SQL expression, and we need to get the server-calculated # value back. [ticket:3133] metadata = MetaData() table = Table( "sometable", metadata, Column("id", Integer, autoincrement=False, primary_key=True), Column("data", String), ) stmt = table.insert().return_defaults().values(id=func.foobar()) returning_dialect = postgresql.dialect() returning_dialect.implicit_returning = True compiled = stmt.compile( dialect=returning_dialect, column_keys=["data"] ) eq_(compiled.postfetch, []) eq_(compiled.returning, [table.c.id]) self.assert_compile( stmt, "INSERT INTO sometable (id, data) VALUES " "(foobar(), %(data)s) RETURNING sometable.id", checkparams={"data": "foo"}, params={"data": "foo"}, dialect=returning_dialect, )
def _raise_query(self, q): """ useful for debugging. just do... self._raise_query(q) """ c = q.compile(dialect=postgresql.dialect()) raise ValueError(c)
def test_postgres(self): from sqlalchemy.dialects import postgresql as diale_ dialect = diale_.dialect() query = str(self.group_concat_md.compile(dialect=dialect)) expected = ( '''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name ''' '''|| '=' || metric_dimension.value, ',' ) AS dimensions ''' ''' FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') self.assertEqual(expected, query) query = str(self.group_concat_md_order.compile(dialect=dialect)) expected = ( '''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name ''' '''|| '=' || metric_dimension.value, ',' ORDER BY metric_dimension.name ASC) ''' '''AS dimensions ''' ''' FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''') self.assertEqual(expected, query) expected = ( """SELECT metric_dimension.dimension_set_id \n""" """FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'""" """ THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN""" """ metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""") query = str(self.order_by_field.compile(dialect=dialect)) self.assertEqual(expected, query)
def test_update_returning(self): dialect = postgresql.dialect() table1 = table( 'mytable', column( 'myid', Integer), column( 'name', String(128)), column( 'description', String(128))) u = update( table1, values=dict( name='foo')).returning( table1.c.myid, table1.c.name) self.assert_compile(u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING mytable.myid, mytable.name', dialect=dialect) u = update(table1, values=dict(name='foo')).returning(table1) self.assert_compile(u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING mytable.myid, mytable.name, ' 'mytable.description', dialect=dialect) u = update(table1, values=dict(name='foo' )).returning(func.length(table1.c.name)) self.assert_compile( u, 'UPDATE mytable SET name=%(name)s ' 'RETURNING length(mytable.name) AS length_1', dialect=dialect)
def test_insert_returning(self): dialect = postgresql.dialect() table1 = table('mytable', column('myid', Integer), column('name', String(128)), column('description', String(128)), ) i = insert( table1, values=dict( name='foo')).returning( table1.c.myid, table1.c.name) self.assert_compile(i, 'INSERT INTO mytable (name) VALUES ' '(%(name)s) RETURNING mytable.myid, ' 'mytable.name', dialect=dialect) i = insert(table1, values=dict(name='foo')).returning(table1) self.assert_compile(i, 'INSERT INTO mytable (name) VALUES ' '(%(name)s) RETURNING mytable.myid, ' 'mytable.name, mytable.description', dialect=dialect) i = insert(table1, values=dict(name='foo' )).returning(func.length(table1.c.name)) self.assert_compile(i, 'INSERT INTO mytable (name) VALUES ' '(%(name)s) RETURNING length(mytable.name) ' 'AS length_1', dialect=dialect)
def test_python_fn_default(self): metadata = MetaData() table = Table('sometable', metadata, Column('id', Integer, primary_key=True), Column('data', String), Column('foo', Integer, default=lambda: 10)) values = [ {'id': 1, 'data': 'data1'}, {'id': 2, 'data': 'data2', 'foo': 15}, {'id': 3, 'data': 'data3'}, ] checkparams = { 'id_0': 1, 'id_1': 2, 'id_2': 3, 'data_0': 'data1', 'data_1': 'data2', 'data_2': 'data3', 'foo': None, # evaluated later 'foo_1': 15, 'foo_2': None, # evaluated later } self.assert_compile( table.insert().values(values), "INSERT INTO sometable (id, data, foo) VALUES " "(%(id_0)s, %(data_0)s, %(foo)s), " "(%(id_1)s, %(data_1)s, %(foo_1)s), " "(%(id_2)s, %(data_2)s, %(foo_2)s)", checkparams=checkparams, dialect=postgresql.dialect())
def _mquery(self, query_obj=None, *filter, **filter_by): """ Get a MongoQuery with initial filtering applied :param query_obj: Query Object :type query_obj: dict|None :param filter: Additional filter() criteria :param filter_by: Additional filter_by() criteria :rtype: sqlalchemy.orm.Query, list of fields """ mongo_query = self._getCrudHelper().mquery( self._query().filter(*filter).filter_by(**filter_by), query_obj ) sqlalchemy_query = mongo_query.end() try: dialect = pg.dialect() sql_query = sqlalchemy_query.statement.compile(dialect=dialect) if PY2: sql_str = (sql_query.string.encode(dialect.encoding) % sql_query.params).decode(dialect.encoding) else: sql_str = sql_query.string % sql_query.params self.sqlaclhemy_queries.append(sql_str) except Exception as e: logging.error('Error generate SQL string %e', e) return sqlalchemy_query, mongo_query.get_project()
def test_modify_select(self): select_obj = select([self.table1]) join_obj = self.table1 select_obj, join_obj = AgeTransform.modify_select( self.metadata, 'table1', select_obj, join_obj) select_obj = select_obj.select_from(join_obj) new_sql = str(select_obj.compile(dialect=postgresql.dialect())) expected = clean(""" SELECT table1.foo_start_time, table1.bar_start_time, table1.person_id, months_in_interval(person.time_of_birth, table1.foo_start_time) AS foo_start_age_in_months, months_in_interval(person.time_of_birth, table1.bar_start_time) AS bar_start_age_in_months {NL}FROM table1 JOIN person ON person.person_id = table1.person_id """) self.maxDiff = None self.assertEqual(new_sql, expected)
def test_modify_metadata(self): metadata = AgeTransform.modify_metadata(self.metadata) indexes = metadata.tables['table1'].indexes self.assertEqual(len(indexes), 2, 'Indexes created') for index in indexes: index_sql = str(CreateIndex(index).compile( dialect=postgresql.dialect())) if index.name == 'tab_fsaim_107eee9e009461416_ix': expected = clean(""" CREATE INDEX tab_fsaim_107eee9e009461416_ix ON table1 (foo_start_age_in_months) """) self.assertEqual(index_sql, expected) elif index.name == 'tab_bsaim_ca07fdbcdf9bfef7a_ix': expected = clean(""" CREATE INDEX tab_bsaim_ca07fdbcdf9bfef7a_ix ON table1 (bar_start_age_in_months) """) self.assertEqual(index_sql, expected) else: self.fail( 'Unexpected index encountered: {}'.format(index.name))
def test_inline_default(self): metadata = MetaData() table = Table('sometable', metadata, Column('id', Integer, primary_key=True), Column('data', String), Column('foo', Integer, default=func.foobar())) values = [ {'id': 1, 'data': 'data1'}, {'id': 2, 'data': 'data2', 'foo': 'plainfoo'}, {'id': 3, 'data': 'data3'}, ] checkparams = { 'id_m0': 1, 'id_m1': 2, 'id_m2': 3, 'data_m0': 'data1', 'data_m1': 'data2', 'data_m2': 'data3', 'foo_m1': 'plainfoo', } self.assert_compile( table.insert().values(values), 'INSERT INTO sometable (id, data, foo) VALUES ' '(%(id_m0)s, %(data_m0)s, foobar()), ' '(%(id_m1)s, %(data_m1)s, %(foo_m1)s), ' '(%(id_m2)s, %(data_m2)s, foobar())', checkparams=checkparams, dialect=postgresql.dialect())
def copy_to(source, dest, engine_or_conn, **flags): """Export a query or select to a file. For flags, see the PostgreSQL documentation at http://www.postgresql.org/docs/9.5/static/sql-copy.html. Examples: :: select = MyTable.select() with open('/path/to/file.tsv', 'w') as fp: copy_to(select, fp, conn) query = session.query(MyModel) with open('/path/to/file/csv', 'w') as fp: copy_to(query, fp, engine, format='csv', null='.') :param source: SQLAlchemy query or select :param dest: Destination file pointer, in write mode :param engine_or_conn: SQLAlchemy engine, connection, or raw_connection :param **flags: Options passed through to COPY If an existing connection is passed to `engine_or_conn`, it is the caller's responsibility to commit and close. """ dialect = postgresql.dialect() statement = getattr(source, 'statement', source) compiled = statement.compile(dialect=dialect) conn, autoclose = raw_connection_from(engine_or_conn) cursor = conn.cursor() query = cursor.mogrify(compiled.string, compiled.params).decode() formatted_flags = '({})'.format(format_flags(flags)) if flags else '' copy = 'COPY ({}) TO STDOUT {}'.format(query, formatted_flags) cursor.copy_expert(copy, dest) if autoclose: conn.close()
def test_python_scalar_default(self): metadata = MetaData() table = Table('sometable', metadata, Column('id', Integer, primary_key=True), Column('data', String), Column('foo', Integer, default=10)) values = [ {'id': 1, 'data': 'data1'}, {'id': 2, 'data': 'data2', 'foo': 15}, {'id': 3, 'data': 'data3'}, ] checkparams = { 'id_m0': 1, 'id_m1': 2, 'id_m2': 3, 'data_m0': 'data1', 'data_m1': 'data2', 'data_m2': 'data3', 'foo': None, # evaluated later 'foo_m1': 15, 'foo_m2': None # evaluated later } stmt = table.insert().values(values) eq_( dict([ (k, v.type._type_affinity) for (k, v) in stmt.compile(dialect=postgresql.dialect()).binds.items()]), { 'foo': Integer, 'data_m2': String, 'id_m0': Integer, 'id_m2': Integer, 'foo_m1': Integer, 'data_m1': String, 'id_m1': Integer, 'foo_m2': Integer, 'data_m0': String} ) self.assert_compile( stmt, 'INSERT INTO sometable (id, data, foo) VALUES ' '(%(id_m0)s, %(data_m0)s, %(foo)s), ' '(%(id_m1)s, %(data_m1)s, %(foo_m1)s), ' '(%(id_m2)s, %(data_m2)s, %(foo_m2)s)', checkparams=checkparams, dialect=postgresql.dialect())
def test_format(self): seq = Sequence("my_seq_no_schema") dialect = postgresql.dialect() assert dialect.identifier_preparer.format_sequence(seq) == "my_seq_no_schema" seq = Sequence("my_seq", schema="some_schema") assert dialect.identifier_preparer.format_sequence(seq) == "some_schema.my_seq" seq = Sequence("My_Seq", schema="Some_Schema") assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"'
def setup_class(cls): cls.autogen_context = { 'opts': { 'sqlalchemy_module_prefix': 'sa.', 'alembic_module_prefix': 'op.', }, 'dialect': postgresql.dialect() }
def q2sql(q): """ Convert an SqlAlchemy query to string """ # See: http://stackoverflow.com/a/4617623/134904 # This intentionally does not escape values! stmt = q.statement dialect = pg.dialect() query = stmt.compile(dialect=dialect) return (query.string.encode(dialect.encoding) % query.params).decode(dialect.encoding)
def test_load_dialect_impl_postgres(self): """Assert with PostgreSQL, a UUID type is used.""" guid = models.GUID() dialect = postgresql.dialect() result = guid.load_dialect_impl(dialect) self.assertTrue(isinstance(result, postgresql.UUID))
def test_xxx(self): from sqlalchemy.dialects import postgresql # >>> print str(q.statement.compile(dialect=postgresql.dialect())) # SELECT DISTINCT ON (name.value) name.id, name.value # FROM name ORDER BY name.value q = db.session.query(User).join(Token) # db.session.query(User).all() print str(q.statement.compile(dialect=postgresql.dialect()))
def test_process_bind_param_uuid_postgres(self): """Assert UUIDs with PostgreSQL are normal string representations of UUIDs.""" guid = models.GUID() uuid = uuid4() dialect = postgresql.dialect() result = guid.process_bind_param(uuid, dialect) self.assertEqual(str(uuid), result)
def test_create_index_with_ops(self): m = MetaData() tbl = Table("testtbl", m, Column("data", String), Column("data2", Integer, key="d2")) idx = Index("test_idx1", tbl.c.data, postgresql_ops={"data": "text_pattern_ops"}) idx2 = Index("test_idx2", tbl.c.data, tbl.c.d2, postgresql_ops={"data": "text_pattern_ops", "d2": "int4_ops"}) self.assert_compile( schema.CreateIndex(idx), "CREATE INDEX test_idx1 ON testtbl " "(data text_pattern_ops)", dialect=postgresql.dialect(), ) self.assert_compile( schema.CreateIndex(idx2), "CREATE INDEX test_idx2 ON testtbl " "(data text_pattern_ops, data2 int4_ops)", dialect=postgresql.dialect(), )
def setup_class(cls): cls.autogen_context = { "opts": {"sqlalchemy_module_prefix": "sa.", "alembic_module_prefix": "op."}, "dialect": mysql.dialect(), } cls.pg_autogen_context = { "opts": {"sqlalchemy_module_prefix": "sa.", "alembic_module_prefix": "op."}, "dialect": postgresql.dialect(), }
def fetch_content(query_text, column, text_column, page, sources=None): session = g.session tsq = build_tsquery(query_text) search = None if column == db.WebPages.title: query = session \ .query(db.WebPages, func.ts_rank_cd(func.to_tsvector("english", column), func.to_tsquery(tsq))) \ .filter( \ func.to_tsvector("english", column).match(tsq, postgresql_regconfig='english') \ ) \ .order_by(func.ts_rank_cd(func.to_tsvector("english", column), func.to_tsquery(tsq)).desc()) elif column == db.WebPages.tsv_content: query = session \ .query(db.WebPages, func.ts_rank_cd(column, func.to_tsquery(tsq))) \ .filter( column.match(tsq) ) if "'" in query_text or '"' in query_text: search = query_text.replace("!", " ").replace("?", " ").replace("'", " ").replace('"', " ").replace(',', " ").replace('.', " ").strip() while " " in search: search = search.replace(" ", " ") search = search.strip() search = '%{}%'.format(search.lower()) query = query.filter( func.lower(text_column).like(search) ) query = query.order_by(func.ts_rank_cd(column, func.to_tsquery(tsq)).desc()) if sources: query = query.filter(db.WebPages.netloc.in_(sources)) else: raise ValueError("Wat?") print(str(query.statement.compile(dialect=postgresql.dialect()))) print("param: '%s', '%s', '%s'" % (tsq, sources, search)) try: entries = paginate(query, page, per_page=50) except sqlalchemy.exc.ProgrammingError: traceback.print_exc() print("ProgrammingError - Rolling back!") g.session.rollback() raise except sqlalchemy.exc.InternalError: traceback.print_exc() print("InternalError - Rolling back!") g.session.rollback() raise except sqlalchemy.exc.OperationalError: traceback.print_exc() print("InternalError - Rolling back!") g.session.rollback() raise return entries
def test_generate_date_series_start_date_none(end_date, interval, granularity): (interval_param, interval, interval_str) = interval (end_date_param, end_date, end_date_str) = end_date (granularity_param, granularity, granularity_str) = granularity start_date_str = end_date.strftime('%Y-%m-%d %H:%M:%S.%f') query = generate_date_series(None, end_date_param, interval_param, granularity_param) expression = query.compile(dialect=postgresql.dialect()) expected = 'generate_series(DATE {} - INTERVAL {}, {}, {})'.format( start_date_str, interval_str, end_date_str, granularity_str) actual = str(expression) % expression.params assert expected == actual
def test_anticipate_no_pk_non_composite_pk_prefetch(self): t = Table('t', MetaData(), Column('x', Integer, primary_key=True, autoincrement=False), Column('q', Integer)) d = postgresql.dialect() d.implicit_returning = False assert_raises_message(exc.CompileError, "Column 't.x' is marked as a member.*" "may not store NULL.$", t.insert().compile, dialect=d, column_keys=['q'])
async def delete_by_id(todo_id: int) -> Union[int, None]: row = await read_by_id(todo_id) logger.debug("row: {}".format(row)) if not row: return None stmt = todos.delete().where(todos.columns.id == todo_id) logger.debug("stmt: {}".format( stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) await database.execute(stmt) return todo_id
def get_data(self, start=None, limit=None): query_context = self.query_context() for qm in query_context.query_meta.values(): query = qm._build_query().compile(dialect=postgresql.dialect()) if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled( uuid.uuid4().hex, NAMESPACE_OTHER): params = json.loads( json.dumps(self.filter_values, cls=DjangoJSONEncoder)) call_citus_experiment(str(query), params, data_source=self.__class__.__name__) return super(IcdsSqlData, self).get_data(start, limit)
def test_overlap(info): filters = {'tags_overlap': ['a', 'b']} post_query = Query(Post) query = PostFilter.filter(info, post_query, filters) sql = str(query.statement.compile(dialect=postgresql.dialect())) ok = ( 'SELECT post.id, post.tags \n' 'FROM post \n' 'WHERE post.tags && CAST(%(param_1)s::VARCHAR(10)[] AS VARCHAR(10)[])' ) assert sql == ok
def test_time_exp_lowr_col_sec_1y(self): """ DB Eng Specs (postgres): Test grain expr lower case 1 YEAR """ col = column("lower_case") expr = PostgresEngineSpec.get_timestamp_expr(col, "epoch_s", "P1Y") result = str(expr.compile(None, dialect=postgresql.dialect())) self.assertEqual( result, "DATE_TRUNC('year', " "(timestamp 'epoch' + lower_case * interval '1 second'))", )
def print_query(query, literals=True): ''' Print a SQLAlchemy query with literals inserted and adjusted for the PostgreSQL dialect. ''' from sqlalchemy.dialects import postgresql sql = query.statement.compile(dialect=postgresql.dialect(), compile_kwargs={'literal_binds': literals}) print('---- SQL ({} literals) ----'.format( 'with' if literals else 'without')) print(str(sql))
def test_insert_from_select_seq(self): m = MetaData() t1 = Table('t', m, Column('id', Integer, Sequence('id_seq'), primary_key=True), Column('data', String)) stmt = t1.insert().from_select(('data', ), select([t1.c.data])) self.assert_compile(stmt, "INSERT INTO t (data, id) SELECT t.data, " "nextval('id_seq') AS next_value_1 FROM t", dialect=postgresql.dialect())
def test_array_agg_with_default(self): Base = sa.ext.declarative.declarative_base() class Article(Base): __tablename__ = 'article' id = sa.Column(sa.Integer, primary_key=True) assert str( sa.func.array_agg( Article.id, [1]).compile(dialect=postgresql.dialect())) == ( 'coalesce(array_agg(article.id), CAST(ARRAY[%(param_1)s]' ' AS INTEGER[]))')
async def update(payload: TodoUpdate) -> Union[dict, None]: row = await read_by_id(payload.id) if not row: return None stmt = (todos.update().where(todos.columns.id == payload.id).values( title=payload.title, completed=payload.completed)) logger.debug("stmt: {}".format( stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) await database.execute(stmt) return payload
def test_unconsumed_names_values_dict(self): table1 = self.tables.mytable checkparams = {"myid": 3, "name": "jack", "unknowncol": "oops"} stmt = insert(table1, values=checkparams) assert_raises_message( exc.CompileError, "Unconsumed column names: unknowncol", stmt.compile, dialect=postgresql.dialect(), )
class PostgreSQLJDBC(JDBC): DRIVER = 'org.postgresql.Driver' JDBC_NAME = 'jdbc:postgresql' DIALECT: Dialect = postgresql.dialect(paramstyle='named') @staticmethod def reset_generated_id(cursor: jaydebeapi.Cursor) -> None: pass @staticmethod def last_generated_id(cursor: jaydebeapi.Cursor) -> int: return 0
async def create(payload: TodoCreate) -> Union[int, None]: logger.debug("payload: {}".format(payload)) stmt = todos.insert().values(title=payload.title, completed=payload.completed) logger.debug("stmt: {}".format( stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) id = await database.execute(stmt) logger.debug("id: {}".format(id)) if not id: return None return id
def test_from_only(self): m = MetaData() tbl1 = Table('testtbl1', m, Column('id', Integer)) tbl2 = Table('testtbl2', m, Column('id', Integer)) stmt = tbl1.select().with_hint(tbl1, 'ONLY', 'postgresql') expected = 'SELECT testtbl1.id FROM ONLY testtbl1' self.assert_compile(stmt, expected) talias1 = tbl1.alias('foo') stmt = talias1.select().with_hint(talias1, 'ONLY', 'postgresql') expected = 'SELECT foo.id FROM ONLY testtbl1 AS foo' self.assert_compile(stmt, expected) stmt = select([tbl1, tbl2]).with_hint(tbl1, 'ONLY', 'postgresql') expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, ' 'testtbl2') self.assert_compile(stmt, expected) stmt = select([tbl1, tbl2]).with_hint(tbl2, 'ONLY', 'postgresql') expected = ('SELECT testtbl1.id, testtbl2.id FROM testtbl1, ONLY ' 'testtbl2') self.assert_compile(stmt, expected) stmt = select([tbl1, tbl2]) stmt = stmt.with_hint(tbl1, 'ONLY', 'postgresql') stmt = stmt.with_hint(tbl2, 'ONLY', 'postgresql') expected = ('SELECT testtbl1.id, testtbl2.id FROM ONLY testtbl1, ' 'ONLY testtbl2') self.assert_compile(stmt, expected) stmt = update(tbl1, values=dict(id=1)) stmt = stmt.with_hint('ONLY', dialect_name='postgresql') expected = 'UPDATE ONLY testtbl1 SET id=%(id)s' self.assert_compile(stmt, expected) stmt = delete(tbl1).with_hint('ONLY', selectable=tbl1, dialect_name='postgresql') expected = 'DELETE FROM ONLY testtbl1' self.assert_compile(stmt, expected) tbl3 = Table('testtbl3', m, Column('id', Integer), schema='testschema') stmt = tbl3.select().with_hint(tbl3, 'ONLY', 'postgresql') expected = 'SELECT testschema.testtbl3.id FROM '\ 'ONLY testschema.testtbl3' self.assert_compile(stmt, expected) assert_raises(exc.CompileError, tbl3.select().with_hint(tbl3, "FAKE", "postgresql").compile, dialect=postgresql.dialect())
def build_sql_query(query): """Takes a sqlalchemy.orm.query.Query and returns a string representing the final query to be addressed to the lagoon-server""" d = psql.dialect() q = query.statement.compile(dialect=d) # The following is not ideal, as q.params and str(q) should # normally be passed separately to the PostgreSQL database: ps = {} for k, v in q.params.items(): ps[k] = psql_ext.adapt(v).getquoted().decode(d.encoding) return str(q) % ps
def compile_query(query): # type: ignore dialect = postgresql.dialect() compiled_query = query.compile(dialect=dialect) params = {} for k, v in compiled_query.params.items(): if isinstance(v, str): params[k] = sqlescape(v.encode('utf-8')) else: params[k] = sqlescape(v) return compiled_query.string % params
def test_drop_index_concurrently(self): m = MetaData() tbl = Table('testtbl', m, Column('data', Integer)) idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) self.assert_compile(schema.DropIndex(idx1), "DROP INDEX CONCURRENTLY test_idx1") dialect_9_1 = postgresql.dialect() dialect_9_1._supports_drop_index_concurrently = False self.assert_compile(schema.DropIndex(idx1), "DROP INDEX test_idx1", dialect=dialect_9_1)
def as_sql(expression, **params): """Convert sqlalchemy expression to SQL string. (primarily for debugging: to see what sqlalchemy is doing) This has its literal values bound, so it's more readable than the engine's query logging. """ if params: expression = expression.params(**params) return str( expression.compile(dialect=postgres.dialect(), compile_kwargs={"literal_binds": True}))
def test_anticipate_no_pk_non_composite_pk_prefetch(self): t = Table('t', MetaData(), Column('x', Integer, primary_key=True, autoincrement=False), Column('q', Integer)) d = postgresql.dialect() d.implicit_returning = False with expect_warnings("Column 't.x' is marked as a member.*" "may not store NULL.$"): self.assert_compile(t.insert(), "INSERT INTO t (q) VALUES (%(q)s)", params={"q": 5}, dialect=d)
def test_anticipate_no_pk_composite_pk_implicit_returning(self): t = Table('t', MetaData(), Column('x', Integer, primary_key=True), Column('y', Integer, primary_key=True)) d = postgresql.dialect() d.implicit_returning = True with expect_warnings( "Column 't.y' is marked as a member.*" "Note that as of SQLAlchemy 1.1,", ): self.assert_compile(t.insert(), "INSERT INTO t (x) VALUES (%(x)s)", params={"x": 5}, dialect=d)
def definition(self): if isinstance(self._definition, str): return inspect.cleandoc(self._definition) if isinstance(self._definition, Selectable): return str( self._definition.compile( dialect=postgresql.dialect(), compile_kwargs={'literal_binds': True})) raise ValueError( f"definition must be str or Selectable, not {type(self._definition)}" )
def get_word_stats(self, n: int = 4, limit: int = 20, start: str = None, end: str = None, user: Tuple[int, str] = None, **kwargs) -> Tuple[str, None]: """ Print table of lexeme statistics. :param n: Only consider lexemes with length of at least n :param limit: Number of top lexemes to return :param start: Start timestamp (e.g. 2019, 2019-01, 2019-01-01, "2019-01-01 14:21") :param end: End timestamp (e.g. 2019, 2019-01, 2019-01-01, "2019-01-01 14:21") """ q = select(messages.c['text_index_col']) if user: q = q.where(messages.c['from_user'] == user[0]) if start: q = q.where(messages.c['date'] >= str(pd.to_datetime('2019'))) if end: q = q.where(messages.c['date'] < str(pd.to_datetime('2019'))) q = q.scalar_subquery() f = TsStat(q) stmt = select([f.c['word'], f.c['ndoc'], f.c['nentry']]) \ .select_from(f) if n: stmt = stmt.where(func.length(f.c['word']) >= n) stmt = stmt.order_by(f.c.nentry.desc(), f.c.ndoc.desc(), f.c.word) if limit: stmt = stmt.limit(limit)\ .compile(dialect=postgresql.dialect()) with self.engine.connect() as con: df = pd.read_sql_query(stmt, con) df.columns = ['Lexeme', 'Messages', 'Uses'] text = df.to_string(index=False, header=True, float_format=lambda x: f"{x:.1f}") if user: return f"**Most frequently used lexemes, {escape_markdown(user[1].lstrip('@'))}\n```\n{text}\n```", None else: return f"**Most frequently used lexemes, all users:**\n```\n{text}\n```", None
def _load_boundaries_tables(self, extent): multipolygon_cast = Geometry(geometry_type='MULTIPOLYGON', srid=4326) multilinestring_cast = Geometry(geometry_type='MULTILINESTRING', srid=4326) table_casts = { 'sea_a': multipolygon_cast, 'landmass_a': multipolygon_cast, 'coastline_l': multilinestring_cast, } for table_name in self._osm_boundaries_tables: source_table_meta = self._table_metas[table_name] query = select([ source_table_meta.c.ogc_fid, source_table_meta.c.fid, source_table_meta.c.wkb_geometry ]) query = query.where( func.ST_Intersects(source_table_meta.c.wkb_geometry, extent.ewkt)) self._execute_and_insert_into_local_db( query, source_table_meta, source_engine=self._osm_boundaries_db_engine) from sqlalchemy_views import CreateView view_definition_query = select([ source_table_meta.c.ogc_fid, source_table_meta.c.fid, expression.cast( func.ST_Multi( func.ST_Intersection(source_table_meta.c.wkb_geometry, extent.ewkt)), table_casts[table_name]).label('geom') ]).where( func.ST_Intersects(source_table_meta.c.wkb_geometry, extent.ewkt)) view_meta = MetaData() view = Table(table_name, view_meta, schema='view_osmaxx') from sqlalchemy.dialects import postgresql from sqlalchemy.sql import text query_defintion_string = str( view_definition_query.compile( dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})) query_defintion_string = query_defintion_string.replace( 'ST_AsEWKB(CAST', 'CAST') query_defintion_string = query_defintion_string.replace( '))) AS geom', ')) AS geom') query_defintion_text = text(query_defintion_string) create_view = CreateView(view, query_defintion_text, or_replace=True) self._local_db_engine.execute(create_view)
def show(self, prop=None): ''' Prints into to the console Displays the query to the console with parameter variables plugged in. Works only in local mode. Input prop can be one of Can be one of query, tables, joins, or filter. Only works in LOCAL mode. Allowed Values for Prop: query - displays the entire query (default if nothing specified) tables - displays the tables that have been joined in the query joins - same as table filter - displays only the filter used on the query Parameters: prop (str): The type of info to print. Example: TODO add example ''' assert prop in [None, 'query', 'tables', 'joins', 'filter'], 'Input must be query, joins, or filter' if self.mode == 'local': if not prop or 'query' in prop: print(self.query.statement.compile(dialect=postgresql.dialect(), compile_kwargs={'literal_binds': True})) elif prop == 'tables': print(self.joins) elif prop == 'filter': '''oddly this does not update when bound parameters change, but the statement above does ''' print(self.query.whereclause.compile(dialect=postgresql.dialect(), compile_kwargs={'literal_binds': True})) else: print(self.__getattribute__(prop)) elif self.mode == 'remote': print('Cannot show full SQL query in remote mode, use the Results showQuery')
def compile_query(self, query=None, dialect=postgresql.dialect()): # dialect = query.session.bind.dialect if query is None: query = self.query statement = query.statement comp = compiler.SQLCompiler(dialect, statement) comp.compile() enc = dialect.encoding params = {} for k, v in comp.params.iteritems(): if isinstance(v, unicode): v = v.encode(enc) params[k] = sqlescape(v) return (comp.string.encode(enc) % params).decode(enc)
def compiled_query(query, label=None, literal_binds=False): """Compile an SQLAlchemy query with an optional label.""" query = str( query.compile( dialect=postgresql.dialect(), compile_kwargs={'literal_binds': literal_binds}, )) query = sqlparse.format(query, reindent=True, keyword_case='upper') if label: logger.debug(f'\033[4m{label}:\033[0m\n{query}') sys.stdout.write(f'\033[4m{label}:\033[0m\n{query}') else: logging.debug(f'{query}') sys.stdout.write(f'{query}')
def test_create_index_concurrently(self): m = MetaData() tbl = Table('testtbl', m, Column('data', Integer)) idx1 = Index('test_idx1', tbl.c.data, postgresql_concurrently=True) self.assert_compile( schema.CreateIndex(idx1), "CREATE INDEX CONCURRENTLY test_idx1 ON testtbl (data)") dialect_8_1 = postgresql.dialect() dialect_8_1._supports_create_index_concurrently = False self.assert_compile(schema.CreateIndex(idx1), "CREATE INDEX test_idx1 ON testtbl (data)", dialect=dialect_8_1)
def test_create_partial_index(self): m = MetaData() tbl = Table('testtbl', m, Column('data', Integer)) idx = Index('test_idx1', tbl.c.data, postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10)) idx = Index('test_idx1', tbl.c.data, postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10)) # test quoting and all that idx2 = Index('test_idx2', tbl.c.data, postgresql_where=and_(tbl.c.data > 'a', tbl.c.data < "b's")) self.assert_compile(schema.CreateIndex(idx), 'CREATE INDEX test_idx1 ON testtbl (data) ' 'WHERE data > 5 AND data < 10', dialect=postgresql.dialect()) self.assert_compile(schema.CreateIndex(idx2), "CREATE INDEX test_idx2 ON testtbl (data) " "WHERE data > 'a' AND data < 'b''s'", dialect=postgresql.dialect())
def _create_sql_query(self): left = self.tables[0] joinq = left select_cols = list(left.c) for right in self.tables[1:]: on = and_( left.c.get(col) == right.c.get(col) for col in self.join_cols) joinq = joinq.join(right, on) select_cols += [c for c in right.c if c.name not in self.join_cols] query = select(*select_cols).select_from(joinq) self.sql_query = str( query.compile( dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}, ))
def setup_class(cls): cls.autogen_context = { 'opts': { 'sqlalchemy_module_prefix': 'sa.', 'alembic_module_prefix': 'op.', }, 'dialect': mysql.dialect() } cls.pg_autogen_context = { 'opts': { 'sqlalchemy_module_prefix': 'sa.', 'alembic_module_prefix': 'op.', }, 'dialect': postgresql.dialect() }