def test_fixed_width_format_without_widths(): copy = dialect.CopyCommand(tbl, format=commands.Format.fixed_width, data_location='s3://bucket', access_key_id=access_key_id, secret_access_key=secret_access_key) with pytest.raises(sa_exc.CompileError, match=r"^'fixed_width' argument required.*$"): compile_query(copy)
def test_fixed_width_format_without_widths(): copy = dialect.CopyCommand( tbl, format=commands.Format.fixed_width, data_location='s3://bucket', access_key_id=access_key_id, secret_access_key=secret_access_key ) with pytest.raises(sa_exc.CompileError, match=r"^'fixed_width' argument required.*$"): compile_query(copy)
def test_parquet_format__bad_options_crash(kwargs): """Verify we crash if we use the Parquet format with a bad option.""" unload = dialect.UnloadFromSelect(select=sa.select( [sa.func.count(table.c.id)]), unload_location='s3://bucket/key', access_key_id=access_key_id, secret_access_key=secret_access_key, format=dialect.Format.parquet, **kwargs) with pytest.raises(ValueError): compile_query(unload)
def test_csv_format__bad_options_crash(delimiter, fixed_width): """Test that UnloadFromSelect crashes if you try to use DELIMITER and/or FIXEDWIDTH with the CSV format. """ unload = dialect.UnloadFromSelect(select=sa.select( [sa.func.count(table.c.id)]), unload_location='s3://bucket/key', access_key_id=access_key_id, secret_access_key=secret_access_key, format=dialect.Format.csv, delimiter=delimiter, fixed_width=fixed_width) with pytest.raises(ValueError): compile_query(unload)
def test_iam_role(stub_redshift_dialect): """Tests the use of iam role instead of access keys.""" aws_account_id = '000123456789' iam_role_name = 'redshiftrole' creds = 'aws_iam_role=arn:aws:iam::{0}:role/{1}'.format( aws_account_id, iam_role_name, ) unload = dialect.UnloadFromSelect( select=sa.select([sa.func.count(table.c.id)]), unload_location='s3://bucket/key', aws_account_id=aws_account_id, iam_role_name=iam_role_name, ) expected_result = """ UNLOAD ('SELECT count(t1.id) AS count_1 FROM t1') TO 's3://bucket/key' CREDENTIALS '{creds}' """.format(creds=creds) assert clean(compile_query(unload, stub_redshift_dialect)) == \ clean(expected_result)
def test_ascii_nul_as_redshift_null(): expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' DELIMITER AS ',' BZIP2 BLANKSASNULL EMPTYASNULL IGNOREHEADER AS 0 NULL AS '\0' TRUNCATECOLUMNS """ % creds copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, compression='BZIP2', dangerous_null_delimiter=u'\000', truncate_columns=True, delimiter=',', ignore_header=0, empty_as_null=True, blanks_as_null=True, ) assert clean(expected_result) == clean(compile_query(copy))
def test_delete_stmt_joinedwhereclause1(): del_stmt = sa.delete(orders).where(orders.c.customer_id == customers.c.id) expected = """ DELETE FROM orders USING customers WHERE orders.customer_id = customers.id""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_iam_role_partition(): """Tests the use of iam role with a custom partition""" aws_partition = 'aws-us-gov' aws_account_id = '000123456789' iam_role_name = 'redshiftrole' creds = 'aws_iam_role=arn:{0}:iam::{1}:role/{2}'.format( aws_partition, aws_account_id, iam_role_name, ) unload = dialect.UnloadFromSelect( select=sa.select([sa.func.count(table.c.id)]), unload_location='s3://bucket/key', aws_partition=aws_partition, aws_account_id=aws_account_id, iam_role_name=iam_role_name, ) expected_result = """ UNLOAD ('SELECT count(t1.id) AS count_1 FROM t1') TO 's3://bucket/key' CREDENTIALS '{creds}' """.format(creds=creds) assert clean(compile_query(unload)) == clean(expected_result)
def test_delete_on_comma(stub_redshift_dialect): del_stmt = sa.delete(ham).where(ham.c.id == hammy_spam.c.ham_id) expected = """ DELETE FROM ham USING "ham, spam" WHERE ham.id = "ham, spam".ham_id""" assert clean(compile_query(del_stmt, stub_redshift_dialect)) == \ clean(expected)
def test_delete_stmt_with_comma_subquery_alias_join(stub_redshift_dialect): parent_ = sa.alias(product) del_stmt = sa.delete(items).where(items.c.order_id == orders.c.id).where( orders.c.customer_id.in_( sa.select([customers.c.id]).where( customers.c.email.endswith('test.com')))).where( items.c.product_id == product.c.id).where( product.c.parent_id == parent_.c.id).where( parent_.c.id != hammy_spam.c.ham_id) expected = """ DELETE FROM items USING orders, products, products AS products_1, "ham, spam" WHERE items.order_id = orders.id AND orders.customer_id IN (SELECT customers.id FROM customers WHERE (customers.email LIKE '%%' || 'test.com')) AND items.product_id = products.id AND products.parent_id = products_1.id AND products_1.id != "ham, spam".ham_id""" assert clean(compile_query(del_stmt, stub_redshift_dialect)) == \ clean(expected)
def test_json_upload_with_manifest_region_ordered_columns(): expected_result = """ COPY schema1.t1 (col1, col2) FROM 's3://mybucket/data/listing.manifest' WITH CREDENTIALS AS '%s' FORMAT AS JSON AS 's3://mybucket/data/jsonpath.json' GZIP MANIFEST REGION AS 'us-east-1' ACCEPTANYDATE TIMEFORMAT AS 'auto' """ % creds copy = dialect.CopyCommand( [tbl.c.col1, tbl.c.col2], data_location='s3://mybucket/data/listing.manifest', access_key_id=access_key_id, secret_access_key=secret_access_key, manifest=True, region='us-east-1', format='JSON', path_file='s3://mybucket/data/jsonpath.json', compression='GZIP', time_format='auto', accept_any_date=True, ) assert clean(expected_result) == clean(compile_query(copy))
def test_basic_copy_case(): expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' DELIMITER AS ',' BLANKSASNULL EMPTYASNULL IGNOREHEADER AS 0 TRUNCATECOLUMNS REGION 'eu-west-3' """ % creds copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, truncate_columns=True, delimiter=',', ignore_header=0, empty_as_null=True, blanks_as_null=True, region='eu-west-3', ) assert clean(expected_result) == clean(compile_query(copy))
def test_delete_stmt_with_comma_subquery_alias_join(): parent_ = sa.alias(product) del_stmt = ( sa.delete(items) .where(items.c.order_id == orders.c.id) .where(orders.c.customer_id.in_(sa.select([customers.c.id]).where(customers.c.email.endswith("test.com")))) .where(items.c.product_id == product.c.id) .where(product.c.parent_id == parent_.c.id) .where(parent_.c.id != hammy_spam.c.ham_id) ) expected = """ DELETE FROM items USING orders, products, products AS products_1, "ham, spam" WHERE items.order_id = orders.id AND orders.customer_id IN (SELECT customers.id FROM customers WHERE (customers.email LIKE '%%' || 'test.com')) AND items.product_id = products.id AND products.parent_id = products_1.id AND products_1.id != "ham, spam".ham_id""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_iam_role_partition(stub_redshift_dialect): """Tests the use of iam role with a custom partition""" aws_partition = 'aws-us-gov' aws_account_id = '000123456789' iam_role_name = 'redshiftrole' creds = 'aws_iam_role=arn:{0}:iam::{1}:role/{2}'.format( aws_partition, aws_account_id, iam_role_name, ) expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '{creds}' """.format(creds=creds) copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', aws_partition=aws_partition, aws_account_id=aws_account_id, iam_role_name=iam_role_name, ) assert clean(expected_result) == \ clean(compile_query(copy, stub_redshift_dialect))
def test_basic_copy_case(stub_redshift_dialect): expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' DELIMITER AS ',' BLANKSASNULL EMPTYASNULL IGNOREHEADER AS 0 TRUNCATECOLUMNS REGION 'eu-west-3' """ % creds copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, truncate_columns=True, delimiter=',', ignore_header=0, empty_as_null=True, blanks_as_null=True, region='eu-west-3', ) assert clean(expected_result) == \ clean(compile_query(copy, stub_redshift_dialect))
def test_delete_stmt_simplewhereclause2(): del_stmt = sa.delete(customers).where(customers.c.email.endswith("test.com")) expected = """ DELETE FROM customers WHERE customers.email LIKE '%%' || 'test.com'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_basic_materialized_view(selectable): expected_result = """ CREATE MATERIALIZED VIEW test_view AS SELECT t1.id, t1.name FROM t1 """ view = dialect.CreateMaterializedView("test_view", selectable) assert clean(expected_result) == clean(compile_query(view))
def test_format(stub_redshift_dialect): expected_result = """ COPY t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' FORMAT AS JSON AS 'auto' DELIMITER AS ',' BLANKSASNULL EMPTYASNULL IGNOREHEADER AS 0 TRUNCATECOLUMNS """ % creds copy = dialect.CopyCommand( tbl2, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, format='JSON', truncate_columns=True, delimiter=',', ignore_header=0, empty_as_null=True, blanks_as_null=True, ) assert clean(expected_result) == \ clean(compile_query(copy, stub_redshift_dialect))
def test_delete_stmt_simplewhereclause1(): del_stmt = sa.delete(customers).where( customers.c.email == '*****@*****.**') expected = """ DELETE FROM customers WHERE customers.email = '*****@*****.**'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_simplewhereclause2(): del_stmt = sa.delete(customers).where( customers.c.email.endswith('test.com')) expected = """ DELETE FROM customers WHERE customers.email LIKE '%%' || 'test.com'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_on_alias(): parent_ = sa.alias(product) del_stmt = sa.delete(product).where(product.c.parent_id == parent_.c.id) expected = """ DELETE FROM products USING products AS products_1 WHERE products.parent_id = products_1.id""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_on_subquerycomma(): del_stmt = sa.delete(ham).where(ham.c.id.in_(sa.select([hammy_spam.c.ham_id]))) expected = """ DELETE FROM ham WHERE ham.id IN (SELECT "ham, spam".ham_id FROM "ham, spam")""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_on_subquerycomma(): del_stmt = sa.delete(ham).where( ham.c.id.in_(sa.select([hammy_spam.c.ham_id]))) expected = """ DELETE FROM ham WHERE ham.id IN (SELECT "ham, spam".ham_id FROM "ham, spam")""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_diststyle_materialized_view(selectable): expected_result = """ CREATE MATERIALIZED VIEW test_view DISTSTYLE ALL AS SELECT t1.id, t1.name FROM t1 """ view = dialect.CreateMaterializedView("test_view", selectable, diststyle='ALL') assert clean(expected_result) == clean(compile_query(view))
def test_delete_stmt_subquery(): del_stmt = sa.delete(orders).where( orders.c.customer_id.in_(sa.select([customers.c.id]).where(customers.c.email.endswith("test.com"))) ) expected = """ DELETE FROM orders WHERE orders.customer_id IN (SELECT customers.id FROM customers WHERE (customers.email LIKE '%%' || 'test.com'))""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_no_backup_materialized_view(selectable, stub_redshift_dialect): expected_result = """ CREATE MATERIALIZED VIEW test_view BACKUP NO AS SELECT t1.id, t1.name FROM t1 """ view = dialect.CreateMaterializedView("test_view", selectable, backup=False) assert clean(expected_result) == \ clean(compile_query(view, stub_redshift_dialect))
def test_interleaved_sortkey_materialized_view(selectable): expected_result = """ CREATE MATERIALIZED VIEW test_view INTERLEAVED SORTKEY (id) AS SELECT t1.id, t1.name FROM t1 """ for key in ("id", selectable.c.id): view = dialect.CreateMaterializedView("test_view", selectable, interleaved_sortkey=key) assert clean(expected_result) == clean(compile_query(view))
def test_sortkey_materialized_view(selectable, stub_redshift_dialect): expected_result = """ CREATE MATERIALIZED VIEW test_view SORTKEY (id) AS SELECT t1.id, t1.name FROM t1 """ for key in ("id", selectable.c.id): view = dialect.CreateMaterializedView("test_view", selectable, sortkey=key) assert clean(expected_result) == \ clean(compile_query(view, stub_redshift_dialect))
def test_delete_stmt_subquery(): del_stmt = sa.delete(orders).where( orders.c.customer_id.in_( sa.select([customers.c.id ]).where(customers.c.email.endswith('test.com')))) expected = """ DELETE FROM orders WHERE orders.customer_id IN (SELECT customers.id FROM customers WHERE (customers.email LIKE '%%' || 'test.com'))""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_simplewhereclause2(stub_redshift_dialect): del_stmt = sa.delete(customers).where( customers.c.email.endswith('test.com')) if sa_version >= Version('1.4.0'): expected = """ DELETE FROM customers WHERE (customers.email LIKE '%%' || 'test.com')""" else: expected = """ DELETE FROM customers WHERE customers.email LIKE '%%' || 'test.com'""" assert clean(compile_query(del_stmt, stub_redshift_dialect)) == \ clean(expected)
def test_view_reflection(redshift_engine): table_ddl = "CREATE TABLE my_table (col1 INTEGER, col2 INTEGER)" view_query = "SELECT my_table.col1, my_table.col2 FROM my_table" view_ddl = "CREATE VIEW my_view AS %s" % view_query conn = redshift_engine.connect() conn.execute(table_ddl) conn.execute(view_ddl) insp = inspect(redshift_engine) view_definition = insp.get_view_definition('my_view') assert(clean(compile_query(view_definition)) == clean(view_query)) view = Table('my_view', MetaData(), autoload=True, autoload_with=redshift_engine) assert(len(view.columns) == 2)
def test_format__columnar(format_type): expected_result = """ COPY t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' FORMAT AS %s """ % (creds, format_type.value.upper()) copy = dialect.CopyCommand( tbl2, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, format=format_type, ) assert clean(expected_result) == clean(compile_query(copy))
def test_format(): expected_result = """ COPY t1 FROM 's3://mybucket/data/listing/' CREDENTIALS '%s' JSON TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 EMPTYASNULL BLANKSASNULL """ % creds copy = dialect.CopyCommand( table=tbl2, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, format='JSON', ) assert clean(expected_result) == clean(compile_query(copy))
def test_compression(): expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' CREDENTIALS '%s' CSV TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 LZOP EMPTYASNULL BLANKSASNULL """ % creds copy = dialect.CopyCommand( table=tbl, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, compression='LZOP', ) assert clean(expected_result) == clean(compile_query(copy))
def test_delete_stmt_joinedwhereclause2(): del_stmt = sa.delete(orders).where( orders.c.customer_id == customers.c.id).where( orders.c.id == items.c.order_id).where( customers.c.email.endswith('test.com')).where( items.c.name == 'test product') expected = """ DELETE FROM orders USING customers, items WHERE orders.customer_id = customers.id AND orders.id = items.order_id AND (customers.email LIKE '%%' || 'test.com') AND items.name = 'test product'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_or_replace(): name = 'SomeLibrary' where = 's3://bucket/path/to/archive.zip' expected_result = """ CREATE OR REPLACE LIBRARY "{name}" LANGUAGE pythonplu FROM '{where}' WITH CREDENTIALS AS '{creds}' """.format(name=name, where=where, creds=CREDS) cmd = commands.CreateLibraryCommand( name, where, access_key_id=ACCESS_KEY_ID, secret_access_key=SECRET_ACCESS_KEY, replace=True) assert clean(expected_result) == clean(compile_query(cmd))
def test_basic(): name = 'asdfghjkl' where = 'https://www.example.com/libraries/extension.zip' expected_result = """ CREATE LIBRARY "{name}" LANGUAGE pythonplu FROM '{where}' WITH CREDENTIALS AS '{creds}' """.format(name=name, where=where, creds=CREDS) cmd = commands.CreateLibraryCommand( name, where, access_key_id=ACCESS_KEY_ID, secret_access_key=SECRET_ACCESS_KEY) assert clean(expected_result) == clean(compile_query(cmd))
def test_basic(): name = 'asdfghjkl' where = 'https://www.example.com/libraries/extension.zip' expected_result = """ CREATE LIBRARY "{name}" LANGUAGE pythonplu FROM '{where}' WITH CREDENTIALS AS '{creds}' """.format(name=name, where=where, creds=CREDS) cmd = commands.CreateLibraryCommand(name, where, access_key_id=ACCESS_KEY_ID, secret_access_key=SECRET_ACCESS_KEY) assert clean(expected_result) == clean(compile_query(cmd))
def test_late_binding_view_reflection(redshift_engine): table_ddl = "CREATE TABLE my_table (col1 INTEGER, col2 INTEGER)" view_query = "SELECT my_table.col1, my_table.col2 FROM public.my_table" view_ddl = ("CREATE VIEW my_late_view AS " "%s WITH NO SCHEMA BINDING" % view_query) conn = redshift_engine.connect() conn.execute(table_ddl) conn.execute(view_ddl) insp = inspect(redshift_engine) view_definition = insp.get_view_definition('my_late_view') # For some reason, Redshift returns the entire DDL for late binding views. assert(clean(compile_query(view_definition)) == clean(view_ddl)) view = Table('my_late_view', MetaData(), autoload=True, autoload_with=redshift_engine) assert(len(view.columns) == 2)
def test_delete_stmt_subqueryplusjoin(): del_stmt = ( sa.delete(orders) .where(orders.c.customer_id.in_(sa.select([customers.c.id]).where(customers.c.email.endswith("test.com")))) .where(orders.c.id == items.c.order_id) .where(items.c.name == "test product") ) expected = """ DELETE FROM orders USING items WHERE orders.customer_id IN (SELECT customers.id FROM customers WHERE (customers.email LIKE '%%' || 'test.com')) AND orders.id = items.order_id AND items.name = 'test product'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_basic_unload_case(): """Tests that the simplest type of UnloadFromSelect works.""" unload = dialect.UnloadFromSelect( select=sa.select([sa.func.count(table.c.id)]), unload_location='s3://bucket/key', access_key_id=access_key_id, secret_access_key=secret_access_key, ) expected_result = """ UNLOAD ('SELECT count(t1.id) AS count_1 FROM t1') TO 's3://bucket/key' CREDENTIALS '{creds}' """.format(creds=creds) assert clean(compile_query(unload)) == clean(expected_result)
def test_region(): name = 'SomeLibrary' where = 's3://bucket/path/to/archive.zip' region = 'sa-east-1' expected_result = """ CREATE LIBRARY "{name}" LANGUAGE pythonplu FROM '{where}' WITH CREDENTIALS AS '{creds}' REGION '{region}' """.format(name=name, where=where, creds=CREDS, region=region) cmd = commands.CreateLibraryCommand( name, where, access_key_id=ACCESS_KEY_ID, secret_access_key=SECRET_ACCESS_KEY, region=region) assert clean(expected_result) == clean(compile_query(cmd))
def test_delete_stmt_joinedwhereclause2(): del_stmt = ( sa.delete(orders) .where(orders.c.customer_id == customers.c.id) .where(orders.c.id == items.c.order_id) .where(customers.c.email.endswith("test.com")) .where(items.c.name == "test product") ) expected = """ DELETE FROM orders USING customers, items WHERE orders.customer_id = customers.id AND orders.id = items.order_id AND (customers.email LIKE '%%' || 'test.com') AND items.name = 'test product'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_stat_update_maxerror_and_escape(): expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' ESCAPE NULL AS '\x00' MAXERROR AS 0 STATUPDATE ON """ % creds copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, max_error=0, dangerous_null_delimiter=u'\000', stat_update=True, escape=True, ) assert clean(expected_result) == clean(compile_query(copy))
def test_all_redshift_options(): """Tests that UnloadFromSelect handles all options correctly.""" unload = dialect.UnloadFromSelect( sa.select([sa.func.count(table.c.id)]), unload_location='s3://bucket/key', access_key_id=access_key_id, secret_access_key=secret_access_key, manifest=True, delimiter=',', fixed_width=[('count_1', 50), ], encrypted=True, gzip=True, add_quotes=True, null='---', escape=True, allow_overwrite=True, parallel=False, region='us-west-2', max_file_size=10 * 1024**2, ) expected_result = """ UNLOAD ('SELECT count(t1.id) AS count_1 FROM t1') TO 's3://bucket/key' CREDENTIALS '{creds}' MANIFEST DELIMITER AS ',' ENCRYPTED FIXEDWIDTH AS 'count_1:50' GZIP ADDQUOTES NULL AS '---' ESCAPE ALLOWOVERWRITE PARALLEL OFF REGION 'us-west-2' MAXFILESIZE 10.0 MB """.format(creds=creds) assert clean(compile_query(unload)) == clean(expected_result)
def test_iam_role(): """Tests the use of iam role instead of access keys.""" aws_account_id = '000123456789' iam_role_name = 'redshiftrole' creds = 'aws_iam_role=arn:aws:iam::{0}:role/{1}'.format( aws_account_id, iam_role_name, ) expected_result = """ COPY schema1.t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '{creds}' """.format(creds=creds) copy = dialect.CopyCommand( tbl, data_location='s3://mybucket/data/listing/', aws_account_id=aws_account_id, iam_role_name=iam_role_name, ) assert clean(expected_result) == clean(compile_query(copy))
def test_json_upload_with_manifest_ordered_columns(): expected_result = """ COPY schema1.t1 (col1, col2) FROM 's3://mybucket/data/listing.manifest' WITH CREDENTIALS AS '%s' FORMAT AS JSON AS 's3://mybucket/data/jsonpath.json' GZIP MANIFEST ACCEPTANYDATE TIMEFORMAT AS 'auto' """ % creds copy = dialect.CopyCommand( [tbl.c.col1, tbl.c.col2], data_location='s3://mybucket/data/listing.manifest', access_key_id=access_key_id, secret_access_key=secret_access_key, manifest=True, format='JSON', path_file='s3://mybucket/data/jsonpath.json', compression='GZIP', time_format='auto', accept_any_date=True, ) assert clean(expected_result) == clean(compile_query(copy))
def test_iam_role(): """Tests the use of iam role instead of access keys.""" aws_account_id = '000123456789' iam_role_name = 'redshiftrole' creds = 'aws_iam_role=arn:aws:iam::{0}:role/{1}'.format( aws_account_id, iam_role_name, ) unload = dialect.UnloadFromSelect( select=sa.select([sa.func.count(table.c.id)]), unload_location='s3://bucket/key', aws_account_id=aws_account_id, iam_role_name=iam_role_name, ) expected_result = """ UNLOAD ('SELECT count(t1.id) AS count_1 FROM t1') TO 's3://bucket/key' CREDENTIALS '{creds}' """.format(creds=creds) assert clean(compile_query(unload)) == clean(expected_result)
def test_format(): expected_result = """ COPY t1 FROM 's3://mybucket/data/listing/' WITH CREDENTIALS AS '%s' FORMAT AS JSON AS 'auto' DELIMITER AS ',' BLANKSASNULL EMPTYASNULL IGNOREHEADER AS 0 TRUNCATECOLUMNS """ % creds copy = dialect.CopyCommand( tbl2, data_location='s3://mybucket/data/listing/', access_key_id=access_key_id, secret_access_key=secret_access_key, format='JSON', truncate_columns=True, delimiter=',', ignore_header=0, empty_as_null=True, blanks_as_null=True, ) assert clean(expected_result) == clean(compile_query(copy))
def test_delete_stmt_simplewhereclause1(): del_stmt = sa.delete(customers).where(customers.c.email == "*****@*****.**") expected = """ DELETE FROM customers WHERE customers.email = '*****@*****.**'""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_append__basic(kwargs, expected_query): source = sa.Table('src', sa.MetaData(), sa.Column('value', sa.Integer)) target = sa.Table('trg', sa.MetaData(), sa.Column('value', sa.Integer)) command = dialect.AlterTableAppendCommand(source, target, **kwargs) assert clean(compile_query(command)) == clean(expected_query)
def test_delete_on_comma(): del_stmt = sa.delete(ham).where(ham.c.id == hammy_spam.c.ham_id) expected = """ DELETE FROM ham USING "ham, spam" WHERE ham.id = "ham, spam".ham_id""" assert clean(compile_query(del_stmt)) == clean(expected)
def test_delete_stmt_nowhereclause(): del_stmt = sa.delete(customers) assert clean(compile_query(del_stmt)) == "DELETE FROM customers"