def _put_list_rm_files_in_stage(tmpdir, conn_cnx, db_parameters, elem): s3location = elem['s3location'] stage_name = elem['stage_name'] data_file_name = elem['data_file_name'] from io import open from snowflake.connector.compat import (UTF8, TO_UNICODE) tmp_dir = str(tmpdir.mkdir('data')) data_file = os.path.join(tmp_dir, data_file_name) with open(data_file, 'w', encoding=UTF8) as f: f.write(TO_UNICODE("123,456,string1\n")) f.write(TO_UNICODE("789,012,string2\n")) output_dir = str(tmpdir.mkdir('output')) with conn_cnx(user=db_parameters['s3_user'], account=db_parameters['s3_account'], password=db_parameters['s3_password']) as cnx: cnx.cursor().execute(""" create or replace stage {stage_name} url='s3://{s3location}' credentials=( AWS_KEY_ID='{aws_key_id}' AWS_SECRET_KEY='{aws_secret_key}' ) """.format(s3location=s3location, stage_name=stage_name, aws_key_id=os.getenv('AWS_ACCESS_KEY_ID'), aws_secret_key=os.getenv('AWS_SECRET_ACCESS_KEY'))) try: with conn_cnx(user=db_parameters['s3_user'], account=db_parameters['s3_account'], password=db_parameters['s3_password']) as cnx: cnx.cursor().execute(""" RM @{stage_name} """.format(stage_name=stage_name)) rec = cnx.cursor().execute(""" PUT file://{file} @{stage_name} """.format(file=data_file, stage_name=stage_name)).fetchone() assert rec[0] == data_file_name assert rec[6] == 'UPLOADED' rec = cnx.cursor().execute(""" LIST @{stage_name} """.format(stage_name=stage_name, output_dir=output_dir)).fetchone() assert rec, 'LIST should return something' assert rec[0].startswith('s3://'), "The file location in S3" rec = cnx.cursor().execute(""" GET @{stage_name} file://{output_dir} """.format(stage_name=stage_name, output_dir=output_dir)).fetchone() assert rec[0] == data_file_name + '.gz' assert rec[2] == 'DOWNLOADED' finally: with conn_cnx(user=db_parameters['s3_user'], account=db_parameters['s3_account'], password=db_parameters['s3_password']) as cnx: cnx.cursor().execute(""" RM @{stage_name} """.format(stage_name=stage_name)) cnx.cursor().execute("drop stage if exists {stage_name}".format( stage_name=stage_name))
def db_parameters(): """ Sets the db connection parameters """ ret = {} os.environ['TZ'] = 'UTC' time.tzset() for k, v in CONNECTION_PARAMETERS.items(): ret[k] = v for k, v in DEFAULT_PARAMETERS.items(): if k not in ret: ret[k] = v if 'account' in ret and ret['account'] == DEFAULT_PARAMETERS['account']: help() sys.exit(2) if 'host' in ret and ret['host'] == DEFAULT_PARAMETERS['host']: ret['host'] = ret['account'] + '.snowflakecomputing.com' # a unique table name ret['name'] = ('sqlalchemy_tests_' + TO_UNICODE(uuid.uuid4())).replace( '-', '_') ret['schema'] = TEST_SCHEMA return ret
def test_concurrent_insert(conn_cnx, db_parameters): """ Concurrent insert tests. Inserts block on the one that's running. """ number_of_threads = 22 # change this to increase the concurrency expected_success_runs = number_of_threads - 1 cnx_array = [] try: with conn_cnx() as cnx: cnx.cursor().execute(""" create or replace warehouse {0} warehouse_type=standard warehouse_size=small """.format(db_parameters['name_wh'])) sql = """ create or replace table {name} (c1 integer, c2 string) """.format(name=db_parameters['name']) cnx.cursor().execute(sql) for i in range(number_of_threads): cnx_array.append({ 'host': db_parameters['host'], 'port': db_parameters['port'], 'user': db_parameters['user'], 'password': db_parameters['password'], 'account': db_parameters['account'], 'database': db_parameters['database'], 'schema': db_parameters['schema'], 'table': db_parameters['name'], 'idx': TO_UNICODE(i), 'warehouse': db_parameters['name_wh'] }) pool = ThreadPool(processes=number_of_threads) results = pool.map( _concurrent_insert, cnx_array) success = 0 for record in results: success += 1 if record['success'] else 0 # 21 threads or more assert success >= expected_success_runs, "Number of success run" c = cnx.cursor() sql = "select * from {name} order by 1".format( name=db_parameters['name']) c.execute(sql) for rec in c: logger.debug(rec) c.close() finally: with conn_cnx() as cnx: cnx.cursor().execute( "drop table if exists {0}".format(db_parameters['name'])) cnx.cursor().execute( "drop warehouse if exists {0}".format(db_parameters['name_wh']))
def test_different_key_length(is_public_test, request, conn_cnx, db_parameters): if is_public_test: pytest.skip( 'This test requires ACCOUNTADMIN privilege to set the public key') test_user = "******" + TO_UNICODE(uuid.uuid4()).replace( '-', '_') db_config = { 'protocol': db_parameters['protocol'], 'account': db_parameters['account'], 'user': test_user, 'host': db_parameters['host'], 'port': db_parameters['port'], 'database': db_parameters['database'], 'schema': db_parameters['schema'], 'timezone': 'UTC', } def fin(): with conn_cnx() as cnx: cnx.cursor().execute(""" use role accountadmin """) cnx.cursor().execute(""" drop user if exists {user} """.format(user=test_user)) request.addfinalizer(fin) testcases = [2048, 4096, 8192] with conn_cnx() as cnx: cursor = cnx.cursor() cursor.execute(""" use role accountadmin """) cursor.execute("create user " + test_user) for key_length in testcases: private_key_der, public_key_der_encoded = generate_key_pair( key_length) cnx.cursor().execute(""" alter user {user} set rsa_public_key='{public_key}' """.format(user=test_user, public_key=public_key_der_encoded)) db_config['private_key'] = private_key_der with snowflake.connector.connect(**db_config) as _: pass
def test_is_dst(): """ SNOW-6020: Failed to convert to local time during DST is being changed """ # DST to non-DST convClass = DefaultConverterClass() conv = convClass() conv.set_parameter('TIMEZONE', 'America/Los_Angeles') col_meta = { 'name': 'CREATED_ON', 'type': 6, 'length': None, 'precision': None, 'scale': 3, 'nullable': True, } m = conv.to_python_method('TIMESTAMP_LTZ', col_meta) ret = m('1414890189.000') assert TO_UNICODE(ret) == u'2014-11-01 18:03:09-07:00', \ 'Timestamp during from DST to non-DST' # non-DST to DST col_meta = { 'name': 'CREATED_ON', 'type': 6, 'length': None, 'precision': None, 'scale': 3, 'nullable': True, } m = conv.to_python_method('TIMESTAMP_LTZ', col_meta) ret = m('1425780189.000') assert TO_UNICODE(ret) == u'2015-03-07 18:03:09-08:00', \ 'Timestamp during from non-DST to DST'
def db_parameters(): """ Sets the db connection parameters """ ret = {} os.environ['TZ'] = 'UTC' if not IS_WINDOWS: time.tzset() # testaccount connection info for k, v in CONNECTION_PARAMETERS.items(): ret[k] = v for k, v in DEFAULT_PARAMETERS.items(): if k not in ret: ret[k] = v # s3 testaccount connection info. Not available in TravisCI if CONNECTION_PARAMETERS_S3: for k, v in CONNECTION_PARAMETERS_S3.items(): ret['s3_' + k] = v else: for k, v in CONNECTION_PARAMETERS.items(): ret['s3_' + k] = v # azure testaccount connection info. Not available in TravisCI if CONNECTION_PARAMETERS_AZURE: for k, v in CONNECTION_PARAMETERS_AZURE.items(): ret['azure_' + k] = v else: for k, v in CONNECTION_PARAMETERS.items(): ret['azure_' + k] = v # snowflake admin account. Not available in TravisCI for k, v in CONNECTION_PARAMETERS_ADMIN.items(): ret['sf_' + k] = v if 'host' in ret and ret['host'] == DEFAULT_PARAMETERS['host']: ret['host'] = ret['account'] + '.snowflakecomputing.com' if 'account' in ret and ret['account'] == DEFAULT_PARAMETERS['account']: help() sys.exit(2) # a unique table name ret['name'] = 'python_tests_' + TO_UNICODE(uuid.uuid4()).replace('-', '_') ret['name_wh'] = ret['name'] + 'wh' ret['schema'] = TEST_SCHEMA return ret
def get_db_parameters(): """ Sets the db connection parameters """ ret = {} os.environ['TZ'] = 'UTC' if not IS_WINDOWS: time.tzset() for k, v in CONNECTION_PARAMETERS.items(): ret[k] = v for k, v in DEFAULT_PARAMETERS.items(): if k not in ret: ret[k] = v if 'account' in ret and ret['account'] == DEFAULT_PARAMETERS['account']: help() sys.exit(2) if 'host' in ret and ret['host'] == DEFAULT_PARAMETERS['host']: ret['host'] = ret['account'] + '.snowflakecomputing.com' # a unique table name ret['name'] = ('sqlalchemy_tests_' + TO_UNICODE(uuid.uuid4())).replace( '-', '_') ret['schema'] = TEST_SCHEMA # This reduces a chance to exposing password in test output. ret['a00'] = 'dummy parameter' ret['a01'] = 'dummy parameter' ret['a02'] = 'dummy parameter' ret['a03'] = 'dummy parameter' ret['a04'] = 'dummy parameter' ret['a05'] = 'dummy parameter' ret['a06'] = 'dummy parameter' ret['a07'] = 'dummy parameter' ret['a08'] = 'dummy parameter' ret['a09'] = 'dummy parameter' ret['a10'] = 'dummy parameter' ret['a11'] = 'dummy parameter' ret['a12'] = 'dummy parameter' ret['a13'] = 'dummy parameter' ret['a14'] = 'dummy parameter' ret['a15'] = 'dummy parameter' ret['a16'] = 'dummy parameter' return ret
def test_cte(): from snowflake.sqlalchemy import snowdialect from datetime import date from sqlalchemy import (MetaData, Table, Column, Integer, Date, select, literal, and_, exists, create_engine) metadata = MetaData() visitors = Table('visitors', metadata, Column('product_id', Integer), Column('date1', Date), Column('count', Integer)) product_id = 1 day = date.today() count = 5 with_bar = select([literal(product_id), literal(day), literal(count)]).cte('bar') sel = select([with_bar]) ins = visitors.insert().from_select( [visitors.c.product_id, visitors.c.date1, visitors.c.count], sel) assert TO_UNICODE(ins.compile(dialect=snowdialect.dialect( ))) == u"""INSERT INTO visitors (product_id, date1, count) WITH bar AS
def test_cte(): from snowflake.sqlalchemy import snowdialect from datetime import date from sqlalchemy import (MetaData, Table, Column, Integer, Date, select, literal) metadata = MetaData() visitors = Table('visitors', metadata, Column('product_id', Integer), Column('date1', Date), Column('count', Integer)) product_id = 1 day = date.today() count = 5 with_bar = select([literal(product_id), literal(day), literal(count)]).cte('bar') sel = select([with_bar]) ins = visitors.insert().from_select( [visitors.c.product_id, visitors.c.date1, visitors.c.count], sel) assert TO_UNICODE(ins.compile(dialect=snowdialect.dialect())) == ( "INSERT INTO visitors (product_id, date1, count) WITH bar AS \n" "(SELECT %(param_1)s AS anon_1, %(param_2)s AS anon_2, %(param_3)s AS anon_3)\n" " SELECT bar.anon_1, bar.anon_2, bar.anon_3 \n" "FROM bar")
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. # from . import base from . import snowdialect from .custom_commands import (MergeInto, CSVFormatter, JSONFormatter, PARQUETFormatter, CopyIntoStorage, AWSBucket, AzureContainer) from .util import _url as URL from .version import VERSION from snowflake.connector.compat import TO_UNICODE from .custom_types import VARIANT, ARRAY, OBJECT, TIMESTAMP_LTZ, TIMESTAMP_TZ, TIMESTAMP_NTZ SNOWFLAKE_CONNECTOR_VERSION = '.'.join(TO_UNICODE(v) for v in VERSION[0:3]) base.dialect = dialect = snowdialect.dialect __version__ = SNOWFLAKE_CONNECTOR_VERSION __all__ = ('VARIANT', 'ARRAY', 'OBJECT', 'TIMESTAMP_LTZ', 'TIMESTAMP_TZ', 'TIMESTAMP_NTZ', 'MergeInto', 'CSVFormatter', 'JSONFormatter', 'PARQUETFormatter', 'CopyIntoStorage', 'AWSBucket', 'AzureContainer')
except: CONNECTION_PARAMETERS_ADMIN = {} import snowflake.connector from snowflake.connector.compat import (UTF8, TO_UNICODE, PY_ISSUE_23517, IS_WINDOWS) from snowflake.connector.converter import SnowflakeConverter from snowflake.connector.converter_issue23517 import ( SnowflakeConverterIssue23517) logger = getLogger(__name__) if os.getenv('TRAVIS') == 'true': TEST_SCHEMA = 'TRAVIS_JOB_{0}'.format(os.getenv('TRAVIS_JOB_ID')) else: TEST_SCHEMA = 'python_connector_tests_' + TO_UNICODE(uuid.uuid4()).replace( '-', '_') DEFAULT_PARAMETERS = { 'account': '<account_name>', 'user': '******', 'password': '******', 'database': '<database_name>', 'schema': '<schema_name>', 'protocol': 'https', 'host': '<host>', 'port': '443', } def help(): print("""Connection parameter must be specified in parameters.py,
def get_db_parameters(): """ Sets the db connection parameters """ ret = {} os.environ['TZ'] = 'UTC' if not IS_WINDOWS: time.tzset() # testaccount connection info for k, v in CONNECTION_PARAMETERS.items(): ret[k] = v for k, v in DEFAULT_PARAMETERS.items(): if k not in ret: ret[k] = v # s3 testaccount connection info. Not available in TravisCI if CONNECTION_PARAMETERS_S3: for k, v in CONNECTION_PARAMETERS_S3.items(): ret['s3_' + k] = v else: for k, v in CONNECTION_PARAMETERS.items(): ret['s3_' + k] = v # azure testaccount connection info. Not available in TravisCI if CONNECTION_PARAMETERS_AZURE: for k, v in CONNECTION_PARAMETERS_AZURE.items(): ret['azure_' + k] = v else: for k, v in CONNECTION_PARAMETERS.items(): ret['azure_' + k] = v # snowflake admin account. Not available in TravisCI for k, v in CONNECTION_PARAMETERS_ADMIN.items(): ret['sf_' + k] = v if 'host' in ret and ret['host'] == DEFAULT_PARAMETERS['host']: ret['host'] = ret['account'] + '.snowflakecomputing.com' if 'account' in ret and ret['account'] == DEFAULT_PARAMETERS['account']: help() sys.exit(2) # a unique table name ret['name'] = 'python_tests_' + TO_UNICODE(uuid.uuid4()).replace('-', '_') ret['name_wh'] = ret['name'] + 'wh' ret['schema'] = TEST_SCHEMA # This reduces a chance to exposing password in test output. ret['a00'] = 'dummy parameter' ret['a01'] = 'dummy parameter' ret['a02'] = 'dummy parameter' ret['a03'] = 'dummy parameter' ret['a04'] = 'dummy parameter' ret['a05'] = 'dummy parameter' ret['a06'] = 'dummy parameter' ret['a07'] = 'dummy parameter' ret['a08'] = 'dummy parameter' ret['a09'] = 'dummy parameter' ret['a10'] = 'dummy parameter' ret['a11'] = 'dummy parameter' ret['a12'] = 'dummy parameter' ret['a13'] = 'dummy parameter' ret['a14'] = 'dummy parameter' ret['a15'] = 'dummy parameter' ret['a16'] = 'dummy parameter' return ret
def test_multiple_key_pair(is_public_test, request, conn_cnx, db_parameters): if is_public_test: pytest.skip( 'This test requires ACCOUNTADMIN privilege to set the public key') test_user = "******" + TO_UNICODE(uuid.uuid4()).replace( '-', '_') db_config = { 'protocol': db_parameters['protocol'], 'account': db_parameters['account'], 'user': test_user, 'host': db_parameters['host'], 'port': db_parameters['port'], 'database': db_parameters['database'], 'schema': db_parameters['schema'], 'timezone': 'UTC', } def fin(): with conn_cnx() as cnx: cnx.cursor().execute(""" use role accountadmin """) cnx.cursor().execute(""" drop user if exists {user} """.format(user=test_user)) request.addfinalizer(fin) private_key_one_der, public_key_one_der_encoded = generate_key_pair(2048) private_key_two_der, public_key_two_der_encoded = generate_key_pair(2048) with conn_cnx() as cnx: cnx.cursor().execute(""" use role accountadmin """) cnx.cursor().execute(""" create user {user} """.format(user=test_user)) cnx.cursor().execute(""" alter user {user} set rsa_public_key='{public_key}' """.format(user=test_user, public_key=public_key_one_der_encoded)) db_config['private_key'] = private_key_one_der with snowflake.connector.connect(**db_config) as _: pass # assert exception since different key pair is used db_config['private_key'] = private_key_two_der # although specifying password, # key pair authentication should used and it should fail since we don't do fall back db_config['password'] = '******' with pytest.raises(snowflake.connector.errors.DatabaseError) as exec_info: snowflake.connector.connect(**db_config) assert (exec_info.value.errno == 250001) assert (exec_info.value.sqlstate == '08001') assert ("JWT token is invalid" in exec_info.value.msg) with conn_cnx() as cnx: cnx.cursor().execute(""" use role accountadmin """) cnx.cursor().execute(""" alter user {user} set rsa_public_key_2='{public_key}' """.format(user=test_user, public_key=public_key_two_der_encoded)) with snowflake.connector.connect(**db_config) as _: pass
def _test_snow5871(conn_cnx, db_parameters, number_of_threads=10, rt_max_outgoing_rate=60, rt_max_burst_size=1, rt_max_borrowing_limt=1000, rt_reset_period=10000): """ SNOW-5871: rate limiting for creation of non-recycable objects """ logger.debug(( 'number_of_threads = %s, rt_max_outgoing_rate = %s, ' 'rt_max_burst_size = %s, rt_max_borrowing_limt = %s, ' 'rt_reset_period = %s'), number_of_threads, rt_max_outgoing_rate, rt_max_burst_size, rt_max_borrowing_limt, rt_reset_period) with conn_cnx(user=db_parameters['sf_user'], password=db_parameters['sf_password'], account=db_parameters['sf_account']) as cnx: cnx.cursor().execute(""" alter system set RT_MAX_OUTGOING_RATE={}, RT_MAX_BURST_SIZE={}, RT_MAX_BORROWING_LIMIT={}, RT_RESET_PERIOD={}""".format( rt_max_outgoing_rate, rt_max_burst_size, rt_max_borrowing_limt, rt_reset_period)) try: with conn_cnx() as cnx: cnx.cursor().execute( "create or replace database {name}_db".format( name=db_parameters['name'])) meta = [] for i in range(number_of_threads): meta.append({'idx': TO_UNICODE(i + 1), 'cnx': cnx, 'name': db_parameters[ 'name'] + 'tbl_5871_' + TO_UNICODE( i + 1)}) pool = ThreadPool(processes=number_of_threads) results = pool.map(_create_a_table, meta) success = 0 for r in results: success += 1 if r['success'] else 0 # at least one should be success assert success >= 1, 'success queries' finally: with conn_cnx() as cnx: cnx.cursor().execute( "drop database if exists {name}_db".format( name=db_parameters['name'])) with conn_cnx(user=db_parameters['sf_user'], password=db_parameters['sf_password'], account=db_parameters['sf_account']) as cnx: cnx.cursor().execute(""" alter system set RT_MAX_OUTGOING_RATE=default, RT_MAX_BURST_SIZE=default, RT_RESET_PERIOD=default, RT_MAX_BORROWING_LIMIT=default""")
import uuid from logging import getLogger import pytest from parameters import (CONNECTION_PARAMETERS) from sqlalchemy import create_engine import snowflake.connector from snowflake.connector.compat import TO_UNICODE from snowflake.sqlalchemy import URL, dialect if os.getenv('TRAVIS') == 'true': TEST_SCHEMA = 'TRAVIS_JOB_{0}'.format(os.getenv('TRAVIS_JOB_ID')) else: TEST_SCHEMA = ('sqlalchemy_tests_' + TO_UNICODE(uuid.uuid4()).replace('-', '_')) def help(): print("""Connection parameter must be specified in parameters.py, for example: CONNECTION_PARAMETERS = { 'account': 'testaccount', 'user': '******', 'password': '******', 'database': 'testdb', 'schema': 'public', }""") logger = getLogger(__name__)
def test_concurrent_multiple_user_queries(conn_cnx, db_parameters): """ Multithreaded multiple users tests """ max_per_user = 10 max_per_account = 20 max_per_instance = 10 with conn_cnx(user=db_parameters['sf_user'], password=db_parameters['sf_password'], account=db_parameters['sf_account']) as cnx: cnx.cursor().execute("alter system set QUERY_GATEWAY_ENABLED=true") cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_USER={0}".format( max_per_user)) cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_ACCOUNT={0}".format( max_per_account)) cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_INSTANCE={0}".format( max_per_instance)) try: with conn_cnx() as cnx: cnx.cursor().execute( "create or replace warehouse regress1 " "warehouse_type='medium' warehouse_size=small") cnx.cursor().execute( "create or replace warehouse regress2 " "warehouse_type='medium' warehouse_size=small") cnx.cursor().execute("use role securityadmin") cnx.cursor().execute("create or replace user snowwoman " "password='******'") cnx.cursor().execute("use role accountadmin") cnx.cursor().execute("grant role sysadmin to user snowwoman") cnx.cursor().execute("grant all on warehouse regress2 to sysadmin") cnx.cursor().execute( "alter user snowwoman set default_role=sysadmin") suc_cnt1 = 0 suc_cnt2 = 0 with conn_cnx() as cnx1: with conn_cnx(user='******', password='******') as cnx2: cnx1.cursor().execute('use warehouse regress1') cnx2.cursor().execute('use warehouse regress2') number_of_threads = 50 meta = [] for i in range(number_of_threads): cnx = cnx1 if i < number_of_threads / 2 else cnx2 user = '******' if i < number_of_threads / 2 else 'B' idx = TO_UNICODE(i + 1) \ if i < number_of_threads / 2 \ else TO_UNICODE(i + 1) meta.append({'user': user, 'idx': idx, 'cnx': cnx}) pool = ThreadPool(processes=number_of_threads) all_results = pool.map(_run_more_query, meta) assert len(all_results) == number_of_threads, \ 'total number of jobs' for r in all_results: if r['user'] == 'A' and r['result'] > 0: suc_cnt1 += 1 elif r['user'] == 'B' and r['result'] > 0: suc_cnt2 += 1 logger.debug("A success: %s", suc_cnt1) logger.debug("B success: %s", suc_cnt2) # NOTE: if the previous test cancels a query, the incoming # query counter may not be reduced asynchrously, so # the maximum number of runnable queries can be one less assert suc_cnt1 + suc_cnt2 in (max_per_instance * 2, max_per_instance * 2 - 1), \ 'success queries for user A and B' finally: with conn_cnx() as cnx: cnx.cursor().execute("use role accountadmin") cnx.cursor().execute("drop warehouse if exists regress2") cnx.cursor().execute("drop warehouse if exists regress1") cnx.cursor().execute("use role securityadmin") cnx.cursor().execute("drop user if exists snowwoman") with conn_cnx(user=db_parameters['sf_user'], password=db_parameters['sf_password'], account=db_parameters['sf_account']) as cnx: cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_USER=default") cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_INSTANCE=default") cnx.cursor().execute( "alter system set QUERY_GATEWAY_MAX_PER_ACCOUNT=default")