Esempio n. 1
0
def test_upload_one_file_to_s3_econnreset():
    """
    Tests Upload one file to S3 with retry on errno.ECONNRESET.
    The last attempted max_currency should not be changed.
    """
    for error_code in [errno.ECONNRESET, errno.ETIMEDOUT, errno.EPIPE, -1]:
        upload_file = MagicMock(side_effect=OpenSSL.SSL.SysCallError(
            error_code, 'mock err. connection aborted'))
        s3object = MagicMock(metadata=defaultdict(str),
                             upload_file=upload_file)
        s3client = Mock()
        s3client.Object.return_value = s3object
        initial_parallel = 100
        upload_meta = {
            u'no_sleeping_time': True,
            u'parallel': initial_parallel,
            u'put_callback': None,
            u'put_callback_output_stream': None,
            u'existing_files': [],
            SHA256_DIGEST: '123456789abcdef',
            u'stage_location': 'sfc-teststage/rwyitestacco/users/1234/',
            u's3client': s3client,
            u'dst_file_name': 'data1.txt.gz',
            u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'),
        }
        upload_meta[u'real_src_file_name'] = upload_meta['src_file_name']
        upload_meta[u'upload_size'] = os.stat(
            upload_meta['src_file_name']).st_size
        try:
            SnowflakeS3Util.upload_one_file_to_s3(upload_meta)
            raise Exception("Should fail with OpenSSL.SSL.SysCallError")
        except OpenSSL.SSL.SysCallError:
            assert upload_file.call_count == DEFAULT_MAX_RETRY
            assert 'last_max_concurrency' not in upload_meta
Esempio n. 2
0
def test_upload_one_file_to_s3_unknown_openssl_error():
    """
    Tests Upload one file to S3 with unknown OpenSSL error
    """
    for error_code in [123]:

        upload_file = MagicMock(side_effect=OpenSSL.SSL.SysCallError(
            error_code, 'mock err. connection aborted'))
        s3client = Mock()
        s3client.Object.return_value = MagicMock(metadata=defaultdict(str),
                                                 upload_file=upload_file)
        initial_parallel = 100
        upload_meta = {
            u'no_sleeping_time': True,
            u'parallel': initial_parallel,
            u'put_callback': None,
            u'put_callback_output_stream': None,
            u'existing_files': [],
            SHA256_DIGEST: '123456789abcdef',
            u'stage_location': 'sfc-teststage/rwyitestacco/users/1234/',
            u's3client': s3client,
            u'dst_file_name': 'data1.txt.gz',
            u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'),
        }
        upload_meta[u'real_src_file_name'] = upload_meta['src_file_name']
        upload_meta[u'upload_size'] = os.stat(
            upload_meta['src_file_name']).st_size
        try:
            SnowflakeS3Util.upload_one_file_to_s3(upload_meta)
            raise Exception("Should fail with OpenSSL.SSL.SysCallError")
        except OpenSSL.SSL.SysCallError:
            assert upload_file.call_count == 1
Esempio n. 3
0
def test_download_syscall_error(caplog, error_no, result_status):
    """Tests whether a syscall error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = OpenSSL.SSL.SysCallError(
        error_no)
    client_meta = {
        "cloud_client": mock_resource,
        "stage_info": {
            "location": "loc"
        },
    }
    meta = {
        "name": "f",
        "stage_location_type": "S3",
        "client_meta": SFResourceMeta(**client_meta),
        "sha256_digest": "asd",
        "src_file_name": "f",
        "src_file_size": 99,
        "get_callback_output_stream": None,
        "show_progress_bar": False,
        "get_callback": None,
    }
    meta = SnowflakeFileMeta(**meta)
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object",
            return_value=mock_resource,
    ):
        SnowflakeS3Util._native_download_file(meta, "f", 4)
    assert meta.last_error is mock_resource.download_file.side_effect
    assert meta.result_status == result_status
Esempio n. 4
0
def test_download_expiry_error(caplog):
    """Tests whether token expiry error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = botocore.exceptions.ClientError(
        {"Error": {
            "Code": "ExpiredToken",
            "Message": "Just testing"
        }}, "Testing")
    client_meta = {
        "cloud_client": mock_resource,
        "stage_info": {
            "location": "loc"
        },
    }
    meta_dict = {
        "name": "f",
        "src_file_name": "f",
        "stage_location_type": "S3",
        "sha256_digest": "asd",
        "client_meta": SFResourceMeta(**client_meta),
        "src_file_size": 99,
        "get_callback_output_stream": None,
        "show_progress_bar": False,
        "get_callback": None,
    }
    meta = SnowflakeFileMeta(**meta_dict)
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object",
            return_value=mock_resource,
    ):
        SnowflakeS3Util._native_download_file(meta, "f", 4)
    assert meta.result_status == ResultStatus.RENEW_TOKEN
def test_download_unknown_error(caplog):
    """Tests whether an unknown error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, 'snowflake.connector')
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = botocore.exceptions.ClientError(
        {'Error': {'Code': 'unknown', 'Message': 'Just testing'}}, 'Testing')
    client_meta = {
        'cloud_client': mock_resource,
        'stage_info': {'location': 'loc'},
    }
    meta = {'name': 'f',
            'src_file_name': 'f',
            'stage_location_type': 'S3',
            'client_meta': SFResourceMeta(**client_meta),
            'sha256_digest': 'asd',
            'src_file_size': 99,
            'get_callback_output_stream': None,
            'show_progress_bar': False,
            'get_callback': None}
    meta = SnowflakeFileMeta(**meta)
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        with pytest.raises(botocore.exceptions.ClientError,
                           match=r'An error occurred \(unknown\) when calling the Testing operation: Just testing'):
            SnowflakeS3Util._native_download_file(meta, 'f', 4)
    assert ('snowflake.connector.s3_util',
            logging.DEBUG,
            'Failed to download a file: f, err: An error occurred (unknown) when '
            'calling the Testing operation: Just testing') in caplog.record_tuples
def test_upload_unknown_error(caplog):
    """Tests whether unknown errors are handled as expected when uploading."""
    caplog.set_level(logging.DEBUG, 'snowflake.connector')
    mock_resource, mock_object = MagicMock(), MagicMock()
    mock_resource.Object.return_value = mock_object
    mock_object.bucket_name = 'bucket'
    mock_object.key = 'key'
    mock_object.upload_file.side_effect = botocore.exceptions.ClientError(
        {'Error': {'Code': 'unknown', 'Message': 'Just testing'}}, 'Testing')
    client_meta = {
        'cloud_client': mock_resource,
        'stage_info': {'location': 'loc'},
    }
    meta = {'name': 'f',
            'src_file_name': 'f',
            'stage_location_type': 'S3',
            'client_meta': SFResourceMeta(**client_meta),
            'sha256_digest': 'asd',
            'dst_file_name': 'f',
            'put_callback': None}
    meta = SnowflakeFileMeta(**meta)
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util.extract_bucket_name_and_path'):
        with pytest.raises(botocore.exceptions.ClientError,
                           match=r'An error occurred \(unknown\) when calling the Testing operation: Just testing'):
            SnowflakeS3Util.upload_file('f', meta, {}, 4, 67108864)
Esempio n. 7
0
def test_download_expiry_error(caplog):
    """Tests whether token expiry error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, 'snowflake.connector')
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = botocore.exceptions.ClientError(
        {'Error': {
            'Code': 'ExpiredToken',
            'Message': 'Just testing'
        }}, 'Testing')
    meta = {
        'client': mock_resource,
        'sha256_digest': 'asd',
        'stage_info': {
            'location': 'loc'
        },
        'src_file_name': 'f',
        'src_file_size': 99,
        'get_callback_output_stream': None,
        'show_progress_bar': False,
        'get_callback': None
    }
    with mock.patch(
            'snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object',
            return_value=mock_resource):
        SnowflakeS3Util._native_download_file(meta, 'f', 4)
    assert meta['result_status'] == ResultStatus.RENEW_TOKEN
def test_get_header_expiry_error(caplog):
    """Tests whether token expiry error is handled as expected when getting header."""
    meta = {}
    mock_resource = MagicMock()
    mock_resource.load.side_effect = botocore.exceptions.ClientError(
        {'Error': {'Code': 'ExpiredToken', 'Message': 'Just testing'}}, 'Testing')
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        SnowflakeS3Util.get_file_header(meta, 'file.txt')
    assert ('snowflake.connector.s3_util', logging.DEBUG, 'AWS Token expired. Renew and retry') in caplog.record_tuples
    assert meta['result_status'] == ResultStatus.RENEW_TOKEN
Esempio n. 9
0
def test_extract_bucket_name_and_path():
    """
    Extract bucket name and S3 path
    """
    s3_util = SnowflakeS3Util()

    s3_loc = s3_util.extract_bucket_name_and_path(
        'sfc-dev1-regression/test_sub_dir/')
    assert s3_loc.bucket_name == 'sfc-dev1-regression'
    assert s3_loc.s3path == 'test_sub_dir/'

    s3_loc = s3_util.extract_bucket_name_and_path(
        'sfc-dev1-regression/stakeda/test_stg/test_sub_dir/')
    assert s3_loc.bucket_name == 'sfc-dev1-regression'
    assert s3_loc.s3path == 'stakeda/test_stg/test_sub_dir/'

    s3_loc = s3_util.extract_bucket_name_and_path('sfc-dev1-regression/')
    assert s3_loc.bucket_name == 'sfc-dev1-regression'
    assert s3_loc.s3path == ''

    s3_loc = s3_util.extract_bucket_name_and_path('sfc-dev1-regression//')
    assert s3_loc.bucket_name == 'sfc-dev1-regression'
    assert s3_loc.s3path == '/'

    s3_loc = s3_util.extract_bucket_name_and_path('sfc-dev1-regression///')
    assert s3_loc.bucket_name == 'sfc-dev1-regression'
    assert s3_loc.s3path == '//'
def test_pretend_to_put_but_list(tmpdir, conn_cnx, db_parameters):
    """[s3] SNOW-6154: Pretends to PUT but LIST."""
    # create a data file
    fname = str(tmpdir.join('test_put_get_with_aws_token.txt'))
    with gzip.open(fname, 'wb') as f:
        f.write("123,test1\n456,test2".encode(UTF8))
    table_name = random_string(5, 'snow6154_list_')

    with conn_cnx() as cnx:
        cnx.cursor().execute(
            "create or replace table {} (a int, b string)".format(table_name))
        ret = cnx.cursor()._execute_helper("put file://{} @%{}".format(
            fname, table_name))
        stage_location = ret['data']['stageInfo']['location']
        stage_credentials = ret['data']['stageInfo']['creds']

        s3location = SnowflakeS3Util.extract_bucket_name_and_path(
            stage_location)

        # listing
        client = boto3.resource(
            's3',
            aws_access_key_id=stage_credentials['AWS_ID'],
            aws_secret_access_key=stage_credentials['AWS_KEY'],
            aws_session_token=stage_credentials['AWS_TOKEN'])
        from botocore.exceptions import ClientError
        with pytest.raises(ClientError):
            _s3bucket_list(client, s3location.bucket_name)
def test_download_syscall_error(caplog, error_no, result_status):
    """Tests whether a syscall error is handled as expected when downloading."""
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = OpenSSL.SSL.SysCallError(error_no)
    meta = {'client': mock_resource,
            'sha256_digest': 'asd',
            'stage_info': {'location': 'loc'},
            'src_file_name': 'f',
            'src_file_size': 99,
            'get_callback_output_stream': None,
            'show_progress_bar': False,
            'get_callback': None}
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        SnowflakeS3Util._native_download_file(meta, 'f', 4)
    assert meta['last_error'] is mock_resource.download_file.side_effect
    assert meta['result_status'] == result_status
Esempio n. 12
0
def test_upload_expiry_error(caplog):
    """Tests whether token expiry error is handled as expected when uploading."""
    caplog.set_level(logging.DEBUG, 'snowflake.connector')
    mock_resource, mock_object = MagicMock(), MagicMock()
    mock_resource.Object.return_value = mock_object
    mock_object.upload_file.side_effect = botocore.exceptions.ClientError(
        {'Error': {
            'Code': 'ExpiredToken',
            'Message': 'Just testing'
        }}, 'Testing')
    meta = {
        'client': mock_resource,
        'sha256_digest': 'asd',
        'stage_info': {
            'location': 'loc'
        },
        'dst_file_name': 'f',
        'put_callback': None
    }
    with mock.patch(
            'snowflake.connector.s3_util.SnowflakeS3Util.extract_bucket_name_and_path'
    ):
        assert SnowflakeS3Util.upload_file('f', meta, {}, 4) is None
    assert ('snowflake.connector.s3_util', logging.DEBUG,
            'AWS Token expired. Renew and retry') in caplog.record_tuples
    assert meta['result_status'] == ResultStatus.RENEW_TOKEN
Esempio n. 13
0
def test_upload_failed_error(caplog):
    """Tests whether token expiry error is handled as expected when uploading."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    mock_resource, mock_object = MagicMock(), MagicMock()
    mock_resource.Object.return_value = mock_object
    mock_object.upload_file.side_effect = S3UploadFailedError("ExpiredToken")
    client_meta = {
        "cloud_client": mock_resource,
        "stage_info": {
            "location": "loc"
        },
    }
    meta = {
        "name": "f",
        "src_file_name": "f",
        "stage_location_type": "S3",
        "client_meta": SFResourceMeta(**client_meta),
        "sha256_digest": "asd",
        "dst_file_name": "f",
        "put_callback": None,
    }
    meta = SnowflakeFileMeta(**meta)
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util.extract_bucket_name_and_path"
    ):
        assert SnowflakeS3Util.upload_file("f", meta, {}, 4, 67108864) is None
    assert (
        "snowflake.connector.s3_util",
        logging.DEBUG,
        "Failed to upload a file: f, err: ExpiredToken. Renewing AWS Token and Retrying",
    ) in caplog.record_tuples
    assert meta.result_status == ResultStatus.RENEW_TOKEN
Esempio n. 14
0
def test_get_s3_file_object_http_400_error():
    """Tests Get S3 file object with HTTP 400 error.

    Looks like HTTP 400 is returned when AWS token expires and S3.Object.load is called.
    """
    load_method = MagicMock(side_effect=botocore.exceptions.ClientError(
        {"Error": {
            "Code": "400",
            "Message": "Bad Request"
        }},
        operation_name="mock load",
    ))
    s3object = MagicMock(load=load_method)
    client = Mock()
    client.Object.return_value = s3object
    client.load.return_value = None
    type(client).s3path = PropertyMock(return_value="s3://testbucket/")
    client_meta = {
        "cloud_client": client,
        "stage_info": {
            "location": "sfc-teststage/rwyitestacco/users/1234/",
            "locationType": "S3",
        },
    }
    meta = {
        "name": "data1.txt.gz",
        "stage_location_type": "S3",
        "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"),
        "client_meta": SFResourceMeta(**client_meta),
    }
    meta = SnowflakeFileMeta(**meta)
    filename = "/path1/file2.txt"
    akey = SnowflakeS3Util.get_file_header(meta, filename)
    assert akey is None
    assert meta.result_status == ResultStatus.RENEW_TOKEN
def test_get_s3_file_object_http_400_error():
    """Tests Get S3 file object with HTTP 400 error.

    Looks like HTTP 400 is returned when AWS token expires and S3.Object.load is called.
    """
    load_method = MagicMock(
        side_effect=botocore.exceptions.ClientError(
            {'Error': {'Code': '400', 'Message': 'Bad Request'}},
            operation_name='mock load'))
    s3object = MagicMock(load=load_method)
    client = Mock()
    client.Object.return_value = s3object
    client.load.return_value = None
    type(client).s3path = PropertyMock(return_value='s3://testbucket/')
    meta = {
        'client': client,
        'stage_info': {
            'location': 'sfc-teststage/rwyitestacco/users/1234/',
            'locationType': 'S3',
        }
    }
    filename = "/path1/file2.txt"
    akey = SnowflakeS3Util.get_file_header(meta, filename)
    assert akey is None
    assert meta['result_status'] == ResultStatus.RENEW_TOKEN
def test_download_retry_exceeded_error(caplog):
    """Tests whether a retry exceeded error is handled as expected when downloading."""
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = RetriesExceededError(Boto3Error())
    meta = {'client': mock_resource,
            'sha256_digest': 'asd',
            'stage_info': {'location': 'loc'},
            'src_file_name': 'f',
            'src_file_size': 99,
            'get_callback_output_stream': None,
            'show_progress_bar': False,
            'get_callback': None}
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        SnowflakeS3Util._native_download_file(meta, 'f', 4)
    assert meta['last_error'] is mock_resource.download_file.side_effect
    assert meta['result_status'] == ResultStatus.NEED_RETRY
def test_pretend_to_put_but_list(tmpdir, conn_cnx, db_parameters):
    """
    [s3] SNOW-6154: pretend to PUT but LIST
    """
    # create a data file
    fname = str(tmpdir.join('test_put_get_with_aws_token.txt'))
    f = gzip.open(fname, 'wb')
    f.write("123,test1\n456,test2".encode(UTF8))
    f.close()

    with conn_cnx(
            user=db_parameters['s3_user'],
            account=db_parameters['s3_account'],
            password=db_parameters['s3_password']) as cnx:
        cnx.cursor().execute(
            "create or replace table snow6154 (a int, b string)")
        ret = cnx.cursor()._execute_helper(
            "put file://{} @%snow6154".format(fname))
        stage_location = ret['data']['stageInfo']['location']
        stage_credentials = ret['data']['stageInfo']['creds']

        s3location = SnowflakeS3Util.extract_bucket_name_and_path(
            stage_location)

        # listing
        client = boto3.resource(
            's3',
            aws_access_key_id=stage_credentials['AWS_ID'],
            aws_secret_access_key=stage_credentials['AWS_KEY'],
            aws_session_token=stage_credentials['AWS_TOKEN'])
        with pytest.raises(Exception):
            _s3bucket_list(client, s3location.bucket_name)
Esempio n. 18
0
def test_upload_one_file_to_s3_wsaeconnaborted():
    """
    Tests Upload one file to S3 with retry on ERRORNO_WSAECONNABORTED.
    The last attempted max_currency should be (initial_parallel/max_retry)
    """
    upload_file = MagicMock(side_effect=OpenSSL.SSL.SysCallError(
        ERRORNO_WSAECONNABORTED, 'mock err. connection aborted'))
    s3object = MagicMock(metadata=defaultdict(str), upload_file=upload_file)
    s3client = Mock()
    s3client.Object.return_value = s3object
    initial_parallel = 100
    upload_meta = {
        u'no_sleeping_time': True,
        u'parallel': initial_parallel,
        u'put_callback': None,
        u'put_callback_output_stream': None,
        u'existing_files': [],
        u's3client': s3client,
        SHA256_DIGEST: '123456789abcdef',
        u'stage_location': 'sfc-customer-stage/rwyi-testacco/users/9220/',
        u'dst_file_name': 'data1.txt.gz',
        u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'),
    }
    upload_meta[u'real_src_file_name'] = upload_meta['src_file_name']
    upload_meta[u'upload_size'] = os.stat(upload_meta['src_file_name']).st_size
    tmp_upload_meta = upload_meta.copy()
    try:
        SnowflakeS3Util.upload_one_file_to_s3(tmp_upload_meta)
        raise Exception("Should fail with OpenSSL.SSL.SysCallError")
    except OpenSSL.SSL.SysCallError:
        assert upload_file.call_count == DEFAULT_MAX_RETRY
        assert 'last_max_concurrency' in tmp_upload_meta
        assert tmp_upload_meta[
            'last_max_concurrency'] == initial_parallel / DEFAULT_MAX_RETRY

    # min parallel == 1
    upload_file.reset_mock()
    initial_parallel = 4
    upload_meta[u'parallel'] = initial_parallel
    tmp_upload_meta = upload_meta.copy()
    try:
        SnowflakeS3Util.upload_one_file_to_s3(tmp_upload_meta)
        raise Exception("Should fail with OpenSSL.SSL.SysCallError")
    except OpenSSL.SSL.SysCallError:
        assert upload_file.call_count == DEFAULT_MAX_RETRY
        assert 'last_max_concurrency' in tmp_upload_meta
        assert tmp_upload_meta['last_max_concurrency'] == 1
Esempio n. 19
0
def test_download_unknown_error(caplog):
    """Tests whether an unknown error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = botocore.exceptions.ClientError(
        {"Error": {
            "Code": "unknown",
            "Message": "Just testing"
        }}, "Testing")
    client_meta = {
        "cloud_client": mock_resource,
        "stage_info": {
            "location": "loc"
        },
    }
    meta = {
        "name": "f",
        "src_file_name": "f",
        "stage_location_type": "S3",
        "client_meta": SFResourceMeta(**client_meta),
        "sha256_digest": "asd",
        "src_file_size": 99,
        "get_callback_output_stream": None,
        "show_progress_bar": False,
        "get_callback": None,
    }
    meta = SnowflakeFileMeta(**meta)
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object",
            return_value=mock_resource,
    ):
        with pytest.raises(
                botocore.exceptions.ClientError,
                match=
                r"An error occurred \(unknown\) when calling the Testing operation: Just testing",
        ):
            SnowflakeS3Util._native_download_file(meta, "f", 4)
    assert (
        "snowflake.connector.s3_util",
        logging.DEBUG,
        "Failed to download a file: f, err: An error occurred (unknown) when "
        "calling the Testing operation: Just testing",
    ) in caplog.record_tuples
Esempio n. 20
0
def test_get_header_expiry_error(caplog):
    """Tests whether token expiry error is handled as expected when getting header."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    meta = MINIMAL_METADATA
    mock_resource = MagicMock()
    mock_resource.load.side_effect = botocore.exceptions.ClientError(
        {"Error": {
            "Code": "ExpiredToken",
            "Message": "Just testing"
        }}, "Testing")
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object",
            return_value=mock_resource,
    ):
        SnowflakeS3Util.get_file_header(meta, "file.txt")
    assert (
        "snowflake.connector.s3_util",
        logging.DEBUG,
        "AWS Token expired. Renew and retry",
    ) in caplog.record_tuples
    assert meta.result_status == ResultStatus.RENEW_TOKEN
def test_put_with_invalid_token(tmpdir, conn_cnx, db_parameters):
    """
    [s3] SNOW-6154: Use invalid combination of AWS credential
    """
    # create a data file
    fname = str(tmpdir.join('test_put_get_with_aws_token.txt.gz'))
    f = gzip.open(fname, 'wb')
    f.write("123,test1\n456,test2".encode(UTF8))
    f.close()

    with conn_cnx(
            user=db_parameters['s3_user'],
            account=db_parameters['s3_account'],
            password=db_parameters['s3_password']) as cnx:
        cnx.cursor().execute(
            "create or replace table snow6154 (a int, b string)")
        ret = cnx.cursor()._execute_helper(
            "put file://{} @%snow6154".format(fname))
        stage_location = ret['data']['stageInfo']['location']
        stage_credentials = ret['data']['stageInfo']['creds']

        s3location = SnowflakeS3Util.extract_bucket_name_and_path(
            stage_location)

        s3path = s3location.s3path + os.path.basename(fname) + ".gz"

        # positive case
        client = boto3.resource(
            's3',
            aws_access_key_id=stage_credentials['AWS_ID'],
            aws_secret_access_key=stage_credentials['AWS_KEY'],
            aws_session_token=stage_credentials['AWS_TOKEN'])

        client.meta.client.upload_file(
            fname, s3location.bucket_name, s3path)

        # negative: wrong location, attempting to put the file in the
        # parent path
        parent_s3path = os.path.dirname(os.path.dirname(s3path)) + '/'

        with pytest.raises(Exception):
            client.meta.client.upload_file(
                fname, s3location.bucket_name, parent_s3path)

        # negative: missing AWS_TOKEN
        client = boto3.resource(
            's3',
            aws_access_key_id=stage_credentials['AWS_ID'],
            aws_secret_access_key=stage_credentials['AWS_KEY'])
        with pytest.raises(Exception):
            client.meta.client.upload_file(
                fname, s3location.bucket_name, s3path)
def test_download_syscall_error(caplog, error_no, result_status):
    """Tests whether a syscall error is handled as expected when downloading."""
    caplog.set_level(logging.DEBUG, 'snowflake.connector')
    mock_resource = MagicMock()
    mock_resource.download_file.side_effect = OpenSSL.SSL.SysCallError(error_no)
    client_meta = {
        'cloud_client': mock_resource,
        'stage_info': {'location': 'loc'},
    }
    meta = {'name': 'f',
            'stage_location_type': 'S3',
            'client_meta': SFResourceMeta(**client_meta),
            'sha256_digest': 'asd',
            'src_file_name': 'f',
            'src_file_size': 99,
            'get_callback_output_stream': None,
            'show_progress_bar': False,
            'get_callback': None}
    meta = SnowflakeFileMeta(**meta)
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        SnowflakeS3Util._native_download_file(meta, 'f', 4)
    assert meta.last_error is mock_resource.download_file.side_effect
    assert meta.result_status == result_status
Esempio n. 23
0
def test_upload_unknown_error(caplog):
    """Tests whether unknown errors are handled as expected when uploading."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    mock_resource, mock_object = MagicMock(), MagicMock()
    mock_resource.Object.return_value = mock_object
    mock_object.bucket_name = "bucket"
    mock_object.key = "key"
    mock_object.upload_file.side_effect = botocore.exceptions.ClientError(
        {"Error": {
            "Code": "unknown",
            "Message": "Just testing"
        }}, "Testing")
    client_meta = {
        "cloud_client": mock_resource,
        "stage_info": {
            "location": "loc"
        },
    }
    meta = {
        "name": "f",
        "src_file_name": "f",
        "stage_location_type": "S3",
        "client_meta": SFResourceMeta(**client_meta),
        "sha256_digest": "asd",
        "dst_file_name": "f",
        "put_callback": None,
    }
    meta = SnowflakeFileMeta(**meta)
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util.extract_bucket_name_and_path"
    ):
        with pytest.raises(
                botocore.exceptions.ClientError,
                match=
                r"An error occurred \(unknown\) when calling the Testing operation: Just testing",
        ):
            SnowflakeS3Util.upload_file("f", meta, {}, 4, 67108864)
def test_get_header_unexpected_error(caplog):
    """Tests whether unexpected errors are handled as expected when getting header."""
    meta = {}
    mock_resource = MagicMock()
    mock_resource.load.side_effect = botocore.exceptions.ClientError(
        {'Error': {'Code': '???', 'Message': 'Just testing'}}, 'Testing')
    mock_resource.bucket_name = "bucket"
    mock_resource.key = "key"
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object', return_value=mock_resource):
        assert SnowflakeS3Util.get_file_header(meta, 'file.txt') is None
    assert ('snowflake.connector.s3_util',
            logging.DEBUG,
            'Failed to get metadata for bucket, key: An error occurred (???) when calling '
            'the Testing operation: Just testing') in caplog.record_tuples
    assert meta['result_status'] == ResultStatus.ERROR
def test_upload_failed_error(caplog):
    """Tests whether token expiry error is handled as expected when uploading."""
    mock_resource, mock_object = MagicMock(), MagicMock()
    mock_resource.Object.return_value = mock_object
    mock_object.upload_file.side_effect = S3UploadFailedError('ExpiredToken')
    meta = {'client': mock_resource,
            'sha256_digest': 'asd',
            'stage_info': {'location': 'loc'},
            'dst_file_name': 'f',
            'put_callback': None}
    with mock.patch('snowflake.connector.s3_util.SnowflakeS3Util.extract_bucket_name_and_path'):
        assert SnowflakeS3Util.upload_file('f', meta, {}, 4) is None
    assert ('snowflake.connector.s3_util',
            logging.DEBUG,
            'Failed to upload a file: f, err: ExpiredToken. Renewing AWS Token and Retrying') in caplog.record_tuples
    assert meta['result_status'] == ResultStatus.RENEW_TOKEN
Esempio n. 26
0
def test_encrypt_decrypt_large_file(tmpdir, test_files):
    """
    Encrypt and Decrypt a large file
    """
    s3_util = SnowflakeS3Util()
    s3_metadata = {}

    encryption_material = SnowflakeS3FileEncryptionMaterial(
        query_stage_master_key='ztke8tIdVt1zmlQIZm0BMA==',
        query_id='123873c7-3a66-40c4-ab89-e3722fbccce1',
        smk_id=3112)

    # generates N files
    number_of_files = 1
    number_of_lines = 10000
    tmp_dir = test_files(tmpdir, number_of_lines, number_of_files)

    files = glob.glob(os.path.join(tmp_dir, 'file*'))
    input_file = files[0]
    encrypted_file = None
    decrypted_file = None
    try:
        encrypted_file = s3_util.encrypt_file(s3_metadata, encryption_material,
                                              input_file)
        decrypted_file = s3_util.decrypt_file(s3_metadata, encryption_material,
                                              encrypted_file)

        contents = ''
        cnt = 0
        fd = codecs.open(decrypted_file, 'r', encoding=UTF8)
        for line in fd:
            contents += line
            cnt += 1
        assert cnt == number_of_lines, "number of lines"
    finally:
        os.remove(input_file)
        if encrypted_file:
            os.remove(encrypted_file)
        if decrypted_file:
            os.remove(decrypted_file)
Esempio n. 27
0
def test_encrypt_decrypt_file():
    """
    Encrypt and Decrypt a file
    """
    s3_util = SnowflakeS3Util()
    s3_metadata = {}

    encryption_material = SnowflakeS3FileEncryptionMaterial(
        query_stage_master_key='ztke8tIdVt1zmlQIZm0BMA==',
        query_id='123873c7-3a66-40c4-ab89-e3722fbccce1',
        smk_id=3112)
    data = 'test data'
    input_fd, input_file = tempfile.mkstemp()
    encrypted_file = None
    decrypted_file = None
    try:
        with codecs.open(input_file, 'w', encoding=UTF8) as fd:
            fd.write(data)

        encrypted_file = s3_util.encrypt_file(s3_metadata, encryption_material,
                                              input_file)
        decrypted_file = s3_util.decrypt_file(s3_metadata, encryption_material,
                                              encrypted_file)

        contents = ''
        fd = codecs.open(decrypted_file, 'r', encoding=UTF8)
        for line in fd:
            contents += line
        assert data == contents, "encrypted and decrypted contents"
    finally:
        os.close(input_fd)
        os.remove(input_file)
        if encrypted_file:
            os.remove(encrypted_file)
        if decrypted_file:
            os.remove(decrypted_file)
Esempio n. 28
0
def test_get_header_unexpected_error(caplog):
    """Tests whether unexpected errors are handled as expected when getting header."""
    caplog.set_level(logging.DEBUG, "snowflake.connector")
    meta = MINIMAL_METADATA
    mock_resource = MagicMock()
    mock_resource.load.side_effect = botocore.exceptions.ClientError(
        {"Error": {
            "Code": "???",
            "Message": "Just testing"
        }}, "Testing")
    mock_resource.bucket_name = "bucket"
    mock_resource.key = "key"
    with mock.patch(
            "snowflake.connector.s3_util.SnowflakeS3Util._get_s3_object",
            return_value=mock_resource,
    ):
        assert SnowflakeS3Util.get_file_header(meta, "file.txt") is None
    assert (
        "snowflake.connector.s3_util",
        logging.DEBUG,
        "Failed to get metadata for bucket, key: An error occurred (???) when calling "
        "the Testing operation: Just testing",
    ) in caplog.record_tuples
    assert meta.result_status == ResultStatus.ERROR
def test_put_with_invalid_token(tmpdir, conn_cnx, db_parameters, from_path):
    """[s3] SNOW-6154: Uses invalid combination of AWS credential."""
    # create a data file
    fname = str(tmpdir.join('test_put_get_with_aws_token.txt.gz'))
    with gzip.open(fname, 'wb') as f:
        f.write("123,test1\n456,test2".encode(UTF8))
    table_name = random_string(5, 'snow6154_')

    with conn_cnx() as cnx:
        try:
            cnx.cursor().execute("create or replace table {} (a int, b string)".format(table_name))
            ret = cnx.cursor()._execute_helper("put file://{} @%{}".format(fname, table_name))
            stage_location = ret['data']['stageInfo']['location']
            stage_credentials = ret['data']['stageInfo']['creds']

            s3location = SnowflakeS3Util.extract_bucket_name_and_path(stage_location)

            s3path = s3location.s3path + os.path.basename(fname) + ".gz"

            # positive case
            client = boto3.resource(
                's3',
                aws_access_key_id=stage_credentials['AWS_ID'],
                aws_secret_access_key=stage_credentials['AWS_KEY'],
                aws_session_token=stage_credentials['AWS_TOKEN'])

            file_stream = None if from_path else open(fname, 'rb')

            if from_path:
                client.meta.client.upload_file(
                    fname, s3location.bucket_name, s3path)
            else:
                client.meta.client.upload_fileobj(
                    file_stream, s3location.bucket_name, s3path)

            # s3 closes stream
            file_stream = None if from_path else open(fname, 'rb')

            # negative: wrong location, attempting to put the file in the
            # parent path
            parent_s3path = os.path.dirname(os.path.dirname(s3path)) + '/'

            with pytest.raises((S3UploadFailedError, ClientError)):
                if from_path:
                    client.meta.client.upload_file(fname, s3location.bucket_name, parent_s3path)
                else:
                    client.meta.client.upload_fileobj(file_stream, s3location.bucket_name, parent_s3path)

            # s3 closes stream
            file_stream = None if from_path else open(fname, 'rb')

            # negative: missing AWS_TOKEN
            client = boto3.resource(
                's3',
                aws_access_key_id=stage_credentials['AWS_ID'],
                aws_secret_access_key=stage_credentials['AWS_KEY'])

            with pytest.raises((S3UploadFailedError, ClientError)):
                if from_path:
                    client.meta.client.upload_file(
                        fname, s3location.bucket_name, s3path)
                else:
                    client.meta.client.upload_fileobj(
                        file_stream, s3location.bucket_name, s3path)
        finally:
            if file_stream:
                file_stream.close()
            cnx.cursor().execute("drop table if exists {}".format(table_name))