def test_store_client(self):
        store = S3Store(client='foo', resource='bar')
        assert store.client == 'foo'
        assert store.resource == 'bar'

        store = S3Store()
        assert isinstance(store.client, BaseClient)
        assert isinstance(store.resource, ServiceResource)
    def test_check_bucket(self):
        store = S3Store()
        bucket = store.get_bucket('bucket')
        bucket.create()

        self.assertTrue(store.check_bucket('bucket'))
        self.assertFalse(store.check_bucket('not-a-bucket'))
    def test_upload_bytes(self):
        store = S3Store()
        store.client.create_bucket(Bucket='bucket')

        store.upload_bytes(b'Content', 'my_key', 'bucket')
        body = store.resource.Object('bucket', 'my_key').get()['Body'].read()

        self.assertEqual(body, b'Content')
    def test_read_key(self):
        store = S3Store()
        store.client.create_bucket(Bucket='bucket')
        store.client.put_object(Bucket='bucket',
                                Key='my_key',
                                Body=b'M\xC3\xA9nar')

        self.assertEqual(store.read_key('my_key', 'bucket'), u'Ménar')
    def test_upload_string(self):
        store = S3Store()
        store.client.create_bucket(Bucket='bucket')

        store.upload_string(u'Ménar', 'my_key', 'bucket')
        body = store.resource.Object('bucket', 'my_key').get()['Body'].read()

        self.assertEqual(body, b'M\xC3\xA9nar')
    def test_upload_download_directory(self):
        store = S3Store()
        store.client.create_bucket(Bucket='bucket')

        dirname1 = tempfile.mkdtemp()
        fpath1 = dirname1 + '/test1.txt'
        with open(fpath1, 'w') as f:
            f.write('data1')

        fpath2 = dirname1 + '/test2.txt'
        with open(fpath2, 'w') as f:
            f.write('data2')

        dirname2 = tempfile.mkdtemp(prefix=dirname1 + '/')
        fpath3 = dirname2 + '/test3.txt'
        with open(fpath3, 'w') as f:
            f.write('data3')

        rel_path1 = dirname1.split('/')[-1]
        rel_path2 = dirname2.split('/')[-1]

        # Test without using basename
        # Upload
        store.upload_dir(dirname1, 'mykey', 'bucket', use_basename=False)
        assert store.check_key('mykey/test1.txt', 'bucket') is True
        assert store.check_key('mykey/test2.txt', 'bucket') is True
        assert store.check_key('mykey/{}/test3.txt'.format(rel_path2),
                               'bucket') is True
        # Download
        dirname3 = tempfile.mkdtemp()
        store.download_dir('mykey', dirname3, 'bucket', use_basename=False)
        assert sorted(os.listdir(dirname3)) == sorted(
            [rel_path2, 'test1.txt', 'test2.txt'])
        assert os.listdir('{}/{}'.format(dirname3, rel_path2)) == ['test3.txt']

        # Test with using basename
        store.upload_dir(dirname1, 'mykey', 'bucket', use_basename=True)
        assert store.check_key('mykey/{}/test1.txt'.format(rel_path1),
                               'bucket') is True
        assert store.check_key('mykey/{}/test2.txt'.format(rel_path1),
                               'bucket') is True
        assert store.check_key(
            'mykey/{}/{}/test3.txt'.format(rel_path1, rel_path2),
            'bucket') is True
        # Download
        dirname3 = tempfile.mkdtemp()
        store.download_dir('mykey/{}'.format(rel_path1),
                           dirname3,
                           'bucket',
                           use_basename=True)
        assert os.listdir(dirname3) == [rel_path1]
        assert sorted(os.listdir('{}/{}'.format(
            dirname3,
            rel_path1))) == sorted([rel_path2, 'test1.txt', 'test2.txt'])
        assert os.listdir('{}/{}/{}'.format(dirname3, rel_path1,
                                            rel_path2)) == ['test3.txt']
    def test_check_key(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        b.put_object(Key='a', Body=b'a')

        assert store.check_key('a', 'bucket') is True
        assert store.check_key('s3://bucket//a') is True
        assert store.check_key('b', 'bucket') is False
        assert store.check_key('s3://bucket//b') is False
    def test_list_size(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        num_objects = 1001
        for k in range(num_objects):
            b.put_object(Key='a' + str(k), Body=b'a')

        assert len(store.list(bucket_name='bucket',
                              delimiter='/')['keys']) == num_objects
示例#9
0
    def test_list_keys(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        b.put_object(Key='a', Body=b'a')
        b.put_object(Key='dir/b', Body=b'b')

        assert store.list_keys(bucket_name='bucket', prefix='non-existent/') == []
        assert store.list_keys(bucket_name='bucket') == [('a', 1), ('dir/b', 1)]
        assert store.list_keys(bucket_name='bucket', delimiter='/') == [('a', 1)]
        assert store.list_keys(bucket_name='bucket', prefix='dir/') == [('b', 1)]
示例#10
0
    def test_get_key(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        b.put_object(Key='a', Body=b'a')

        assert store.get_key('a', 'bucket').key == 'a'
        assert store.get_key('s3://bucket/a').key == 'a'

        # No bucket
        with self.assertRaises(PolyaxonStoresException):
            store.get_key('a', 'nobucket')
示例#11
0
 def test_check_prefix_format(self):
     assert S3Store.check_prefix_format(prefix='foo', delimiter='') == 'foo'
     assert S3Store.check_prefix_format(prefix='foo', delimiter='/') == 'foo/'
     assert S3Store.check_prefix_format(prefix='foo/', delimiter='/') == 'foo/'
     assert S3Store.check_prefix_format(prefix='/foo/', delimiter='/') == '/foo/'
     assert S3Store.check_prefix_format(prefix='/foo/boo', delimiter='/') == '/foo/boo/'
     assert S3Store.check_prefix_format(prefix='', delimiter='/') == ''
示例#12
0
    def test_list_keys_paged(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()

        keys = ['x', 'y']
        for key in keys:
            b.put_object(Key=key, Body=b'a')

        self.assertListEqual(sorted([(k, 1) for k in keys]),
                             sorted(store.list_keys(bucket_name='bucket',
                                                    delimiter='/',
                                                    page_size=2)))
示例#13
0
    def test_list_prefixes_paged(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        # Test only one page
        keys = ['x/b', 'y/b']
        dirs = ['x', 'y']
        for key in keys:
            b.put_object(Key=key, Body=b'a')

        self.assertListEqual(sorted(dirs),
                             sorted(store.list_prefixes(bucket_name='bucket',
                                                        delimiter='/',
                                                        page_size=1)))
示例#14
0
    def test_ls(self):
        store = S3Store()
        b = store.get_bucket('bucket')
        b.create()
        b.put_object(Key='a', Body=b'a')
        b.put_object(Key='dir/b', Body=b'b')

        full_response = {'files': [('a', 1)], 'dirs': ['dir']}
        empty_response = {'dirs': [], 'files': []}
        dir_response = {'dirs': [], 'files': [('b', 1)]}

        assert store.ls('s3://bucket') == full_response
        assert store.ls('s3://bucket/') == full_response
        assert store.ls('s3://bucket/non-existent') == empty_response
        assert store.ls('s3://bucket/non-existent/') == empty_response
        assert store.ls('s3://bucket/dir') == dir_response
        assert store.ls('s3://bucket/dir/') == dir_response
示例#15
0
    def test_upload_download(self):
        store = S3Store()
        store.client.create_bucket(Bucket='bucket')

        dirname = tempfile.mkdtemp()
        fpath1 = dirname + '/test1.txt'
        with open(fpath1, 'w') as f:
            f.write('data1')

        fpath2 = dirname + '/test2.txt'
        with open(fpath2, 'w') as f:
            f.write('data2')

        fpath3 = dirname + '/test3.txt'
        with open(fpath3, 'w') as f:
            f.write('data3')

        # Upload
        store.upload_file(fpath1, 'my_key1.txt', 'bucket', use_basename=False)
        assert store.check_key('my_key1.txt', 'bucket') is True

        store.upload_file(fpath2, 'my_key2.txt', 'bucket', use_basename=False)
        assert store.check_key('my_key2.txt', 'bucket') is True

        store.upload_file(fpath3, 'foo/', 'bucket', use_basename=True)
        assert store.check_key('foo/test3.txt', 'bucket') is True

        # Download
        store.download_file('my_key1.txt',
                            local_path=dirname + '/foo1.txt',
                            bucket_name='bucket',
                            use_basename=False)
        assert os.path.basename(dirname + '/foo1.txt') == 'foo1.txt'
        assert open(os.path.join(dirname + '/foo1.txt')).read() == 'data1'

        dirname2 = tempfile.mkdtemp()
        store.download_file('foo/test3.txt',
                            local_path=dirname2,
                            bucket_name='bucket',
                            use_basename=True)
        assert os.path.basename(dirname2 + '/test3.txt') == 'test3.txt'
        assert open(os.path.join(dirname2 + '/test3.txt')).read() == 'data3'
示例#16
0
    def get_store(cls, store_type=None, **kwargs):
        store_type = store_type or get_from_env(['POLYAXON_STORE'])
        # We assume that `None` refers to local store as well
        store_type = cls._LOCAL_STORE if store_type is None else store_type
        if store_type not in cls._STORE_TYPES:
            raise PolyaxonStoresException(
                'Received an unrecognised store type `{}`.'.format(store_type))

        if store_type == cls._LOCAL_STORE:
            from polystores.stores.local_store import LocalStore
            return LocalStore()
        if store_type == cls._AZURE_STORE:
            from polystores.stores.azure_store import AzureStore
            return AzureStore(**kwargs)
        if store_type == cls._S3_STORE:
            from polystores.stores.s3_store import S3Store
            return S3Store(**kwargs)
        if store_type == cls._GCS_STORE:
            from polystores.stores.gcs_store import GCSStore
            return GCSStore(**kwargs)

        raise PolyaxonStoresException(
            'Received an unrecognised store type `{}`.'.format(store_type))
示例#17
0
 def test_get_bucket(self):
     store = S3Store()
     bucket = store.get_bucket('bucket')
     assert bucket is not None
示例#18
0
    def test_check_bucket_raises_with_invalid_client_resources(self):
        store = S3Store(client='foo', resource='bar')

        with self.assertRaises(Exception):
            store.check_bucket('bucket')
示例#19
0
 def test_parse_s3_url(self):
     s3_url = 's3://test/this/is/valid/key.txt'
     parsed_url = S3Store.parse_s3_url(s3_url)
     assert parsed_url == ('test', 'this/is/valid/key.txt')