def test_public_read(self, mock_connection): content = ''' <AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> \ <Owner> \ <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \ <DisplayName>[email protected]</DisplayName> \ </Owner> \ <AccessControlList> \ <Grant> \ <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"> \ <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \ <DisplayName>[email protected]</DisplayName> \ <URI>http://acs.amazonaws.com/groups/global/AllUsers</URI> \ </Grantee> \ <Permission>READ</Permission> \ </Grant> \ <Grant> \ <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"> \ <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \ <DisplayName>[email protected]</DisplayName> \ </Grantee> \ <Permission>FULL_CONTROL</Permission> \ </Grant> \ </AccessControlList> \ </AccessControlPolicy> ''' mock_server = MockConnection() mock_connection.return_value = mock_server mock_server.mock_add_request(MockResponse('GET', 'https://localhost:9000/hello/?acl', {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=content)) client = Minio('localhost:9000') acl = client.get_bucket_acl('hello') eq_(Acl.public_read(), acl)
# -*- coding: utf-8 -*- # Minio Python Library for Amazon S3 compatible cloud storage, (C) 2015 Minio, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from minio import Acl from minio import Minio __author__ = 'minio' client = Minio('https://s3.amazonaws.com', access_key='YOUR-ACCESSKEYID', secret_key='YOUR-SECRETACCESSKEY') # Set private ACL client.set_bucket_acl('my-bucket', Acl.private()) # Print current ACL print client.get_bucket_acl('my-bucket')
# Minio Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 Minio, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are # dummy values, please replace them with original values. from minio import Acl from minio import Minio from minio.error import ResponseError client = Minio('s3.amazonaws.com', access_key='YOUR-ACCESSKEYID', secret_key='YOUR-SECRETACCESSKEY') # Print current bucket acl. try: print(client.get_bucket_acl('my-bucketname')) except ResponseError as err: print(err)
def test_bucket_is_not_empty_string(self): client = Minio('localhost:9000') client.get_bucket_acl(' \t \n ')
def test_bucket_is_string(self): client = Minio('localhost:9000') client.get_bucket_acl(1234)
def main(): """ Functional testing of minio python library. """ fake = Factory.create() client = Minio('s3.amazonaws.com', os.getenv('ACCESS_KEY'), os.getenv('SECRET_KEY')) _http = urllib3.PoolManager( cert_reqs='CERT_REQUIRED', ca_certs=certifi.where() ) # Get unique bucket_name, object_name. bucket_name = uuid.uuid4().__str__() object_name = uuid.uuid4().__str__() # Enable trace # client.trace_on(sys.stderr) # Make a new bucket. bucket_name = 'minio-pytest' print(client.make_bucket(bucket_name)) print(client.make_bucket(bucket_name+'.unique', location='us-west-1')) ## Check if return codes a valid from server. try: client.make_bucket(bucket_name+'.unique', location='us-west-1') except ResponseError as err: if str(err.code) in ['BucketAlreadyOwnedByYou', 'BucketAlreadyExists']: pass else: raise # Check if bucket was created properly. print(client.bucket_exists(bucket_name)) print(client.bucket_exists(bucket_name+'.unique')) # Set bucket name to private. print(client.set_bucket_acl(bucket_name, Acl.private())) print(client.set_bucket_acl(bucket_name+'.unique', Acl.private())) # Verify current bucket acl. acl = client.get_bucket_acl(bucket_name) if acl != 'private': raise ValueError('Invalid acl type found: ' + acl) acl = client.get_bucket_acl(bucket_name+'.unique') if acl != 'private': raise ValueError('Invalid acl type found: ' + acl) # Set bucket name to public-read. print(client.set_bucket_acl(bucket_name, Acl.public_read())) print(client.set_bucket_acl(bucket_name+'.unique', Acl.public_read())) # Verify current bucket acl. acl = client.get_bucket_acl(bucket_name) if acl != 'public-read': raise ValueError('Invalid acl type found: ' + acl) acl = client.get_bucket_acl(bucket_name+'.unique') if acl != 'public-read': raise ValueError('Invalid acl type found: ' + acl) # Set bucket name to public-read-write. print(client.set_bucket_acl(bucket_name, Acl.public_read_write())) print(client.set_bucket_acl(bucket_name+'.unique', Acl.public_read_write())) # Verify current bucket acl. acl = client.get_bucket_acl(bucket_name) if acl != 'public-read-write': raise ValueError('Invalid acl type found: ' + acl) acl = client.get_bucket_acl(bucket_name+'.unique') if acl != 'public-read-write': raise ValueError('Invalid acl type found: ' + acl) # List all buckets. buckets = client.list_buckets() for bucket in buckets: print(bucket.name, bucket.creation_date) with open('testfile', 'wb') as file_data: file_data.write(fake.text().encode('utf-8')) file_data.close() # Put a file file_stat = os.stat('testfile') with open('testfile', 'rb') as file_data: client.put_object(bucket_name, object_name, file_data, file_stat.st_size) file_data.close() # Fput a file print(client.fput_object(bucket_name, object_name+'-f', 'testfile')) # Fetch stats on your object. print(client.stat_object(bucket_name, object_name)) # Get a full object object_data = client.get_object(bucket_name, object_name) with open('newfile', 'wb') as file_data: for data in object_data: file_data.write(data) file_data.close() # Get a full object locally. print(client.fget_object(bucket_name, object_name, 'newfile-f')) # List all object paths in bucket. objects = client.list_objects(bucket_name, recursive=True) for obj in objects: print(obj.bucket_name, obj.object_name, obj.last_modified, \ obj.etag, obj.size, obj.content_type) presigned_get_object_url = client.presigned_get_object(bucket_name, object_name) response = _http.urlopen('GET', presigned_get_object_url) if response.status != 200: response_error = ResponseError(response) raise response_error.get(bucket_name, object_name) presigned_put_object_url = client.presigned_put_object(bucket_name, object_name) value = fake.text().encode('utf-8') data = io.BytesIO(value).getvalue() response = _http.urlopen('PUT', presigned_put_object_url, body=data) if response.status != 200: response_error = ResponseError(response) raise response_error.put(bucket_name, object_name) object_data = client.get_object(bucket_name, object_name) if object_data.read() != value: raise ValueError('Bytes not equal') # Post policy. policy = PostPolicy() policy.set_bucket_name(bucket_name) policy.set_key_startswith('objectPrefix/') expires_date = datetime.utcnow()+timedelta(days=10) policy.set_expires(expires_date) print(client.presigned_post_policy(policy)) # Remove an object. print(client.remove_object(bucket_name, object_name)) print(client.remove_object(bucket_name, object_name+'-f')) # Remove a bucket. This operation will only work if your bucket is empty. print(client.remove_bucket(bucket_name)) print(client.remove_bucket(bucket_name+'.unique')) # Remove temporary files. os.remove('testfile') os.remove('newfile') os.remove('newfile-f')
# -*- coding: utf-8 -*- # Minio Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 Minio, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from minio import Acl from minio import Minio client = Minio('https://s3.amazonaws.com', access_key='YOUR-ACCESSKEYID', secret_key='YOUR-SECRETACCESSKEY') # Print current bucket acl. print(client.get_bucket_acl('bucketName'))
def main(): """ Functional testing of minio python library. """ fake = Factory.create() client = Minio('https://play.minio.io:9002', 'Q3AM3UQ867SPQQA43P2F', 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG') # Get unique bucket_name, object_name. bucket_name = uuid.uuid4().__str__() object_name = uuid.uuid4().__str__() # Make a new bucket. print(client.make_bucket(bucket_name)) # Check if bucket was created properly. print(client.bucket_exists(bucket_name)) # Set bucket name to private. print(client.set_bucket_acl(bucket_name, Acl.private())) # Print current bucket acl. print(client.get_bucket_acl(bucket_name)) # List all buckets. buckets = client.list_buckets() for bucket in buckets: print(bucket.name, bucket.creation_date) with open('testfile', 'wb') as file_data: file_data.write(fake.text().encode('utf-8')) file_data.close() # Put a file file_stat = os.stat('testfile') with open('testfile', 'rb') as file_data: client.put_object(bucket_name, object_name, file_data, file_stat.st_size) file_data.close() # Fetch stats on your object. print(client.stat_object(bucket_name, object_name)) # Get a full object data = client.get_object(bucket_name, object_name) with open('newfile', 'wb') as file_data: for d in data: file_data.write(d) file_data.close() # List all object paths in bucket that begin with hello. objects = client.list_objects(bucket_name) for obj in objects: print(obj.bucket_name, obj.object_name, obj.last_modified, \ obj.etag, obj.size, obj.content_type) uploads = client.list_incomplete_uploads(bucket_name, prefix='', recursive=True) for obj in uploads: print(obj.bucket_name, obj.object_name, obj.upload_id) print(client.presigned_get_object(bucket_name, object_name)) print(client.presigned_put_object(bucket_name, object_name)) # Remove an object. print(client.remove_object(bucket_name, object_name)) # Remove a bucket. # This operation will only work if your bucket is empty. print(client.remove_bucket(bucket_name)) # Remove temporary files. os.remove('testfile') os.remove('newfile')