Ejemplo n.º 1
0
 def test_public_read(self, mock_connection):
     content = '''
               <AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> \
                 <Owner> \
                   <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \
                   <DisplayName>[email protected]</DisplayName> \
                 </Owner> \
                 <AccessControlList> \
                   <Grant> \
                     <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"> \
                       <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \
                       <DisplayName>[email protected]</DisplayName> \
                       <URI>http://acs.amazonaws.com/groups/global/AllUsers</URI> \
                     </Grantee> \
                     <Permission>READ</Permission> \
                   </Grant> \
                   <Grant> \
                     <Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"> \
                       <ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID> \
                       <DisplayName>[email protected]</DisplayName> \
                     </Grantee> \
                     <Permission>FULL_CONTROL</Permission> \
                   </Grant> \
                 </AccessControlList> \
               </AccessControlPolicy>
               '''
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('GET',
                                               'https://localhost:9000/hello/?acl',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               200, content=content))
     client = Minio('localhost:9000')
     acl = client.get_bucket_acl('hello')
     eq_(Acl.public_read(), acl)
Ejemplo n.º 2
0
def get_presigned_get_url(filename, config, secrets):
    """Generate a presigned get URL using the Minio S3 client.

    Args:
        filename: Name of file to use in S3
        config: Pyglidein cluster config dictionary
        secrets: Pyglidein cluster secrets dictionary

    Returns:
        string: Presigned Get URL

    """
    from minio import Minio
    from minio.error import ResponseError
    
    config_startd_logging = config['StartdLogging']
    secrets_startd_logging = secrets['StartdLogging']

    client = Minio(config_startd_logging['url'],
                   access_key=secrets_startd_logging['access_key'],
                   secret_key=secrets_startd_logging['secret_key'],
                   secure=True
                   )

    try:
        return client.presigned_get_object(config_startd_logging['bucket'],
                                           filename)
    except ResponseError as err:
        print(err)
Ejemplo n.º 3
0
    def test_empty_list_objects_works(self, mock_connection):
        mock_data = """<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
  <Name>bucket</Name>
  <Prefix/>
  <Marker/>
  <IsTruncated>false</IsTruncated>
  <MaxKeys>1000</MaxKeys>
  <Delimiter/>
</ListBucketResult>
        """
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(
            MockResponse(
                "GET",
                "https://localhost:9000/bucket/?max-keys=1000",
                {"User-Agent": _DEFAULT_USER_AGENT},
                200,
                content=mock_data,
            )
        )
        client = Minio("localhost:9000")
        bucket_iter = client.list_objects("bucket", recursive=True)
        buckets = []
        for bucket in bucket_iter:
            buckets.append(bucket)
        eq_(0, len(buckets))
Ejemplo n.º 4
0
 def test_remove_bucket_works(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('DELETE', 'http://localhost:9000/hello/',
                                               {'User-Agent': _DEFAULT_USER_AGENT}, 204))
     client = Minio('http://localhost:9000')
     client.remove_bucket('hello')
 def test_empty_list_uploads_test(self, mock_connection):
     mock_data = '''<?xml version="1.0"?>
                    <ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
                      <Bucket>golang</Bucket>
                      <KeyMarker/>
                      <UploadIdMarker/>
                      <NextKeyMarker/>
                      <NextUploadIdMarker/>
                      <EncodingType/>
                      <MaxUploads>1000</MaxUploads>
                      <IsTruncated>false</IsTruncated>
                      <Prefix/>
                      <Delimiter/>
                    </ListMultipartUploadsResult>
                 '''
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(
         MockResponse('GET',
                      'https://localhost:9000/bucket/?max-uploads=1000&uploads',
                      {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
     client = Minio('localhost:9000')
     upload_iter = client._list_incomplete_uploads('bucket', '', True, False)
     uploads = []
     for upload in upload_iter:
         uploads.append(upload)
     eq_(0, len(uploads))
    def test_empty_list_parts_works(self, mock_connection):
        mock_data = '''<?xml version="1.0"?>
                       <ListPartsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
                         <Bucket>bucket</Bucket>
                         <Key>go1.4.2</Key>
                         <UploadId>ntWSjzBytPT2xKLaMRonzXncsO10EH4Fc-Iq2-4hG-ulRYB</UploadId>
                         <Initiator>
                           <ID>minio</ID>
                           <DisplayName>minio</DisplayName>
                         </Initiator>
                         <Owner>
                           <ID>minio</ID>
                           <DisplayName>minio</DisplayName>
                         </Owner>
                         <StorageClass>STANDARD</StorageClass>
                         <PartNumberMarker>0</PartNumberMarker>
                         <NextPartNumberMarker>0</NextPartNumberMarker>
                         <MaxParts>1000</MaxParts>
                         <IsTruncated>false</IsTruncated>
                       </ListPartsResult>
                    '''
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(
            MockResponse('GET',
                         'https://localhost:9000/bucket/key?max-parts=1000&uploadId=upload_id',
                         {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))

        client = Minio('localhost:9000')
        part_iter = client._list_object_parts('bucket', 'key', 'upload_id')
        parts = []
        for part in part_iter:
            parts.append(part)
        eq_(0, len(parts))
 def test_notification_config_id_key_is_optional(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(
         MockResponse(
             'PUT',
             'https://localhost:9000/my-test-bucket/?notification=',
             {
                 'Content-Md5': 'f+TfVp/A4pNnI7S4S+MkFg==',
                 'Content-Length': '196',
                 'User-Agent': _DEFAULT_USER_AGENT,
             },
             200, content=""
         )
     )
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Arn': 'arn1',
                     'Events': ['s3:ObjectCreated:*'],
                 }
             ]
         }
     )
Ejemplo n.º 8
0
 def test_set_bucket_acl_works(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('PUT', 'http://localhost:9000/hello/?acl',
                                               {'x-amz-acl': 'private',
                                                'User-Agent':  _DEFAULT_USER_AGENT}, 200))
     client = Minio('http://localhost:9000')
     client.set_bucket_acl('hello', Acl.private())
    def test_list_uploads_works(self, mock_connection):
        mock_data = '''<?xml version="1.0"?>
                       <ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
                         <Bucket>golang</Bucket>
                         <KeyMarker/>
                         <UploadIdMarker/>
                         <NextKeyMarker>keymarker</NextKeyMarker>
                         <NextUploadIdMarker>uploadidmarker</NextUploadIdMarker>
                         <EncodingType/>
                         <MaxUploads>1000</MaxUploads>
                         <IsTruncated>false</IsTruncated>
                         <Upload>
                           <Key>go1.4.2</Key>
                           <UploadId>uploadid</UploadId>
                           <Initiator>
                             <ID/>
                             <DisplayName/>
                           </Initiator>
                           <Owner>
                             <ID/>
                             <DisplayName/>
                           </Owner>
                           <StorageClass/>
                           <Initiated>2015-05-30T14:43:35.349Z</Initiated>
                         </Upload>
                         <Upload>
                           <Key>go1.5.0</Key>
                           <UploadId>uploadid2</UploadId>
                           <Initiator>
                             <ID/>
                             <DisplayName/>
                           </Initiator>
                           <Owner>
                             <ID/>
                             <DisplayName/>
                           </Owner>
                           <StorageClass/>
                           <Initiated>2015-05-30T15:00:07.759Z</Initiated>
                         </Upload>
                         <Prefix/>
                         <Delimiter/>
                       </ListMultipartUploadsResult>
                    '''
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(
            MockResponse('GET',
                         'https://localhost:9000/bucket/?delimiter=%2F&max-uploads=1000&uploads',
                         {'User-Agent': _DEFAULT_USER_AGENT},
                         200, content=mock_data))

        client = Minio('localhost:9000')
        upload_iter = client._list_incomplete_uploads('bucket', '', False, False)
        uploads = []
        for upload in upload_iter:
            uploads.append(upload)
        eq_(2, len(uploads))
Ejemplo n.º 10
0
 def test_bucket_exists_works(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('HEAD',
                                               'https://localhost:9000/hello/',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               400))
     client = Minio('localhost:9000')
     result = client.bucket_exists('hello')
Ejemplo n.º 11
0
 def test_make_bucket_throws_fail(self, mock_connection):
     error_xml = generate_error('code', 'message', 'request_id',
                                'host_id', 'resource')
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('PUT',
                                               'http://localhost:9000/hello/',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               409, content=error_xml))
     client = Minio('http://localhost:9000')
     client.make_bucket('hello')
Ejemplo n.º 12
0
 def test_get_object_throws_fail(self, mock_connection):
     error_xml = generate_error('code', 'message', 'request_id',
                                'host_id', 'resource', 'bucket',
                                'object')
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('GET',
                                               'https://localhost:9000/hello/key',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               404, content=error_xml))
     client = Minio('localhost:9000')
     client.get_object('hello', 'key')
 def test_notification_config_events_key_is_present(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': '1',
                     'Arn': 'arn1',
                 }
             ]
         }
     )
 def test_can_include_response_headers(self):
     client = Minio('localhost:9000', 'my_access_key', 'my_secret_key',
                    secure=True)
     client._get_bucket_region = mock.Mock(return_value='us-east-1')
     r = client.presigned_get_object(
         'mybucket', 'myfile.pdf',
         response_headers={
             'Response-Content-Type': 'application/pdf',
             'Response-Content-Disposition': 'inline;  filename="test.pdf"'
         })
     self.assertIn('inline', r)
     self.assertIn('test.pdf', r)
     self.assertIn('application%2Fpdf', r)
 def test_notification_config_arn_key_is_present(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': '1',
                     'Events': ['s3:ObjectCreated:*'],
                 }
             ]
         }
     )
 def test_notification_config_id_key_is_string(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': 1,
                     'Arn': 'abc',
                     'Events': ['s3:ObjectCreated:*'],
                 }
             ]
         }
     )
Ejemplo n.º 17
0
 def test_empty_list_buckets_works(self, mock_connection):
     mock_data = '<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Buckets>' \
                 '</Buckets><Owner><ID>minio</ID><DisplayName>minio</DisplayName></Owner></ListAllMyBucketsResult>'
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('GET', 'https://localhost:9000/',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               200, content=mock_data))
     client = Minio('localhost:9000')
     buckets = client.list_buckets()
     count = 0
     for bucket in buckets:
         count += 1
     eq_(0, count)
Ejemplo n.º 18
0
 def test_object_is_tuple(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(
         MockResponse('POST',
                      'https://localhost:9000/hello/?delete',
                      {'Content-Length': 95,
                       'User-Agent': _DEFAULT_USER_AGENT,
                       'Content-Md5': u'5Tg5SmU9Or43L4+iIyfPrQ=='}, 200,
                      content='<Delete/>')
     )
     client = Minio('localhost:9000')
     for err in client.remove_objects('hello', ('Ab', 'c')):
         print(err)
Ejemplo n.º 19
0
 def test_stat_object_works(self, mock_connection):
     mock_headers = {
         'content-type': 'application/octet-stream',
         'last-modified': 'Fri, 26 Jun 2015 19:05:37 GMT',
         'content-length': 11,
         'etag': '5eb63bbbe01eeed093cb22bb8f5acdc3'
     }
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('HEAD', 'http://localhost:9000/hello/world',
                                               {'User-Agent': _DEFAULT_USER_AGENT}, 200,
                                               response_headers=mock_headers))
     client = Minio('http://localhost:9000')
     client.stat_object('hello', 'world')
 def test_notification_config_has_valid_keys(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfiguration': [
                 {
                     'Id': '1',
                     'Arn': 'arn1',
                     'Events': ['s3:ObjectCreated:*'],
                 }
             ]
         }
     )
 def test_notification_config_has_valid_event_names(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': '1',
                     'Arn': 'arn1',
                     'Events': ['object_created'],
                 }
             ]
         }
     )
Ejemplo n.º 22
0
    def test_list_objects_works(self, mock_connection):
        mock_data = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
  <Name>bucket</Name>
  <Prefix/>
  <Marker/>
  <MaxKeys>1000</MaxKeys>
  <Delimiter/>
  <IsTruncated>false</IsTruncated>
  <Contents>
    <Key>key1</Key>
    <LastModified>2015-05-05T02:21:15.716Z</LastModified>
    <ETag>5eb63bbbe01eeed093cb22bb8f5acdc3</ETag>
    <Size>11</Size>
    <StorageClass>STANDARD</StorageClass>
    <Owner>
      <ID>minio</ID>
      <DisplayName>minio</DisplayName>
    </Owner>
  </Contents>
  <Contents>
    <Key>key2</Key>
    <LastModified>2015-05-05T20:36:17.498Z</LastModified>
    <ETag>2a60eaffa7a82804bdc682ce1df6c2d4</ETag>
    <Size>1661</Size>
    <StorageClass>STANDARD</StorageClass>
    <Owner>
      <ID>minio</ID>
      <DisplayName>minio</DisplayName>
    </Owner>
  </Contents>
</ListBucketResult>
        '''
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(MockResponse('GET',
                                                  'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000&prefix=',
                                                  {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
        client = Minio('localhost:9000')
        bucket_iter = client.list_objects('bucket')
        buckets = []
        for bucket in bucket_iter:
            # cause an xml exception and fail if we try retrieving again
            mock_server.mock_add_request(MockResponse('GET',
                                                      'https://localhost:9000/bucket/?delimiter=%2F&max-keys=1000&prefix=',
                                                      {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=''))
            buckets.append(bucket)

        eq_(2, len(buckets))
 def test_notification_config_filterspec_is_valid_1(self):
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': '1',
                     'Arn': 'arn1',
                     'Events': ['s3:ObjectCreated:*'],
                     'Filter': []
                 }
             ]
         }
     )
Ejemplo n.º 24
0
    def test_empty_list_objects_works(self, mock_connection):
        mock_data = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
  <Name>bucket</Name>
  <Prefix/>
  <Marker/>
  <MaxKeys>1000</MaxKeys>
  <Delimiter/>
  <IsTruncated>true</IsTruncated>
</ListBucketResult>
        '''
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(MockResponse('GET', 'http://localhost:9000/bucket/?max-keys=1000', {}, 200, content=mock_data))
        client = Minio('http://localhost:9000')
        bucket_iter = client.list_objects('bucket', recursive=True)
        buckets = []
        for bucket in bucket_iter:
            buckets.append(bucket)
        eq_(0, len(buckets))
Ejemplo n.º 25
0
    def get(cls, raw_msg_id):
        """
        Get raw message from db or ObjectStorage service

        :param raw_msg_id:
        :return: a RawMessage or NotFound exception
        """
        try:
            raw_msg = super(RawMessage, cls).get(raw_msg_id)
        except Exception as exc:
            log.warn(exc)
            raise NotFound

        if raw_msg.raw_data == '' and raw_msg.uri != '':
            # means raw message data have been stored in object store
            # need to retrieve raw_data from it
            url = urlparse.urlsplit(raw_msg.uri)
            path = url.path.strip("/")
            if url.scheme == 's3':
                minioConf = Configuration("global").get("object_store")
                minioClient = Minio(minioConf["endpoint"],
                                    access_key=minioConf["access_key"],
                                    secret_key=minioConf["secret_key"],
                                    secure=False,
                                    region=minioConf["location"])
                try:
                    resp = minioClient.get_object(url.netloc, path)
                except Exception as exc:
                    log.warn(exc)
                    raise NotFound
                # resp is a urllib3.response.HTTPResponse class
                try:
                    raw_msg.raw_data = resp.data
                except Exception as exc:
                    log.warn(exc)
                    raise NotFound
            else:
                log.warn("raw message uri scheme not implemented")
                raise NotFound

        return raw_msg
Ejemplo n.º 26
0
 def test_list_buckets_works(self, mock_connection):
     mock_data = '<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Buckets>' \
                 '<Bucket><Name>hello</Name><CreationDate>2015-06-22T23:07:43.240Z</CreationDate></Bucket><Bucket>' \
                 '<Name>world</Name><CreationDate>2015-06-22T23:07:56.766Z</CreationDate></Bucket>' \
                 '</Buckets><Owner><ID>minio</ID><DisplayName>minio</DisplayName></Owner></ListAllMyBucketsResult>'
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(MockResponse('GET', 'https://localhost:9000/',
                                               {'User-Agent': _DEFAULT_USER_AGENT},
                                               200, content=mock_data))
     client = Minio('localhost:9000')
     buckets = client.list_buckets()
     buckets_list = []
     count = 0
     for bucket in buckets:
         count += 1
         buckets_list.append(bucket)
     eq_(2, count)
     eq_('hello', buckets_list[0].name)
     eq_(datetime(2015, 6, 22, 23, 7, 43, 240000, pytz.utc), buckets_list[0].creation_date)
     eq_('world', buckets_list[1].name)
     eq_(datetime(2015, 6, 22, 23, 7, 56, 766000, pytz.utc), buckets_list[1].creation_date)
Ejemplo n.º 27
0
    def test_connection():

        # Initialize httpClient with relevant timeout.
        httpClient = urllib3.PoolManager(
                timeout=30,
                        cert_reqs='CERT_REQUIRED',
                        ca_certs=certifi.where(),
                        retries=urllib3.Retry(
                            total=3,
                            backoff_factor=0.2,
                            status_forcelist=[500, 502, 503, 504]
                        )
            )

        # Initialize minioClient with an endpoint and access/secret keys.
        minioClient = Minio(S3_CONN_INFO['endpoint'],
                            access_key=S3_CONN_INFO['access_key'],
                            secret_key=S3_CONN_INFO['secret_key'],
                            secure=False,
                            http_client=httpClient)

        buckets = minioClient.list_buckets()
 def test_notification_config_filterspec_is_valid_8(self, mock_connection):
     mock_server = MockConnection()
     mock_connection.return_value = mock_server
     mock_server.mock_add_request(
         MockResponse(
             'PUT',
             'https://localhost:9000/my-test-bucket/?notification=',
             {
                 'Content-Length': '206',
                 'Content-Md5': 'AGCNfbD5OuiyIJFd+r67MA==',
                 'User-Agent': _DEFAULT_USER_AGENT,
             },
             200, content=""
         )
     )
     client = Minio('localhost:9000')
     client.set_bucket_notification(
         'my-test-bucket',
         {
             'QueueConfigurations': [
                 {
                     'Id': '1',
                     'Arn': 'arn1',
                     'Events': ['s3:ObjectCreated:*'],
                     'Filter': {
                         'Key': {
                             'FilterRules': [
                                 {
                                     'Name': 'suffix',
                                     'Value': 'abc'
                                 }
                             ]
                         }
                     }
                 }
             ]
         }
     )
Ejemplo n.º 29
0
#
# * Neither the name of the copyright holder nor the names of its
#   contributors may be used to endorse or promote products derived from
#   this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys

from minio import Minio

minioClient = Minio('localhost:9000',
                    access_key='miniouser',
                    secret_key='leftfoot1', secure=False)

if len(sys.argv) == 0:
    buckets = minioClient.list_buckets()
    print([bucket.name for bucket in buckets])
else:
    objects = minioClient.list_objects(sys.argv[1])
    sizes =[object.size for object in objects]
    print(sum(sizes) / 1e6)
Ejemplo n.º 30
0
 def __init__(self, _bucket="default"):
     self.client = Minio(self.server,
                         access_key=self.secret_id,
                         secret_key=self.secret_key,
                         secure=self.isSSL);
     self.bucket = _bucket
Ejemplo n.º 31
0
class S3DataStore(DataStore):
    """
    An implementation of the data store using S3 for storing policy checkpoints when using Coach in distributed mode.
    The policy checkpoints are written by the trainer and read by the rollout worker.
    """
    def __init__(self, params: S3DataStoreParameters):
        """
        :param params: The parameters required to use the S3 data store.
        """

        super(S3DataStore, self).__init__(params)
        self.params = params
        access_key = None
        secret_key = None
        if params.creds_file:
            config = ConfigParser()
            config.read(params.creds_file)
            try:
                access_key = config.get('default', 'aws_access_key_id')
                secret_key = config.get('default', 'aws_secret_access_key')
            except Error as e:
                print("Error when reading S3 credentials file: %s", e)
        else:
            access_key = os.environ.get('ACCESS_KEY_ID')
            secret_key = os.environ.get('SECRET_ACCESS_KEY')
        self.mc = Minio(self.params.end_point,
                        access_key=access_key,
                        secret_key=secret_key)

    def deploy(self) -> bool:
        return True

    def get_info(self):
        return "s3://{}/{}".format(self.params.bucket_name)

    def undeploy(self) -> bool:
        return True

    def save_to_store(self):
        self._save_to_store(self.params.checkpoint_dir)

    def _save_to_store(self, checkpoint_dir):
        """
        save_to_store() uploads the policy checkpoint, gifs and videos to the S3 data store. It reads the checkpoint state files and
        uploads only the latest checkpoint files to S3. It is used by the trainer in Coach when used in the distributed mode.
        """
        try:
            # remove lock file if it exists
            self.mc.remove_object(self.params.bucket_name,
                                  SyncFiles.LOCKFILE.value)

            # Acquire lock
            self.mc.put_object(self.params.bucket_name,
                               SyncFiles.LOCKFILE.value, io.BytesIO(b''), 0)

            state_file = CheckpointStateFile(os.path.abspath(checkpoint_dir))
            if state_file.exists():
                ckpt_state = state_file.read()
                checkpoint_file = None
                for root, dirs, files in os.walk(checkpoint_dir):
                    for filename in files:
                        if filename == CheckpointStateFile.checkpoint_state_filename:
                            checkpoint_file = (root, filename)
                            continue
                        if filename.startswith(ckpt_state.name):
                            abs_name = os.path.abspath(
                                os.path.join(root, filename))
                            rel_name = os.path.relpath(abs_name,
                                                       checkpoint_dir)
                            self.mc.fput_object(self.params.bucket_name,
                                                rel_name, abs_name)

                abs_name = os.path.abspath(
                    os.path.join(checkpoint_file[0], checkpoint_file[1]))
                rel_name = os.path.relpath(abs_name, checkpoint_dir)
                self.mc.fput_object(self.params.bucket_name, rel_name,
                                    abs_name)

            # upload Finished if present
            if os.path.exists(
                    os.path.join(checkpoint_dir, SyncFiles.FINISHED.value)):
                self.mc.put_object(self.params.bucket_name,
                                   SyncFiles.FINISHED.value, io.BytesIO(b''),
                                   0)

            # upload Ready if present
            if os.path.exists(
                    os.path.join(checkpoint_dir,
                                 SyncFiles.TRAINER_READY.value)):
                self.mc.put_object(self.params.bucket_name,
                                   SyncFiles.TRAINER_READY.value,
                                   io.BytesIO(b''), 0)

            # release lock
            self.mc.remove_object(self.params.bucket_name,
                                  SyncFiles.LOCKFILE.value)

            if self.params.expt_dir and os.path.exists(self.params.expt_dir):
                for filename in os.listdir(self.params.expt_dir):
                    if filename.endswith((".csv", ".json")):
                        self.mc.fput_object(
                            self.params.bucket_name, filename,
                            os.path.join(self.params.expt_dir, filename))

            if self.params.expt_dir and os.path.exists(
                    os.path.join(self.params.expt_dir, 'videos')):
                for filename in os.listdir(
                        os.path.join(self.params.expt_dir, 'videos')):
                    self.mc.fput_object(
                        self.params.bucket_name, filename,
                        os.path.join(self.params.expt_dir, 'videos', filename))

            if self.params.expt_dir and os.path.exists(
                    os.path.join(self.params.expt_dir, 'gifs')):
                for filename in os.listdir(
                        os.path.join(self.params.expt_dir, 'gifs')):
                    self.mc.fput_object(
                        self.params.bucket_name, filename,
                        os.path.join(self.params.expt_dir, 'gifs', filename))

        except ResponseError as e:
            print("Got exception: %s\n while saving to S3", e)

    def load_from_store(self):
        """
        load_from_store() downloads a new checkpoint from the S3 data store when it is not available locally. It is used
        by the rollout workers when using Coach in distributed mode.
        """
        try:
            state_file = CheckpointStateFile(
                os.path.abspath(self.params.checkpoint_dir))

            # wait until lock is removed
            while True:
                objects = self.mc.list_objects_v2(self.params.bucket_name,
                                                  SyncFiles.LOCKFILE.value)

                if next(objects, None) is None:
                    try:
                        # fetch checkpoint state file from S3
                        self.mc.fget_object(self.params.bucket_name,
                                            state_file.filename,
                                            state_file.path)
                    except Exception as e:
                        continue
                    break
                time.sleep(10)

            # Check if there's a finished file
            objects = self.mc.list_objects_v2(self.params.bucket_name,
                                              SyncFiles.FINISHED.value)

            if next(objects, None) is not None:
                try:
                    self.mc.fget_object(
                        self.params.bucket_name, SyncFiles.FINISHED.value,
                        os.path.abspath(
                            os.path.join(self.params.checkpoint_dir,
                                         SyncFiles.FINISHED.value)))
                except Exception as e:
                    pass

            # Check if there's a ready file
            objects = self.mc.list_objects_v2(self.params.bucket_name,
                                              SyncFiles.TRAINER_READY.value)

            if next(objects, None) is not None:
                try:
                    self.mc.fget_object(
                        self.params.bucket_name, SyncFiles.TRAINER_READY.value,
                        os.path.abspath(
                            os.path.join(self.params.checkpoint_dir,
                                         SyncFiles.TRAINER_READY.value)))
                except Exception as e:
                    pass

            checkpoint_state = state_file.read()
            if checkpoint_state is not None:
                objects = self.mc.list_objects_v2(self.params.bucket_name,
                                                  prefix=checkpoint_state.name,
                                                  recursive=True)
                for obj in objects:
                    filename = os.path.abspath(
                        os.path.join(self.params.checkpoint_dir,
                                     obj.object_name))
                    if not os.path.exists(filename):
                        self.mc.fget_object(obj.bucket_name, obj.object_name,
                                            filename)

        except ResponseError as e:
            print("Got exception: %s\n while loading from S3", e)

    def setup_checkpoint_dir(self, crd=None):
        if crd:
            self._save_to_store(crd)
Ejemplo n.º 32
0
# -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY, my-bucketname and my-prefixname
# are dummy values, please replace them with original values.

from minio import Minio

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

# List all object paths in bucket that begin with my-prefixname.
uploads = client.list_incomplete_uploads('my-bucketname',
                                         prefix='my-prefixname',
                                         recursive=True)
for obj in uploads:
    print(obj.bucket_name, obj.object_name, obj.upload_id, obj.size)
    cos_endpoint = get_secret_creds("/app/secrets/cos_endpoint")
    cos_access_key = get_secret_creds("/app/secrets/cos_access_key")
    cos_secret_key = get_secret_creds("/app/secrets/cos_secret_key")

    ''' Remove possible http scheme for Minio '''
    url = re.compile(r"https?://")
    cos_endpoint = url.sub('', cos_endpoint)

    WML_CREDENTIALS = {
                       "url": wml_url,
                       "instance_id": wml_instance_id,
                       "apikey": wml_apikey
                      }
    ''' Load Spark model '''
    cos = Minio(cos_endpoint,
                access_key=cos_access_key,
                secret_key=cos_secret_key,
                secure=True)

    cos.fget_object(cos_bucket_name, model_filepath, model_filepath)
    cos.fget_object(cos_bucket_name, train_data_filepath, train_data_filepath)
    cos.fget_object(cos_bucket_name, 'evaluation.json', 'evaluation.json')
    if aios_manifest_path:
        cos.fget_object(cos_bucket_name, aios_manifest_path, aios_manifest_path)

    os.system('unzip %s' % model_filepath)
    print('model ' + model_filepath + ' is downloaded')
    os.system('unzip %s' % train_data_filepath)
    print('train_data ' + train_data_filepath + ' is downloaded')

    sc = SparkContext()
    model = PipelineModel.load(model_filepath.split('.')[0])
Ejemplo n.º 34
0
 def test_bucket_is_not_empty_string(self):
     client = Minio('localhost:9000')
     client.remove_bucket('  \t \n  ')
Ejemplo n.º 35
0
 def test_object_is_string(self):
     client = Minio('localhost:9000')
     client.presigned_put_object('hello', 1234)
Ejemplo n.º 36
0
    transcription = db.Column(db.Text, nullable=False)
    original = db.Column(db.Boolean)
    upload_ip = db.Column(db.String(128), nullable=False)

    # returns a direct link to the image
    def get_url(self):
        return '{}/memes/{}'.format(app.config['MINIO_URL'], self.id)

    # returns a link to the meme page for this meme
    def get_page(self):
        return '/meme/{}'.format(self.id)


# Minio configuration
minioClient = Minio(app.config['MINIO_URL'].split('/')[2],
                    access_key=app.config['MINIO_ACCESS_KEY'],
                    secret_key=app.config['MINIO_SECRET_KEY'],
                    secure='https://' in app.config['MINIO_URL'])

# Allowed image file extensions
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg', 'gif'}


# does a file have the correct extension
def allowed_file(filename):
    return '.' in filename and \
           filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS


# TODO: this is really basic CSRF that uses sessions, when adding accounts this should be replaced
@app.before_request
def csrf_protect():
Ejemplo n.º 37
0
from minio.error import ResponseError

post_policy = PostPolicy()
# set bucket name location for uploads.
post_policy.set_bucket_name('my-bucketname')
# set key prefix for all incoming uploads.
post_policy.set_key_startswith('my-objectname')
# set content length for incoming uploads.
post_policy.set_content_length_range(10, 1024)

# set expiry 10 days into future.
expires_date = datetime.utcnow()+timedelta(days=10)
post_policy.set_expires(expires_date)

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

try:
    curl_str = 'curl -X POST my-bucketname.s3.amazonaws.com/'
    curl_cmd = [curl_str]
    signed_form_data = client.presigned_post_policy(post_policy)
    for field in signed_form_data:
        curl_cmd.append('-F {0}={1}'.format(field, signed_form_data[field]))

        # print curl command to upload files.
        curl_cmd.append('-F file=@<FILE>')
        print(' '.join(curl_cmd))
except ResponseError as err:
    print(err)
Ejemplo n.º 38
0
 def test_default_user_agent(self):
     client = Minio('localhost')
     self.assertEqual(client._user_agent, _DEFAULT_USER_AGENT)
Ejemplo n.º 39
0
# Use Pillow for thumbnail
from PIL import Image

config.load_incluster_config()

v1 = client.CoreV1Api()

for secrets in v1.list_secret_for_all_namespaces().items:
    if secrets.metadata.name == 'minio':
        access_key = base64.b64decode(secrets.data['access_key'])
        secret_key = base64.b64decode(secrets.data['secret_key'])

# Replace the DNS below with the minio service name (helm release name -svc)
client = Minio('minio-minio-svc:9000',
               access_key=access_key,
               secret_key=secret_key,
               secure=False)


def thumbnail(context):
    bucket = os.path.dirname(context['Key'])
    _, file_extension = os.path.splitext(context['Key'])
    filename = os.path.basename(context['Key'])

    print file_extension.upper()

    if file_extension.upper() != ".JPEG":
        return "Not a picture"

    if context['EventType'] == "s3:ObjectCreated:Put" and bucket == 'foobar':
Ejemplo n.º 40
0
from minio import Minio

from minio.error import ResponseError
minioClient = Minio('ggtaiwanmini.117503445.top:9000',
                    access_key='admin',
                    secret_key='admin123',
                    secure=False)

#通过文件上传到对象中。
# Put an object 'myobject' with contents from '/tmp/otherobject', upon success prints the etag identifier computed by server.
# fanhui ('0200eb05a5d0e2a77478699a9244a941', None)
# 这里上传了一张图片
try:
    print(
        minioClient.fput_object('liqiqi', 'myobjectsss.jpg',
                                'C://User//minioPythonDemo//test.jpg'))
except ResponseError as err:
    print(err)

# Put on object 'myobject.csv' with contents from
# '/tmp/otherobject.csv' as 'application/csv'.
# try:
#     print(minioClient.fput_object('mybucket', 'myobject.csv',
#                              '/tmp/otherobject.csv',
#                              content_type='application/csv'))
# except ResponseError as err:
#     print(err)
Ejemplo n.º 41
0
 def __init__(self, host, bucket, access_key, secret_key, region):
     self._bucket = bucket
     self._conn = Minio(host, access_key=access_key, secret_key=secret_key, region=region)
     found = self._conn.bucket_exists(bucket)
     if not found:
         quit('Minio: Unable to connect to ' + host + '/' + bucket)
Ejemplo n.º 42
0
 def test_bucket_is_string(self):
     client = Minio('localhost:9000')
     client.remove_bucket(1234)
Ejemplo n.º 43
0
def check_obj_exists(s3: minio.Minio, bucket_name, object_name):
    try:
        return s3.stat_object(bucket_name, object_name)
    except minio.error.S3Error:
        return False
role_arn = "ROLE-ARN"

# Role session name if available.
role_session_name = "ROLE-SESSION-NAME"

# External ID if available.
external_id = "EXTERNAL-ID"

# Policy if available.
policy = "POLICY"

# Region if available.
region = "REGION"

provider = AssumeRoleProvider(
    sts_endpoint,
    access_key,
    secret_key,
    policy=policy,
    region=region,
    role_arn=role_arn,
    role_session_name=role_session_name,
    external_id=external_id,
)

client = Minio("MINIO-HOST:MINIO-PORT", credentials=provider)

# Get information of an object.
stat = client.stat_object("my-bucket", "my-object")
print(stat)
Ejemplo n.º 45
0
class CosClient(LoggingConfigurable):
    client = None

    def __init__(self,
                 config=None,
                 endpoint=None,
                 access_key=None,
                 secret_key=None,
                 bucket=None):
        super().__init__()
        if config:
            self.endpoint = urlparse(config.metadata['cos_endpoint'])
            self.access_key = config.metadata['cos_username']
            self.secret_key = config.metadata['cos_password']
            self.bucket = config.metadata['cos_bucket']
        else:
            self.endpoint = urlparse(endpoint)
            self.access_key = access_key
            self.secret_key = secret_key
            self.bucket = bucket
        # Infer secure from the endpoint's scheme.
        self.secure = self.endpoint.scheme == 'https'

        self.client = self.__initialize_object_store()

    def __initialize_object_store(self):

        # Initialize minioClient with an endpoint and access/secret keys.
        self.client = Minio(endpoint=self.endpoint.netloc,
                            access_key=self.access_key,
                            secret_key=self.secret_key,
                            secure=self.secure)

        # Make a bucket with the make_bucket API call.
        try:
            if not self.client.bucket_exists(self.bucket):
                self.client.make_bucket(self.bucket)
        except BucketAlreadyOwnedByYou as ex:
            self.log.warning("Object Storage bucket already owned by you",
                             exc_info=True)
            raise ex from ex
        except BucketAlreadyExists as ex:
            self.log.warning("Object Storage bucket already exists",
                             exc_info=True)
            raise ex from ex
        except ResponseError as ex:
            self.log.error("Object Storage error", exc_info=True)
            raise ex from ex
        except SignatureDoesNotMatch as ex:
            self.log.error("Incorrect Object Storage credentials supplied")
            raise ex from ex

        return self.client

    def upload_file(self, file_name, file_path):
        """
        Uploads contents from a file, located on the local filesystem at `file_path`,
        as `file_name` in object storage.
        :param file_name: Name of the file object in object storage
        :param file_path: Path on the local filesystem from which object data will be read.
        :return:
        """

        try:
            self.client.fput_object(bucket_name=self.bucket,
                                    object_name=file_name,
                                    file_path=file_path)
        except BaseException as ex:
            self.log.error('Error uploading file {} to bucket {}'.format(
                file_path, self.bucket),
                           exc_info=True)
            raise ex from ex

    def upload_file_to_dir(self, dir, file_name, file_path):
        """
        Uploads contents from a file, located on the local filesystem at `file_path`,
        as `file_name` in object storage.
        :param dir: the directory where the file should be uploaded to
        :param file_name: Name of the file object in object storage
        :param file_path: Path on the local filesystem from which object data will be read.
        :return:
        """
        self.upload_file(os.path.join(dir, file_name), file_path)

    def download_file(self, file_name, file_path):
        """
        Downloads and saves the object as a file in the local filesystem.
        :param file_name: Name of the file object in object storage
        :param file_path: Path on the local filesystem to which the object data will be written.
        :return:
        """
        try:
            self.client.fget_object(bucket_name=self.bucket,
                                    object_name=file_name,
                                    file_path=file_path)
        except BaseException as ex:
            self.log.error('Error reading file {} from bucket {}'.format(
                file_name, self.bucket),
                           exc_info=True)
            raise ex from ex

    def download_file_from_dir(self, dir, file_name, file_path):
        """
        Downloads and saves the object as a file in the local filesystem.
        :param dir: the directory where the file is located
        :param file_name: Name of the file object in object storage
        :param file_path: Path on the local filesystem to which the object data will be written.
        :return:
        """

        self.download_file(os.path.join(dir, file_name), file_path)
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# A Chain credentials provider, provides a way of chaining multiple providers
# together and will pick the first available using priority order of the
# 'providers' list

from minio import Minio
from minio.credentials import (AWSConfigProvider, ChainedProvider,
                               EnvAWSProvider, IamAwsProvider)

client = Minio(
    's3.amazonaws.com',
    credentials=ChainedProvider(
        [
            IamAwsProvider(),
            AWSConfigProvider(),
            EnvAWSProvider(),
        ]
    )
)

# Get information of an object.
stat = client.stat_object("my-bucketname", "my-objectname")
print(stat)
Ejemplo n.º 47
0
from minio import Minio

MINIO_URL = os.environ.get('MINIO_URL')


@contextlib.contextmanager
def nostdout():
    save_stdout = sys.stdout
    sys.stdout = io.BytesIO()
    yield
    sys.stdout = save_stdout


minioClient = Minio(MINIO_URL,
                    access_key=os.environ.get('MINIO_ACCESS_KEY'),
                    secret_key=os.environ.get('MINIO_SECRET_KEY'),
                    secure=False)

caffe.set_mode_cpu()

# Select desired model
net = caffe.Net('./models/colorization_deploy_v2.prototxt',
                './models/colorization_release_v2.caffemodel', caffe.TEST)

(H_in, W_in) = net.blobs['data_l'].data.shape[2:]  # get input shape
(H_out, W_out) = net.blobs['class8_ab'].data.shape[2:]  # get output shape

pts_in_hull = np.load('./resources/pts_in_hull.npy')  # load cluster centers
net.params['class8_ab'][0].data[:, :, 0, 0] = pts_in_hull.transpose(
    (1, 0))  # populate cluster centers as 1x1 convolution kernel
Ejemplo n.º 48
0
from minio.error import ResponseError

post_policy = PostPolicy()
# set bucket name location for uploads.
post_policy.set_bucket_name('my-bucketname')
# set key prefix for all incoming uploads.
post_policy.set_key_startswith('my-objectname')
# set content length for incoming uploads.
post_policy.set_content_length_range(10, 1024)

# set expiry 10 days into future.
expires_date = datetime.utcnow()+timedelta(days=10)
post_policy.set_expires(expires_date)

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

try:
    url_str, signed_form_data = client.presigned_post_policy(post_policy)
    curl_str = 'curl -X POST {0}'.format(url_str)
    curl_cmd = [curl_str]
    for field in signed_form_data:
        curl_cmd.append('-F {0}={1}'.format(field, signed_form_data[field]))

    # print curl command to upload files.
    curl_cmd.append('-F file=@<FILE>')
    print(' '.join(curl_cmd))
except ResponseError as err:
    print(err)
Ejemplo n.º 49
0
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are
# dummy values, please replace them with original values.

from minio import Minio
from minio.commonconfig import ENABLED, Filter
from minio.lifecycleconfig import Expiration, LifecycleConfig, Rule

client = Minio(
    "play.min.io",
    access_key="Q3AM3UQ867SPQQA43P2F",
    secret_key="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG",
)

config = LifecycleConfig([
    Rule(
        ENABLED,
        rule_filter=Filter(prefix="logs/"),
        rule_id="rule2",
        expiration=Expiration(days=365),
    ),
], )
client.set_bucket_lifecycle("my-bucketname", config)
Ejemplo n.º 50
0
    filename = data_url
    response = requests.get(data_url, allow_redirects=True)
    if data_url.find('/'):
        filename = data_url.rsplit('/', 1)[1]

    open(filename, 'wb').write(response.content)
    ''' Read data with Spark SQL '''
    spark = SparkSession.builder.getOrCreate()
    df_data = spark.read.csv(path=filename,
                             sep=",",
                             header=True,
                             inferSchema=True)
    df_data.head()
    ''' Upload data to Cloud object storage '''
    cos = Minio(cos_endpoint,
                access_key=cos_access_key,
                secret_key=cos_secret_key,
                secure=True)

    if not cos.bucket_exists(cos_bucket_name):
        try:
            cos.make_bucket(cos_bucket_name)
        except ResponseError as err:
            print(err)

    cos.fput_object(cos_bucket_name, filename, filename)

    print('Data ' + filename + ' is uploaded to bucket at ' + cos_bucket_name)
    with open("/tmp/filename", "w") as report:
        report.write(filename)

    df_data.printSchema()
Ejemplo n.º 51
0
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os, time
import sys
import requests
from minio import Minio
from minio.error import ResponseError
import json

MINIO_URL = os.environ.get('MINIO_URL', 'minionas.uvadcos.io/')
MINIO_ACCESS_KEY = os.environ.get('MINIO_ACCESS_KEY')
MINIO_SECRET = os.environ.get('MINIO_SECRET')

ORS_URL = os.environ.get("ORS_URL", "http://mds.ors/")

minioClient = Minio(MINIO_URL,
                    access_key=MINIO_ACCESS_KEY,
                    secret_key=MINIO_SECRET,
                    secure=False)


def get_distribution(id):
    """Validates that given identifier exists in Mongo.
        Returns location in minio. """
    if isinstance(id, list):
        locations = []
        names = []
        for i in id:
            location, name = get_distribution(i)
            if location == '':
                continue
            locations.append(location)
            names.append(name)
Ejemplo n.º 52
0
def main():
    """
    Functional testing of minio python library.
    """
    fake = Factory.create()
    client = Minio('s3.amazonaws.com', os.getenv('ACCESS_KEY'),
                   os.getenv('SECRET_KEY'))

    _http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                                ca_certs=certifi.where())

    # Get unique bucket_name, object_name.
    bucket_name = uuid.uuid4().__str__()
    object_name = uuid.uuid4().__str__()

    # Enable trace
    # client.trace_on(sys.stderr)

    # Make a new bucket.
    bucket_name = 'minio-pytest'

    print(client.make_bucket(bucket_name))
    print(client.make_bucket(bucket_name + '.unique', location='us-west-1'))

    ## Check if return codes a valid from server.
    try:
        client.make_bucket(bucket_name + '.unique', location='us-west-1')
    except ResponseError as err:
        if str(err.code) in ['BucketAlreadyOwnedByYou', 'BucketAlreadyExists']:
            pass
        else:
            raise

    # Check if bucket was created properly.
    print(client.bucket_exists(bucket_name))
    print(client.bucket_exists(bucket_name + '.unique'))

    # List all buckets.
    buckets = client.list_buckets()
    for bucket in buckets:
        print(bucket.name, bucket.creation_date)

    with open('testfile', 'wb') as file_data:
        file_data.write(fake.text().encode('utf-8'))
    file_data.close()

    # Put a file
    file_stat = os.stat('testfile')
    with open('testfile', 'rb') as file_data:
        client.put_object(bucket_name, object_name, file_data,
                          file_stat.st_size)
    file_data.close()

    # Fput a file
    print(client.fput_object(bucket_name, object_name + '-f', 'testfile'))

    # Copy a file
    print(
        client.copy_object(bucket_name, object_name + '-copy',
                           '/' + bucket_name + '/' + object_name + '-f'))

    try:
        copy_conditions = CopyConditions()
        copy_conditions.set_match_etag('test-etag')
        print(
            client.copy_object(bucket_name, object_name + '-copy',
                               '/' + bucket_name + '/' + object_name + '-f',
                               copy_conditions))
    except ResponseError as err:
        if err.code != 'PreconditionFailed':
            raise
        if err.message != 'At least one of the pre-conditions you specified did not hold':
            raise

    # Fetch stats on your object.
    print(client.stat_object(bucket_name, object_name))

    # Fetch stats on your object.
    print(client.stat_object(bucket_name, object_name + '-f'))

    # Fetch stats on your object.
    print(client.stat_object(bucket_name, object_name + '-copy'))

    # Get a full object
    object_data = client.get_object(bucket_name, object_name)
    with open('newfile', 'wb') as file_data:
        for data in object_data:
            file_data.write(data)
    file_data.close()

    # Get a full object locally.
    print(client.fget_object(bucket_name, object_name, 'newfile-f'))

    # List all object paths in bucket.
    print("Listing using ListObjects API")
    objects = client.list_objects(bucket_name, recursive=True)
    for obj in objects:
        print(obj.bucket_name, obj.object_name, obj.last_modified, \
            obj.etag, obj.size, obj.content_type)

    # List all object paths in bucket using V2 API.
    print("Listing using ListObjectsV2 API")
    objects = client.list_objects_v2(bucket_name, recursive=True)
    for obj in objects:
        print(obj.bucket_name, obj.object_name, obj.last_modified, \
            obj.etag, obj.size, obj.content_type)

    presigned_get_object_url = client.presigned_get_object(
        bucket_name, object_name)
    response = _http.urlopen('GET', presigned_get_object_url)
    if response.status != 200:
        response_error = ResponseError(response)
        raise response_error.get(bucket_name, object_name)

    presigned_put_object_url = client.presigned_put_object(
        bucket_name, object_name)
    value = fake.text().encode('utf-8')
    data = io.BytesIO(value).getvalue()
    response = _http.urlopen('PUT', presigned_put_object_url, body=data)
    if response.status != 200:
        response_error = ResponseError(response)
        raise response_error.put(bucket_name, object_name)

    object_data = client.get_object(bucket_name, object_name)
    if object_data.read() != value:
        raise ValueError('Bytes not equal')

    # Post policy.
    policy = PostPolicy()
    policy.set_bucket_name(bucket_name)
    policy.set_key_startswith('objectPrefix/')

    expires_date = datetime.utcnow() + timedelta(days=10)
    policy.set_expires(expires_date)
    print(client.presigned_post_policy(policy))

    # Remove an object.
    print(client.remove_object(bucket_name, object_name))
    print(client.remove_object(bucket_name, object_name + '-f'))
    print(client.remove_object(bucket_name, object_name + '-copy'))

    policy_name = client.get_bucket_policy(bucket_name)
    if policy_name != Policy.NONE:
        raise ValueError('Policy name is invalid ' + policy_name)

    # Set read-write policy successfully.
    client.set_bucket_policy(bucket_name, '', Policy.READ_WRITE)

    # Reset policy to NONE.
    client.set_bucket_policy(bucket_name, '', Policy.NONE)

    # Validate if the policy is reverted back to NONE.
    policy_name = client.get_bucket_policy(bucket_name)
    if policy_name != Policy.NONE:
        raise ValueError('Policy name is invalid ' + policy_name)

    # Upload some new objects to prepare for multi-object delete test.
    print("Prepare for remove_objects() test.")
    object_names = []
    for i in range(10):
        curr_object_name = object_name + "-{}".format(i)
        print("object-name: {}".format(curr_object_name))
        print(client.fput_object(bucket_name, curr_object_name, "testfile"))
        object_names.append(curr_object_name)

    # delete the objects in a single library call.
    print("Performing remove_objects() test.")
    del_errs = client.remove_objects(bucket_name, object_names)
    had_errs = False
    for del_err in del_errs:
        had_errs = True
        print("Err is {}".format(del_err))
    if had_errs:
        print("remove_objects() FAILED - it had unexpected errors.")
    else:
        print("remove_objects() worked as expected.")

    # Remove a bucket. This operation will only work if your bucket is empty.
    print(client.remove_bucket(bucket_name))
    print(client.remove_bucket(bucket_name + '.unique'))

    # Remove temporary files.
    os.remove('testfile')
    os.remove('newfile')
    os.remove('newfile-f')
Ejemplo n.º 53
0
def test_create_minio_bucket():
    """Test if buckets can be put into the Minio storage."""
    # Create Necessary Config Parameters
    config.AWS_PKG_BUCKET = "test-bayesian-core-package-data"
    config.AWS_EPV_BUCKET = "test-bayesian-core-data"

    minioClient = Minio(config.LOCAL_MINIO_ENDPOINT,
                        access_key=config.MINIO_ACCESS_KEY,
                        secret_key=config.MINIO_SECRET_KEY,
                        secure=False)
    try:
        minioClient.make_bucket(config.AWS_EPV_BUCKET, location="us-east-1")
        minioClient.make_bucket(config.AWS_PKG_BUCKET, location="us-east-1")
    except (BucketAlreadyOwnedByYou, BucketAlreadyExists):
        pass
    except ResponseError as err:
        logger.error(err)

    try:
        minioClient.fput_object(
            config.AWS_PKG_BUCKET, 'pypi/access_points/github_details.json',
            'test/data/S3-data/pypi/access_points/github_details.json')
        minioClient.fput_object(
            config.AWS_PKG_BUCKET, 'pypi/access_points/libraries_io.json',
            'test/data/S3-data/pypi/access_points/libraries_io.json')
        minioClient.fput_object(
            config.AWS_EPV_BUCKET,
            'pypi/access_points/0.4.59/code_metrics.json',
            'test/data/S3-data/pypi/access_points/0.4.59/code_metrics.json')
        minioClient.fput_object(
            config.AWS_EPV_BUCKET,
            'pypi/access_points/0.4.59/security_issues.json',
            'test/data/S3-data/pypi/access_points/0.4.59/security_issues.json')
        minioClient.fput_object(
            config.AWS_EPV_BUCKET,
            'pypi/access_points/0.4.59/source_licenses.json',
            'test/data/S3-data/pypi/access_points/0.4.59/source_licenses.json')
        minioClient.fput_object(
            config.AWS_EPV_BUCKET, 'pypi/access_points/0.4.59/metadata.json',
            'test/data/S3-data/pypi/access_points/0.4.59/metadata.json')
        minioClient.fput_object(
            config.AWS_EPV_BUCKET, 'pypi/access_points/0.4.59.json',
            'test/data/S3-data/pypi/access_points/0.4.59.json')
    except ResponseError as err:
        logger.error(err)

    assert minioClient.bucket_exists(config.AWS_PKG_BUCKET)
    assert minioClient.bucket_exists(config.AWS_EPV_BUCKET)
Ejemplo n.º 54
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are
# dummy values, please replace them with original values.

from minio import Minio
from minio.error import ResponseError
from minio.policy import Policy

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

# Make a new bucket
try:
    # Set policy Policy.READ_ONLY to bucket 'my-bucketname' which
    # enables 'my-bucketname' readable by everyone.
    client.set_bucket_policy('my-bucketname', '', Policy.READ_ONLY)

    # Set policy Policy.READ_WRITE to bucket 'my-bucketname' and
    # prefix 'public-folder/' which enables
    # 'my-bucketname/public-folder/' read/writeable by everyone.
    client.set_bucket_policy('my-bucketname', 'public-folder/',
                             Policy.READ_WRITE)

    # Set policy Policy.WRITE_ONLY to bucket 'my-bucketname' and
Ejemplo n.º 55
0
# -*- coding: utf-8 -*-
# Minio Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 Minio, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Note: YOUR-ACCESSKEYID and YOUR-SECRETACCESSKEY are
# dummy values, please replace them with original values.

from minio import Minio

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

buckets = client.list_buckets()

for bucket in buckets:
    print(bucket.name, bucket.creation_date)
Ejemplo n.º 56
0
import os
import time
import openpyxl
from minio import Minio
from minio.error import ResponseError

minio_client = Minio("192.168.8.106:9000",
                     access_key="AKIAIOSFODNN7EXAMPLE",
                     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                     secure=False)

excel_client = openpyxl.Workbook()
excel_sheet1 = excel_client.create_sheet(index=0, title="上传文件测试结果")
excel_sheet2 = excel_client.create_sheet(index=1, title="下载文件测试结果")
excel_row = ['A', 'B', 'C', 'D']
excel_sheet1['A1'] = "对象服务"
excel_sheet1['B1'] = "0.7 MB"
excel_sheet1['C1'] = "5.88 MB"
excel_sheet1['D1'] = "88.9 MB"

excel_sheet2['A1'] = "对象服务"
excel_sheet2['B1'] = "0.7 MB"
excel_sheet2['C1'] = "5.88 MB"
excel_sheet2['D1'] = "88.9 MB"


def upload(object_name, file_path):
    try:
        minio_client.fput_object("test1",
                                 object_name,
                                 file_path,
Ejemplo n.º 57
0
 def test_set_app_info(self):
     client = Minio('localhost')
     expected_user_agent = _DEFAULT_USER_AGENT + ' hello/' + minio_version
     client.set_app_info('hello', minio_version)
     self.assertEqual(client._user_agent, expected_user_agent)
Ejemplo n.º 58
0
 def test_remove_bucket_invalid_name(self):
     client = Minio('localhost:9000')
     client.remove_bucket('ABCD')
Ejemplo n.º 59
0
# -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are
# dummy values, please replace them with original values.

from minio import Minio
from minio.error import ResponseError

client = Minio('s3.amazonaws.com',
               access_key='YOUR-ACCESSKEYID',
               secret_key='YOUR-SECRETACCESSKEY')

# Make a new bucket
try:
    client.make_bucket('my-bucketname')
except ResponseError as err:
    print(err)
Ejemplo n.º 60
0
 def test_set_app_info_requires_non_empty_version(self):
     client = Minio('localhost:9000')
     self.assertRaises(ValueError, client.set_app_info, 'hello', '')