Beispiel #1
0
    def test_put_file_to_key(self):
        # TODO: Test callback

        #
        # Using filename
        #

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey1', self.temp_file_path, False)
            key = bucket.get_key('testkey1')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            put_file_to_key(bucket, 'testkey2', self.temp_file_path, True)
            key = bucket.get_key('testkey2')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        #
        # Using file object
        #

        with self.setUpTearDown(), open(self.temp_file_path, 'r') as file:
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey1', file, False)
            key = bucket.get_key('testkey1')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        with self.setUpTearDown(), open(self.temp_file_path, 'r') as file:
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey2', file, True)
            key = bucket.get_key('testkey2')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
    def test_put_file_to_key(self):
        # TODO: Test callback

        #
        # Using filename
        #

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey1', self.temp_file_path, False)
            key = bucket.get_key('testkey1')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            put_file_to_key(bucket, 'testkey2', self.temp_file_path, True)
            key = bucket.get_key('testkey2')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        #
        # Using file object
        #

        with self.setUpTearDown(), open(self.temp_file_path, 'r') as file:
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey1', file, False)
            key = bucket.get_key('testkey1')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        with self.setUpTearDown(), open(self.temp_file_path, 'r') as file:
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_file_to_key(bucket, 'testkey2', file, True)
            key = bucket.get_key('testkey2')
            self.assertEqual(key.get_contents_as_string(), 'I am a file')
Beispiel #3
0
 def get_size(self, path):
     path = self._init_path(path)
     # Lookup does a HEAD HTTP Request on the object
     key = self._s3_bucket.lookup(path)
     if not key:
         raise OSError('No such key: \'{0}\''.format(path))
     else:
         # Little trick to retrieve the size correctly using the moto library
         key.get_contents_as_string()
     return key.size
    def test_put_string_to_key(self):
        import time

        # TODO: Test callback
        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_string_to_key(bucket, 'testkey', 'testcontent', False)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        #
        # Changed vs. unchanged content
        #

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)

            # Write something
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # Record the modified time
            last_modified_1 = key.last_modified

            time.sleep(1)

            # Try to write the same thing
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            # Should be no change
            self.assertEqual(key.last_modified, last_modified_1)

            time.sleep(1)

            # Write a different thing
            put_string_to_key(bucket, 'testkey', 'testcontent new', True)
            key = bucket.get_key('testkey')
            # Should be changed
            self.assertNotEqual(key.last_modified, last_modified_1)
Beispiel #5
0
    def test_put_string_to_key(self):
        import time

        # TODO: Test callback
        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_string_to_key(bucket, 'testkey', 'testcontent', False)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # bucket.get_acl('testkey')

        #
        # Changed vs. unchanged content
        #

        with self.setUpTearDown():
            bucket = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)

            # Write something
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # Record the modified time
            last_modified_1 = key.last_modified

            time.sleep(1)

            # Try to write the same thing
            put_string_to_key(bucket, 'testkey', 'testcontent', True)
            key = bucket.get_key('testkey')
            # Should be no change
            self.assertEqual(key.last_modified, last_modified_1)

            time.sleep(1)

            # Write a different thing
            put_string_to_key(bucket, 'testkey', 'testcontent new', True)
            key = bucket.get_key('testkey')
            # Should be changed
            self.assertNotEqual(key.last_modified, last_modified_1)
Beispiel #6
0
def s3_iter_bucket_process_key(key):
    """
    Conceptually part of `s3_iter_bucket`, but must remain top-level method because
    of pickling visibility.

    """
    return key, key.get_contents_as_string()
Beispiel #7
0
def s3_iter_bucket_process_key(key):
    """
    Conceptually part of `s3_iter_bucket`, but must remain top-level method because
    of pickling visibility.

    """
    return key, key.get_contents_as_string()
Beispiel #8
0
 def get_archive(self):
     if (not 'cipher' in self.__archive__.keys()) and (len(self.__archive__.get('records', 0)) == 0):
         try:
             key = boto.s3.key.Key(self.__bucket__, self.__filename__)
             self.__archive__ = json.loads(key.get_contents_as_string())
         except boto.exception.S3ResponseError as e:
             if ('404' in [str(e.status), str(e.error_code)]):
                 logging.warning("{} not found in S3; creating a new archive...".format(self.__filename__))
     return self.__archive__
Beispiel #9
0
def get_latest_version(package):
    '''
    Gets the latest release version of package from the LATEST file
    in the gearbox directory in S3
    '''
    s3_conn = boto.connect_s3()
    bucket = s3_conn.get_bucket(bucket_name)
    key = boto.s3.key.Key(bucket)
    key.key = '{0}/LATEST'.format(package)
    try:
        return key.get_contents_as_string()
    except boto.exception.S3ResponseError:
        raise RuntimeError('Can\'t find the LATEST in S3 file for {0}, are you sure you have the right package?'.format(package))
Beispiel #10
0
    def get_content(self, path):
        self._initialize_cloud_conn()
        path = self._init_path(path)
        key = self._key_class(self._cloud_bucket, path)
        try:
            return key.get_contents_as_string()
        except S3ResponseError as s3r:
            # Raise an IOError in case the key was not found, to maintain the current
            # interface.
            if s3r.error_code == "NoSuchKey":
                raise IOError("No such key: '{0}'".format(path))

            raise
Beispiel #11
0
    def test_put_string_to_key_in_bucket(self):
        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.old_style_bucket_id, 'testkey',
                                        'testcontent', False)
            key = self.s3.get_bucket(
                self.old_style_bucket_id).get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.old_style_bucket_id, 'testkey',
                                        'testcontent', True)
            key = self.s3.get_bucket(
                self.old_style_bucket_id).get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.new_style_bucket_id, 'testkey',
                                        'testcontent', False)
            key = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)\
                         .get_key('%s/%s/testkey' % (DOWNLOAD_SITE_PREFIX, self.old_style_bucket_id))
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.new_style_bucket_id, 'testkey',
                                        'testcontent', True)
            key = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)\
                         .get_key('%s/%s/testkey' % (DOWNLOAD_SITE_PREFIX, self.old_style_bucket_id))
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
Beispiel #12
0
def get_latest_version(package):
    '''
    Gets the latest release version of package from the LATEST file
    in the gearbox directory in S3
    '''
    s3_conn = boto.connect_s3()
    bucket = s3_conn.get_bucket(bucket_name)
    key = boto.s3.key.Key(bucket)
    key.key = '{0}/LATEST'.format(package)
    try:
        return key.get_contents_as_string()
    except boto.exception.S3ResponseError:
        raise RuntimeError(
            'Can\'t find the LATEST in S3 file for {0}, are you sure you have the right package?'
            .format(package))
Beispiel #13
0
    def test_put_string_to_key_in_bucket(self):
        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.old_style_bucket_id,
                                        'testkey', 'testcontent', False)
            key = self.s3.get_bucket(self.old_style_bucket_id).get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.old_style_bucket_id,
                                        'testkey', 'testcontent', True)
            key = self.s3.get_bucket(self.old_style_bucket_id).get_key('testkey')
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.new_style_bucket_id,
                                        'testkey', 'testcontent', False)
            key = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)\
                         .get_key('%s/%s/testkey' % (DOWNLOAD_SITE_PREFIX, self.old_style_bucket_id))
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
            # moto doesn't support ACL stuff
            # self.s3.get_bucket(self.old_style_bucket_id).get_acl('testkey')

        with self.setUpTearDown():
            put_string_to_key_in_bucket(self.aws_creds,
                                        self.new_style_bucket_id,
                                        'testkey', 'testcontent', True)
            key = self.s3.get_bucket(DOWNLOAD_SITE_BUCKET)\
                         .get_key('%s/%s/testkey' % (DOWNLOAD_SITE_PREFIX, self.old_style_bucket_id))
            self.assertEqual(key.get_contents_as_string(), 'testcontent')
Beispiel #14
0
def s3_iter_bucket_process_key(key, retries=3):
    """
    Conceptually part of `s3_iter_bucket`, but must remain top-level method because
    of pickling visibility.

    """
    # Sometimes, https://github.com/boto/boto/issues/2409 can happen because of network issues on either side.
    # Retry up to 3 times to ensure its not a transient issue.
    for x in range(0, retries + 1):
        try:
            return key, key.get_contents_as_string()
        except SSLError:
            # Actually fail on last pass through the loop
            if x == retries:
                raise
            # Otherwise, try again, as this might be a transient timeout
            pass
    def test_create_get_delete_object(self):
        # S3 Create, get and delete object
        bucket_name = rand_name("s3bucket-")
        object_name = rand_name("s3object-")
        content = 'x' * 42
        bucket = self.client.create_bucket(bucket_name)
        self.addResourceCleanUp(self.destroy_bucket,
                                self.client.connection_data, bucket_name)

        self.assertTrue(bucket.name == bucket_name)
        with contextlib.closing(boto.s3.key.Key(bucket)) as key:
            key.key = object_name
            key.set_contents_from_string(content)
            readback = key.get_contents_as_string()
            self.assertTrue(readback == content)
            bucket.delete_key(key)
            self.assertBotoError(self.s3_error_code.client.NoSuchKey,
                                 key.get_contents_as_string)
Beispiel #16
0
def slow_updates_list(config):
    bucket = _get_s3_bucket(config)

    prefix = "updates/"
    results = bucket.list(prefix)
    results = sorted(results, key=lambda k: k.last_modified)

    for key in results:
        email = re.match("updates/(.*)", key.name).group(1)
        postcode = key.get_contents_as_string().strip().decode('ascii')
        constituency = lookup_postcode(postcode)
        if 'error' in constituency:
            print("ERROR looking up postcode", postcode)
            continue
        last_modified = boto.utils.parse_ts(key.last_modified)

        candidates = lookup_candidates(config, constituency['id'])
        if 'errors' in candidates:
            print("ERROR looking up candidates", postcode)
            continue

        candidates = augment_if_has_cv(config, candidates)
        candidates_no_cv, candidates_no_email, candidates_have_cv = split_candidates_by_type(config, candidates)
        candidates_cv_created, candidates_cv_updated = split_candidates_by_updates(config, candidates, last_modified)

        subscriber = {
            'email': email,
            'postcode': postcode,
            'constituency': constituency,

            'candidates': candidates,

            'has_cv_count': len(candidates_have_cv),
            'no_cv_count': len(candidates_no_cv),
            'no_email_count': len(candidates_no_email),

            'candidates_cv_created': candidates_cv_created,
            'candidates_cv_updated': candidates_cv_updated,
            'last_modified': last_modified
        }


        yield subscriber
    def test_create_get_delete_object(self):
        # S3 Create, get and delete object
        bucket_name = data_utils.rand_name("s3bucket")
        object_name = data_utils.rand_name("s3object")
        content = 'x' * 42
        bucket = self.client.create_bucket(bucket_name)
        self.addResourceCleanUp(self.destroy_bucket,
                                self.client.connection_data,
                                bucket_name)

        self.assertTrue(bucket.name == bucket_name)
        with contextlib.closing(boto.s3.key.Key(bucket)) as key:
            key.key = object_name
            key.set_contents_from_string(content)
            readback = key.get_contents_as_string()
            self.assertTrue(readback == content)
            bucket.delete_key(key)
            self.assertBotoError(self.s3_error_code.client.NoSuchKey,
                                 key.get_contents_as_string)
Beispiel #18
0
    def deleteAMI(self, amiId):
        """
        Deletes an ami id from Amazon S3.
        @param amiId: the ami id to delete.
        @type amiId: C{str}
        @return: the ami id deleted
        @rtype: C{str}
        """
        # Returns a one item list of the amiId we asked for.
        image = self.ec2conn.get_all_images(image_ids=amiId)

        # It's possible this image has already been deleted, handle that case
        # gracefully.
        if not image:
            raise mint_error.AMIInstanceDoesNotExist()
        image = image[0]

        # Image location should be of the format:
        # bucket-name/manifest-xml-file-name.xml
        manifest_path_bits = image.location.split('/')
        bucketName = manifest_path_bits[0]
        keyName = ''.join(manifest_path_bits[1:])

        bucket = self.s3conn.get_bucket(bucketName)
        key = boto.s3.key.Key(bucket, keyName)

        parts = []
        try:
            # Load the contents of the manifest, and read all the part
            # filenames and save them in parts.
            manifest_contents = key.get_contents_as_string()
            document = xml.dom.minidom.parseString(manifest_contents)
            parts = [x.firstChild.data \
                     for x in document.getElementsByTagName("filename")]

            # Delete each part.
            for part in parts:
                bucket.delete_key(part)

            # Delete the manifest.
            bucket.delete_key(keyName)
        except S3ResponseError, e:
            raise mint_error.EC2Exception(ErrorResponseObject(e))
Beispiel #19
0
def updates_list(config):
    bucket = _get_s3_bucket(config)

    prefix = "updates/"
    results = bucket.list(prefix)
    results = sorted(results, key=lambda k: k.last_modified)

    for key in results:
        email = re.match("updates/(.*)", key.name).group(1)
        postcode = key.get_contents_as_string().strip().decode('ascii')
        constituency = lookup_postcode(postcode)
        if 'error' in constituency:
            print("ERROR looking up postcode", postcode)
            continue
        last_modified = boto.utils.parse_ts(key.last_modified)

        candidates = lookup_candidates(constituency['id'])
        if 'errors' in candidates:
            print("ERROR looking up candidates", postcode)
            continue

        candidates = augment_if_has_cv(config, candidates)
        candidates_no_cv, candidates_no_email, candidates_have_cv = split_candidates_by_type(config, candidates)
        candidates_cv_created, candidates_cv_updated = split_candidates_by_updates(config, candidates, last_modified)

        subscriber = {
            'email': email,
            'postcode': postcode,
            'constituency': constituency,

            'candidates': candidates,

            'has_cv_count': len(candidates_have_cv),
            'no_cv_count': len(candidates_no_cv),
            'no_email_count': len(candidates_no_email),

            'candidates_cv_created': candidates_cv_created,
            'candidates_cv_updated': candidates_cv_updated,
            'last_modified': last_modified
        }


        yield subscriber
Beispiel #20
0
def _fetch_candidates(config):
    bucket = _get_s3_bucket(config)
    key_name = "cache/candidates.csv"

    url = "https://yournextmp.com/media/candidates.csv"
    r = requests.get(url)

    if r.status_code == 200:
        r.encoding = 'utf-8'
        text = r.text
        # save to bucket
        key = boto.s3.key.Key(bucket)
        key.key = key_name
        key.set_contents_from_string(text)
    else:
        print("couldn't read from YourNextMP; loading candidates from S3")
        key = bucket.get_key(key_name)
        text = key.get_contents_as_string().decode('utf-8')

    return csv.DictReader(io.StringIO(text))
Beispiel #21
0
def _fetch_candidates(config):
    bucket = _get_s3_bucket(config)
    key_name = "cache/candidates.csv"

    url = "https://yournextmp.com/media/candidates.csv"
    r = requests.get(url)

    if r.status_code == 200:
        r.encoding = 'utf-8'
        text = r.text
        # save to bucket
        key = boto.s3.key.Key(bucket)
        key.key = key_name
        key.set_contents_from_string(text)
    else:
        print("couldn't read from YourNextMP; loading candidates from S3")
        key = bucket.get_key(key_name)
        text = key.get_contents_as_string().decode('utf-8')

    return csv.DictReader(io.StringIO(text))
Beispiel #22
0
 def get_content(self, path):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     if not key.exists():
         raise IOError('No such key: \'{0}\''.format(path))
     return key.get_contents_as_string()
Beispiel #23
0
 def get_content(self, path):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     if not key.exists():
         raise IOError('No such key: \'{0}\''.format(path))
     return key.get_contents_as_string()
def test():
    print '--- running AWS s3 examples ---'
    c = boto.s3.connection.S3Connection(conf.AWS_ACCESS_KEY, conf.AWS_SECRET_ACCESS_KEY)

    print 'original bucket number:', len(c.get_all_buckets())
    
    bucket_name = 'yet.another.s3.example.code'
    print 'creating a bucket:', bucket_name
    try:
        bucket = c.create_bucket(bucket_name)
    except boto.exception.S3CreateError  as e:
        print ' ' * 4, 'error occured:'
        print ' ' * 8, 'http status code:', e.status
        print ' ' * 8, 'reason:', e.reason
        print ' ' * 8, 'body:', e.body
        return

    test_bucket_name = 'no.existence.yet.another.s3.example.code'
    print 'if you just want to know whether the bucket(\'%s\') exists or not' % (test_bucket_name,), \
        'and don\'t want to get this bucket'
    try:
        test_bucket = c.head_bucket(test_bucket_name)
    except boto.exception.S3ResponseError as e:
        if e.status == 403 and e.reason == 'Forbidden':
            print ' ' * 4, 'the bucket(\'%s\') exists but you don\'t have the permission.' % (test_bucket_name,)
        elif e.status == 404 and e.reason == 'Not Found':
            print ' ' * 4, 'the bucket(\'%s\') doesn\'t exist.' % (test_bucket_name,)

    print 'or use lookup() instead of head_bucket() to do the same thing.', \
        'it will return None if the bucket does not exist instead of throwing an exception.'
    test_bucket = c.lookup(test_bucket_name)
    if test_bucket is None:
        print ' ' * 4, 'the bucket(\'%s\') doesn\'t exist.' % (test_bucket_name,)

    print 'now you can get the bucket(\'%s\')' % (bucket_name,)
    bucket = c.get_bucket(bucket_name)

    print 'add some objects to bucket ', bucket_name
    keys = ['sample.txt', 'notes/2006/January/sample.txt', 'notes/2006/February/sample2.txt',\
           'notes/2006/February/sample3.txt', 'notes/2006/February/sample4.txt', 'notes/2006/sample5.txt']
    print ' ' * 4, 'these key names are:'
    for name in keys:
        print ' ' * 8, name
    
    filename = './_test_dir/sample.txt'
    print ' ' * 4, 'you can contents of object(\'%s\') from filename(\'%s\')' % (keys[0], filename,)
    key = boto.s3.key.Key(bucket, keys[0])
    bytes_written = key.set_contents_from_filename(filename)
    assert bytes_written == os.path.getsize(filename), '    error occured:broken file'
        
    print ' ' * 4, 'or set contents of object(\'%s\') by opened file object' % (keys[1],)
    fp = open(filename, 'r')
    key = boto.s3.key.Key(bucket, keys[1])
    bytes_written = key.set_contents_from_file(fp)
    assert bytes_written == os.path.getsize(filename), '    error occured:broken file'

    print ' ' * 4, 'you can also set contents the remaining key objects from string'
    for name in keys[2:]:
        print ' ' * 8, 'key:', name
        key = boto.s3.key.Key(bucket, name)
        s = 'This is the content of %s ' % (name,)
        key.set_contents_from_string(s)
        print ' ' * 8, '..contents:', key.get_contents_as_string()
        # use get_contents_to_filename() to save contents to a specific file in the filesystem.

    #print 'You have %d objects in bucket %s' % ()    
    
    print 'list all objects added into \'%s\' bucket' % (bucket_name,)
    print ' ' * 4, 'list() automatically handles all of the result paging from S3.'
    print ' ' * 4, 'You just need to keep iterating until there are no more results.'
    print ' ' * 4, '---------------------------------------------------------------'
    bucket_size = 0
    for key in bucket.list():
        print ' ' * 4, key.name
        bucket_size += key.size
    print ' ' * 4, 'bucket size:', bucket_size, 'bytes.'
    # do not caculate bucket size or number of objects when you have millions of objects in a bucket.

    p = 'notes/2006/'
    print 'list objects start with \'%s\'' % (p,)
    objs = bucket.list(prefix = p)
    for key in objs:
        print ' ' * 4, key.name

    print 'list objects or key prefixs like \'%s/*\', something like what\'s in the top of \'%s\' folder ?' % (p, p,)
    objs = bucket.list(prefix = p, delimiter = '/')
    for key in objs:
        print ' ' * 4, key.name

    keys_per_page = 4
    print 'manually handle the results paging from s3,', ' number of keys per page:', keys_per_page
    print ' ' * 4, 'get page 1'
    objs = bucket.get_all_keys(max_keys = keys_per_page)
    for key in objs:
        print ' ' * 8, key.name

    print ' ' * 4, 'get page 2'
    last_key_name = objs[-1].name   #last key of last page is the marker to retrive next page.
    objs = bucket.get_all_keys(max_keys = keys_per_page, marker = last_key_name)
    for key in objs:
        print ' ' * 8, key.name
    """
    get_all_keys() a lower-level method for listing contents of a bucket.
    This closely models the actual S3 API and requires you to manually handle the paging of results. 
    For a higher-level method that handles the details of paging for you, you can use the list() method.
    """

    print 'you must delete all objects in the bucket \'%s\' before delete this bucket' % (bucket_name, )
    print ' ' * 4, 'you can delete objects one by one'
    bucket.delete_key(keys[0])
    print ' ' * 4, 'or you can delete multiple objects using a single HTTP request with delete_keys().'
    bucket.delete_keys(keys[1:])

    #TODO print 'after previous deletion, we now have %d objects in bucket(\'%s\')' % (len(bucket.list()), bucket_name,)
    print 'now you can delete the bucket \'%s\'' % (bucket_name,)
    c.delete_bucket(bucket)
Beispiel #25
0
 def get_content(self, path):
     path = self._init_path(path)
     key = boto.s3.key.Key(self._s3_bucket, path)
     if not key.exists():
         raise IOError("No such key: '{}'".format(path))
     return key.get_contents_as_string().decode('utf-8')