Exemplo n.º 1
0
    def __init__(self, app, local_to_me_profile, remote_to_me_profile, version,
                 account_name, container_name, object_name):
        super(CloudConnectorController, self).__init__(app)

        self.version = version
        self.account_name = account_name  # UTF8-encoded string
        self.container_name = container_name  # UTF8-encoded string
        self.object_name = object_name  # UTF8-encoded string
        self.local_to_me_profile, per_account = \
            maybe_munge_profile_for_all_containers(local_to_me_profile,
                                                   container_name)
        self.local_to_me_provider = create_provider(self.local_to_me_profile,
                                                    max_conns=1,
                                                    per_account=per_account,
                                                    logger=self.app.logger)

        self.remote_to_me_profile, per_account = \
            maybe_munge_profile_for_all_containers(remote_to_me_profile,
                                                   container_name)
        self.remote_to_me_provider = create_provider(
            self.remote_to_me_profile,
            max_conns=1,
            per_account=per_account,
            logger=self.app.logger,
            extra_headers={SHUNT_BYPASS_HEADER: 'true'})

        self.aco_str = urllib.quote('/'.join(
            filter(None, (account_name, container_name, object_name))))
        self.app.logger.debug('For %s using local_to_me profile %r',
                              self.aco_str,
                              safe_profile_config(self.local_to_me_profile))
        self.app.logger.debug('For %s using remote_to_me profile %r',
                              self.aco_str,
                              safe_profile_config(self.remote_to_me_profile))
Exemplo n.º 2
0
    def test_s3_archive_range_get(self):
        content = 's3 archive and get'
        key = 'test_s3_archive'
        s3_mapping = self.s3_restore_mapping()
        provider = create_provider(s3_mapping, 1)
        provider.put_object(key, {}, content)

        hdrs = self.local_swift('head_object', s3_mapping['container'], key)
        self.assertIn('server', hdrs)
        self.assertTrue(hdrs['server'].startswith('Jetty'))

        hdrs, body = self.local_swift('get_object',
                                      s3_mapping['container'],
                                      key,
                                      content,
                                      headers={'Range': 'bytes=0-5'})
        self.assertEqual(hashlib.md5(content).hexdigest(), hdrs['etag'])
        swift_content = ''.join([chunk for chunk in body])
        self.assertEqual(content[:6], swift_content)
        # There should be a "server" header, set to Jetty for S3Proxy
        self.assertEqual('Jetty(9.2.z-SNAPSHOT)', hdrs['server'])

        # the object should not be restored
        hdrs = self.local_swift('head_object', s3_mapping['container'], key)
        self.assertTrue('server' in hdrs)
        clear_s3_bucket(self.s3_client, s3_mapping['aws_bucket'])
        clear_swift_container(self.swift_src, s3_mapping['container'])
Exemplo n.º 3
0
    def test_provider_s3_put_object_no_prefix(self):
        mapping = self.s3_sync_mapping()
        mapping['custom_prefix'] = ''
        provider = create_provider(mapping, 1)

        # NOTE: as long as the response includes a Content-Length, the
        # SeekableFileLikeIter will bound reads to the content-length, even if
        # the iterable goes further
        swift_key = u'a-\u062a-b-c'
        resp = provider.put_object(
            swift_key,
            {
                'content-length': 87,  # take an int for convenience
                'content-type': 'text/plain',
                'x-object-meta-foobie-bar': 'bam',
            },
            repeat('b'))
        self.assertTrue(resp.success)

        resp = self.s3('get_object',
                       Bucket=mapping['aws_bucket'],
                       Key=swift_key)
        self.assertEqual(87, resp['ContentLength'])
        self.assertEqual('text/plain', resp['ContentType'])
        self.assertEqual('b' * 87, resp['Body'].read())
        self.assertEqual('bam', resp['Metadata']['foobie-bar'])
Exemplo n.º 4
0
    def setUp(self):
        super(TestCloudConnector, self).setUp()

        self.mapping = self._find_mapping(
            # NOTE: Swift container name must be a valid Amazon S3 bucket name.
            # Also, this mapping has a `copy_after` of 3600 meaning that
            # background daemons won't be mucking about with our objects while
            # these tests are trying to do _their_ job.
            lambda m: m.get('container') == u"s3-restore")
        self.conn_noshunt = self.conn_for_acct_noshunt(u'AUTH_test')
        self.local_to_me_provider = create_provider(self.mapping, 1, False)
        self.cc_endpoint = "http://cloud-connector:%d" % (
            self.PORTS['cloud_connector'],)
        self.cc_mapping = {
            "account": u"AUTH_test",
            "container": "s3-restore",
            "aws_bucket": "s3-restore",
            "aws_endpoint": self.cc_endpoint,
            "aws_identity": u"test:tester",
            "aws_secret": u"testing",
            "protocol": "s3",
            "custom_prefix": '',
        }
        self.cc_provider = create_provider(self.cc_mapping, 1, False)

        session = boto3.session.Session(
            aws_access_key_id=self.cc_mapping['aws_identity'],
            aws_secret_access_key=self.cc_mapping['aws_secret'])
        conf = boto3.session.Config(signature_version='s3',
                                    s3={'addressing_style': 'path'})
        self._orig_s3_client = self.s3_client
        self.s3_client = session.client('s3', config=conf,
                                        endpoint_url=self.cc_endpoint)

        # make sure our container & bucket are clear
        self.tearDown()
Exemplo n.º 5
0
    def test_provider_s3_put_object_defaults(self):
        mapping = self.s3_sync_mapping()
        provider = create_provider(mapping, 1)

        # NOTE: as long as the response includes a Content-Length, the
        # SeekableFileLikeIter will bound reads to the content-length, even if
        # the iterable goes further
        swift_key = u'a-\u062a-b-c'
        content = 'this str has a length'
        resp = provider.put_object(swift_key, {
            'x-object-meta-foobie-bar': 'blam',
        }, content)
        self.assertTrue(resp.success)

        s3_key = s3_key_name(mapping, swift_key)
        resp = self.s3('get_object', Bucket=mapping['aws_bucket'], Key=s3_key)
        self.assertEqual(len(content), resp['ContentLength'])
        self.assertEqual('application/octet-stream', resp['ContentType'])
        self.assertEqual(content, resp['Body'].read())
        self.assertEqual('blam', resp['Metadata']['foobie-bar'])
Exemplo n.º 6
0
    def test_s3_archive_get(self):
        tests = [{
            'content': 's3 archive and get',
            'key': 'test_s3_archive'
        }, {
            'content': '',
            'key': 'test-empty'
        }]

        s3_mapping = self.s3_restore_mapping()
        provider = create_provider(s3_mapping, 1)
        for test in tests:
            content = test['content']
            key = test['key']
            provider.put_object(key, {}, content)

            hdrs = self.local_swift('head_object', s3_mapping['container'],
                                    key)
            self.assertIn('server', hdrs)
            self.assertTrue(hdrs['server'].startswith('Jetty'))

            hdrs, body = self.local_swift('get_object',
                                          s3_mapping['container'], key,
                                          content)
            self.assertEqual(hashlib.md5(content).hexdigest(), hdrs['etag'])
            swift_content = ''.join([chunk for chunk in body])
            self.assertEqual(content, swift_content)
            # There should be a "server" header, set to Jetty for S3Proxy
            self.assertEqual('Jetty(9.2.z-SNAPSHOT)', hdrs['server'])

            # the subsequent request should come back from Swift
            hdrs, body = self.local_swift('get_object',
                                          s3_mapping['container'], key)
            swift_content = ''.join([chunk for chunk in body])
            self.assertEqual(content, swift_content)
            self.assertEqual(False, 'server' in hdrs)
            clear_s3_bucket(self.s3_client, s3_mapping['aws_bucket'])
            clear_swift_container(self.swift_src, s3_mapping['container'])
Exemplo n.º 7
0
    def test_s3_archive_slo_restore(self):
        # Satisfy the 5MB minimum MPU part size
        content = 'A' * (6 * 1024 * 1024)
        key = 'test_swift_archive'
        mapping = self.s3_restore_mapping()
        provider = create_provider(mapping, 1)
        provider.put_object(key, {}, content)
        s3_key = s3_key_name(mapping, key)
        manifest_key = s3_key_name
        prefix, account, container, _ = s3_key.split('/', 3)
        key_hash = hashlib.sha256(key).hexdigest()
        manifest_key = '/'.join([
            prefix, '.manifests', account, container,
            '%s.swift_slo_manifest' % (key_hash)
        ])
        manifest = [{
            'bytes': 5 * 1024 * 1024,
            'name': '/segments/part1',
            'hash': hashlib.md5('A' * 5 * 2**20).hexdigest()
        }, {
            'bytes': 1024 * 1024,
            'name': '/segments/part2',
            'hash': hashlib.md5('A' * 2**20).hexdigest()
        }]
        self.s3('put_object',
                Bucket=mapping['aws_bucket'],
                Key=manifest_key,
                Body=json.dumps(manifest))
        resp = self.s3('create_multipart_upload',
                       Bucket=mapping['aws_bucket'],
                       Key=s3_key,
                       Metadata={'x-static-large-object': 'True'})
        self.s3('upload_part',
                Bucket=mapping['aws_bucket'],
                Key=s3_key,
                PartNumber=1,
                UploadId=resp['UploadId'],
                Body=content[:(5 * 1024 * 1024)])
        self.s3('upload_part',
                Bucket=mapping['aws_bucket'],
                Key=s3_key,
                PartNumber=2,
                UploadId=resp['UploadId'],
                Body=content[(5 * 1024 * 1024):])
        self.s3('complete_multipart_upload',
                Bucket=mapping['aws_bucket'],
                Key=s3_key,
                UploadId=resp['UploadId'],
                MultipartUpload={
                    'Parts': [{
                        'PartNumber':
                        1,
                        'ETag':
                        hashlib.md5(content[:(5 * 1024 * 1024)]).hexdigest()
                    }, {
                        'PartNumber':
                        2,
                        'ETag':
                        hashlib.md5(content[(5 * 1024 * 1024):]).hexdigest()
                    }]
                })

        hdrs, listing = self.local_swift('get_container', mapping['container'])
        self.assertEqual(0, int(hdrs['x-container-object-count']))
        for entry in listing:
            self.assertIn('content_location', entry)

        hdrs, body = self.local_swift('get_object', mapping['container'], key)
        # NOTE: this is different from real S3 as all of the parts are merged
        # and this is the content ETag
        self.assertEqual(hashlib.md5(content).hexdigest(), hdrs['etag'])
        swift_content = ''.join(body)
        self.assertEqual(content, swift_content)
        self.assertEqual('True', hdrs['x-static-large-object'])

        # the subsequent request should come back from Swift
        hdrs, listing = self.local_swift('get_container', mapping['container'])
        self.assertEqual(1, int(hdrs['x-container-object-count']))
        # We get back an entry for the remote and the local object
        self.assertEqual(1, len(listing))
        self.assertEqual(2, len(listing[0]['content_location']))
        hdrs, body = self.local_swift('get_object', mapping['container'], key)
        swift_content = ''.join([chunk for chunk in body])
        self.assertEqual(content, swift_content)

        for k in hdrs.keys():
            self.assertFalse(k.startswith('Remote-'))
        clear_s3_bucket(self.s3_client, mapping['aws_bucket'])
        clear_swift_container(self.swift_src, mapping['container'])
        clear_swift_container(self.swift_src, 'segments')