Exemple #1
0
    def list_files(self, volume_path, recursive):
        """See :meth:`storage.brokers.broker.Broker.list_files`
        """

        with S3Client(self._credentials, self._region_name) as client:
            return client.list_objects(self._bucket_name, recursive,
                                       volume_path)
Exemple #2
0
    def test_list_objects_prefix(self, mock_func):
        mock_func.return_value = self.sample_response

        with S3Client(self.credentials) as client:
            results = client.list_objects('sample-bucket', False, 'test/')

        self.assertEqual(len(list(results)), 1)
Exemple #3
0
    def download_files(self, volume_path, file_downloads):
        """See :meth:`storage.brokers.broker.Broker.download_files`"""

        with S3Client(self._credentials, self._region_name) as client:
            for file_download in file_downloads:
                # If file supports partial mount and volume is configured attempt sym-link
                if file_download.partial and self._volume:
                    logger.debug('Partial S3 file accessed by mounted bucket.')
                    path_to_download = os.path.join(
                        volume_path, file_download.file.file_path)

                    logger.info('Checking path %s', path_to_download)
                    if not os.path.exists(path_to_download):
                        raise MissingFile(file_download.file.file_name)

                    # Create symlink to the file in the host mount
                    logger.info('Creating link %s -> %s',
                                file_download.local_path, path_to_download)
                    execute_command_line([
                        'ln', '-s', path_to_download, file_download.local_path
                    ])
                # Fall-back to default S3 file download
                else:
                    try:
                        s3_object = client.get_object(
                            self._bucket_name, file_download.file.file_path)
                    except FileDoesNotExist:
                        raise MissingFile(file_download.file.file_name)

                    self._download_file(s3_object, file_download.file,
                                        file_download.local_path)
Exemple #4
0
    def test_list_objects_empty_bucket(self, mock_func):
        response = self.sample_response
        del response['Contents']
        mock_func.return_value = response

        with S3Client(self.credentials) as client:
            results = client.list_objects('empty-bucket', True)

        self.assertEqual(len(list(results)), 0)
Exemple #5
0
    def upload_files(self, volume_path, file_uploads):
        """See :meth:`storage.brokers.broker.Broker.upload_files`"""

        with S3Client(self._credentials, self._region_name) as client:
            for file_upload in file_uploads:
                s3_object = client.get_object(self._bucket_name, file_upload.file.file_path, False)

                self._upload_file(s3_object, file_upload.file, file_upload.local_path)

                # Create new model
                file_upload.file.save()
Exemple #6
0
    def test_list_objects_iteration(self, mock_func):
        response1 = self.sample_response
        response1['IsTruncated'] = True
        response2 = deepcopy(response1)
        response2['IsTruncated'] = False
        mock_func.side_effect = [response1, response2]

        with S3Client(self.credentials) as client:
            results = client.list_objects('iterating-bucket', True)

        self.assertEqual(len(list(results)), 2)
Exemple #7
0
    def test_list_objects_bucket_not_found(self, mock_func):
        error_response = {
            'Error': {
                'Code': 'NoSuchBucket',
                'Message': 'The specified bucket does not exist'
            }
        }
        mock_func.side_effect = ClientError(error_response, 'ListObjects')

        with self.assertRaises(ClientError):
            with S3Client(self.credentials) as client:
                items = list(client.list_objects('nonexistent-bucket'))
Exemple #8
0
    def delete_files(self, volume_path, files):
        """See :meth:`storage.brokers.broker.Broker.delete_files`"""

        with S3Client(self._credentials, self._region_name) as client:
            for scale_file in files:
                s3_object = client.get_object(self._bucket_name,
                                              scale_file.file_path)

                self._delete_file(s3_object, scale_file)

                # Update model attributes
                scale_file.set_deleted()
                scale_file.save()
Exemple #9
0
    def test_list_objects_prefix_recursive(self, mock_func):
        response = self.sample_response
        response['Contents'] = [
            deepcopy(self.sample_content),
            deepcopy(self.sample_content)
        ]
        response['Contents'][0]['Key'] = 'string'
        mock_func.return_value = response

        with S3Client(self.credentials) as client:
            results = client.list_objects('sample-bucket', True)

        self.assertEqual(len(list(results)), 2)
Exemple #10
0
    def move_files(self, volume_path, file_moves):
        """See :meth:`storage.brokers.broker.Broker.move_files`"""

        with S3Client(self._credentials, self._region_name) as client:
            for file_move in file_moves:
                try:
                    s3_object_src = client.get_object(self._bucket_name, file_move.file.file_path)
                except FileDoesNotExist:
                    raise MissingFile(file_move.file.file_name)
                s3_object_dest = client.get_object(self._bucket_name, file_move.new_path, False)

                self._move_file(s3_object_src, s3_object_dest, file_move.file, file_move.new_path)

                # Update model attributes
                file_move.file.file_path = file_move.new_path
                file_move.file.save()
Exemple #11
0
    def validate_configuration(self, config):
        """See :meth:`storage.brokers.broker.Broker.validate_configuration`"""

        warnings = []
        if 'bucket_name' not in config or not config['bucket_name']:
            raise InvalidBrokerConfiguration('INVALID_BROKER', 'S3 broker requires "bucket_name" to be populated')
        region_name = config.get('region_name')

        credentials = AWSClient.instantiate_credentials_from_config(config)

        # Check whether the bucket can actually be accessed
        with S3Client(credentials, region_name) as client:
            try:
                client.get_bucket(config['bucket_name'])
            except (ClientError, NoCredentialsError):
                warnings.append(ValidationWarning('bucket_access',
                                                  'Unable to access bucket. Check the bucket name and credentials.'))

        return warnings
Exemple #12
0
 def test_list_objects_invalid_bucket_name(self):
     with self.assertRaises(ParamValidationError):
         with S3Client(self.credentials) as client:
             items = list(client.list_objects('invalid:bucket:name'))