Example #1
0
    def create_bucket(self,
                      bucket_name,
                      headers=None,
                      location=Location.DEFAULT,
                      policy=None,
                      storage_class='STANDARD'):
        """
        Creates a new bucket. By default it's located in the USA. You can
        pass Location.EU to create bucket in the EU. You can also pass
        a LocationConstraint for where the bucket should be located, and
        a StorageClass describing how the data should be stored.

        :type bucket_name: string
        :param bucket_name: The name of the new bucket.

        :type headers: dict
        :param headers: Additional headers to pass along with the request to GCS.

        :type location: :class:`boto.gs.connection.Location`
        :param location: The location of the new bucket.

        :type policy: :class:`boto.gs.acl.CannedACLStrings`
        :param policy: A canned ACL policy that will be applied to the new key
                       in GCS.

        :type storage_class: string
        :param storage_class: Either 'STANDARD' or 'DURABLE_REDUCED_AVAILABILITY'.

        """
        check_lowercase_bucketname(bucket_name)

        if policy:
            if headers:
                headers[self.provider.acl_header] = policy
            else:
                headers = {self.provider.acl_header: policy}
        if not location:
            location = Location.DEFAULT
        location_elem = ('<LocationConstraint>%s</LocationConstraint>' %
                         location)
        if storage_class:
            storage_class_elem = ('<StorageClass>%s</StorageClass>' %
                                  storage_class)
        else:
            storage_class_elem = ''
        data = ('<CreateBucketConfiguration>%s%s</CreateBucketConfiguration>' %
                (location_elem, storage_class_elem))
        response = self.make_request('PUT',
                                     get_utf8_value(bucket_name),
                                     headers=headers,
                                     data=get_utf8_value(data))
        body = response.read()
        if response.status == 409:
            raise self.provider.storage_create_error(response.status,
                                                     response.reason, body)
        if response.status == 200:
            return self.bucket_class(self, bucket_name)
        else:
            raise self.provider.storage_response_error(response.status,
                                                       response.reason, body)
Example #2
0
    def create_bucket(self, bucket_name, headers=None,
                      location=Location.DEFAULT, policy=None,
                      storage_class='STANDARD'):
        """
        Creates a new bucket. By default it's located in the USA. You can
        pass Location.EU to create bucket in the EU. You can also pass
        a LocationConstraint for where the bucket should be located, and 
        a StorageClass describing how the data should be stored.

        :type bucket_name: string
        :param bucket_name: The name of the new bucket.
        
        :type headers: dict
        :param headers: Additional headers to pass along with the request to GCS.

        :type location: :class:`boto.gs.connection.Location`
        :param location: The location of the new bucket.

        :type policy: :class:`boto.gs.acl.CannedACLStrings`
        :param policy: A canned ACL policy that will be applied to the new key
                       in GCS.

        :type storage_class: string
        :param storage_class: Either 'STANDARD' or 'DURABLE_REDUCED_AVAILABILITY'.
             
        """
        check_lowercase_bucketname(bucket_name)

        if policy:
            if headers:
                headers[self.provider.acl_header] = policy
            else:
                headers = {self.provider.acl_header : policy}
        if not location:
            location = Location.DEFAULT
        location_elem = ('<LocationConstraint>%s</LocationConstraint>'
                         % location)
        if storage_class:
            storage_class_elem = ('<StorageClass>%s</StorageClass>'
                                  % storage_class)
        else:
            storage_class_elem = ''
        data = ('<CreateBucketConfiguration>%s%s</CreateBucketConfiguration>'
                 % (location_elem, storage_class_elem))
        response = self.make_request(
            'PUT', get_utf8_value(bucket_name), headers=headers,
            data=get_utf8_value(data))
        body = response.read()
        if response.status == 409:
            raise self.provider.storage_create_error(
                response.status, response.reason, body)
        if response.status == 200:
            return self.bucket_class(self, bucket_name)
        else:
            raise self.provider.storage_response_error(
                response.status, response.reason, body)
Example #3
0
    def set_cors(self, cors, headers=None):
        """Sets a bucket's CORS XML document.

        :param str cors: A string containing the CORS XML.
        :param dict headers: Additional headers to send with the request.
        """
        response = self.connection.make_request(
            "PUT", get_utf8_value(self.name), data=get_utf8_value(cors), query_args=CORS_ARG, headers=headers
        )
        body = response.read()
        if response.status != 200:
            raise self.connection.provider.storage_response_error(response.status, response.reason, body)
Example #4
0
    def set_cors(self, cors, headers=None):
        """Sets a bucket's CORS XML document.

        :param str cors: A string containing the CORS XML.
        :param dict headers: Additional headers to send with the request.
        """
        response = self.connection.make_request(
            'PUT', get_utf8_value(self.name), data=get_utf8_value(cors),
            query_args=CORS_ARG, headers=headers)
        body = response.read()
        if response.status != 200:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
Example #5
0
    def compose(self, components, content_type=None, headers=None):
        """Create a new object from a sequence of existing objects.

        The content of the object representing this Key will be the
        concatenation of the given object sequence. For more detail, visit

            https://developers.google.com/storage/docs/composite-objects

        :type components list of Keys
        :param components List of gs.Keys representing the component objects

        :type content_type (optional) string
        :param content_type Content type for the new composite object.
        """
        compose_req = []
        for key in components:
            if key.bucket.name != self.bucket.name:
                raise BotoClientError(
                    'GCS does not support inter-bucket composing')

            generation_tag = ''
            if key.generation:
                generation_tag = ('<Generation>%s</Generation>' %
                                  str(key.generation))
            compose_req.append('<Component><Name>%s</Name>%s</Component>' %
                               (key.name, generation_tag))
        compose_req_xml = ('<ComposeRequest>%s</ComposeRequest>' %
                           ''.join(compose_req))
        headers = headers or {}
        if content_type:
            headers['Content-Type'] = content_type
        resp = self.bucket.connection.make_request(
            'PUT',
            get_utf8_value(self.bucket.name),
            get_utf8_value(self.name),
            headers=headers,
            query_args='compose',
            data=get_utf8_value(compose_req_xml))
        if resp.status < 200 or resp.status > 299:
            raise self.bucket.connection.provider.storage_response_error(
                resp.status, resp.reason, resp.read())

        # Return the generation so that the result URI can be built with this
        # for automatic parallel uploads.
        return resp.getheader('x-goog-generation')
Example #6
0
    def _set_acl_helper(self,
                        acl_or_str,
                        key_name,
                        headers,
                        query_args,
                        generation,
                        if_generation,
                        if_metageneration,
                        canned=False):
        """Provides common functionality for set_acl, set_xml_acl,
        set_canned_acl, set_def_acl, set_def_xml_acl, and
        set_def_canned_acl()."""

        headers = headers or {}
        data = ''
        if canned:
            headers[self.connection.provider.acl_header] = acl_or_str
        else:
            data = acl_or_str

        if generation:
            query_args += '&generation=%s' % generation

        if if_metageneration is not None and if_generation is None:
            raise ValueError("Received if_metageneration argument with no "
                             "if_generation argument. A metageneration has no "
                             "meaning without a content generation.")
        if not key_name and (if_generation or if_metageneration):
            raise ValueError("Received if_generation or if_metageneration "
                             "parameter while setting the ACL of a bucket.")
        if if_generation is not None:
            headers['x-goog-if-generation-match'] = str(if_generation)
        if if_metageneration is not None:
            headers['x-goog-if-metageneration-match'] = str(if_metageneration)

        response = self.connection.make_request('PUT',
                                                get_utf8_value(self.name),
                                                get_utf8_value(key_name),
                                                data=get_utf8_value(data),
                                                headers=headers,
                                                query_args=query_args)
        body = response.read()
        if response.status != 200:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
    def set_storage_class(self, storage_class, headers=None):
        """
        Sets a bucket's storage class.

        :param str storage_class: A string containing the storage class.
        :param dict headers: Additional headers to send with the request.
        """
        req_body = self.StorageClassBody % (get_utf8_value(storage_class))
        self.set_subresource(STORAGE_CLASS_ARG, req_body, headers=headers)
Example #8
0
    def configure_lifecycle(self, lifecycle_config, headers=None):
        """
        Configure lifecycle for this bucket.

        :type lifecycle_config: :class:`boto.gs.lifecycle.LifecycleConfig`
        :param lifecycle_config: The lifecycle configuration you want
            to configure for this bucket.
        """
        xml = lifecycle_config.to_xml()
        response = self.connection.make_request(
            'PUT', get_utf8_value(self.name), data=get_utf8_value(xml),
            query_args=LIFECYCLE_ARG, headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
Example #9
0
    def set_storage_class(self, storage_class, headers=None):
        """
        Sets a bucket's storage class.

        :param str storage_class: A string containing the storage class.
        :param dict headers: Additional headers to send with the request.
        """
        req_body = self.StorageClassBody % (get_utf8_value(storage_class))
        self.set_subresource(STORAGE_CLASS_ARG, req_body, headers=headers)
Example #10
0
    def configure_website(self,
                          main_page_suffix=None,
                          error_key=None,
                          headers=None):
        """Configure this bucket to act as a website

        :type main_page_suffix: str
        :param main_page_suffix: Suffix that is appended to a request that is
            for a "directory" on the website endpoint (e.g. if the suffix is
            index.html and you make a request to samplebucket/images/ the data
            that is returned will be for the object with the key name
            images/index.html). The suffix must not be empty and must not
            include a slash character. This parameter is optional and the
            property is disabled if excluded.

        :type error_key: str
        :param error_key: The object key name to use when a 400 error occurs.
            This parameter is optional and the property is disabled if excluded.

        :param dict headers: Additional headers to send with the request.
        """
        if main_page_suffix:
            main_page_frag = self.WebsiteMainPageFragment % main_page_suffix
        else:
            main_page_frag = ''

        if error_key:
            error_frag = self.WebsiteErrorFragment % error_key
        else:
            error_frag = ''

        body = self.WebsiteBody % (main_page_frag, error_frag)
        response = self.connection.make_request('PUT',
                                                get_utf8_value(self.name),
                                                data=get_utf8_value(body),
                                                query_args='websiteConfig',
                                                headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
Example #11
0
    def _set_acl_helper(
        self, acl_or_str, key_name, headers, query_args, generation, if_generation, if_metageneration, canned=False
    ):
        """Provides common functionality for set_acl, set_xml_acl,
        set_canned_acl, set_def_acl, set_def_xml_acl, and
        set_def_canned_acl()."""

        headers = headers or {}
        data = ""
        if canned:
            headers[self.connection.provider.acl_header] = acl_or_str
        else:
            data = acl_or_str

        if generation:
            query_args += "&generation=%s" % generation

        if if_metageneration is not None and if_generation is None:
            raise ValueError(
                "Received if_metageneration argument with no "
                "if_generation argument. A metageneration has no "
                "meaning without a content generation."
            )
        if not key_name and (if_generation or if_metageneration):
            raise ValueError(
                "Received if_generation or if_metageneration " "parameter while setting the ACL of a bucket."
            )
        if if_generation is not None:
            headers["x-goog-if-generation-match"] = str(if_generation)
        if if_metageneration is not None:
            headers["x-goog-if-metageneration-match"] = str(if_metageneration)

        response = self.connection.make_request(
            "PUT",
            get_utf8_value(self.name),
            get_utf8_value(key_name),
            data=get_utf8_value(data),
            headers=headers,
            query_args=query_args,
        )
        body = response.read()
        if response.status != 200:
            raise self.connection.provider.storage_response_error(response.status, response.reason, body)
Example #12
0
    def compose(self, components, content_type=None, headers=None):
        """Create a new object from a sequence of existing objects.

        The content of the object representing this Key will be the
        concatenation of the given object sequence. For more detail, visit

            https://developers.google.com/storage/docs/composite-objects

        :type components list of Keys
        :param components List of gs.Keys representing the component objects

        :type content_type (optional) string
        :param content_type Content type for the new composite object.
        """
        compose_req = []
        for key in components:
            if key.bucket.name != self.bucket.name:
                raise BotoClientError(
                    'GCS does not support inter-bucket composing')

            generation_tag = ''
            if key.generation:
                generation_tag = ('<Generation>%s</Generation>'
                                  % str(key.generation))
            compose_req.append('<Component><Name>%s</Name>%s</Component>' %
                               (key.name, generation_tag))
        compose_req_xml = ('<ComposeRequest>%s</ComposeRequest>' %
                         ''.join(compose_req))
        headers = headers or {}
        if content_type:
            headers['Content-Type'] = content_type
        resp = self.bucket.connection.make_request(
            'PUT', get_utf8_value(self.bucket.name), get_utf8_value(self.name),
            headers=headers, query_args='compose',
            data=get_utf8_value(compose_req_xml))
        if resp.status < 200 or resp.status > 299:
            raise self.bucket.connection.provider.storage_response_error(
                resp.status, resp.reason, resp.read())

        # Return the generation so that the result URI can be built with this
        # for automatic parallel uploads.
        return resp.getheader('x-goog-generation')
Example #13
0
File: bucket.py Project: 10sr/hue
    def configure_website(self, main_page_suffix=None, error_key=None,
                          headers=None):
        """Configure this bucket to act as a website

        :type main_page_suffix: str
        :param main_page_suffix: Suffix that is appended to a request that is
            for a "directory" on the website endpoint (e.g. if the suffix is
            index.html and you make a request to samplebucket/images/ the data
            that is returned will be for the object with the key name
            images/index.html). The suffix must not be empty and must not
            include a slash character. This parameter is optional and the
            property is disabled if excluded.

        :type error_key: str
        :param error_key: The object key name to use when a 400 error occurs.
            This parameter is optional and the property is disabled if excluded.

        :param dict headers: Additional headers to send with the request.
        """
        if main_page_suffix:
            main_page_frag = self.WebsiteMainPageFragment % main_page_suffix
        else:
            main_page_frag = ''

        if error_key:
            error_frag = self.WebsiteErrorFragment % error_key
        else:
            error_frag = ''

        body = self.WebsiteBody % (main_page_frag, error_frag)
        response = self.connection.make_request(
            'PUT', get_utf8_value(self.name), data=get_utf8_value(body),
            query_args='websiteConfig', headers=headers)
        body = response.read()
        if response.status == 200:
            return True
        else:
            raise self.connection.provider.storage_response_error(
                response.status, response.reason, body)
Example #14
0
File: key.py Project: epowers/boto
    def compose(self, components, content_type=None, headers=None):
        """Create a new object from a sequence of existing objects.

        The content of the object representing this Key will be the
        concatenation of the given object sequence. For more detail, visit

            https://developers.google.com/storage/docs/composite-objects

        :type components list of Keys
        :param components List of gs.Keys representing the component objects

        :type content_type (optional) string
        :param content_type Content type for the new composite object.
        """
        compose_req = []
        for key in components:
            if key.bucket.name != self.bucket.name:
                raise BotoClientError("GCS does not support inter-bucket composing")

            generation_tag = ""
            if key.generation:
                generation_tag = "<Generation>%s</Generation>" % str(key.generation)
            compose_req.append("<Component><Name>%s</Name>%s</Component>" % (key.name, generation_tag))
        compose_req_xml = "<ComposeRequest>%s</ComposeRequest>" % "".join(compose_req)
        headers = headers or {}
        if content_type:
            headers["Content-Type"] = content_type
        resp = self.bucket.connection.make_request(
            "PUT",
            get_utf8_value(self.bucket.name),
            get_utf8_value(self.name),
            headers=headers,
            query_args="compose",
            data=get_utf8_value(compose_req_xml),
        )
        if resp.status < 200 or resp.status > 299:
            raise self.bucket.connection.provider.storage_response_error(resp.status, resp.reason, resp.read())
Example #15
0
    def set_contents_from_string(self,
                                 s,
                                 headers=None,
                                 replace=True,
                                 cb=None,
                                 num_cb=10,
                                 policy=None,
                                 md5=None,
                                 if_generation=None):
        """
        Store an object in GCS using the name of the Key object as the
        key in GCS and the string 's' as the contents.
        See set_contents_from_file method for details about the
        parameters.

        :type headers: dict
        :param headers: Additional headers to pass along with the
                        request to AWS.

        :type replace: bool
        :param replace: If True, replaces the contents of the file if
                        it already exists.

        :type cb: function
        :param cb: a callback function that will be called to report
                   progress on the upload. The callback should accept
                   two integer parameters, the first representing the
                   number of bytes that have been successfully
                   transmitted to GCS and the second representing the
                   size of the to be transmitted object.

        :type cb: int
        :param num_cb: (optional) If a callback is specified with
                       the cb parameter this parameter determines the
                       granularity of the callback by defining
                       the maximum number of times the callback will
                       be called during the file transfer.

        :type policy: :class:`boto.gs.acl.CannedACLStrings`
        :param policy: A canned ACL policy that will be applied to the
                       new key in GCS.

        :type md5: A tuple containing the hexdigest version of the MD5
                   checksum of the file as the first element and the
                   Base64-encoded version of the plain checksum as the
                   second element. This is the same format returned by
                   the compute_md5 method.
        :param md5: If you need to compute the MD5 for any reason prior
                    to upload, it's silly to have to do it twice so this
                    param, if present, will be used as the MD5 values
                    of the file. Otherwise, the checksum will be computed.

        :type if_generation: int
        :param if_generation: (optional) If set to a generation number, the
            object will only be written to if its current generation number is
            this value. If set to the value 0, the object will only be written
            if it doesn't already exist.
        """

        # Clear out any previously computed md5 hashes, since we are setting the content.
        self.md5 = None
        self.base64md5 = None

        fp = StringIO.StringIO(get_utf8_value(s))
        r = self.set_contents_from_file(fp,
                                        headers,
                                        replace,
                                        cb,
                                        num_cb,
                                        policy,
                                        md5,
                                        if_generation=if_generation)
        fp.close()
        return r
Example #16
0
    def set_contents_from_string(self, s, headers=None, replace=True,
                                 cb=None, num_cb=10, policy=None, md5=None,
                                 if_generation=None):
        """
        Store an object in GCS using the name of the Key object as the
        key in GCS and the string 's' as the contents.
        See set_contents_from_file method for details about the
        parameters.

        :type headers: dict
        :param headers: Additional headers to pass along with the
                        request to AWS.

        :type replace: bool
        :param replace: If True, replaces the contents of the file if
                        it already exists.

        :type cb: function
        :param cb: a callback function that will be called to report
                   progress on the upload. The callback should accept
                   two integer parameters, the first representing the
                   number of bytes that have been successfully
                   transmitted to GCS and the second representing the
                   size of the to be transmitted object.

        :type cb: int
        :param num_cb: (optional) If a callback is specified with
                       the cb parameter this parameter determines the
                       granularity of the callback by defining
                       the maximum number of times the callback will
                       be called during the file transfer.

        :type policy: :class:`boto.gs.acl.CannedACLStrings`
        :param policy: A canned ACL policy that will be applied to the
                       new key in GCS.

        :type md5: A tuple containing the hexdigest version of the MD5
                   checksum of the file as the first element and the
                   Base64-encoded version of the plain checksum as the
                   second element. This is the same format returned by
                   the compute_md5 method.
        :param md5: If you need to compute the MD5 for any reason prior
                    to upload, it's silly to have to do it twice so this
                    param, if present, will be used as the MD5 values
                    of the file. Otherwise, the checksum will be computed.

        :type if_generation: int
        :param if_generation: (optional) If set to a generation number, the
            object will only be written to if its current generation number is
            this value. If set to the value 0, the object will only be written
            if it doesn't already exist.
        """

        # Clear out any previously computed md5 hashes, since we are setting the content.
        self.md5 = None
        self.base64md5 = None

        fp = StringIO.StringIO(get_utf8_value(s))
        r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
                                        policy, md5,
                                        if_generation=if_generation)
        fp.close()
        return r