def _set_container_meta(self, container: OpenStackContainer, meta_data: MetaData) -> None: """Set metadata on container. :param container: Container to set metadata to. :type container: :class:`openstack.object_store.v1.container.Container` :param meta_data: A map of metadata to store with the container. :type meta_data: dict :return: NoneType :rtype: None :raises CloudStorageError: If setting the metadata failed. """ object_url = self._get_server_public_url('cloudFiles') object_url += '/' + quote(container.id) headers = {'X-Auth-Token': self._token} # Add header prefix to user meta data, X-Object-Meta- for meta_key, meta_value in meta_data.items(): headers[self._CONTAINER_META_PREFIX + meta_key] = meta_value response = requests.post(object_url, headers=headers) if response.status_code != HTTPStatus.NO_CONTENT: raise CloudStorageError(response.text)
def generate_container_upload_url( self, container: Container, blob_name: str, expires: int = 3600, acl: str = None, meta_data: MetaData = None, content_disposition: str = None, content_length: ContentLength = None, content_type: str = None, cache_control: str = None, extra: ExtraOptions = None, ) -> FormPost: meta_data = {} if meta_data is None else meta_data extra = {} if extra is None else extra extra_norm = self._normalize_parameters(extra, self._POST_OBJECT_KEYS) conditions = [] # type: List[Any] fields = {} # type: Dict[Any, Any] if acl: conditions.append({"acl": acl}) fields["acl"] = acl headers = { "Content-Disposition": content_disposition, "Content-Type": content_type, "Cache-Control": cache_control, } for header_name, header_value in headers.items(): if not header_value: continue fields[header_name.lower()] = header_value conditions.append(["eq", "$" + header_name, header_value]) # Add content-length-range which is a tuple if content_length: min_range, max_range = content_length conditions.append(["content-length-range", min_range, max_range]) for meta_name, meta_value in meta_data.items(): meta_name = self._OBJECT_META_PREFIX + meta_name fields[meta_name] = meta_value conditions.append({meta_name: meta_value}) # Add extra conditions and fields for extra_name, extra_value in extra_norm.items(): fields[extra_name] = extra_value conditions.append({extra_name: extra_value}) return self.s3.meta.client.generate_presigned_post( Bucket=container.name, Key=blob_name, Fields=fields, Conditions=conditions, ExpiresIn=int(expires), )
def generate_container_upload_url( self, container: Container, blob_name: str, expires: int = 3600, acl: str = None, meta_data: MetaData = None, content_disposition: str = None, content_length: ContentLength = None, content_type: str = None, cache_control: str = None, extra: ExtraOptions = None, ) -> FormPost: if acl: logger.info(messages.OPTION_NOT_SUPPORTED, "acl") meta_data = meta_data if meta_data is not None else {} extra = extra if extra is not None else {} params = self._normalize_parameters(extra, self._POST_OBJECT_KEYS) azure_container = self._get_azure_container(container.name) expires_at = datetime.utcnow() + timedelta(seconds=expires) sas_token = self.service.generate_container_shared_access_signature( container_name=azure_container.name, permission=BlobPermissions.WRITE, expiry=expires_at, content_disposition=content_disposition, content_type=content_type, **params, ) headers = { "x-ms-blob-type": "BlockBlob", "x-ms-blob-content-type": content_type, "x-ms-blob-content-disposition": content_disposition, "x-ms-blob-cache-control": cache_control, } for meta_key, meta_value in meta_data.items(): key = self._OBJECT_META_PREFIX + meta_key headers[key] = meta_value upload_url = self.service.make_blob_url( container_name=azure_container.name, blob_name=blob_name, sas_token=sas_token, ) return {"url": upload_url, "fields": None, "headers": headers}
def generate_container_upload_url( self, container: Container, blob_name: str, expires: int = 3600, acl: str = None, meta_data: MetaData = None, content_disposition: str = None, content_length: ContentLength = None, content_type: str = None, cache_control: str = None, extra: ExtraOptions = None, ) -> FormPost: if content_disposition: logger.warning(messages.OPTION_NOT_SUPPORTED, "content_disposition") if cache_control: logger.warning(messages.OPTION_NOT_SUPPORTED, "cache_control") meta_data = {} if meta_data is None else meta_data post_policy = PostPolicy() post_policy.set_bucket_name(container.name) post_policy.set_key_startswith(blob_name) if content_length: min_range, max_range = content_length post_policy.set_content_length_range(min_range, max_range) if content_type: post_policy.set_content_type(content_type) for meta_name, meta_value in meta_data.items(): meta_name = self._OBJECT_META_PREFIX + meta_name post_policy.policies.append(("eq", "$%s" % meta_name, meta_value)) post_policy.form_data[meta_name] = meta_value expires_date = datetime.utcnow() + timedelta(seconds=expires) post_policy.set_expires(expires_date) url, fields = self.client.presigned_post_policy(post_policy) return {"url": url, "fields": fields}
def generate_container_upload_url( self, container: Container, blob_name: str, expires: int = 3600, acl: str = None, meta_data: MetaData = None, content_disposition: str = None, content_length: ContentLength = None, content_type: str = None, cache_control: str = None, extra: ExtraOptions = None, ) -> FormPost: meta_data = meta_data if meta_data is not None else {} extra = extra if extra is not None else {} extra_norm = self._normalize_parameters(extra, self._POST_OBJECT_KEYS) bucket = self._get_bucket(container.name) conditions = [ # file name can start with any valid character. ["starts-with", "$key", ""] ] # type: List[Any] fields = {} if acl: conditions.append({"acl": acl}) fields["acl"] = acl headers = { "Content-Disposition": content_disposition, "Content-Type": content_type, "Cache-Control": cache_control, } for header_name, header_value in headers.items(): if not header_value: continue fields[header_name.lower()] = header_value conditions.append(["eq", "$" + header_name, header_value]) # Add content-length-range which is a tuple if content_length: min_range, max_range = content_length conditions.append(["content-length-range", min_range, max_range]) for meta_name, meta_value in meta_data.items(): meta_name = self._OBJECT_META_PREFIX + meta_name fields[meta_name] = meta_value conditions.append({meta_name: meta_value}) # Add extra conditions and fields for extra_name, extra_value in extra_norm.items(): fields[extra_name] = extra_value conditions.append({extra_name: extra_value}) # Determine key value for blob name when uploaded if not blob_name: # user provided filename fields["key"] = "${filename}" else: path = pathlib.Path(blob_name) if path.suffix: # blob_name is filename fields["key"] = blob_name else: # prefix + user provided filename fields["key"] = blob_name + "${filename}" logger.debug("conditions=%s", conditions) logger.debug("fields=%s", fields) expiration = datetime.utcnow() + timedelta(seconds=expires) # noinspection PyTypeChecker policy = bucket.generate_upload_policy( conditions=conditions, expiration=expiration ) fields.update(policy) url = "https://{bucket_name}.storage.googleapis.com".format( bucket_name=container.name ) return {"url": url, "fields": fields}