def _get_all(self, element_map, initial_query_string='', headers=None, **params): query_args = self._get_all_query_args( params, initial_query_string=initial_query_string) response = self.connection.make_request('GET', self.name, headers=headers, query_args=query_args) body = response.read() if response.status == 200: rs = ResultSet(element_map) h = handler.XmlHandler(rs, self) if not isinstance(body, bytes): body = body.encode('utf-8') try: xml.sax.parseString(body, h) except Exception as exc: print "There have exception {0}, use alternative method".format( str(exc)) doc = PyQuery(body) for content in doc('Contents').items(): new_key = Key(bucket=self) new_key.name = content('Key').text() new_key.etag = content('Etag').text() new_key.size = content('Size').text() new_key.last_modified = content('LastModified').text() new_key.storage_class = content('StorageClass').text() rs.append(new_key) return rs else: raise S3ResponseError(response.status, response.reason, body)
def get_all_parts(self, max_parts=None, part_number_marker=None, encoding_type=None): """ Return the uploaded parts of this MultiPart Upload. This is a lower-level method that requires you to manually page through results. To simplify this process, you can just use the object itself as an iterator and it will automatically handle all of the paging with S3. """ self._parts = [] query_args = 'uploadId=%s' % self.id if max_parts: query_args += '&max-parts=%d' % max_parts if part_number_marker: query_args += '&part-number-marker=%s' % part_number_marker if encoding_type: query_args += '&encoding-type=%s' % encoding_type response = self.bucket.connection.make_request('GET', self.bucket.name, self.key_name, query_args=query_args) body = response.read() if response.status == 200: h = handler.XmlHandler(self, self) xml.sax.parseString(body, h) return self._parts
def initiate_multipart_upload(self, key_name, headers=None, reduced_redundancy=False, metadata=None, encrypt_key=False, policy=None, calc_encrypt_md5=True): """ Start a multipart upload operation. Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage. """ query_args = 'uploads' provider = self.connection.provider headers = headers or {} if policy: headers[provider.acl_header] = policy if reduced_redundancy: storage_class_header = provider.storage_class_header if storage_class_header: headers[storage_class_header] = 'REDUCED_REDUNDANCY' # TODO: what if the provider doesn't support reduced redundancy? if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' if metadata is None: metadata = {} headers = ks3.utils.merge_meta(headers, metadata, self.connection.provider) if self.connection.local_encrypt: crypts = Crypts(self.connection.key) crypts.calc_md5 = calc_encrypt_md5 crypts.action_info = "init_multi" md5_generator = hashlib.md5() md5_generator.update(crypts.key) headers["x-kss-meta-key"] = base64.b64encode(md5_generator.hexdigest().encode()).decode() headers["x-kss-meta-iv"] = base64.b64encode(crypts.first_iv).decode() response = self.connection.make_request('POST', self.name, key_name, query_args=query_args, headers=headers) else: response = self.connection.make_request('POST', self.name, key_name, query_args=query_args, headers=headers) body = response.read() if response.status == 200: resp = MultiPartUpload(self) if self.connection.local_encrypt: resp.set_crypt_context(crypts) h = handler.XmlHandler(resp, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return resp else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
def get_bucket_location(self, bucket_name): response = self.make_request('GET', bucket_name, query_args='location') body = response.read() if response.status == 200: loc = BucketLocation() h = handler.XmlHandler(loc, self) xml.sax.parseString(body, h) return loc.location else: raise S3ResponseError(response.status, response.reason, body)
def get_all_buckets(self, headers=None): response = self.make_request('GET', headers=headers) body = response.read() if response.status > 300: raise S3ResponseError(response.status, response.reason, body) rs = ResultSet([('Bucket', Bucket)]) h = handler.XmlHandler(rs, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return rs
def get_bucket_versioning(self, headers=None): response = self.connection.make_request('GET', self.name, query_args='versioning', headers=headers) body = response.read() if response.status == 200: versioningConfig = BucketVersioningConfig() h = handler.XmlHandler(versioningConfig, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return versioningConfig else: raise S3ResponseError(response.status, response.reason, body)
def get_bucket_lifecyle(self, headers=None): response = self.connection.make_request('GET', self.name, query_args='lifecycle', headers=headers) body = response.read() if response.status == 200: lifecyle = BucketLifecyle() h = handler.XmlHandler(lifecyle, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return lifecyle else: raise S3ResponseError(response.status, response.reason, body)
def copy_key(self, new_key_name, src_bucket_name, src_key_name, headers=None, query_args=None, encrypt_key=False): """ Create a new key in the bucket by copying another existing key. :param new_key_name: The name of the new key :param src_bucket_name: The name of the source bucket :param src_key_name: The name of the source key :param headers: A dictionary of header name/value pairs. :param query_args: A string of additional querystring arguments to append to the request :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by KS3 and will be stored in an encrypted form while at rest in KS3. :return: """ if not new_key_name or not src_key_name: raise ValueError('Empty key names are not allowed') if not src_bucket_name: raise ValueError('Empty bucket name are not allowed') headers = headers or {} provider = self.connection.provider if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' src = '/%s/%s' % (src_bucket_name, parse.quote_plus(src_key_name.encode('utf-8'))) src = src.replace('//', '/%2F') headers[provider.copy_source_header] = str(src) response = self.connection.make_request('PUT', self.name, new_key_name, headers=headers, query_args=query_args) body = response.read() if response.status == 200: key = self.new_key(new_key_name) h = handler.XmlHandler(key, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) if hasattr(key, 'Error'): raise provider.storage_copy_error(key.Code, key.Message, body) return key else: raise provider.storage_response_error(response.status, response.reason, body)
def initiate_multipart_upload(self, key_name, headers=None, reduced_redundancy=False, metadata=None, encrypt_key=False, policy=None): """ Start a multipart upload operation. Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage. """ query_args = 'uploads' provider = self.connection.provider headers = headers or {} if policy: headers[provider.acl_header] = policy if reduced_redundancy: storage_class_header = provider.storage_class_header if storage_class_header: headers[storage_class_header] = 'REDUCED_REDUNDANCY' # TODO: what if the provider doesn't support reduced redundancy? if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' if metadata is None: metadata = {} headers = ks3.utils.merge_meta(headers, metadata, self.connection.provider) response = self.connection.make_request('POST', self.name, key_name, query_args=query_args, headers=headers) body = response.read() if response.status == 200: resp = MultiPartUpload(self) h = handler.XmlHandler(resp, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return resp else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
def get_acl(self, key_name='', headers=None, version_id=None): query_args = 'acl' if version_id: query_args += '&versionId=%s' % version_id response = self.connection.make_request('GET', self.name, key_name, query_args=query_args, headers=headers) body = response.read() if response.status == 200: policy = Policy(self) h = handler.XmlHandler(policy, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return policy else: raise S3ResponseError(response.status, response.reason, body)
def get_object_tagging(self, object_key, version_id=None, headers=None): query_args = 'tagging' if version_id is not None: query_args = query_args + '&versionId=' + version_id response = self.connection.make_request('GET', self.name, object_key, query_args=query_args, headers=headers) body = response.read() if response.status == 200: objectTagging = ObjectTagging() h = handler.XmlHandler(objectTagging, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return objectTagging else: raise S3ResponseError(response.status, response.reason, body)
def complete_multipart_upload(self, key_name, upload_id, xml_body, headers=None): """ Complete a multipart upload operation. """ query_args = 'uploadId=%s' % upload_id if headers is None: headers = {} headers['Content-Type'] = 'text/xml' response = self.connection.make_request('POST', self.name, key_name, query_args=query_args, headers=headers, data=xml_body) contains_error = False body = response.read().decode('utf-8') # Some errors will be reported in the body of the response # even though the HTTP response code is 200. This check # does a quick and dirty peek in the body for an error element. if body.find('<Error>') > 0: contains_error = True if response.status == 200 and not contains_error: resp = CompleteMultiPartUpload(self) h = handler.XmlHandler(resp, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) # Use a dummy key to parse various response headers # for versioning, encryption info and then explicitly # set the completed MPU object values from key. k = Key(self) k.handle_version_headers(response) k.handle_encryption_headers(response) resp.version_id = k.version_id resp.encrypted = k.encrypted resp.status = response.status return resp else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
def get_logging_status(self, headers=None): """ Get the logging status for this bucket. :rtype: :class:`boto.s3.bucketlogging.BucketLogging` :return: A BucketLogging object for this bucket. """ response = self.connection.make_request('GET', self.name, query_args='logging', headers=headers) body = response.read() if response.status == 200: blogging = BucketLogging() h = handler.XmlHandler(blogging, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return blogging else: raise S3ResponseError(response.status, response.reason, body)
def _get_all(self, element_map, initial_query_string='', headers=None, **params): query_args = self._get_all_query_args( params, initial_query_string=initial_query_string ) response = self.connection.make_request('GET', self.name, headers=headers, query_args=query_args) body = response.read() if response.status == 200: rs = ResultSet(element_map) h = handler.XmlHandler(rs, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return rs else: raise S3ResponseError(response.status, response.reason, body)
def _get_all(self, element_map, initial_query_string='', headers=None, **params): query_args = self._get_all_query_args( params, initial_query_string=initial_query_string) response = self.connection.make_request('GET', self.name, headers=headers, query_args=query_args) body = response.read() if response.status == 200: rs = ResultSet(element_map) h = handler.XmlHandler(rs, self) if not isinstance(body, bytes): body = body.encode('utf-8') try: xml.sax.parseString(body, h) except: print "My Func" doc = PyQuery(body) for content in doc('Contents').items(): new_key = Key(bucket=self) new_key.name = content('Key').text() new_key.etag = content('Etag').text() new_key.size = content('Size').text() new_key.last_modified = content('LastModified').text() new_key.storage_class = content('StorageClass').text() rs.append(new_key) # k_name_list = re.findall('<Key>([\d\D]+?)</Key>', body) # for k_name in k_name_list: # kkk = Key() # kkk.name = unicode(k_name) # rs.append(kkk) return rs else: raise S3ResponseError(response.status, response.reason, body)
def delete_keys2(hdrs): hdrs = hdrs or {} data = u"""<?xml version="1.0" encoding="UTF-8"?>""" data += u"<Delete>" if quiet: data += u"<Quiet>true</Quiet>" count = 0 while count < 1000: try: key = next(ikeys) except StopIteration: break if isinstance(key, six.string_types): key_name = key version_id = None elif isinstance(key, tuple) and len(key) == 2: key_name, version_id = key elif (isinstance(key, Key) or isinstance(key, DeleteMarker)) and key.name: key_name = key.name version_id = key.version_id else: if isinstance(key, Prefix): key_name = key.name code = 'PrefixSkipped' # Don't delete Prefix else: key_name = repr(key) # try get a string code = 'InvalidArgument' # other unknown type message = 'Invalid. No delete action taken for this object.' error = Error(key_name, code=code, message=message) result.errors.append(error) continue count += 1 data += u"<Object><Key>%s</Key>" % xml.sax.saxutils.escape( key_name) if version_id: data += u"<VersionId>%s</VersionId>" % version_id data += u"</Object>" data += u"</Delete>" if count <= 0: return False # no more data = data.encode('utf-8') fp = BytesIO(data) md5 = boto.utils.compute_md5(fp) hdrs['Content-MD5'] = md5[1] hdrs['Content-Type'] = 'text/xml' if mfa_token: hdrs[provider.mfa_header] = ' '.join(mfa_token) response = self.connection.make_request('POST', self.name, headers=hdrs, query_args=query_args, data=data) body = response.read() if response.status == 200: h = handler.XmlHandler(result, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return count >= 1000 # more? else: raise provider.storage_response_error(response.status, response.reason, body)