def account_listing_response(account, req, response_content_type, info=None, listing=None, s3_buckets_only=False): now = time.time() if info is None: info = {'containers': 0, 'objects': 0, 'bytes': 0, 'metadata': {}, 'ctime': Timestamp(now).internal} if listing is None: listing = [] elif listing and len(listing[0]) < 5: # oio-sds < 4.2 does not return mtime listing = [x + [now] for x in listing] resp_headers = get_response_headers(info) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir, mtime) in listing: if is_subdir: if not s3_buckets_only: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(mtime).isoformat}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, is_subdir, mtime) in listing: if is_subdir: if not s3_buckets_only: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes><last_modified>%s</last_modified>' \ '</container>' % \ (saxutils.escape(name), object_count, bytes_used, Timestamp(mtime).isoformat) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not listing: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in listing) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, reverse) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({ 'name': name, 'count': object_count, 'bytes': bytes_used }) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = [ '<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account) ] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: output_list.append('<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes></container>' % \ (saxutils.escape(name), object_count, bytes_used) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def __call__(self, req): extract_type = req.params.get('extract-archive') resp = None if extract_type is not None and req.method == 'PUT': archive_type = { 'tar': '', 'tar.gz': 'gz', 'tar.bz2': 'bz2' }.get(extract_type.lower().strip('.')) if archive_type is not None: resp = HTTPOk(request=req) try: out_content_type = req.accept.best_match( ACCEPTABLE_FORMATS) except ValueError: out_content_type = None # Ignore invalid header if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_extract_iter( req, archive_type, out_content_type=out_content_type) else: resp = HTTPBadRequest("Unsupported archive format") if 'bulk-delete' in req.params and req.method in ['POST', 'DELETE']: resp = HTTPOk(request=req) try: out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) except ValueError: out_content_type = None # Ignore invalid header if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_delete_iter( req, out_content_type=out_content_type) return resp or self.app
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): """ This is an exact copy of swift.account.utis.account_listing_response() except for one difference i.e this method passes response_content_type to broker.list_containers_iter() method. """ if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, response_content_type, reverse) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(put_tstamp).isoformat}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, put_tstamp, is_subdir) in account_list: if is_subdir: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes><last_modified>%s</last_modified> \ </container>' % \ (saxutils.escape(name), object_count, bytes_used, Timestamp(put_tstamp).isoformat) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter=''): if broker is None: broker = FakeAccountBroker() info = broker.get_info() resp_headers = { 'X-Account-Container-Count': info['container_count'], 'X-Account-Object-Count': info['object_count'], 'X-Account-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp']} resp_headers.update((key, value) for key, (value, timestamp) in broker.metadata.iteritems() if value != '') account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes></container>' % \ (saxutils.escape(name), object_count, bytes_used) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def account_listing_response(account, req, response_content_type, info=None, listing=None): if info is None: now = Timestamp(time.time()).internal info = {'containers': 0, 'objects': 0, 'bytes': 0, 'metadata': {}, 'ctime': now} if listing is None: listing = [] resp_headers = get_response_headers(info) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir) in listing: if is_subdir: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, is_subdir) in listing: if is_subdir: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes></container>' % \ (saxutils.escape(name), object_count, bytes_used) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not listing: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in listing) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def GET(self, req): """ Handle GET Service request """ req.query_string = 'format=json' resp = req.get_response(self.app) status = resp.status_int if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') else: return get_err_response('InvalidURI') containers = loads(resp.body) # we don't keep the creation time of a backet (s3cmd doesn't # work without that) so we use something bogus. body = '<?xml version="1.0" encoding="UTF-8"?>' \ '<ListAllMyBucketsResult ' \ 'xmlns="http://doc.s3.amazonaws.com/2006-03-01">' \ '<Buckets>%s</Buckets>' \ '</ListAllMyBucketsResult>' \ % ("".join(['<Bucket><Name>%s</Name><CreationDate>' '2009-02-03T16:45:09.000Z</CreationDate></Bucket>' % xml_escape(i['name']) for i in containers])) return HTTPOk(content_type='application/xml', body=body)
def _manifest_head_response(self, req, response_headers): conditional_etag = resolve_etag_is_at_header(req, response_headers) return HTTPOk(request=req, headers=response_headers, body='', conditional_etag=conditional_etag, conditional_response=True)
def PUT(self, req): """ Handles PUT Bucket acl and PUT Object acl. """ if self.object_name: # Handle Object ACL return get_err_response('Unsupported') else: # Handle Bucket ACL # We very likely have an XML-based ACL request. translated_acl = swift_acl_translate(req.body, xml=True) if translated_acl == 'Unsupported': return get_err_response('Unsupported') elif translated_acl == 'InvalidArgument': return get_err_response('InvalidArgument') for header, acl in translated_acl: req.headers[header] = acl req.method = 'POST' resp = req.get_response(self.app) status = resp.status_int if status != HTTP_CREATED and status != HTTP_NO_CONTENT: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_ACCEPTED: return get_err_response('BucketAlreadyExists') else: return get_err_response('InvalidURI') return HTTPOk(headers={'Location': self.container_name})
def handle_multipart_delete(self, req): """ Will delete all the segments in the SLO manifest and then, if successful, will delete the manifest file. :params req: a swob.Request with an obj in path :returns: swob.Response whose app_iter set to Bulk.handle_delete_iter """ resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.bulk_deleter.handle_delete_iter( req, objs_to_delete=self.get_segments_to_delete_iter(req), user_agent='MultipartDELETE', swift_source='SLO', out_content_type=out_content_type) return resp
def __call__(self, req): extract_type = req.params.get('extract-archive') resp = None if extract_type is not None and req.method == 'PUT': archive_type = { 'tar': '', 'tar.gz': 'gz', 'tar.bz2': 'bz2'}.get(extract_type.lower().strip('.')) if archive_type is not None: resp = HTTPOk(request=req) resp.app_iter = self.handle_extract_iter(req, archive_type) else: resp = HTTPBadRequest("Unsupported archive format") if 'bulk-delete' in req.params and req.method == 'DELETE': resp = HTTPOk(request=req) resp.app_iter = self.handle_delete_iter(req) return resp or self.app
def __call__(self, req): extract_type = req.params.get('extract-archive') resp = None if extract_type is not None and req.method == 'PUT': archive_type = { 'tar': '', 'tar.gz': 'gz', 'tar.bz2': 'bz2'}.get(extract_type.lower().strip('.')) if archive_type is not None: resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_extract_iter( req, archive_type, out_content_type=out_content_type) else: resp = HTTPBadRequest("Unsupported archive format") if 'bulk-delete' in req.params and req.method in ['POST', 'DELETE']: resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_delete_iter( req, out_content_type=out_content_type) return resp or self.app
def handle_multipart_delete(self, req): """ Will delete all the segments in the SLO manifest and then, if successful, will delete the manifest file. :params req: a swob.Request with an obj in path :raises HTTPServerError: on invalid manifest :returns: swob.Response whose app_iter set to Bulk.handle_delete_iter """ if not check_utf8(req.path_info): raise HTTPPreconditionFailed(request=req, body='Invalid UTF8 or contains NULL') try: vrs, account, container, obj = req.split_path(4, 4, True) except ValueError: raise HTTPBadRequest('Not an SLO manifest') new_env = req.environ.copy() new_env['REQUEST_METHOD'] = 'GET' del (new_env['wsgi.input']) new_env['QUERY_STRING'] = 'multipart-manifest=get' new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartDELETE' % req.environ.get('HTTP_USER_AGENT') new_env['swift.source'] = 'SLO' get_man_resp = \ Request.blank('', new_env).get_response(self.app) if get_man_resp.status_int // 100 == 2: if not config_true_value( get_man_resp.headers.get('X-Static-Large-Object')): raise HTTPBadRequest('Not an SLO manifest') try: manifest = json.loads(get_man_resp.body) # append the manifest file for deletion at the end manifest.append( {'name': '/'.join(['', container, obj]).decode('utf-8')}) except ValueError: raise HTTPServerError('Invalid manifest file') resp = HTTPOk(request=req) resp.app_iter = self.bulk_deleter.handle_delete_iter( req, objs_to_delete=[o['name'].encode('utf-8') for o in manifest], user_agent='MultipartDELETE', swift_source='SLO') return resp return get_man_resp
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, reverse) if response_content_type == 'application/json': data = [] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: data.append({'subdir': name}) else: data.append({'name': name, 'count': object_count, 'bytes': bytes_used}) account_list = json.dumps(data) elif response_content_type.endswith('/xml'): output_list = ['<?xml version="1.0" encoding="UTF-8"?>', '<account name=%s>' % saxutils.quoteattr(account)] for (name, object_count, bytes_used, is_subdir) in account_list: if is_subdir: output_list.append( '<subdir name=%s />' % saxutils.quoteattr(name)) else: item = '<container><name>%s</name><count>%s</count>' \ '<bytes>%s</bytes></container>' % \ (saxutils.escape(name), object_count, bytes_used) output_list.append(item) output_list.append('</account>') account_list = '\n'.join(output_list) else: if not account_list: resp = HTTPNoContent(request=req, headers=resp_headers) resp.content_type = response_content_type resp.charset = 'utf-8' return resp account_list = '\n'.join(r[0] for r in account_list) + '\n' ret = HTTPOk(body=account_list, request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def PUT(self, req): """ Handle PUT Object and PUT Object (Copy) request """ for key, value in req.environ.items(): if key.startswith('HTTP_X_AMZ_META_'): del req.environ[key] req.environ['HTTP_X_OBJECT_META_' + key[16:]] = value elif key == 'HTTP_CONTENT_MD5': if value == '': return get_err_response('InvalidDigest') try: req.environ['HTTP_ETAG'] = \ value.decode('base64').encode('hex') except Exception: return get_err_response('InvalidDigest') if req.environ['HTTP_ETAG'] == '': return get_err_response('SignatureDoesNotMatch') elif key == 'HTTP_X_AMZ_COPY_SOURCE': req.environ['HTTP_X_COPY_FROM'] = value resp = req.get_response(self.app) status = resp.status_int if status != HTTP_CREATED: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') elif status == HTTP_UNPROCESSABLE_ENTITY: return get_err_response('InvalidDigest') elif status == HTTP_REQUEST_ENTITY_TOO_LARGE: return get_err_response('EntityTooLarge') else: return get_err_response('InvalidURI') if 'HTTP_X_COPY_FROM' in req.environ: body = '<CopyObjectResult>' \ '<ETag>"%s"</ETag>' \ '</CopyObjectResult>' % resp.etag return HTTPOk(body=body) return HTTPOk(etag=resp.etag)
def handle_multipart_delete(self, req): """ Will delete all the segments in the SLO manifest and then, if successful, will delete the manifest file. :params req: a swob.Request with an obj in path :raises HTTPServerError: on invalid manifest :returns: swob.Response whose app_iter set to Bulk.handle_delete_iter """ if not check_utf8(req.path_info): raise HTTPPreconditionFailed( request=req, body='Invalid UTF8 or contains NULL') try: vrs, account, container, obj = req.split_path(4, 4, True) except ValueError: raise HTTPBadRequest('Not an SLO manifest') new_env = req.environ.copy() new_env['REQUEST_METHOD'] = 'GET' del(new_env['wsgi.input']) new_env['QUERY_STRING'] = 'multipart-manifest=get' new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartDELETE' % req.environ.get('HTTP_USER_AGENT') new_env['swift.source'] = 'SLO' get_man_resp = \ Request.blank('', new_env).get_response(self.app) if get_man_resp.status_int // 100 == 2: if not config_true_value( get_man_resp.headers.get('X-Static-Large-Object')): raise HTTPBadRequest('Not an SLO manifest') try: manifest = json.loads(get_man_resp.body) # append the manifest file for deletion at the end manifest.append( {'name': '/'.join(['', container, obj]).decode('utf-8')}) except ValueError: raise HTTPServerError('Invalid manifest file') resp = HTTPOk(request=req) resp.app_iter = self.bulk_deleter.handle_delete_iter( req, objs_to_delete=[o['name'].encode('utf-8') for o in manifest], user_agent='MultipartDELETE', swift_source='SLO') return resp return get_man_resp
def POST(self, req): """ Handles Delete Multiple Objects. """ def object_key_iter(xml): dom = parseString(xml) delete = dom.getElementsByTagName('Delete')[0] for obj in delete.getElementsByTagName('Object'): key = obj.getElementsByTagName('Key')[0].firstChild.data version = None if obj.getElementsByTagName('VersionId').length > 0: version = obj.getElementsByTagName('VersionId')[0]\ .firstChild.data yield (key, version) def get_deleted_elem(key): return ' <Deleted>\r\n' \ ' <Key>%s</Key>\r\n' \ ' </Deleted>\r\n' % (key) def get_err_elem(key, err_code, message): return ' <Error>\r\n' \ ' <Key>%s</Key>\r\n' \ ' <Code>%s</Code>\r\n' \ ' <Message>%s</Message>\r\n' \ ' </Error>\r\n' % (key, err_code, message) body = '<?xml version="1.0" encoding="UTF-8"?>\r\n' \ '<DeleteResult ' \ 'xmlns="http://doc.s3.amazonaws.com/2006-03-01">\r\n' for key, version in object_key_iter(req.body): if version is not None: # TODO: delete the specific version of the object return get_err_response('Unsupported') sub_req = Request(req.environ.copy()) sub_req.query_string = '' sub_req.content_length = 0 sub_req.method = 'DELETE' controller = ObjectController(sub_req, self.app, self.account_name, req.environ['HTTP_X_AUTH_TOKEN'], self.container_name, key) sub_resp = controller.DELETE(sub_req) status = sub_resp.status_int if status == HTTP_NO_CONTENT or status == HTTP_NOT_FOUND: body += get_deleted_elem(key) else: if status == HTTP_UNAUTHORIZED: body += get_err_elem(key, 'AccessDenied', 'Access Denied') else: body += get_err_elem(key, 'InvalidURI', 'Invalid URI') body += '</DeleteResult>\r\n' return HTTPOk(body=body)
def handle_multipart_delete(self, req): """ Will delete all the segments in the SLO manifest and then, if successful, will delete the manifest file. :params req: a swob.Request with an obj in path :raises HTTPServerError: on invalid manifest :returns: swob.Response whose app_iter set to Bulk.handle_delete_iter """ if not check_utf8(req.path_info): raise HTTPPreconditionFailed( request=req, body='Invalid UTF8 or contains NULL') resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.bulk_deleter.handle_delete_iter( req, objs_to_delete=self.get_segments_to_delete_iter(req), user_agent='MultipartDELETE', swift_source='SLO', out_content_type=out_content_type) return resp
def dump_status(self): """ Build a response with the current status of the server as a json object. """ cur_reqs = self.cur_reqs.value if self.cur_reqs else 0 status = { 'stat.cur_reqs': cur_reqs, 'stat.workers': self.workers, } return HTTPOk(body=json.dumps(status), headers={'Content-Type': 'application/json'})
def GETorHEAD(self, req): """Handler for HTTP GET/HEAD requests.""" """ Handles requests to /info Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object """ if not self.expose_info: return HTTPForbidden(request=req) admin_request = False sig = req.params.get('swiftinfo_sig', '') expires = req.params.get('swiftinfo_expires', '') if sig != '' or expires != '': admin_request = True if not self.admin_key: return HTTPForbidden(request=req) try: expires = int(expires) except ValueError: return HTTPUnauthorized(request=req) if expires < time(): return HTTPUnauthorized(request=req) valid_sigs = [] for method in self.allowed_hmac_methods[req.method]: valid_sigs.append( get_hmac(method, '/info', expires, self.admin_key)) # While it's true that any() will short-circuit, this doesn't # affect the timing-attack resistance since the only way this will # short-circuit is when a valid signature is passed in. is_valid_hmac = any( streq_const_time(valid_sig, sig) for valid_sig in valid_sigs) if not is_valid_hmac: return HTTPUnauthorized(request=req) headers = {} if 'Origin' in req.headers: headers['Access-Control-Allow-Origin'] = req.headers['Origin'] headers['Access-Control-Expose-Headers'] = ', '.join( ['x-trans-id']) info = json.dumps( get_swift_info(admin=admin_request, disallowed_sections=self.disallowed_sections)) return HTTPOk(request=req, headers=headers, body=info, content_type='application/json; charset=UTF-8')
def account_listing_response(account, req, response_content_type, broker=None, limit=constraints.ACCOUNT_LISTING_LIMIT, marker='', end_marker='', prefix='', delimiter='', reverse=False): if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, reverse, req.allow_reserved_names) data = [] for (name, object_count, bytes_used, put_timestamp, is_subdir) \ in account_list: name_ = name.decode('utf8') if six.PY2 else name if is_subdir: data.append({'subdir': name_}) else: data.append({ 'name': name_, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(put_timestamp).isoformat }) if response_content_type.endswith('/xml'): account_list = listing_formats.account_to_xml(data, account) ret = HTTPOk(body=account_list, request=req, headers=resp_headers) elif response_content_type.endswith('/json'): account_list = json.dumps(data).encode('ascii') ret = HTTPOk(body=account_list, request=req, headers=resp_headers) elif data: account_list = listing_formats.listing_to_text(data) ret = HTTPOk(body=account_list, request=req, headers=resp_headers) else: ret = HTTPNoContent(request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def account_listing_response(account, req, response_content_type, broker=None, limit='', marker='', end_marker='', prefix='', delimiter='', reverse=False): if broker is None: broker = FakeAccountBroker() resp_headers = get_response_headers(broker) account_list = broker.list_containers_iter(limit, marker, end_marker, prefix, delimiter, reverse) data = [] for (name, object_count, bytes_used, put_timestamp, is_subdir) \ in account_list: name_ = name.decode('utf8') if six.PY2 else name if is_subdir: data.append({'subdir': name_}) else: data.append( {'name': name_, 'count': object_count, 'bytes': bytes_used, 'last_modified': Timestamp(put_timestamp).isoformat}) if response_content_type.endswith('/xml'): account_list = listing_formats.account_to_xml(data, account) ret = HTTPOk(body=account_list, request=req, headers=resp_headers) elif response_content_type.endswith('/json'): account_list = json.dumps(data).encode('ascii') ret = HTTPOk(body=account_list, request=req, headers=resp_headers) elif data: account_list = listing_formats.listing_to_text(data) ret = HTTPOk(body=account_list, request=req, headers=resp_headers) else: ret = HTTPNoContent(request=req, headers=resp_headers) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def account_listing_bucket_response(account, req, response_content_type, listing=None): if response_content_type != 'application/json': # AWS S3 is always call wit format=json # check method GET in ServiceController (swift3/controllers/service.py) return HTTPPreconditionFailed(body='Invalid content type') data = [] for entry in listing: data.append({ 'name': entry['name'], 'count': entry['objects'], 'bytes': entry['bytes'], 'last_modified': Timestamp(entry['mtime']).isoformat }) account_list = json.dumps(data, encoding="utf-8") ret = HTTPOk(body=account_list, request=req, headers={}) ret.content_type = response_content_type ret.charset = 'utf-8' return ret
def GETorHEAD(self, req): """Handler for HTTP GET/HEAD requests.""" """ Handles requests to /info Should return a WSGI-style callable (such as swob.Response). :param req: swob.Request object """ if not self.expose_info: return HTTPForbidden(request=req) admin_request = False sig = req.params.get('swiftinfo_sig', '') expires = req.params.get('swiftinfo_expires', '') if sig != '' or expires != '': admin_request = True if not self.admin_key: return HTTPForbidden(request=req) try: expires = int(expires) except ValueError: return HTTPUnauthorized(request=req) if expires < time(): return HTTPUnauthorized(request=req) valid_sigs = [] for method in self.allowed_hmac_methods[req.method]: valid_sigs.append( get_hmac(method, '/info', expires, self.admin_key)) if sig not in valid_sigs: return HTTPUnauthorized(request=req) headers = {} if 'Origin' in req.headers: headers['Access-Control-Allow-Origin'] = req.headers['Origin'] headers['Access-Control-Expose-Headers'] = ', '.join( ['x-trans-id']) info = json.dumps( get_swift_info(admin=admin_request, disallowed_sections=self.disallowed_sections)) return HTTPOk(request=req, headers=headers, body=info, content_type='application/json; charset=UTF-8')
def handle_get_listing(self, req): """Retrieve a new-line separated list of all access keys. Required headers: - `x-s3auth-admin-key`: admin key """ path = quote(self.akd_container_url) resp = make_pre_authed_request(req.environ, 'GET', path).get_response(self.app) if resp.status_int // 100 == 2: listing = '\n'.join([e['name'] for e in json.loads(resp.body)]) return HTTPOk(request=req, body=listing) else: raise Exception('Could not GET access key listing: {} {}'.format( path, resp.status_int))
def handle_get_access_key(self, req, access_key): """Get auth details of access key. Required headers: - `x-s3auth-admin-key`: admin key :return: JSON: {"secret_key": secret_key, "account": account} """ secret_key, account = self._get_details(req, access_key) if secret_key: return HTTPOk(body=json.dumps({ 'secret_key': secret_key, 'account': account })) else: return HTTPNotFound(request=req)
def handle_prep(self, req): """Prepare the backing store Swiftcluster for use with the auth system. Required headers: - `x-s3auth-prep-key`: must be same as key in config - `x-s3auth-hash-key`: hash key used for hashing admin key - `x-s3auth-admin-key`: admin key Note: The call can also be used to change current s3auth-admin key. """ prep_key = req.headers.get("x-s3auth-prep-key") hash_key = req.headers.get("x-s3auth-hash-key") admin_key = req.headers.get('x-s3auth-admin-key') if not all((prep_key, hash_key, admin_key)): return HTTPBadRequest( body='Headers x-s3auth-prep-key, x-s3auth-hash-key, ' 'x-s3auth-admin-key all required', request=req) if self.prep_key != prep_key: return _denied_response(req) hashed_admin_key = _hash_msg(admin_key, hash_key) path = quote('/v1/{}'.format(self.auth_account)) resp = make_pre_authed_request(req.environ, 'PUT', path, headers={ HKEY_HASH_KEY: hash_key, HKEY_HASHED_ADMIN_KEY: hashed_admin_key, }).get_response(self.app) if resp.status_int // 100 != 2: raise Exception('Could not PUT auth account: {} {}'.format( path, resp.status)) path = quote(self.akd_container_url) resp = make_pre_authed_request(req.environ, 'PUT', path).get_response(self.app) if resp.status_int // 100 != 2: raise Exception( 'Could not PUT access key details container: {} {}'.format( path, resp.status)) return HTTPOk(request=req)
def GET(self, req): """ Handles GET Bucket versioning. """ resp = req.get_response(self.app) status = resp.status_int if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') else: return get_err_response('InvalidURI') # Just report there is no versioning configured here. body = ('<VersioningConfiguration ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>') return HTTPOk(body=body, content_type="text/plain")
def GET(self, req): """ Handles GET Bucket logging. """ resp = req.get_response(self.app) status = resp.status_int if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') else: return get_err_response('InvalidURI') # logging disabled body = ('<?xml version="1.0" encoding="UTF-8"?>' '<BucketLoggingStatus ' 'xmlns="http://doc.s3.amazonaws.com/2006-03-01" />') return HTTPOk(body=body, content_type='application/xml')
def PUT(self, req): """ Handle PUT Bucket request """ if 'HTTP_X_AMZ_ACL' in req.environ: amz_acl = req.environ['HTTP_X_AMZ_ACL'] # Translate the Amazon ACL to something that can be # implemented in Swift, 501 otherwise. Swift uses POST # for ACLs, whereas S3 uses PUT. del req.environ['HTTP_X_AMZ_ACL'] if req.query_string: req.query_string = '' translated_acl = swift_acl_translate(amz_acl) if translated_acl == 'Unsupported': return get_err_response('Unsupported') elif translated_acl == 'InvalidArgument': return get_err_response('InvalidArgument') for header, acl in translated_acl: req.headers[header] = acl if 'CONTENT_LENGTH' in req.environ: try: if req.content_length < 0: return get_err_response('InvalidArgument') except (ValueError, TypeError): return get_err_response('InvalidArgument') resp = req.get_response(self.app) status = resp.status_int if status != HTTP_CREATED and status != HTTP_NO_CONTENT: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_ACCEPTED: return get_err_response('BucketAlreadyExists') else: return get_err_response('InvalidURI') return HTTPOk(headers={'Location': self.container_name})
def __call__(self, req): extract_type = req.params.get("extract-archive") resp = None if extract_type is not None and req.method == "PUT": archive_type = {"tar": "", "tar.gz": "gz", "tar.bz2": "bz2"}.get(extract_type.lower().strip(".")) if archive_type is not None: resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_extract_iter(req, archive_type, out_content_type=out_content_type) else: resp = HTTPBadRequest("Unsupported archive format") if "bulk-delete" in req.params and req.method == "DELETE": resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.handle_delete_iter(req, out_content_type=out_content_type) return resp or self.app
def GET(self, req): """ Handles GET Bucket location. """ resp = req.get_response(self.app) status = resp.status_int if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') else: return get_err_response('InvalidURI') body = ('<?xml version="1.0" encoding="UTF-8"?>' '<LocationConstraint ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"') if self.location == 'US': body += '/>' else: body += ('>%s</LocationConstraint>' % self.location) return HTTPOk(body=body, content_type='application/xml')
def handle_delete(self, req, objs_to_delete=None, user_agent='BulkDelete', swift_source='BD'): """ :params req: a swob Request :raises HTTPException: on unhandled errors :returns: a swob Response """ try: vrs, account, _junk = req.split_path(2, 3, True) except ValueError: return HTTPNotFound(request=req) incoming_format = req.headers.get('Content-Type') if incoming_format and not incoming_format.startswith('text/plain'): # For now only accept newline separated object names return HTTPNotAcceptable(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if not out_content_type: return HTTPNotAcceptable(request=req) if objs_to_delete is None: objs_to_delete = self.get_objs_to_delete(req) failed_files = [] success_count = not_found_count = 0 failed_file_response_type = HTTPBadRequest for obj_to_delete in objs_to_delete: obj_to_delete = obj_to_delete.strip().lstrip('/') if not obj_to_delete: continue delete_path = '/'.join(['', vrs, account, obj_to_delete]) if not check_utf8(delete_path): failed_files.append( [quote(delete_path), HTTPPreconditionFailed().status]) continue new_env = req.environ.copy() new_env['PATH_INFO'] = delete_path del (new_env['wsgi.input']) new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s %s' % (req.environ.get('HTTP_USER_AGENT'), user_agent) new_env['swift.source'] = swift_source delete_obj_req = Request.blank(delete_path, new_env) resp = delete_obj_req.get_response(self.app) if resp.status_int // 100 == 2: success_count += 1 elif resp.status_int == HTTP_NOT_FOUND: not_found_count += 1 elif resp.status_int == HTTP_UNAUTHORIZED: return HTTPUnauthorized(request=req) else: if resp.status_int // 100 == 5: failed_file_response_type = HTTPBadGateway failed_files.append([quote(delete_path), resp.status]) resp_body = get_response_body(out_content_type, { 'Number Deleted': success_count, 'Number Not Found': not_found_count }, failed_files) if (success_count or not_found_count) and not failed_files: return HTTPOk(resp_body, content_type=out_content_type) if failed_files: return failed_file_response_type(resp_body, content_type=out_content_type) return HTTPBadRequest('Invalid bulk delete.')
def get_shard_ranges(self, broker, args): return HTTPOk(headers={'Content-Type': 'application/json'}, body=json.dumps(broker.get_all_shard_range_data()))
def _manifest_head_response(self, req, response_headers): return HTTPOk(request=req, headers=response_headers, body='', conditional_response=True)
def OPTIONS(self, req): return HTTPOk(request=req, headers={'Allow': 'HEAD, GET, OPTIONS'})
def GET(request, api_library, app): """ GET /file/:file_id/data Download file data To retrieve file data, an application submits an HTTP GET request to the file data resource that represents the data for the file. """ try: _, _, file_id, _, version, _ = split_path(request.path, 4, 6, False) except: app.logger.error( "StackSync API: data_resource GET: Wrong resource path: %s path_info: %s", str(400), str(request.path_info)) return create_error_response( 400, "Wrong resource path. Expected /file/:file_id/data[/version/:version_id]]" ) app.logger.info('StackSync API: data_resource GET: path info: %s ', str(request.path_info)) user_id = request.environ["stacksync_user_id"] metadata = api_library.get_metadata(user_id, file_id, include_chunks=True, specific_version=version, is_folder=False) response = create_response(metadata, status_code=200) if not is_valid_status(response.status_int): app.logger.error( "StackSync API: data_resource GET: status code: %s. body: %s", str(response.status_int), str(response.body)) return response metadata = json.loads(metadata) data_handler = DataHandler(app) workspace_info = api_library.get_workspace_info(user_id, file_id) response = create_response(workspace_info, status_code=200) if not is_valid_status(response.status_int): app.logger.error( "StackSync API: data_resource GET: status code: %s. body: %s", str(response.status_int), str(response.body)) return response workspace_info = json.loads(workspace_info) container_name = workspace_info['swift_container'] print 'chunks to retrieve', metadata['chunks'] file_compress_content, status = data_handler.get_chunks( request.environ, metadata['chunks'], container_name) if is_valid_status(status): if len(file_compress_content) > 0: joined_file = BuildFile("", file_compress_content) joined_file.join() headers = {'Content-Type': metadata['mimetype']} return HTTPOk(body=joined_file.content, headers=headers) elif len(metadata['chunks']) == 0: return HTTPOk(body='') else: app.logger.error( "StackSync API: data_resource GET: Unexpected case. File_id: %s.", str(file_id)) return create_error_response( 500, "Could not retrieve file. Please contact an administrator.") else: app.logger.error( "StackSync API: data_resource GET: Cannot retrieve chunks. File_id: %s. Status: %s", str(file_id), str(status)) return create_error_response( status, "Cannot retrieve chunks from storage backend.")