def _listing(self, env, start_response, prefix=None): """ Sends an HTML object listing to the remote client. :param env: The original WSGI environment dict. :param start_response: The original WSGI start_response hook. :param prefix: Any prefix desired for the container listing. """ label = env['PATH_INFO'] if self._listings_label: groups = env['PATH_INFO'].split('/') label = '{0}/{1}'.format(self._listings_label, '/'.join(groups[4:])) if not config_true_value(self._listings): body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \ 'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \ '<html>\n' \ '<head>\n' \ '<title>Listing of %s</title>\n' % cgi.escape(label) if self._listings_css: body += ' <link rel="stylesheet" type="text/css" ' \ 'href="%s" />\n' % self._build_css_path(prefix or '') else: body += ' <style type="text/css">\n' \ ' h1 {font-size: 1em; font-weight: bold;}\n' \ ' p {font-size: 2}\n' \ ' </style>\n' body += '</head>\n<body>' \ ' <h1>Web Listing Disabled</h1>' \ ' <p>The owner of this web site has disabled web listing.' \ ' <p>If you are the owner of this web site, you can enable' \ ' web listing by setting X-Container-Meta-Web-Listings.</p>' if self._index: body += '<h1>Index File Not Found</h1>' \ ' <p>The owner of this web site has set ' \ ' <b>X-Container-Meta-Web-Index: %s</b>. ' \ ' However, this file is not found.</p>' % self._index body += ' </body>\n</html>\n' resp = HTTPNotFound(body=body)(env, self._start_response) return self._error_response(resp, env, start_response) tmp_env = make_env( env, 'GET', '/%s/%s/%s' % ( self.version, self.account, self.container), self.agent, swift_source='SW') tmp_env['QUERY_STRING'] = 'delimiter=/&format=json' if prefix: tmp_env['QUERY_STRING'] += '&prefix=%s' % quote(prefix) else: prefix = '' resp = self._app_call(tmp_env) if not is_success(self._get_status_int()): return self._error_response(resp, env, start_response) listing = None body = ''.join(resp) if body: listing = json.loads(body) if not listing: resp = HTTPNotFound()(env, self._start_response) return self._error_response(resp, env, start_response) headers = {'Content-Type': 'text/html; charset=UTF-8'} body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \ 'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \ '<html>\n' \ ' <head>\n' \ ' <title>Listing of %s</title>\n' % cgi.escape(label) if self._listings_css: body += ' <link rel="stylesheet" type="text/css" ' \ 'href="%s" />\n' % (self._build_css_path(prefix)) else: body += ' <style type="text/css">\n' \ ' h1 {font-size: 1em; font-weight: bold;}\n' \ ' th {text-align: left; padding: 0px 1em 0px 1em;}\n' \ ' td {padding: 0px 1em 0px 1em;}\n' \ ' a {text-decoration: none;}\n' \ ' </style>\n' body += ' </head>\n' \ ' <body>\n' \ ' <h1 id="title">Listing of %s</h1>\n' \ ' <table id="listing">\n' \ ' <tr id="heading">\n' \ ' <th class="colname">Name</th>\n' \ ' <th class="colsize">Size</th>\n' \ ' <th class="coldate">Date</th>\n' \ ' </tr>\n' % cgi.escape(label) if prefix: body += ' <tr id="parent" class="item">\n' \ ' <td class="colname"><a href="../">../</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' for item in listing: if 'subdir' in item: subdir = item['subdir'].encode("utf-8") if prefix: subdir = subdir[len(prefix):] body += ' <tr class="item subdir">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' % \ (quote(subdir), cgi.escape(subdir)) for item in listing: if 'name' in item: name = item['name'].encode("utf-8") if prefix: name = name[len(prefix):] content_type = item['content_type'].encode("utf-8") bytes = human_readable(item['bytes']) last_modified = ( cgi.escape(item['last_modified'].encode("utf-8")). split('.')[0].replace('T', ' ')) body += ' <tr class="item %s">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize">%s</td>\n' \ ' <td class="coldate">%s</td>\n' \ ' </tr>\n' % \ (' '.join('type-' + cgi.escape(t.lower(), quote=True) for t in content_type.split('/')), quote(name), cgi.escape(name), bytes, last_modified) body += ' </table>\n' \ ' </body>\n' \ '</html>\n' resp = Response(headers=headers, body=body) return resp(env, start_response)
def GETorHEAD_base(self, req, server_type, partition, nodes, path, attempts): """ Base handler for HTTP GET or HEAD requests. :param req: swob.Request object :param server_type: server type :param partition: partition :param nodes: nodes :param path: path for the request :param attempts: number of attempts to try :returns: swob.Response object """ statuses = [] reasons = [] bodies = [] sources = [] newest = config_true_value(req.headers.get('x-newest', 'f')) nodes = iter(nodes) while len(statuses) < attempts: try: node = nodes.next() except StopIteration: break if self.error_limited(node): continue start_node_timing = time.time() try: with ConnectionTimeout(self.app.conn_timeout): headers = dict(req.headers) headers['Connection'] = 'close' conn = http_connect( node['ip'], node['port'], node['device'], partition, req.method, path, headers=headers, query_string=req.query_string) self.app.set_node_timing(node, time.time() - start_node_timing) with Timeout(self.app.node_timeout): possible_source = conn.getresponse() # See NOTE: swift_conn at top of file about this. possible_source.swift_conn = conn except (Exception, Timeout): self.exception_occurred( node, server_type, _('Trying to %(method)s %(path)s') % {'method': req.method, 'path': req.path}) continue if self.is_good_source(possible_source): # 404 if we know we don't have a synced copy if not float(possible_source.getheader('X-PUT-Timestamp', 1)): statuses.append(HTTP_NOT_FOUND) reasons.append('') bodies.append('') self.close_swift_conn(possible_source) else: statuses.append(possible_source.status) reasons.append(possible_source.reason) bodies.append('') sources.append(possible_source) if not newest: # one good source is enough break else: statuses.append(possible_source.status) reasons.append(possible_source.reason) bodies.append(possible_source.read()) if possible_source.status == HTTP_INSUFFICIENT_STORAGE: self.error_limit(node) elif is_server_error(possible_source.status): self.error_occurred(node, _('ERROR %(status)d %(body)s ' 'From %(type)s Server') % {'status': possible_source.status, 'body': bodies[-1][:1024], 'type': server_type}) if sources: sources.sort(key=source_key) source = sources.pop() for src in sources: self.close_swift_conn(src) res = Response(request=req, conditional_response=True) if req.method == 'GET' and \ source.status in (HTTP_OK, HTTP_PARTIAL_CONTENT): res.app_iter = self._make_app_iter(node, source) # See NOTE: swift_conn at top of file about this. res.swift_conn = source.swift_conn res.status = source.status update_headers(res, source.getheaders()) if not res.environ: res.environ = {} res.environ['swift_x_timestamp'] = \ source.getheader('x-timestamp') res.accept_ranges = 'bytes' res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): res.charset = None res.content_type = source.getheader('Content-Type') return res return self.best_response(req, statuses, reasons, bodies, '%s %s' % (server_type, req.method))
def _listing(self, env, start_response, prefix=None): """ Sends an HTML object listing to the remote client. :param env: The original WSGI environment dict. :param start_response: The original WSGI start_response hook. :param prefix: Any prefix desired for the container listing. """ if not config_true_value(self._listings): resp = HTTPNotFound()(env, self._start_response) return self._error_response(resp, env, start_response) tmp_env = make_pre_authed_env( env, 'GET', '/%s/%s/%s' % (self.version, self.account, self.container), self.agent, swift_source='SW') tmp_env['QUERY_STRING'] = 'delimiter=/&format=json' if prefix: tmp_env['QUERY_STRING'] += '&prefix=%s' % quote(prefix) else: prefix = '' resp = self._app_call(tmp_env) if not is_success(self._get_status_int()): return self._error_response(resp, env, start_response) listing = None body = ''.join(resp) if body: listing = json.loads(body) if not listing: resp = HTTPNotFound()(env, self._start_response) return self._error_response(resp, env, start_response) headers = {'Content-Type': 'text/html; charset=UTF-8'} body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \ 'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \ '<html>\n' \ ' <head>\n' \ ' <title>Listing of %s</title>\n' % \ cgi.escape(env['PATH_INFO']) if self._listings_css: body += ' <link rel="stylesheet" type="text/css" ' \ 'href="%s" />\n' % (self._build_css_path(prefix)) else: body += ' <style type="text/css">\n' \ ' h1 {font-size: 1em; font-weight: bold;}\n' \ ' th {text-align: left; padding: 0px 1em 0px 1em;}\n' \ ' td {padding: 0px 1em 0px 1em;}\n' \ ' a {text-decoration: none;}\n' \ ' </style>\n' body += ' </head>\n' \ ' <body>\n' \ ' <h1 id="title">Listing of %s</h1>\n' \ ' <table id="listing">\n' \ ' <tr id="heading">\n' \ ' <th class="colname">Name</th>\n' \ ' <th class="colsize">Size</th>\n' \ ' <th class="coldate">Date</th>\n' \ ' </tr>\n' % \ cgi.escape(env['PATH_INFO']) if prefix: body += ' <tr id="parent" class="item">\n' \ ' <td class="colname"><a href="../">../</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' for item in listing: if 'subdir' in item: if isinstance(item['subdir'], unicode): subdir = item['subdir'].encode('utf-8') else: subdir = item['subdir'] if prefix: subdir = subdir[len(prefix):] body += ' <tr class="item subdir">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' % \ (quote(subdir), cgi.escape(subdir)) for item in listing: if 'name' in item: if isinstance(item['name'], unicode): name = item['name'].encode('utf-8') else: name = item['name'] if prefix: name = name[len(prefix):] body += ' <tr class="item %s">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize">%s</td>\n' \ ' <td class="coldate">%s</td>\n' \ ' </tr>\n' % \ (' '.join('type-' + cgi.escape(t.lower(), quote=True) for t in item['content_type'].split('/')), quote(name), cgi.escape(name), human_readable(item['bytes']), cgi.escape(item['last_modified']).split('.')[0]. replace('T', ' ')) body += ' </table>\n' \ ' </body>\n' \ '</html>\n' resp = Response(headers=headers, body=body) return resp(env, start_response)
def __call__(self, env, start_response): request = Request(env) if not request.path.startswith(self.endpoints_path): return self.app(env, start_response) if request.method != 'GET': return HTTPMethodNotAllowed(req=request, headers={"Allow": "GET"})(env, start_response) try: clean_path = request.path[len(self.endpoints_path) - 1:] account, container, obj = \ split_path(clean_path, 1, 3, True) except ValueError: return HTTPBadRequest('No account specified')(env, start_response) if account is not None: account = unquote(account) if container is not None: container = unquote(container) if obj is not None: obj = unquote(obj) if obj is not None: # remove 'endpoints' from call to get_container_info stripped = request.environ if stripped['PATH_INFO'][:len(self.endpoints_path)] == \ self.endpoints_path: stripped['PATH_INFO'] = "/v1/" + \ stripped['PATH_INFO'][len(self.endpoints_path):] container_info = get_container_info(stripped, self.app, swift_source='LE') obj_ring = self.get_object_ring(container_info['storage_policy']) partition, nodes = obj_ring.get_nodes(account, container, obj) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}/{container}/{obj}' elif container is not None: partition, nodes = self.container_ring.get_nodes( account, container) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}/{container}' else: partition, nodes = self.account_ring.get_nodes(account) endpoint_template = 'http://{ip}:{port}/{device}/{partition}/' + \ '{account}' endpoints = [] for node in nodes: endpoint = endpoint_template.format(ip=node['ip'], port=node['port'], device=node['device'], partition=partition, account=quote(account), container=quote(container or ''), obj=quote(obj or '')) endpoints.append(endpoint) return Response(json.dumps(endpoints), content_type='application/json')(env, start_response)
def handle_ratelimit(self, req, account_name, container_name, obj_name): """ Performs rate limiting and account white/black listing. Sleeps if necessary. If self.memcache_client is not set, immediately returns None. :param account_name: account name from path :param container_name: container name from path :param obj_name: object name from path """ if not self.memcache_client: return None if req.environ.get('swift.ratelimit.handled'): return None req.environ['swift.ratelimit.handled'] = True try: account_info = get_account_info(req.environ, self.app, swift_source='RL') account_global_ratelimit = \ account_info.get('sysmeta', {}).get('global-write-ratelimit') except ValueError: account_global_ratelimit = None if account_name in self.ratelimit_whitelist or \ account_global_ratelimit == 'WHITELIST': return None if account_name in self.ratelimit_blacklist or \ account_global_ratelimit == 'BLACKLIST': self.logger.error(_('Returning 497 because of blacklisting: %s'), account_name) eventlet.sleep(self.BLACK_LIST_SLEEP) return Response(status='497 Blacklisted', body='Your account has been blacklisted', request=req) for key, max_rate in self.get_ratelimitable_key_tuples( req, account_name, container_name=container_name, obj_name=obj_name, global_ratelimit=account_global_ratelimit): try: need_to_sleep = self._get_sleep_time(key, max_rate) if self.log_sleep_time_seconds and \ need_to_sleep > self.log_sleep_time_seconds: self.logger.warning( _("Ratelimit sleep log: %(sleep)s for " "%(account)s/%(container)s/%(object)s"), { 'sleep': need_to_sleep, 'account': account_name, 'container': container_name, 'object': obj_name }) if need_to_sleep > 0: eventlet.sleep(need_to_sleep) except MaxSleepTimeHitError as e: self.logger.error( _('Returning 498 for %(meth)s to %(acc)s/%(cont)s/%(obj)s ' '. Ratelimit (Max Sleep) %(e)s'), { 'meth': req.method, 'acc': account_name, 'cont': container_name, 'obj': obj_name, 'e': str(e) }) error_resp = Response(status='498 Rate Limited', body='Slow down', request=req) return error_resp return None
def __call__(self, env, start_response): self.calls += 1 resp = Response() resp.environ = env return resp(env, start_response)
def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj = self._parse_path(request) try: disk_file = self._diskfile(device, partition, account, container, obj, keep_data_fp=True, iter_hook=sleep) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) if disk_file.is_deleted() or disk_file.is_expired(): if request.headers.get('if-match') == '*': return HTTPPreconditionFailed(request=request) else: return HTTPNotFound(request=request) try: file_size = disk_file.get_data_file_size() except (DiskFileError, DiskFileNotExist): disk_file.quarantine() return HTTPNotFound(request=request) if request.headers.get('if-match') not in (None, '*') and \ disk_file.metadata['ETag'] not in request.if_match: disk_file.close() return HTTPPreconditionFailed(request=request) if request.headers.get('if-none-match') is not None: if disk_file.metadata['ETag'] in request.if_none_match: resp = HTTPNotModified(request=request) resp.etag = disk_file.metadata['ETag'] disk_file.close() return resp try: if_unmodified_since = request.if_unmodified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_unmodified_since and \ datetime.fromtimestamp( float(disk_file.metadata['X-Timestamp']), UTC) > \ if_unmodified_since: disk_file.close() return HTTPPreconditionFailed(request=request) try: if_modified_since = request.if_modified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_modified_since and \ datetime.fromtimestamp( float(disk_file.metadata['X-Timestamp']), UTC) < \ if_modified_since: disk_file.close() return HTTPNotModified(request=request) response = Response(app_iter=disk_file, request=request, conditional_response=True) response.headers['Content-Type'] = disk_file.metadata.get( 'Content-Type', 'application/octet-stream') for key, value in disk_file.metadata.iteritems(): if key.lower().startswith('x-object-meta-') or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = disk_file.metadata['ETag'] response.last_modified = float(disk_file.metadata['X-Timestamp']) response.content_length = file_size if response.content_length < self.keep_cache_size and \ (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)): disk_file.keep_cache = True if 'Content-Encoding' in disk_file.metadata: response.content_encoding = disk_file.metadata['Content-Encoding'] response.headers['X-Timestamp'] = disk_file.metadata['X-Timestamp'] return request.get_response(response)
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'] == '/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1': return Response(status='412 Precondition Failed')(env, start_response) elif env['PATH_INFO'] == '/v1/a': return Response(status='401 Unauthorized')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c1': return Response(status='401 Unauthorized')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c2': return self.listing(env, start_response, {'x-container-read': '.r:*'}) elif env['PATH_INFO'] == '/v1/a/c2/one.txt': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3': return self.listing( env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-index': 'index.html', 'x-container-meta-web-listings': 't' }) elif env['PATH_INFO'] == '/v1/a/c3/index.html': return Response(status='200 Ok', body=''' <html> <body> <h1>Test main index.html file.</h1> <p>Visit <a href="subdir">subdir</a>.</p> <p>Don't visit <a href="subdir2/">subdir2</a> because it doesn't really exist.</p> <p>Visit <a href="subdir3">subdir3</a>.</p> <p>Visit <a href="subdir3/subsubdir">subdir3/subsubdir</a>.</p> </body> </html> ''')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3b': return self.listing( env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-index': 'index.html', 'x-container-meta-web-listings': 't' }) elif env['PATH_INFO'] == '/v1/a/c3b/index.html': resp = Response(status='204 No Content') resp.app_iter = iter([]) return resp(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/index.html': return Response(status='200 Ok', body='index file')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdirx/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdirx/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdiry/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdiry/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdirz': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/subdirz/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/unknown': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c3/unknown/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4': self.get_c4_called = True return self.listing( env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-index': 'index.html', 'x-container-meta-web-error': 'error.html', 'x-container-meta-web-listings': 't', 'x-container-meta-web-listings-css': 'listing.css' }) elif env['PATH_INFO'] == '/v1/a/c4/one.txt': return Response(status='200 Ok', headers={'x-object-meta-test': 'value'}, body='1')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/two.txt': return Response(status='503 Service Unavailable')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/subdir/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/subdir/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/unknown': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/unknown/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c4/404error.html': return Response(status='200 Ok', body=''' <html> <body style="background: #000000; color: #ffaaaa"> <p>Chrome's 404 fancy-page sucks.</p> <body> <html> '''.strip())(env, start_response) elif env['PATH_INFO'] == '/v1/a/c5': return self.listing( env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-index': 'index.html', 'x-container-meta-listings': 't', 'x-container-meta-web-error': 'error.html' }) elif env['PATH_INFO'] == '/v1/a/c5/index.html': return Response(status='503 Service Unavailable')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c5/503error.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c5/unknown': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c5/unknown/index.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c5/404error.html': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] == '/v1/a/c6': return self.listing(env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-listings': 't' }) elif env['PATH_INFO'] == '/v1/a/c6/subdir': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] in ('/v1/a/c7', '/v1/a/c7/'): return self.listing(env, start_response, { 'x-container-read': '.r:*', 'x-container-meta-web-listings': 'f' }) elif env['PATH_INFO'] in ('/v1/a/c8', '/v1/a/c8/'): return self.listing(env, start_response, {'x-container-read': '.r:*', 'x-container-meta-web-error': 'error.html', 'x-container-meta-web-listings': 't', 'x-container-meta-web-listings-css': \ 'http://localhost/stylesheets/listing.css'}) elif env['PATH_INFO'] == '/v1/a/c8/subdir/': return Response(status='404 Not Found')(env, start_response) elif env['PATH_INFO'] in ('/v1/a/c9', '/v1/a/c9/'): return self.listing(env, start_response, {'x-container-read': '.r:*', 'x-container-meta-web-error': 'error.html', 'x-container-meta-web-listings': 't', 'x-container-meta-web-listings-css': \ '/absolute/listing.css'}) elif env['PATH_INFO'] == '/v1/a/c9/subdir/': return Response(status='404 Not Found')(env, start_response) else: raise Exception('Unknown path %r' % env['PATH_INFO'])
def __call__(self, env, start_response): req = Request(env) return Response(request=req, body='FAKE APP')( env, start_response)
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'] == '/': return Response(status=200, body='passed')(env, start_response) if env['PATH_INFO'].startswith('/test_good/'): j, v, a, cont, obj = env['PATH_INFO'].split('/') if obj == 'a_2': return Response(status=400)(env, start_response) cont_len = 100 if obj == 'small_object': cont_len = 10 headers = { 'etag': 'etagoftheobjectsegment', 'Content-Length': cont_len } if obj == 'slob': headers['X-Static-Large-Object'] = 'true' return Response(status=200, headers=headers)(env, start_response) if env['PATH_INFO'].startswith('/test_good_check/'): j, v, a, cont, obj = env['PATH_INFO'].split('/') etag, size = obj.split('_') last_mod = 'Fri, 01 Feb 2012 20:38:36 GMT' if obj == 'a_1': last_mod = '' return Response(status=200, headers={ 'etag': etag, 'Last-Modified': last_mod, 'Content-Length': size })(env, start_response) if env['PATH_INFO'].startswith('/test_get/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) return Response(status=200, headers={ 'X-Static-Large-Object': 'True', 'Content-Type': 'html;swift_bytes=55' }, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_get_broke_json/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data[:-5])(env, start_response) if env['PATH_INFO'].startswith('/test_get_bad_json/'): bad_data = json.dumps([{ 'name': '/c/a_1', 'something': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'bytes': '2' }]) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=bad_data)(env, start_response) if env['PATH_INFO'].startswith('/test_get_not_slo/'): return Response(status=200, body='lalala')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_404/'): self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=404)(env, start_response) if env['PATH_INFO'].startswith('/test_delete/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete_nested/'): nested_data = json.dumps([{ 'name': '/b/b_2', 'hash': 'a', 'bytes': '1' }, { 'name': '/c/c_3', 'hash': 'b', 'bytes': '2' }]) good_data = json.dumps([{ 'name': '/a/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/a/sub_nest', 'hash': 'a', 'sub_slo': True, 'bytes': len(nested_data) }, { 'name': '/d/d_3', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if 'sub_nest' in env['PATH_INFO']: return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=nested_data)(env, start_response) else: return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response) if env['PATH_INFO'].startswith('/test_delete_bad_json/'): self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body='bad json')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_bad_man/'): self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) return Response(status=200, body='')(env, start_response) if env['PATH_INFO'].startswith('/test_delete_bad/'): good_data = json.dumps([{ 'name': '/c/a_1', 'hash': 'a', 'bytes': '1' }, { 'name': '/d/b_2', 'hash': 'b', 'bytes': '2' }]) self.req_method_paths.append( (env['REQUEST_METHOD'], env['PATH_INFO'])) if env['PATH_INFO'].endswith('/c/a_1'): return Response(status=401)(env, start_response) return Response(status=200, headers={'X-Static-Large-Object': 'True'}, body=good_data)(env, start_response)
def listing(self, env, start_response, headers): if env['PATH_INFO'] in ('/v1/a/c3', '/v1/a/c4', '/v1/a/c8', \ '/v1/a/c9') and \ env['QUERY_STRING'] == 'delimiter=/&format=json&prefix=subdir/': headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'X-Container-Read': '.r:*', 'Content-Type': 'application/json; charset=utf-8' }) body = ''' [{"name":"subdir/1.txt", "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.709100"}, {"name":"subdir/2.txt", "hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.734140"}, {"subdir":"subdir3/subsubdir/"}] '''.strip() elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \ 'delimiter=/&format=json&prefix=subdiry/': headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'X-Container-Read': '.r:*', 'Content-Type': 'application/json; charset=utf-8' }) body = '[]' elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \ 'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/': headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'X-Container-Read': '.r:*', 'Content-Type': 'application/json; charset=utf-8' }) body = ''' [{"name":"subdirz/1.txt", "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.709100"}] '''.strip() elif env['PATH_INFO'] == '/v1/a/c6' and env['QUERY_STRING'] == \ 'limit=1&format=json&delimiter=/&limit=1&prefix=subdir/': headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'X-Container-Read': '.r:*', 'X-Container-Web-Listings': 't', 'Content-Type': 'application/json; charset=utf-8' }) body = ''' [{"name":"subdir/1.txt", "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.709100"}] '''.strip() elif 'prefix=' in env['QUERY_STRING']: return Response(status='204 No Content')(env, start_response) elif 'format=json' in env['QUERY_STRING']: headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'Content-Type': 'application/json; charset=utf-8' }) body = ''' [{"name":"401error.html", "hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110, "content_type":"text/html", "last_modified":"2011-03-24T04:27:52.713710"}, {"name":"404error.html", "hash":"62dcec9c34ed2b347d94e6ca707aff8c", "bytes":130, "content_type":"text/html", "last_modified":"2011-03-24T04:27:52.720850"}, {"name":"index.html", "hash":"8b469f2ca117668a5131fe9ee0815421", "bytes":347, "content_type":"text/html", "last_modified":"2011-03-24T04:27:52.683590"}, {"name":"listing.css", "hash":"7eab5d169f3fcd06a08c130fa10c5236", "bytes":17, "content_type":"text/css", "last_modified":"2011-03-24T04:27:52.721610"}, {"name":"one.txt", "hash":"73f1dd69bacbf0847cc9cffa3c6b23a1", "bytes":22, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.722270"}, {"name":"subdir/1.txt", "hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.709100"}, {"name":"subdir/2.txt", "hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.734140"}, {"name":"subdir/\u2603.txt", "hash":"7337d028c093130898d937c319cc9865", "bytes":72981, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.735460"}, {"name":"subdir2", "hash":"d41d8cd98f00b204e9800998ecf8427e", "bytes":0, "content_type":"text/directory", "last_modified":"2011-03-24T04:27:52.676690"}, {"name":"subdir3/subsubdir/index.html", "hash":"04eea67110f883b1a5c97eb44ccad08c", "bytes":72, "content_type":"text/html", "last_modified":"2011-03-24T04:27:52.751260"}, {"name":"two.txt", "hash":"10abb84c63a5cff379fdfd6385918833", "bytes":22, "content_type":"text/plain", "last_modified":"2011-03-24T04:27:52.825110"}] '''.strip() else: headers.update({ 'X-Container-Object-Count': '11', 'X-Container-Bytes-Used': '73741', 'Content-Type': 'text/plain; charset=utf-8' }) body = '\n'.join([ '401error.html', '404error.html', 'index.html', 'listing.css', 'one.txt', 'subdir/1.txt', 'subdir/2.txt', u'subdir/\u2603.txt', 'subdir2', 'subdir3/subsubdir/index.html', 'two.txt' ]) return Response(status='200 Ok', headers=headers, body=body)(env, start_response)
def GETorHEAD_base(self, req, server_type, ring, partition, path): """ Base handler for HTTP GET or HEAD requests. :param req: swob.Request object :param server_type: server type :param ring: the ring to obtain nodes from :param partition: partition :param path: path for the request :returns: swob.Response object """ statuses = [] reasons = [] bodies = [] source_headers = [] sources = [] newest = config_true_value(req.headers.get('x-newest', 'f')) headers = self.generate_request_headers(req, additional=req.headers) for node in self.iter_nodes(ring, partition): start_node_timing = time.time() try: with ConnectionTimeout(self.app.conn_timeout): conn = http_connect( node['ip'], node['port'], node['device'], partition, req.method, path, headers=headers, query_string=req.query_string) self.app.set_node_timing(node, time.time() - start_node_timing) with Timeout(self.app.node_timeout): possible_source = conn.getresponse() # See NOTE: swift_conn at top of file about this. possible_source.swift_conn = conn except (Exception, Timeout): self.exception_occurred( node, server_type, _('Trying to %(method)s %(path)s') % {'method': req.method, 'path': req.path}) continue if self.is_good_source(possible_source): # 404 if we know we don't have a synced copy if not float(possible_source.getheader('X-PUT-Timestamp', 1)): statuses.append(HTTP_NOT_FOUND) reasons.append('') bodies.append('') source_headers.append('') self.close_swift_conn(possible_source) else: statuses.append(possible_source.status) reasons.append(possible_source.reason) bodies.append('') source_headers.append('') sources.append((possible_source, node)) if not newest: # one good source is enough break else: statuses.append(possible_source.status) reasons.append(possible_source.reason) bodies.append(possible_source.read()) source_headers.append(possible_source.getheaders()) if possible_source.status == HTTP_INSUFFICIENT_STORAGE: self.error_limit(node, _('ERROR Insufficient Storage')) elif is_server_error(possible_source.status): self.error_occurred(node, _('ERROR %(status)d %(body)s ' 'From %(type)s Server') % {'status': possible_source.status, 'body': bodies[-1][:1024], 'type': server_type}) res = None if sources: sources.sort(key=lambda s: source_key(s[0])) source, node = sources.pop() for src, _junk in sources: self.close_swift_conn(src) res = Response(request=req) if req.method == 'GET' and \ source.status in (HTTP_OK, HTTP_PARTIAL_CONTENT): res.app_iter = self._make_app_iter(node, source) # See NOTE: swift_conn at top of file about this. res.swift_conn = source.swift_conn res.status = source.status update_headers(res, source.getheaders()) if not res.environ: res.environ = {} res.environ['swift_x_timestamp'] = \ source.getheader('x-timestamp') res.accept_ranges = 'bytes' res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): res.charset = None res.content_type = source.getheader('Content-Type') if not res: res = self.best_response(req, statuses, reasons, bodies, '%s %s' % (server_type, req.method), headers=source_headers) try: (account, container) = split_path(req.path_info, 1, 2) _set_info_cache(self.app, req.environ, account, container, res) except ValueError: pass try: (account, container, obj) = split_path(req.path_info, 3, 3, True) _set_object_info_cache(self.app, req.environ, account, container, obj, res) except ValueError: pass return res
content_type = saxutils.escape(content_type) xml_output.append('<object><name>%s</name><hash>%s</hash>'\ '<bytes>%d</bytes><content_type>%s</content_type>'\ '<last_modified>%s</last_modified></object>' % \ (name, etag, size, content_type, created_at)) container_list = ''.join([ '<?xml version="1.0" encoding="UTF-8"?>\n', '<container name=%s>' % saxutils.quoteattr(container), ''.join(xml_output), '</container>' ]) else: if not container_list: self.logger.timing_since('GET.timing', start_time) return HTTPNoContent(request=req, headers=resp_headers) container_list = '\n'.join(r[0] for r in container_list) + '\n' ret = Response(body=container_list, request=req, headers=resp_headers) ret.content_type = out_content_type ret.charset = 'utf-8' self.logger.timing_since('GET.timing', start_time) return ret @public def REPLICATE(self, req): """ Handle HTTP REPLICATE request (json-encoded RPC calls for replication.) """ start_time = time.time() try: post_args = split_path(unquote(req.path), 3) drive, partition, hash = post_args validate_device_partition(drive, partition)
def REPLICATION(self, request): return Response(app_iter=ssync_receiver.Receiver(self, request)())
def DELETE(self, env, start_response): """ Handle DELETE Bucket request """ key_args = set(['cors', 'lifecycle', 'policy', 'tagging', 'website']) qs = env.get('QUERY_STRING', '') args = urlparse.parse_qs(qs, 1) if not key_args & set(args): # DELETE a Bucket version = args.get('versionId') if version: vid = version[0] if vid.lower() == 'lastest': pass else: env['PATH_INFO'] = '/v1/AUTH_%s/%s/%s' % ( quote(self.account_name), quote(self.version_name(self.container_name)), vid) body_iter = self._app_call(env) status = self._get_status_int() if status != HTTP_NO_CONTENT: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return self.get_err_response('NoSuchBucket') elif status == HTTP_CONFLICT: return self.get_err_response('BucketNotEmpty') else: return self.get_err_response('InvalidURI') resp = Response() resp.status = HTTP_NO_CONTENT return resp else: # DELETE specified data action = args.keys().pop() if action == 'cors': # delete cors env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_ORIGIN'] = '' env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_MAX_AGE'] = '' env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_EXPOSE_HEADERS'] = '' env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_METHOD'] = '' env['QUERY_STRING'] = '' env['REQUEST_METHOD'] = 'POST' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_NO_CONTENT return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'lifecycle': # delete lifecycle env['HTTP_X_CONTAINER_META_TRANS_AT'] = '' env['HTTP_X_CONTAINER_META_TRANS_AFTER'] = '' env['HTTP_X_CONTAINER_META_TRANS_CLASS'] = '' env['HTTP_X_CONTAINER_META_EXPIRATION_AT'] = '' env['HTTP_X_CONTAINER_META_EXPIRATION_AFTER'] = '' env['HTTP_X_CONTAINER_META_EXPIRATION_PREFIX'] = '' env['HTTP_X_CONTAINER_META_EXPIRATION_STATUS'] = '' env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_NO_CONTENT return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'policy': # delete policy env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_POLICY'] = '' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_NO_CONTENT return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'tagging': # delete tagging env2 = copy(env) container_info = get_container_info(env2, self.app) meta_keys = container_info['meta'].keys() for key in meta_keys: env['HTTP_X_CONTAINER_META_' + key.replace('-', '_').upper()] = '' env['QUERY_STRING'] = '' env['REQUEST_METHOD'] = 'POST' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_NO_CONTENT return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'website': # delete website body = env['wsgi.input'].read() env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_WEBSITE'] = quote(body) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') else: return self.get_err_response('InvalidURI')
def get_acl(account_name, headers): """ Attempts to construct an S3 ACL based on what is found in the swift headers """ acl = 'private' # default to private if 'x-container-read' in headers: if headers['x-container-read'] == ".r:*" or\ ".r:*," in headers['x-container-read'] or \ ",*," in headers['x-container-read']: acl = 'public-read' if 'x-container-write' in headers: if headers['x-container-write'] == ".r:*" or\ ".r:*," in headers['x-container-write'] or \ ",*," in headers['x-container-write']: if acl == 'public-read': acl = 'public-read-write' else: acl = 'public-write' if acl == 'private': body = ('<AccessControlPolicy>' '<Owner>' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Owner>' '<AccessControlList>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="CanonicalUser">' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Grantee>' '<Permission>FULL_CONTROL</Permission>' '</Grant>' '</AccessControlList>' '</AccessControlPolicy>' % (account_name, account_name, account_name, account_name)) elif acl == 'public-read': body = ('<AccessControlPolicy>' '<Owner>' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Owner>' '<AccessControlList>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="CanonicalUser">' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Grantee>' '<Permission>FULL_CONTROL</Permission>' '</Grant>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="Group">' '<URI>http://acs.amazonaws.com/groups/global/AllUsers</URI>' '</Grantee>' '<Permission>READ</Permission>' '</Grant>' '</AccessControlList>' '</AccessControlPolicy>' % (account_name, account_name, account_name, account_name)) elif acl == 'public-read-write': body = ('<AccessControlPolicy>' '<Owner>' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Owner>' '<AccessControlList>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="CanonicalUser">' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Grantee>' '<Permission>FULL_CONTROL</Permission>' '</Grant>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="Group">' '<URI>http://acs.amazonaws.com/groups/global/AllUsers</URI>' '</Grantee>' '<Permission>READ</Permission>' '</Grant>' '</AccessControlList>' '<AccessControlList>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="Group">' '<URI>http://acs.amazonaws.com/groups/global/AllUsers</URI>' '</Grantee>' '<Permission>WRITE</Permission>' '</Grant>' '</AccessControlList>' '</AccessControlPolicy>' % (account_name, account_name, account_name, account_name)) else: body = ('<AccessControlPolicy>' '<Owner>' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Owner>' '<AccessControlList>' '<Grant>' '<Grantee xmlns:xsi="http://www.w3.org/2001/' 'XMLSchema-instance" xsi:type="CanonicalUser">' '<ID>%s</ID>' '<DisplayName>%s</DisplayName>' '</Grantee>' '<Permission>FULL_CONTROL</Permission>' '</Grant>' '</AccessControlList>' '</AccessControlPolicy>' % (account_name, account_name, account_name, account_name)) return Response(body=body, content_type="text/plain")
def __call__(self, env, start_response): self.req = Request(env) return Response(request=self.req, body=b'FAKE APP', headers=self.headers)(env, start_response)
def GET(self, env, start_response): """ Handle GET Bucket (List Objects) request """ if 'QUERY_STRING' in env: args = dict(urlparse.parse_qsl(env['QUERY_STRING'], 1)) else: args = {} if 'max-keys' in args: if args.get('max-keys').isdigit() is False: return get_err_response('InvalidArgument') if 'uploads' in args: # Pass it through, the s3multi upload helper will handle it. return self.app(env, start_response) max_keys = min(int(args.get('max-keys', MAX_BUCKET_LISTING)), MAX_BUCKET_LISTING) if 'acl' not in args: #acl request sent with format=json etc confuses swift env['QUERY_STRING'] = 'format=json&limit=%s' % (max_keys + 1) if 'marker' in args: env['QUERY_STRING'] += '&marker=%s' % quote(args['marker']) if 'prefix' in args: env['QUERY_STRING'] += '&prefix=%s' % quote(args['prefix']) if 'delimiter' in args: env['QUERY_STRING'] += '&delimiter=%s' % quote(args['delimiter']) body_iter = self._app_call(env) status = self._get_status_int() headers = dict(self._response_headers) if is_success(status) and 'acl' in args: return get_acl(self.account_name, headers) if 'versioning' in args: # Just report there is no versioning configured here. body = ('<VersioningConfiguration ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>') return Response(body=body, content_type="text/plain") if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') else: return get_err_response('InvalidURI') if 'location' in args: body = ('<?xml version="1.0" encoding="UTF-8"?>' '<LocationConstraint ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"') if self.location == 'US': body += '/>' else: body += ('>%s</LocationConstraint>' % self.location) return Response(body=body, content_type='application/xml') if 'logging' in args: # logging disabled body = ('<?xml version="1.0" encoding="UTF-8"?>' '<BucketLoggingStatus ' 'xmlns="http://doc.s3.amazonaws.com/2006-03-01" />') return Response(body=body, content_type='application/xml') objects = loads(''.join(list(body_iter))) body = ( '<?xml version="1.0" encoding="UTF-8"?>' '<ListBucketResult ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01">' '<Prefix>%s</Prefix>' '<Marker>%s</Marker>' '<Delimiter>%s</Delimiter>' '<IsTruncated>%s</IsTruncated>' '<MaxKeys>%s</MaxKeys>' '<Name>%s</Name>' '%s' '%s' '</ListBucketResult>' % (xml_escape(args.get('prefix', '')), xml_escape(args.get('marker', '')), xml_escape(args.get('delimiter', '')), 'true' if max_keys > 0 and len(objects) == (max_keys + 1) else 'false', max_keys, xml_escape(self.container_name), "".join([ '<Contents><Key>%s</Key><LastModified>%sZ</LastModif' 'ied><ETag>%s</ETag><Size>%s</Size><StorageClass>STA' 'NDARD</StorageClass><Owner><ID>%s</ID><DisplayName>' '%s</DisplayName></Owner></Contents>' % (xml_escape(unquote(i['name'])), i['last_modified'], i['hash'], i['bytes'], self.account_name, self.account_name) for i in objects[:max_keys] if 'subdir' not in i ]), "".join([ '<CommonPrefixes><Prefix>%s</Prefix></CommonPrefixes>' % xml_escape(i['subdir']) for i in objects[:max_keys] if 'subdir' in i ]))) return Response(body=body, content_type='application/xml')
def GET(self, req): """Handle HTTP GET request.""" drive, part, account, container, obj = split_and_validate_path( req, 4, 5, True) path = get_param(req, 'path') prefix = get_param(req, 'prefix') delimiter = get_param(req, 'delimiter') if delimiter and (len(delimiter) > 1 or ord(delimiter) > 254): # delimiters can be made more flexible later return HTTPPreconditionFailed(body='Bad delimiter') marker = get_param(req, 'marker', '') end_marker = get_param(req, 'end_marker') limit = constraints.CONTAINER_LISTING_LIMIT given_limit = get_param(req, 'limit') if given_limit and given_limit.isdigit(): limit = int(given_limit) if limit > constraints.CONTAINER_LISTING_LIMIT: return HTTPPreconditionFailed( request=req, body='Maximum limit is %d' % constraints.CONTAINER_LISTING_LIMIT) out_content_type = get_listing_content_type(req) if self.mount_check and not check_mount(self.root, drive): return HTTPInsufficientStorage(drive=drive, request=req) broker = self._get_container_broker(drive, part, account, container, pending_timeout=0.1, stale_reads_ok=True) if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() resp_headers = { 'X-Container-Object-Count': info['object_count'], 'X-Container-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp'], } for key, (value, timestamp) in broker.metadata.iteritems(): if value and (key.lower() in self.save_headers or is_sys_or_user_meta('container', key)): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') container_list = broker.list_objects_iter(limit, marker, end_marker, prefix, delimiter, path) if out_content_type == 'application/json': ret.body = json.dumps([self.update_data_record(record) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in ["name", "hash", "bytes", "content_type", "last_modified"]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record): SubElement(obj_element, field).text = str( record[field]).decode('utf-8') ret.body = tostring(doc, encoding='UTF-8').replace( "<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>', 1) else: if not container_list: return HTTPNoContent(request=req, headers=resp_headers) ret.body = '\n'.join(rec[0] for rec in container_list) + '\n' return ret
def handle_get_token(self, req): """ Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. "Active Mode" usage: All formats require GSS (Kerberos) authentication. GET <auth-prefix>/v1/<act>/auth GET <auth-prefix>/auth GET <auth-prefix>/v1.0 On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift. "Passive Mode" usage:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> Values should be url encoded, "act%3Ausr" instead of "act:usr" for example; however, for backwards compatibility the colon may be included unencoded. On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, 1, 3, True) except ValueError: self.logger.increment('errors') return HTTPNotFound(request=req) if not ((pathsegs[0] == 'v1' and pathsegs[2] == 'auth') or pathsegs[0] in ('auth', 'v1.0')): return HTTPBadRequest(request=req) # Client is inside the domain if self.auth_method == "active": return HTTPSeeOther(location=self.ext_authentication_url) # Client is outside the domain elif self.auth_method == "passive": account, user, key = None, None, None # Extract user, account and key from request if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = unquote(req.headers.get('x-auth-user', '')) if user: if ':' not in user: return HTTPUnauthorized(request=req) else: account2, user = user.split(':', 1) if account != account2: return HTTPUnauthorized(request=req) key = req.headers.get('x-storage-pass') if not key: key = unquote(req.headers.get('x-auth-key', '')) elif pathsegs[0] in ('auth', 'v1.0'): user = unquote(req.headers.get('x-auth-user', '')) if not user: user = req.headers.get('x-storage-user') if user: if ':' not in user: return HTTPUnauthorized(request=req) else: account, user = user.split(':', 1) key = unquote(req.headers.get('x-auth-key', '')) if not key: key = req.headers.get('x-storage-pass') if not (account or user or key): # If all are not given, client may be part of the domain return HTTPSeeOther(location=self.ext_authentication_url) elif None in (key, user, account): # If only one or two of them is given, but not all return HTTPUnauthorized(request=req) # Run kinit on the user if self.realm_name and "@" not in user: user = user + "@" + self.realm_name try: ret = run_kinit(user, key) except OSError as e: if e.errno == errno.ENOENT: return HTTPServerError("kinit command not found\n") if ret != 0: self.logger.warning("Failed: kinit %s", user) if ret == -1: self.logger.warning("Failed: kinit: Password has probably " "expired.") return HTTPServerError("Kinit is taking too long.\n") return HTTPUnauthorized(request=req) self.logger.debug("kinit succeeded") if "@" in user: user = user.split("@")[0] # Check if user really belongs to the account groups_list = get_groups_from_username(user).strip().split(",") user_group = ("%s%s" % (self.reseller_prefix, account)).lower() reseller_admin_group = \ ("%sreseller_admin" % self.reseller_prefix).lower() if user_group not in groups_list: # Check if user is reseller_admin. If not, return Unauthorized. # On AD/IdM server, auth_reseller_admin is a separate group if reseller_admin_group not in groups_list: return HTTPUnauthorized(request=req) mc = cache_from_env(req.environ) if not mc: raise Exception('Memcache required') token, expires, groups = get_auth_data(mc, user) if not token: token = generate_token() expires = time() + self.token_life groups = get_groups_from_username(user) set_auth_data(mc, user, token, expires, groups) headers = {'X-Auth-Token': token, 'X-Storage-Token': token} if self.debug_headers: headers.update({ 'X-Debug-Remote-User': user, 'X-Debug-Groups:': groups, 'X-Debug-Token-Life': self.token_life, 'X-Debug-Token-Expires': ctime(expires) }) resp = Response(request=req, headers=headers) resp.headers['X-Storage-Url'] = \ '%s/v1/%s%s' % (resp.host_url, self.reseller_prefix, account) return resp
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'].startswith('/unauth/'): return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/create_cont/'): return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/create_cont_fail/'): return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].startswith('/create_obj_unauth/'): if env['PATH_INFO'].endswith('/cont'): return Response(status='201 Created')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/tar_works/'): if len(env['PATH_INFO']) > 100: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/delete_works/'): self.delete_paths.append(env['PATH_INFO']) if len(env['PATH_INFO']) > 100: return Response(status='400 Bad Request')(env, start_response) if env['PATH_INFO'].endswith('404'): return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].endswith('badutf8'): return Response(status='412 Precondition Failed')( env, start_response) return Response(status='204 No Content')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_fail/'): return Response(status='409 Conflict')(env, start_response) if env['PATH_INFO'].startswith('/broke/'): return Response(status='500 Internal Error')(env, start_response)
def get_or_head_response(self, req, x_object_manifest, response_headers=None): if response_headers is None: response_headers = self._response_headers container, obj_prefix = x_object_manifest.split('/', 1) container = unquote(container) obj_prefix = unquote(obj_prefix) # manifest might point to a different container req.acl = None version, account, _junk = req.split_path(2, 3, True) error_response, segments = self._get_container_listing( req, version, account, container, obj_prefix) if error_response: return error_response have_complete_listing = len(segments) < \ constraints.CONTAINER_LISTING_LIMIT first_byte = last_byte = None actual_content_length = None content_length_for_swob_range = None if req.range and len(req.range.ranges) == 1: content_length_for_swob_range = sum(o['bytes'] for o in segments) # This is a hack to handle suffix byte ranges (e.g. "bytes=-5"), # which we can't honor unless we have a complete listing. _junk, range_end = req.range.ranges_for_length(float("inf"))[0] # If this is all the segments, we know whether or not this # range request is satisfiable. # # Alternately, we may not have all the segments, but this range # falls entirely within the first page's segments, so we know # that it is satisfiable. if (have_complete_listing or range_end < content_length_for_swob_range): byteranges = req.range.ranges_for_length( content_length_for_swob_range) if not byteranges: headers = {'Accept-Ranges': 'bytes'} if have_complete_listing: headers['Content-Range'] = 'bytes */%d' % ( content_length_for_swob_range, ) return HTTPRequestedRangeNotSatisfiable(request=req, headers=headers) first_byte, last_byte = byteranges[0] # For some reason, swob.Range.ranges_for_length adds 1 to the # last byte's position. last_byte -= 1 actual_content_length = last_byte - first_byte + 1 else: # The range may or may not be satisfiable, but we can't tell # based on just one page of listing, and we're not going to go # get more pages because that would use up too many resources, # so we ignore the Range header and return the whole object. actual_content_length = None content_length_for_swob_range = None req.range = None response_headers = [(h, v) for h, v in response_headers if h.lower() not in ("content-length", "content-range")] if content_length_for_swob_range is not None: # Here, we have to give swob a big-enough content length so that # it can compute the actual content length based on the Range # header. This value will not be visible to the client; swob will # substitute its own Content-Length. # # Note: if the manifest points to at least CONTAINER_LISTING_LIMIT # segments, this may be less than the sum of all the segments' # sizes. However, it'll still be greater than the last byte in the # Range header, so it's good enough for swob. response_headers.append( ('Content-Length', str(content_length_for_swob_range))) elif have_complete_listing: actual_content_length = sum(o['bytes'] for o in segments) response_headers.append( ('Content-Length', str(actual_content_length))) if have_complete_listing: response_headers = [(h, v) for h, v in response_headers if h.lower() != "etag"] etag = md5() for seg_dict in segments: etag.update(seg_dict['hash'].strip('"')) response_headers.append(('Etag', '"%s"' % etag.hexdigest())) app_iter = None if req.method == 'GET': listing_iter = RateLimitedIterator( self._segment_listing_iterator(req, version, account, container, obj_prefix, segments, first_byte=first_byte, last_byte=last_byte), self.dlo.rate_limit_segments_per_sec, limit_after=self.dlo.rate_limit_after_segment) app_iter = SegmentedIterable( req, self.dlo.app, listing_iter, ua_suffix="DLO MultipartGET", swift_source="DLO", name=req.path, logger=self.logger, max_get_time=self.dlo.max_get_time, response_body_length=actual_content_length) try: app_iter.validate_first_segment() except (SegmentError, ListingIterError): return HTTPConflict(request=req) resp = Response(request=req, headers=response_headers, conditional_response=True, app_iter=app_iter) return resp
def __call__(self, env, start_response): return Response(body="OK")(env, start_response)
def _manifest_get_response(self, req, content_length, response_headers, segments): if req.range: byteranges = [ # For some reason, swob.Range.ranges_for_length adds 1 to the # last byte's position. (start, end - 1) for start, end in req.range.ranges_for_length(content_length) ] else: byteranges = [] ver, account, _junk = req.split_path(3, 3, rest_with_last=True) plain_listing_iter = self._segment_listing_iterator( req, ver, account, segments, byteranges) def is_small_segment((seg_dict, start_byte, end_byte)): start = 0 if start_byte is None else start_byte end = int(seg_dict['bytes']) - 1 if end_byte is None else end_byte is_small = (end - start + 1) < self.slo.rate_limit_under_size return is_small ratelimited_listing_iter = RateLimitedIterator( plain_listing_iter, self.slo.rate_limit_segments_per_sec, limit_after=self.slo.rate_limit_after_segment, ratelimit_if=is_small_segment) # self._segment_listing_iterator gives us 3-tuples of (segment dict, # start byte, end byte), but SegmentedIterable wants (obj path, etag, # size, start byte, end byte), so we clean that up here segment_listing_iter = ( ("/{ver}/{acc}/{conobj}".format( ver=ver, acc=account, conobj=seg_dict['name'].lstrip('/')), seg_dict['hash'], int(seg_dict['bytes']), start_byte, end_byte) for seg_dict, start_byte, end_byte in ratelimited_listing_iter) segmented_iter = SegmentedIterable(req, self.slo.app, segment_listing_iter, name=req.path, logger=self.slo.logger, ua_suffix="SLO MultipartGET", swift_source="SLO", max_get_time=self.slo.max_get_time) try: segmented_iter.validate_first_segment() except (ListingIterError, SegmentError): # Copy from the SLO explanation in top of this file. # If any of the segments from the manifest are not found or # their Etag/Content Length no longer match the connection # will drop. In this case a 409 Conflict will be logged in # the proxy logs and the user will receive incomplete results. return HTTPConflict(request=req) response = Response(request=req, content_length=content_length, headers=response_headers, conditional_response=True, app_iter=segmented_iter) if req.range: response.headers.pop('Etag') return response
def handle_get_token(self, req): """ Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, 1, 3, True) except ValueError: self.logger.increment('errors') return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = req.headers.get('x-auth-user') if not user or ':' not in user: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account2, user = user.split(':', 1) if account != account2: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) key = req.headers.get('x-storage-pass') if not key: key = req.headers.get('x-auth-key') elif pathsegs[0] in ('auth', 'v1.0'): user = req.headers.get('x-auth-user') if not user: user = req.headers.get('x-storage-user') if not user or ':' not in user: self.logger.increment('token_denied') auth = 'Swift realm="unknown"' return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account, user = user.split(':', 1) key = req.headers.get('x-auth-key') if not key: key = req.headers.get('x-storage-pass') else: return HTTPBadRequest(request=req) if not all((account, user, key)): self.logger.increment('token_denied') realm = account or 'unknown' return HTTPUnauthorized( request=req, headers={'Www-Authenticate': 'Swift realm="%s"' % realm}) # Authenticate user account_user = account + ':' + user if account_user not in self.users: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) if self.users[account_user]['key'] != key: self.logger.increment('token_denied') auth = 'Swift realm="unknown"' return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account_id = self.users[account_user]['url'].rsplit('/', 1)[-1] # Get memcache client memcache_client = cache_from_env(req.environ) if not memcache_client: raise Exception('Memcache required') # See if a token already exists and hasn't expired token = None memcache_user_key = '%s/user/%s' % (self.reseller_prefix, account_user) candidate_token = memcache_client.get(memcache_user_key) if candidate_token: memcache_token_key = \ '%s/token/%s' % (self.reseller_prefix, candidate_token) cached_auth_data = memcache_client.get(memcache_token_key) if cached_auth_data: expires, old_groups = cached_auth_data old_groups = old_groups.split(',') new_groups = self._get_user_groups(account, account_user, account_id) if expires > time() and \ set(old_groups) == set(new_groups.split(',')): token = candidate_token # Create a new token if one didn't exist if not token: # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) expires = time() + self.token_life groups = self._get_user_groups(account, account_user, account_id) # Save token memcache_token_key = '%s/token/%s' % (self.reseller_prefix, token) memcache_client.set(memcache_token_key, (expires, groups), time=float(expires - time())) # Record the token with the user info for future use. memcache_user_key = \ '%s/user/%s' % (self.reseller_prefix, account_user) memcache_client.set(memcache_user_key, token, time=float(expires - time())) resp = Response(request=req, headers={ 'x-auth-token': token, 'x-storage-token': token }) url = self.users[account_user]['url'].replace('$HOST', resp.host_url) if self.storage_url_scheme != 'default': url = self.storage_url_scheme + ':' + url.split(':', 1)[1] resp.headers['x-storage-url'] = url return resp
def GET(self, env, start_response): """ Handle GET Bucket (List Objects) request """ qs = env.get('QUERY_STRING', '') args = urlparse.parse_qs(qs, 1) key_args = set([ 'cors', 'lifecycle', 'policy', 'logging', 'notification', 'tagging', 'requestPayment', 'versioning', 'versions', 'website', 'location' ]) if not key_args & set(args): # GET bucket to list objects max_keys = self.MAX_BUCKET_LISTING if 'max-keys' in args: if args.get('max-keys')[0].isdigit() is False: return self.get_err_response('InvalidArgument') max_keys = min(int(args.get('max-keys')[0]), self.MAX_BUCKET_LISTING) if 'acl' not in args: #acl request sent with format=json etc confuses swift env['QUERY_STRING'] = 'format=json&limit=%s' % (max_keys + 1) if 'marker' in args: env['QUERY_STRING'] += '&marker=%s' % quote(args['marker']) if 'prefix' in args: env['QUERY_STRING'] += '&prefix=%s' % quote(args['prefix']) if 'delimiter' in args: env['QUERY_STRING'] += '&delimiter=%s' % quote( args['delimiter']) body_iter = self._app_call(env) if env['REQUEST_METHOD'] == 'HEAD': body_iter = '' status = self._get_status_int() headers = dict(self._response_headers) if is_success(status) and 'acl' in args: return self.get_acl(self.account_name, headers) if 'versioning' in args: # Just report there is no versioning configured here. body = ('<VersioningConfiguration ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"/>') return Response(body=body, content_type="text/plain") if status != HTTP_OK: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return self.get_err_response('NoSuchBucket') else: return self.get_err_response('InvalidURI') if 'location' in args: body = ('<?xml version="1.0" encoding="UTF-8"?>' '<LocationConstraint ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/"') if self.location == 'US': body += '/>' else: body += ('>%s</LocationConstraint>' % self.location) return Response(body=body, content_type='application/xml') if 'logging' in args: # logging disabled body = ('<?xml version="1.0" encoding="UTF-8"?>' '<BucketLoggingStatus ' 'xmlns="http://doc.s3.amazonaws.com/2006-03-01" />') return Response(body=body, content_type='application/xml') objects = loads(''.join(list(body_iter))) body = ( '<?xml version="1.0" encoding="UTF-8"?>' '<ListBucketResult ' 'xmlns="http://s3.amazonaws.com/doc/2006-03-01">' '<Prefix>%s</Prefix>' '<Marker>%s</Marker>' '<Delimiter>%s</Delimiter>' '<IsTruncated>%s</IsTruncated>' '<MaxKeys>%s</MaxKeys>' '<Name>%s</Name>' '%s' '%s' '</ListBucketResult>' % (xml_escape(args.get('prefix', '')), xml_escape(args.get('marker', '')), xml_escape(args.get('delimiter', '')), 'true' if max_keys > 0 and len(objects) == (max_keys + 1) else 'false', max_keys, xml_escape(self.container_name), "".join([ '<Contents><Key>%s</Key><LastModified>%sZ</LastModif' 'ied><ETag>%s</ETag><Size>%s</Size><StorageClass>STA' 'NDARD</StorageClass><Owner><ID>%s</ID><DisplayName>' '%s</DisplayName></Owner></Contents>' % (xml_escape(unquote( i['name'])), i['last_modified'], i['hash'], i['bytes'], self.account_name, self.account_name) for i in objects[:max_keys] if 'subdir' not in i ]), "".join([ '<CommonPrefixes><Prefix>%s</Prefix></CommonPrefixes>' % xml_escape(i['subdir']) for i in objects[:max_keys] if 'subdir' in i ]))) return Response(body=body, content_type='application/xml') else: # GET specified data #env['REQUEST_METHOD'] = 'HEAD' body_iter = self._app_call(env) status = self._get_status_int() headers = dict(self._response_headers) action = args.keys().pop() if action == 'acl': # get acl # get policy acl = headers.get('X-Container-Meta-Policy') or '' if is_success(status): if acl: return Response(status=HTTP_OK, content_type='application/xml', body=unquote(acl)) else: return self.get_err_response('NotSuchPolicy') elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'cors': # get cors _headers = set([ 'X-Container-Meta-Access-Control-Expose-Headers', 'X-Container-Meta-Access-Control-Allow-Origin', 'X-Container-Meta-Access-Control-Max-Age', 'X-Container-Meta-Access-Control-Allow-Method' ]) bodye = etree.Element('CORSConfiguration') if _headers & set(headers): rule = etree.Element('CORSRule') if 'X-Container-Meta-Access-Control-Expose-Headers' in headers: valuel = headers[ 'X-Container-Meta-Access-Control-Expose-Headers'].split( ',') for i in valuel: eh = self.create_elem('ExposeHeader', i) rule.append(eh) if 'X-Container-Meta-Access-Control-Allow-Origin' in headers: valuel = headers[ 'X-Container-Meta-Access-Control-Allow-Origin'].split( ',') for i in valuel: ao = self.create_elem('AllowedOrigin', i) rule.append(ao) if 'X-Container-Meta-Access-Control-Max-Age' in headers: valuel = headers[ 'X-Container-Meta-Access-Control-Max-Age'].split( ',') for i in valuel: ma = self.create_elem('MaxAgeSeconds', i) rule.append(ma) if 'X-Container-Meta-Access-Control-Allow-Method' in headers: valuel = headers[ 'X-Container-Meta-Access-Control-Allow-Method'].split( ',') for i in valuel: al = self.create_elem('AllowedMethod', i) rule.append(al) rule.append(self.create_elem('ID', 'unique_rule')) bodye.append(rule) else: bodye.text = '' if is_success(status): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'lifecycle': # get lifecycle bodye = etree.Element('LifecycleConfiguration') if 'X-Container-Meta-Expiration-Status' in headers: rule = etree.Element('Rule') rule.append( self.create_elem( 'Status', headers['X-Container-Meta-Expiration-Status'])) rule.append(self.create_elem('ID', 'unique_rule')) if 'X-Container-Meta-Expiration-Prefix' in headers: rule.append( self.create_elem( 'Prefix', headers['X-Container-Meta-Expiration-Prefix'])) if 'X-Container-Meta-Expiration-At' in headers or \ 'X-Container-Meta-Expiration-After' in headers: expir = etree.Element('Expiration') if 'X-Container-Meta-Expiration-At' in headers: expir.append( self.create_elem( 'Date', headers['X-Container-Meta-Expiration-At'])) if 'X-Container-Meta-Expiration-After' in headers: expir.append( self.create_elem( 'Days', headers[ 'X-Container-Meta-Expiration-After'])) rule.append(expir) if 'X-Container-Meta-Trans-Class' in headers: trans = etree.Element('Transition') cls = self.create_elem( 'StorageClass', headers['X-Container-Meta-Trans-Class']) trans.append(cls) if 'X-Container-Meta-Trans-At' in headers: trans.append( self.create_elem( 'Date', headers['X-Container-Meta-Trans-At'])) if 'X-Container-Meta-Trans-After' in headers: trans.append( self.create_elem( 'Days', headers['X-Container-Meta-Trans-After'])) rule.append(trans) bodye.append(rule) else: bodye.text = '' if is_success(status): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'policy': # get policy json = headers.get('X-Container-Meta-Policy') or '' if is_success(status): if json: return Response(status=HTTP_OK, content_type='application/json', body=unquote(json)) else: return self.get_err_response('NotSuchPolicy') elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'logging': # get logging target = headers.get('X-Container-Meta-Logging-Target') or '' prefix = headers.get('X-Container-Meta-Logging-Prefix') or '' statuse = etree.Element('BucketLoggingStatus') if target: enabled = etree.Element('LoggingEnabled') target_bucket = self.create_elem('TargetBucket', target) if prefix: target_prefix = self.create_elem( 'TargetPrefix', prefix) enabled.append(target_bucket) enabled.append(target_prefix) statuse.append(enabled) else: pass # set text None if is_success(status): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(statuse)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'notification': # get it topic = headers.get('X-Container-Meta-Noti-Topic') event = headers.get('X-Container-Meta-Noti-Event') if is_success(status): if topic: body = ( '<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' '<NotificationConfiguration> ' '<TopicConfiguration>' '<Topic>%s</Topic>' '<Event>%s</Event>' '</TopicConfiguration>' '</NotificationConfiguration>', topic, event) return Response(status=HTTP_OK, content_type='application/xml', body=body) else: return self.get_err_response('NotSuchWebsite') elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'tagging': # get tagging Tagging = etree.Element('Tagging') TagSet = etree.Element('TagSet') meta_keys = [ header[21:] for header in headers if header.startswith('X-Container-Meta-Tag-') ] for key in meta_keys: Tag = etree.Element('Tag') keyvalues = headers['X-Container-Meta-Tag-' + key] _key = keyvalues[:len(key)] _value = keyvalues[len(key):] Tag.append(self.create_elem('Key', _key)) Tag.append(self.create_elem('Value', _value)) TagSet.append(Tag) Tagging.append(TagSet) if is_success(status): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(Tagging)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'requestPayment': # get it # default value is BucketOwner pay = headers.get('X-Container-Meta-Payment', 'BucketOwner') if is_success(status): if pay: return Response(status=HTTP_OK, content_type='application/xml', body=unquote(pay)) else: return self.get_err_response('NotSuchWebsite') elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'versioning': versioning = 'Enabled' if 'X-Versions-Location' in headers else 'Suspended' bodye = etree.Element('VersioningConfiguration') stat = self.create_elem('Status', versioning) bodye.append(stat) if is_success(status): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'website': # get website website = headers.get('X-Container-Meta-Website') fake = ( '<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' '<IndexDocument>' '<Suffix>index.html</Suffix>' '</IndexDocument>' '<ErrorDocument>' '<Key>SomeErrorDocument.html</Key>' '</ErrorDocument>' '</WebsiteConfiguration>') if is_success(status): if website: # return fake data return Response(status=HTTP_OK, content_type='application/xml', body=fake) else: return self.get_err_response('NotSuchWebsite') elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'location': bodye = self.create_elem('LocationConstraint', 'CN') return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye)) elif action == 'versions': # get versions container path = '/v1/AUTH_%s/%s' % (self.account_name, self.container_name) env = copyenv(env, method='GET', path=path, query_string='') body_iter = self._app_call(env) status = self._get_status_int() # get origin container path = '/v1/AUTH_%s/%s' % ( quote(self.account_name), quote(self.version_name(self.container_name))) env2 = copyenv(env, method='GET', path=path, query_string='') body_iter2 = self._app_call(env2) status2 = self._get_status_int() last = list(body_iter) history = list(body_iter2) res = etree.Element('ListVersionsResult') bucket = self.create_elem('Name', self.container_name) res.append(bucket) if last: last = [i for i in last[0].split('\n') if i] for i in last: ver = etree.Element('Version') ver.append(self.create_elem('Key', i)) ver.append(self.create_elem('VersionId', 'lastest')) ver.append(self.create_elem('IsLastest', 'true')) res.append(ver) if history: history = [i for i in history[0].split('\n') if i] for i in history: ver = etree.Element('Version') ver.append(self.create_elem('Key', i.split('/')[0][3:])) ver.append( self.create_elem('VersionId', i.split('/')[1])) ver.append(self.create_elem('IsLastest', 'false')) res.append(ver) if is_success(status) and is_success(status2): return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(res)) elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') else: return self.get_err_response('InvalidURI')
def SSYNC(self, request): return Response(app_iter=ssync_receiver.Receiver(self, request)())
def PUT(self, env, start_response): """ Handle PUT Bucket request """ # checking params available AMZ_ACL = set([ 'HTTP_X_AMZ_GRANT_READ', 'HTTP_X_AMZ_GRANT_WRITE', 'HTTP_X_AMZ_GRANT_READ_ACP', 'HTTP_X_AMZ_GRANT_WRITE_ACP', 'HTTP_X_AMZ_GRANT_FULL_CONTROL' ]) qs = env.get('QUERY_STRING', '') args = urlparse.parse_qs(qs, 1) if not args: if not self.validate_bucket_name(self.container_name): return self.get_err_response('InvalidBucketName') if not self.is_unique(self.container_name): return self.get_err_response('BucketAlreadyExists') # to create a new one if 'HTTP_X_AMZ_ACL' in env: amz_acl = env['HTTP_X_AMZ_ACL'] translated_acl = self.swift_acl_translate(canned=amz_acl) for header, value in translated_acl: env[header] = value elif AMZ_ACL & set(env.keys()): acld = dict() if 'HTTP_X_AMZ_GRANT_READ' in env.keys(): acld['read'] = self.keyvalue2dict( env['HTTP_X_AMZ_GRANT_READ']) if 'HTTP_X_AMZ_GRANT_WRITE' in env.keys(): acld['write'] = self.keyvalue2dict( env['HTTP_X_AMZ_GRANT_WRITE']) if 'HTTP_X_AMZ_GRANT_FULL_CONTROL' in env.keys(): acld['full'] = self.keyvalue2dict( env['HTTP_X_AMZ_GRANT_FULL_CONTROL']) translated_acl = self.swift_acl_translate(acl=acld) for header, value in translated_acl: env[header] = value # modify env put to swift body_iter = self._app_call(env) status = self._get_status_int() if status != HTTP_CREATED: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') elif status == HTTP_ACCEPTED: return self.get_err_response('BucketAlreadyExists') else: return self.get_err_response('InvalidURI') resp = Response() resp.headers['Location'] = self.container_name resp.status = HTTP_OK return resp if len(args) > 1: return self.get_err_response('InvalidURI') # now args only 1 action = args.keys().pop() if action == 'acl': # put acl acl = env['wsgi.input'].read() env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_ACL'] = quote(acl) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'cors': # put cors bodye = self.xmlbody2elem(env['wsgi.input'].read()) env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_ORIGIN'] = ','.join( [ i.text for i in bodye.xpath( '/CORSConfiguration/CORSRule/AllowedOrigin') ]) env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_MAX_AGE'] = ','.join([ i.text for i in bodye.xpath( '/CORSConfiguration/CORSRule/MaxAgeSeconds') ]) env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_EXPOSE_HEADERS'] = ','.join( [ i.text for i in bodye.xpath( '/CORSConfiguration/CORSRule/ExposeHeader') ]) env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_METHOD'] = ','.join( i.text for i in bodye.xpath( '/CORSConfiguration/CORSRule/AllowedMethod')) env['QUERY_STRING'] = '' env['REQUEST_METHOD'] = 'POST' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.headers['Location'] = self.container_name resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'lifecycle': # put lifecycle container_info = get_container_info(env, self.app) if container_info['versions']: return self.get_err_response('AccessDenied') bodye = self.xmlbody2elem(env['wsgi.input'].read()) tat = bodye.xpath('/LifecycleConfiguration/Rule/Transition/Date') env['HTTP_X_CONTAINER_META_TRANS_AT'] = tat[0].text if tat else '' tafter = bodye.xpath( '/LifecycleConfiguration/Rule/Transition/Days') env['HTTP_X_CONTAINER_META_TRANS_AFTER'] = tafter[ 0].text if tafter else '' trans = bodye.xpath( '/LifecycleConfiguration/Rule/Transition/StorageClass') env['HTTP_X_CONTAINER_META_TRANS_CLASS'] = trans[ 0].text if trans else '' at = bodye.xpath('/LifecycleConfiguration/Rule/Expiration/Date') env['HTTP_X_CONTAINER_META_EXPIRATION_AT'] = at[ 0].text if at else '' after = bodye.xpath('/LifecycleConfiguration/Rule/Expiration/Days') env['HTTP_X_CONTAINER_META_EXPIRATION_AFTER'] = after[ 0].text if after else '' prefix = bodye.xpath('/LifecycleConfiguration/Rule/Prefix') env['HTTP_X_CONTAINER_META_EXPIRATION_PREFIX'] = prefix[ 0].text if prefix else '' stat = bodye.xpath('/LifecycleConfiguration/Rule/Status') env['HTTP_X_CONTAINER_META_EXPIRATION_STATUS'] = stat[ 0].text if stat else '' env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'policy': # put policy json = env['wsgi.input'].read() env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_POLICY'] = quote(json) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'logging': # put logging env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' bodye = self.xmlbody2elem(env['wsgi.input'].read()) target = bodye.xpath( '/BucketLoggingStatus/LoggingEnabled/TargetBucket') if target: env['HTTP_X_CONTAINER_META_LOGGING_TARGET'] = target[0].text prefix = bodye.xpath( '/BucketLoggingStatus/LoggingEnabled/TargetPrefix') if prefix: env['HTTP_X_CONTAINER_META_LOGGING_PREFIX'] = prefix[ 0].text else: env['HTTP_X_CONTAINER_META_LOGGING_TARGET'] = '' env['HTTP_X_CONTAINER_META_LOGGING_PREFIX'] = '' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'notification': # put it bodye = self.xmlbody2elem(env['wsgi.input'].read()) topic = bodye.xpath( '/NotificationConfiguration/TopicConfiguration/Topic') event = bodye.xpath( '/NotificationConfiguration/TopicConfiguration/Event') if not topic or not event: return self.get_err_response('InvalidArgument') env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_CONTAINER_META_NOTI_TOPIC'] = topic[0].text env['HTTP_CONTAINER_META_NOTI_EVENT'] = event[0].text env['HTTP_X_CONTAINER_META_NOTI'] = quote(body) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'tagging': # put tagging bodye = self.xmlbody2elem(env['wsgi.input'].read()) for tag in bodye.xpath('/Tagging/TagSet/Tag'): key = tag.xpath('Key')[0].text value = tag.xpath('Key')[0].text + tag.xpath('Value')[0].text env['HTTP_X_CONTAINER_META_TAG_%s' % key.upper()] = value env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_NO_CONTENT return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'requestPayment': # put it bodye = self.xmlbody2elem(env['wsgi.input'].read()) target = bodye.xpath('/RequestPaymentConfiguration/Payer') if not target or target[0].text not in ('BucketOwner', 'Requester'): return self.get_err_response('InvalidArgument') env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_PAYMENT'] = quote(body) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'versioning': bodye = self.xmlbody2elem(env['wsgi.input'].read()) status = bodye.xpath('/VersioningConfiguration/Status') if status: status = status[0].text env['REQUEST_METHOD'] = 'POST' env['HTTP_X_VERSIONS_LOCATION'] = self.version_name( self.container_name) if status == 'Enabled' else '' env['QUERY_STRING'] = '' body_iter = self._app_call(env) status = self._get_status_int() path = '/v1/AUTH_%s/%s' % (self.account_name, self.version_name(self.container_name)) env2 = copyenv(env, method='PUT', path=path, query_string='') body_iter2 = self._app_call(env2) status2 = self._get_status_int() if is_success(status) and is_success(status2): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') elif action == 'website': # put website body = env['wsgi.input'].read() env['REQUEST_METHOD'] = 'POST' env['QUERY_STRING'] = '' env['HTTP_X_CONTAINER_META_WEBSITE'] = quote(body) body_iter = self._app_call(env) status = self._get_status_int() if is_success(status): resp = Response() resp.status = HTTP_OK return resp elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return self.get_err_response('AccessDenied') else: return self.get_err_response('InvalidURI') else: return self.get_err_response('InvalidURI')
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'].startswith('/unauth/'): if env['PATH_INFO'].endswith('/c/f_ok'): return Response(status='204 No Content')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/create_cont/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/create_cont_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='403 Forbidden')(env, start_response) return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].startswith('/create_obj_unauth/'): if env['PATH_INFO'].endswith('/cont'): return Response(status='201 Created')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/tar_works/'): if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/tar_works_cont_head_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) if len(env['PATH_INFO']) > 100: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if (env['PATH_INFO'].startswith('/delete_works/') and env['REQUEST_METHOD'] == 'DELETE'): self.delete_paths.append(env['PATH_INFO']) if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) if env['PATH_INFO'].endswith('404'): return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].endswith('badutf8'): return Response(status='412 Precondition Failed')( env, start_response) return Response(status='204 No Content')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_fail/'): return Response(status='409 Conflict')(env, start_response) if env['PATH_INFO'].startswith('/broke/'): return Response(status='500 Internal Error')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_success_after_attempts/'): if self.del_cont_cur_call < self.del_cont_total_calls: self.del_cont_cur_call += 1 return Response(status='409 Conflict')(env, start_response) else: return Response(status='204 No Content')(env, start_response)
def _GET_using_cache(self, req): # It may be possible to fulfil the request from cache: we only reach # here if request record_type is 'shard' or 'auto', so if the container # state is 'sharded' then look for cached shard ranges. However, if # X-Newest is true then we always fetch from the backend servers. get_newest = config_true_value(req.headers.get('x-newest', False)) if get_newest: self.app.logger.debug( 'Skipping shard cache lookup (x-newest) for %s', req.path_qs) info = None else: info = _get_info_from_caches(self.app, req.environ, self.account_name, self.container_name) if (info and is_success(info['status']) and info.get('sharding_state') == 'sharded'): # container is sharded so we may have the shard ranges cached headers = headers_from_container_info(info) if headers: # only use cached values if all required headers available infocache = req.environ.setdefault('swift.infocache', {}) memcache = cache_from_env(req.environ, True) cache_key = get_cache_key(self.account_name, self.container_name, shard='listing') cached_ranges = infocache.get(cache_key) if cached_ranges is None and memcache: cached_ranges = memcache.get(cache_key) if cached_ranges is not None: infocache[cache_key] = tuple(cached_ranges) # shard ranges can be returned from cache self.app.logger.debug('Found %d shards in cache for %s', len(cached_ranges), req.path_qs) headers.update({'x-backend-record-type': 'shard', 'x-backend-cached-results': 'true'}) shard_range_body = self._filter_resp_shard_ranges( req, cached_ranges) # mimic GetOrHeadHandler.get_working_response... # note: server sets charset with content_type but proxy # GETorHEAD_base does not, so don't set it here either resp = Response(request=req, body=shard_range_body) update_headers(resp, headers) resp.last_modified = math.ceil( float(headers['x-put-timestamp'])) resp.environ['swift_x_timestamp'] = headers.get( 'x-timestamp') resp.accept_ranges = 'bytes' resp.content_type = 'application/json' return resp # The request was not fulfilled from cache so send to the backend # server, but instruct the backend server to ignore name constraints in # request params if returning shard ranges so that the response can # potentially be cached. Only do this if the container state is # 'sharded'. We don't attempt to cache shard ranges for a 'sharding' # container as they may include the container itself as a 'gap filler' # for shard ranges that have not yet cleaved; listings from 'gap # filler' shard ranges are likely to become stale as the container # continues to cleave objects to its shards and caching them is # therefore more likely to result in stale or incomplete listings on # subsequent container GETs. req.headers['x-backend-override-shard-name-filter'] = 'sharded' resp = self._GETorHEAD_from_backend(req) sharding_state = resp.headers.get( 'x-backend-sharding-state', '').lower() resp_record_type = resp.headers.get( 'x-backend-record-type', '').lower() complete_listing = config_true_value(resp.headers.pop( 'x-backend-override-shard-name-filter', False)) # given that we sent 'x-backend-override-shard-name-filter=sharded' we # should only receive back 'x-backend-override-shard-name-filter=true' # if the sharding state is 'sharded', but check them both anyway... if (resp_record_type == 'shard' and sharding_state == 'sharded' and complete_listing): # backend returned unfiltered listing state shard ranges so parse # them and replace response body with filtered listing cache_key = get_cache_key(self.account_name, self.container_name, shard='listing') data = self._parse_listing_response(req, resp) backend_shard_ranges = self._parse_shard_ranges(req, data, resp) if backend_shard_ranges is not None: cached_ranges = [dict(sr) for sr in backend_shard_ranges] if resp.headers.get('x-backend-sharding-state') == 'sharded': # cache in infocache even if no shard ranges returned; this # is unexpected but use that result for this request infocache = req.environ.setdefault('swift.infocache', {}) infocache[cache_key] = tuple(cached_ranges) memcache = cache_from_env(req.environ, True) if memcache and cached_ranges: # cache in memcache only if shard ranges as expected self.app.logger.debug('Caching %d shards for %s', len(cached_ranges), req.path_qs) memcache.set( cache_key, cached_ranges, time=self.app.recheck_listing_shard_ranges) # filter returned shard ranges according to request constraints resp.body = self._filter_resp_shard_ranges(req, cached_ranges) return resp