def zip_get_content_response(fp, files, boundary=_boundary): def part_with_attachment(fn): part = Response(headers={ 'Content-Type': 'application/octet-stream', 'Content-Disposition': 'attachment; filename=%s' % fn.encode('utf-8'), }) part.body = zip_file.read(fn) return part with zipfile.ZipFile(fp) as zip_file: try: total_size = sum(zip_file.getinfo(fn).file_size for fn in files) except KeyError: raise HTTPNotFound(body='File not found in the zip\r\n') if total_size > MAX_CONTENT_SIZE: raise HTTPRequestEntityTooLarge() if len(files) == 1: resp = part_with_attachment(files[0]) else: resp = Response( content_type='multipart/mixed; boundary=%s' % boundary) body = io.BytesIO() for fn in files: part_resp = part_with_attachment(fn) body.write('\r\n--%s\r\n' % boundary) body.write(dump_response(part_resp)) body.write('\r\n--%s--\r\n' % boundary) resp.body = body.getvalue() return resp
def best_response(self, req, statuses, reasons, bodies, server_type, etag=None): """ Given a list of responses from several servers, choose the best to return to the API. :param req: swob.Request object :param statuses: list of statuses returned :param reasons: list of reasons for each status :param bodies: bodies of each response :param server_type: type of server the responses came from :param etag: etag :returns: swob.Response object with the correct status, body, etc. set """ resp = Response(request=req) if len(statuses): for hundred in (HTTP_OK, HTTP_MULTIPLE_CHOICES, HTTP_BAD_REQUEST): hstatuses = [s for s in statuses if hundred <= s < hundred + 100] if len(hstatuses) > len(statuses) / 2: status = max(hstatuses) status_index = statuses.index(status) resp.status = "%s %s" % (status, reasons[status_index]) resp.body = bodies[status_index] if etag: resp.headers["etag"] = etag.strip('"') return resp self.app.logger.error(_("%(type)s returning 503 for %(statuses)s"), {"type": server_type, "statuses": statuses}) resp.status = "503 Internal Server Error" return resp
def best_response(self, req, statuses, reasons, bodies, server_type, etag=None, headers=None): """ Given a list of responses from several servers, choose the best to return to the API. :param req: swob.Request object :param statuses: list of statuses returned :param reasons: list of reasons for each status :param bodies: bodies of each response :param server_type: type of server the responses came from :param etag: etag :param headers: headers of each response :returns: swob.Response object with the correct status, body, etc. set """ resp = Response(request=req) if len(statuses): for hundred in (HTTP_OK, HTTP_MULTIPLE_CHOICES, HTTP_BAD_REQUEST): hstatuses = \ [s for s in statuses if hundred <= s < hundred + 100] if len(hstatuses) >= quorum_size(len(statuses)): status = max(hstatuses) status_index = statuses.index(status) resp.status = '%s %s' % (status, reasons[status_index]) resp.body = bodies[status_index] if headers: update_headers(resp, headers[status_index]) if etag: resp.headers['etag'] = etag.strip('"') return resp self.app.logger.error(_('%(type)s returning 503 for %(statuses)s'), {'type': server_type, 'statuses': statuses}) resp.status = '503 Internal Server Error' return resp
def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj, policy = \ get_name_and_placement(request, 5, 5, True) keep_cache = self.keep_cache_private or ( 'X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers) cloud_container_info = request.headers.get('X-Storage-Container') cloud_container = cloud_container_info.strip().split('/') obj_iter = get_file(cloud_container[0], cloud_container[1], obj) conditional_etag = md5() if None == obj_iter: return HTTPNotFound(request=request) response = Response( app_iter=obj_iter, request=request, conditional_response=True, conditional_etag=conditional_etag) response.headers['Content-Type'] = 'application/octet-stream' #response.etag = metadata['ETag'] #response.last_modified = math.ceil(float(file_x_ts)) #response.content_length = obj_size #response.headers['X-Timestamp'] = file_x_ts.normal #response.headers['X-Backend-Timestamp'] = file_x_ts.internal resp = request.get_response(response) return resp
def PUT(self, env, start_response): """ Handle PUT Bucket request """ if 'HTTP_X_AMZ_ACL' in env: amz_acl = env['HTTP_X_AMZ_ACL'] # Translate the Amazon ACL to something that can be # implemented in Swift, 501 otherwise. Swift uses POST # for ACLs, whereas S3 uses PUT. del env['HTTP_X_AMZ_ACL'] if 'QUERY_STRING' in env: del env['QUERY_STRING'] translated_acl = swift_acl_translate(amz_acl) if translated_acl == 'Unsupported': return get_err_response('Unsupported') elif translated_acl == 'InvalidArgument': return get_err_response('InvalidArgument') for header, acl in translated_acl: env[header] = acl if 'CONTENT_LENGTH' in env: content_length = env['CONTENT_LENGTH'] try: content_length = int(content_length) except (ValueError, TypeError): return get_err_response('InvalidArgument') if content_length < 0: return get_err_response('InvalidArgument') if 'QUERY_STRING' in env: args = dict(urlparse.parse_qsl(env['QUERY_STRING'], 1)) if 'acl' in args: # We very likely have an XML-based ACL request. body = env['wsgi.input'].readline().decode() translated_acl = swift_acl_translate(body, xml=True) if translated_acl == 'Unsupported': return get_err_response('Unsupported') elif translated_acl == 'InvalidArgument': return get_err_response('InvalidArgument') for header, acl in translated_acl: env[header] = acl env['REQUEST_METHOD'] = 'POST' body_iter = self._app_call(env) status = self._get_status_int() if status != HTTP_CREATED and status != HTTP_NO_CONTENT: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_ACCEPTED: return get_err_response('BucketAlreadyExists') else: return get_err_response('InvalidURI') resp = Response() resp.headers['Location'] = self.container_name resp.status = HTTP_OK return resp
def page_obj_list(self, req, storage_url, token, template=None): """ """ if template is None: tmpl = self.tmpl path = urlparse(self.del_prefix(req.url)).path if len(path.split('/')) <= 2: path = urlparse(storage_url).path vrs, acc, cont, obj = split_path(path, 1, 4, True) lang = self.get_lang(req) base = self.add_prefix(urlparse(storage_url).path) status = self.token_bank.get(token, None) msg = status.get('msg', '') if status else '' params = req.params_alt() limit = params.get('limit', self.items_per_page) marker = params.get('marker', '') end_marker = params.get('end_marker', '') prefix = params.get('prefix', '') delete_confirm = quote(params.get('delete_confirm', '')) acl_edit = quote(params.get('acl_edit', '')) meta_edit = quote(params.get('meta_edit', '')) # whole container list try: whole_cont_list = self._get_whole_cont_list(storage_url, token) except ClientException, err: resp = Response(charset='utf8') resp.status = err.http_status return resp
def OPTIONS(self, req): """ Base handler for OPTIONS requests :param req: swob.Request object :returns: swob.Response object """ # Prepare the default response headers = {'Allow': ', '.join(self.allowed_methods)} resp = Response(status=200, request=req, headers=headers) # If this isn't a CORS pre-flight request then return now req_origin_value = req.headers.get('Origin', None) if not req_origin_value: return resp # This is a CORS preflight request so check it's allowed try: container_info = \ self.container_info(self.account_name, self.container_name, req) except AttributeError: # This should only happen for requests to the Account. A future # change could allow CORS requests to the Account level as well. return resp cors = container_info.get('cors', {}) # If the CORS origin isn't allowed return a 401 if not self.is_origin_allowed(cors, req_origin_value) or ( req.headers.get('Access-Control-Request-Method') not in self.allowed_methods): resp.status = HTTP_UNAUTHORIZED return resp # Allow all headers requested in the request. The CORS # specification does leave the door open for this, as mentioned in # http://www.w3.org/TR/cors/#resource-preflight-requests # Note: Since the list of headers can be unbounded # simply returning headers can be enough. allow_headers = set() if req.headers.get('Access-Control-Request-Headers'): allow_headers.update( list_from_csv(req.headers['Access-Control-Request-Headers'])) # Populate the response with the CORS preflight headers if cors.get('allow_origin', '').strip() == '*': headers['access-control-allow-origin'] = '*' else: headers['access-control-allow-origin'] = req_origin_value if cors.get('max_age') is not None: headers['access-control-max-age'] = cors.get('max_age') headers['access-control-allow-methods'] = \ ', '.join(self.allowed_methods) if allow_headers: headers['access-control-allow-headers'] = ', '.join(allow_headers) resp.headers = headers return resp
def __call__(self, env, start_response): self.req = Request(env) resp = Response(request=self.req, body=b'FAKE APP', headers=self.headers) # like webob, middlewares in the pipeline may rewrite # location header from relative to absolute resp.location = resp.absolute_location() return resp(env, start_response)
def part_with_attachment(fn): part = Response(headers={ 'Content-Type': 'application/octet-stream', 'Content-Disposition': 'attachment; filename=%s' % fn.encode('utf-8'), }) part.body = zip_file.read(fn) return part
def get_err_response(self, msg="Unable to process requested file"): self.logger.error(msg) resp = Response(content_type="text/xml") resp.status = HTTP_BAD_REQUEST resp.body = ( '<?xml version="1.0" encoding="UTF-8"?>\r\n<Error>\r\n ' "<Code>%s</Code>\r\n <Message>%s</Message>\r\n</Error>\r\n" % (HTTP_BAD_REQUEST, msg) ) return resp
def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" device, partition, account, container, obj, policy_idx = get_name_and_placement(request, 5, 5, True) try: disk_file = self.get_diskfile(device, partition, account, container, obj, policy_idx=policy_idx) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: metadata = disk_file.read_metadata() except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined) as e: headers = {} if hasattr(e, "timestamp"): headers["X-Backend-Timestamp"] = e.timestamp.internal return HTTPNotFound(request=request, headers=headers, conditional_response=True) response = Response(request=request, conditional_response=True) response.headers["Content-Type"] = metadata.get("Content-Type", "application/octet-stream") for key, value in metadata.iteritems(): if is_sys_or_user_meta("object", key) or key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata["ETag"] ts = Timestamp(metadata["X-Timestamp"]) response.last_modified = math.ceil(float(ts)) # Needed for container sync feature response.headers["X-Timestamp"] = ts.normal response.headers["X-Backend-Timestamp"] = ts.internal response.content_length = int(metadata["Content-Length"]) try: response.content_encoding = metadata["Content-Encoding"] except KeyError: pass return response
def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj, policy = \ get_name_and_placement(request, 5, 5, True) print 'request',request print 'device, partition, account, container,obj, policy', device,partition,account,container,obj,policy keep_cache = self.keep_cache_private or ( 'X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers) try: disk_file = self.get_diskfile( device, partition, account, container, obj, policy=policy) print 'disk_file',disk_file except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: with disk_file.open(): metadata = disk_file.get_metadata() obj_size = int(metadata['Content-Length']) file_x_ts = Timestamp(metadata['X-Timestamp']) keep_cache = (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)) conditional_etag = None if 'X-Backend-Etag-Is-At' in request.headers: conditional_etag = metadata.get( request.headers['X-Backend-Etag-Is-At']) response = Response( app_iter=disk_file.reader(keep_cache=keep_cache), request=request, conditional_response=True, conditional_etag=conditional_etag) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_sys_or_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] response.last_modified = math.ceil(float(file_x_ts)) response.content_length = obj_size try: response.content_encoding = metadata[ 'Content-Encoding'] except KeyError: pass response.headers['X-Timestamp'] = file_x_ts.normal response.headers['X-Backend-Timestamp'] = file_x_ts.internal resp = request.get_response(response) except DiskFileXattrNotSupported: return HTTPInsufficientStorage(drive=device, request=request) except (DiskFileNotExist, DiskFileQuarantined) as e: headers = {} if hasattr(e, 'timestamp'): headers['X-Backend-Timestamp'] = e.timestamp.internal resp = HTTPNotFound(request=request, headers=headers, conditional_response=True) print 'resp',resp return resp
def HEAD(self, request): """Handle HTTP HEAD requests for the Swift on File object server""" device, partition, account, container, obj, policy = \ get_name_and_placement(request, 5, 5, True) # Get DiskFile try: disk_file = self.get_diskfile(device, partition, account, container, obj, policy=policy) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) # Read DiskFile metadata try: disk_file.open() metadata = disk_file.get_metadata() except (DiskFileNotExist, DiskFileQuarantined) as e: headers = {} if hasattr(e, 'timestamp'): headers['X-Backend-Timestamp'] = e.timestamp.internal return HTTPNotFound(request=request, headers=headers, conditional_respose=True) # Create and populate our response response = Response(request=request, conditional_response=True) response.headers['Content-Type'] = \ metadata.get('Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_sys_or_user_meta('object', key) or key.lower() in \ self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] ts = Timestamp(metadata['X-Timestamp']) response.last_modified = math.ceil(float(ts)) # Needed for container sync feature response.headers['X-Timestamp'] = ts.normal response.headers['X-Backend-Timestamp'] = ts.internal response.content_length = int(metadata['Content-Length']) try: response.content_encoding = metadata['Content-Encoding'] except KeyError: pass # (HPSS) Inject HPSS xattr metadata into headers want_hpss_metadata = request.headers.get('X-HPSS-Get-Metadata', False) if config_true_value(want_hpss_metadata): try: hpss_headers = disk_file.read_hpss_system_metadata() response.headers.update(hpss_headers) except SwiftOnFileSystemIOError: return HTTPServiceUnavailable(request=request) if 'X-Object-Sysmeta-Update-Container' in response.headers: self._sof_container_update(request, response) response.headers.pop('X-Object-Sysmeta-Update-Container') return response
def OPTIONS(self, req): """ Base handler for OPTIONS requests :param req: swob.Request object :returns: swob.Response object """ # Prepare the default response headers = {'Allow': ', '.join(self.allowed_methods)} resp = Response(status=200, request=req, headers=headers) # If this isn't a CORS pre-flight request then return now req_origin_value = req.headers.get('Origin', None) if not req_origin_value: return resp # This is a CORS preflight request so check it's allowed try: container_info = \ self.container_info(self.account_name, self.container_name) except AttributeError: # This should only happen for requests to the Account. A future # change could allow CORS requests to the Account level as well. return resp cors = container_info.get('cors', {}) # If the CORS origin isn't allowed return a 401 if not self.is_origin_allowed(cors, req_origin_value) or ( req.headers.get('Access-Control-Request-Method') not in self.allowed_methods): resp.status = HTTP_UNAUTHORIZED return resp # Always allow the x-auth-token header. This ensures # clients can always make a request to the resource. allow_headers = set() if cors.get('allow_headers'): allow_headers.update( [a.strip() for a in cors['allow_headers'].split(' ') if a.strip()]) allow_headers.add('x-auth-token') # Populate the response with the CORS preflight headers headers['access-control-allow-origin'] = req_origin_value if cors.get('max_age') is not None: headers['access-control-max-age'] = cors.get('max_age') headers['access-control-allow-methods'] = \ ', '.join(self.allowed_methods) headers['access-control-allow-headers'] = ', '.join(allow_headers) resp.headers = headers return resp
def get_err_response(code): """ Given an HTTP response code, create a properly formatted xml error response :param code: error code :returns: webob.response object """ error_table = { 'AccessDenied': (HTTP_FORBIDDEN, 'Access denied'), 'BucketAlreadyExists': (HTTP_CONFLICT, 'The requested bucket name is not available'), 'BucketNotEmpty': (HTTP_CONFLICT, 'The bucket you tried to delete is not empty'), 'InvalidArgument': (HTTP_BAD_REQUEST, 'Invalid Argument'), 'InvalidBucketName': (HTTP_BAD_REQUEST, 'The specified bucket is not valid'), 'InvalidURI': (HTTP_BAD_REQUEST, 'Could not parse the specified URI'), 'InvalidDigest': (HTTP_BAD_REQUEST, 'The Content-MD5 you specified was invalid'), 'BadDigest': (HTTP_BAD_REQUEST, 'The Content-Length you specified was invalid'), 'NoSuchBucket': (HTTP_NOT_FOUND, 'The specified bucket does not exist'), 'SignatureDoesNotMatch': (HTTP_FORBIDDEN, 'The calculated request signature does not ' 'match your provided one'), 'RequestTimeTooSkewed': (HTTP_FORBIDDEN, 'The difference between the request time and the' ' current time is too large'), 'NoSuchKey': (HTTP_NOT_FOUND, 'The resource you requested does not exist'), 'Unsupported': (HTTP_NOT_IMPLEMENTED, 'The feature you requested is not yet' ' implemented'), 'MissingContentLength': (HTTP_LENGTH_REQUIRED, 'Length Required'), 'ServiceUnavailable': (HTTP_SERVICE_UNAVAILABLE, 'Please reduce your request rate'), 'IllegalVersioningConfigurationException': (HTTP_BAD_REQUEST, 'The specified versioning configuration invalid'), 'MalformedACLError': (HTTP_BAD_REQUEST, 'The XML you provided was not well-formed or did ' 'not validate against our published schema') } resp = Response(content_type='text/xml') resp.status = error_table[code][0] resp.body = '<?xml version="1.0" encoding="UTF-8"?>\r\n<Error>\r\n ' \ '<Code>%s</Code>\r\n <Message>%s</Message>\r\n</Error>\r\n' \ % (code, error_table[code][1]) return resp
def test_add_acls_impossible_cases(self): # For test coverage: verify that defensive coding does defend, in cases # that shouldn't arise naturally # add_acls should do nothing if REQUEST_METHOD isn't HEAD/GET/PUT/POST resp = Response() controller = proxy_server.AccountController(self.app, 'a') resp.environ['PATH_INFO'] = '/a' resp.environ['REQUEST_METHOD'] = 'OPTIONS' controller.add_acls_from_sys_metadata(resp) self.assertEqual(1, len(resp.headers)) # we always get Content-Type self.assertEqual(2, len(resp.environ))
def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" device, partition, account, container, obj = self._parse_path(request) try: disk_file = self._diskfile(device, partition, account, container, obj) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) if disk_file.is_deleted() or disk_file.is_expired(): return HTTPNotFound(request=request) try: file_size = disk_file.get_data_file_size() except (DiskFileError, DiskFileNotExist): disk_file.quarantine() return HTTPNotFound(request=request) response = Response(request=request, conditional_response=True) response.headers["Content-Type"] = disk_file.metadata.get("Content-Type", "application/octet-stream") for key, value in disk_file.metadata.iteritems(): if key.lower().startswith("x-object-meta-") or key.lower() in self.allowed_headers: response.headers[key] = value response.etag = disk_file.metadata["ETag"] response.last_modified = float(disk_file.metadata["X-Timestamp"]) # Needed for container sync feature response.headers["X-Timestamp"] = disk_file.metadata["X-Timestamp"] response.content_length = file_size if "Content-Encoding" in disk_file.metadata: response.content_encoding = disk_file.metadata["Content-Encoding"] return response
def make_object_response(self, req, metadata, stream=None): conditional_etag = None if 'X-Backend-Etag-Is-At' in req.headers: conditional_etag = metadata.get( req.headers['X-Backend-Etag-Is-At']) resp = Response(request=req, conditional_response=True, conditional_etag=conditional_etag) resp.headers['Content-Type'] = metadata.get( 'mime-type', 'application/octet-stream') for k, v in metadata.iteritems(): if k.startswith("user."): meta = k[5:] if is_sys_or_user_meta('object', meta) or \ meta.lower() in self.allowed_headers: resp.headers[meta] = v resp.etag = metadata['hash'].lower() ts = Timestamp(metadata['ctime']) resp.last_modified = math.ceil(float(ts)) if stream: resp.app_iter = stream resp.content_length = int(metadata['length']) try: resp.content_encoding = metadata['encoding'] except KeyError: pass return resp
def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" device, partition, account, container, obj = \ split_and_validate_path(request, 5, 5, True) try: disk_file = self.get_diskfile( device, partition, account, container, obj) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: metadata = disk_file.read_metadata() except (DiskFileNotExist, DiskFileQuarantined): return HTTPNotFound(request=request, conditional_response=True) response = Response(request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] ts = metadata['X-Timestamp'] response.last_modified = math.ceil(float(ts)) # Needed for container sync feature response.headers['X-Timestamp'] = ts response.content_length = int(metadata['Content-Length']) try: response.content_encoding = metadata['Content-Encoding'] except KeyError: pass return response
def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" device, partition, account, container, obj = \ split_and_validate_path(request, 5, 5, True) try: disk_file = self._diskfile(device, partition, account, container, obj) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) with disk_file.open(): if disk_file.is_deleted() or disk_file.is_expired(): return HTTPNotFound(request=request) try: file_size = disk_file.get_data_file_size() except (DiskFileError, DiskFileNotExist): disk_file.quarantine() return HTTPNotFound(request=request) metadata = disk_file.get_metadata() response = Response(request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if key.lower().startswith('x-object-meta-') or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] response.last_modified = float(metadata['X-Timestamp']) # Needed for container sync feature response.headers['X-Timestamp'] = metadata['X-Timestamp'] response.content_length = file_size if 'Content-Encoding' in metadata: response.content_encoding = metadata['Content-Encoding'] return response
def create_listing(self, req, out_content_type, info, metadata, container_list, container): list_meta = get_param(req, 'list_meta', 'f').lower() in TRUE_VALUES resp_headers = { 'X-Container-Object-Count': info['object_count'], 'X-Container-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp'], } for key, (value, timestamp) in metadata.iteritems(): if value and (key.lower() in self.save_headers or is_sys_or_user_meta('container', key)): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') if out_content_type == 'application/json': ret.body = json.dumps([self.update_data_record(record, list_meta) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj, list_meta) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in ["name", "hash", "bytes", "content_type", "last_modified"]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record): if list_meta and field == 'metadata': meta = SubElement(obj_element, field) for k, v in record[field].iteritems(): SubElement(meta, k).text = str( v.decode('utf-8')) else: SubElement(obj_element, field).text = str( record[field]).decode('utf-8') ret.body = tostring(doc, encoding='UTF-8').replace( "<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>', 1) else: if not container_list: return HTTPNoContent(request=req, headers=resp_headers) ret.body = '\n'.join(rec[0] for rec in container_list) + '\n' return ret
def _deny_request(self, code): error_table = { 'AccessDenied': (401, 'Access denied'), 'InvalidURI': (400, 'Could not parse the specified URI'), } resp = Response(content_type='text/xml') resp.status = error_table[code][0] error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n' '<Error>\r\n <Code>%s</Code>\r\n ' '<Message>%s</Message>\r\n</Error>\r\n' % (code, error_table[code][1])) if six.PY3: error_msg = error_msg.encode() resp.body = error_msg return resp
def pass_file(self, req, path, content_type=None): """ pass a file to client """ resp = Response() if content_type: resp.content_type = content_type else: (ctype, enc) = guess_type(basename(path)) resp.content_type = ctype resp.charset = None try: with open(join(self.path, path)) as f: resp.app_iter = iter(f.read()) return resp except IOError: return HTTPNotFound(request=req)
def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj = \ split_and_validate_path(request, 5, 5, True) keep_cache = self.keep_cache_private or ( 'X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers) try: disk_file = self.get_diskfile( device, partition, account, container, obj) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) try: with disk_file.open(): metadata = disk_file.get_metadata() obj_size = int(metadata['Content-Length']) file_x_ts = metadata['X-Timestamp'] file_x_ts_flt = float(file_x_ts) try: if_unmodified_since = request.if_unmodified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) file_x_ts_utc = datetime.fromtimestamp(file_x_ts_flt, UTC) if if_unmodified_since and file_x_ts_utc > if_unmodified_since: return HTTPPreconditionFailed(request=request) try: if_modified_since = request.if_modified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_modified_since and file_x_ts_utc <= if_modified_since: return HTTPNotModified(request=request) keep_cache = (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)) response = Response( app_iter=disk_file.reader(keep_cache=keep_cache), request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] response.last_modified = math.ceil(file_x_ts_flt) response.content_length = obj_size try: response.content_encoding = metadata[ 'Content-Encoding'] except KeyError: pass response.headers['X-Timestamp'] = file_x_ts resp = request.get_response(response) except (DiskFileNotExist, DiskFileQuarantined): resp = HTTPNotFound(request=request, conditional_response=True) return resp
class TestResponse(unittest.TestCase): def setUp(self): self.resp = Response() def tearDown(self): pass def test_set_cookie(self): self.resp.set_cookie('token', 'XXXXXXXX') self.assertEqual('token=XXXXXXXX', self.resp.environ['HTTP_SET_COOKIE']) self.assertEqual('token=XXXXXXXX', self.resp.headers['set-cookie']) self.resp.set_cookie('token', 'XXXXXXXX', path='/', comment='foo', domain='example.tld', max_age=10, secure=True, version='1', httponly=True) self.assertEqual('token=XXXXXXXX; Comment=foo; Domain=example.tld; httponly; Max-Age=10; Path=/; secure; Version=1', self.resp.environ['HTTP_SET_COOKIE'])
def PUT(self): """ PUT handler on Object """ if self.is_trigger_assignation: trigger, micro_controller = self.get_mc_assignation_data() try: set_microcontroller_object(self, trigger, micro_controller) msg = ( 'Vertigo - Microcontroller "' + micro_controller + '" correctly assigned to the "' + trigger + '" trigger.\n' ) except ValueError as e: msg = e.args[0] self.logger.info(msg) response = Response(body=msg, headers={"etag": ""}, request=self.request) elif self.is_trigger_deletion: trigger, micro_controller = self.get_mc_deletion_data() try: delete_microcontroller_object(self, trigger, micro_controller) msg = ( 'Vertigo - Microcontroller "' + micro_controller + '" correctly removed from the "' + trigger + '" trigger.\n' ) except ValueError as e: msg = e.args[0] response = Response(body=msg, headers={"etag": ""}, request=self.request) elif self.request.headers["Content-Type"] == "vertigo/link": response = self.request.get_response(self.app) response.headers["Content-Type"] = "vertigo/link" else: response = self.request.get_response(self.app) return response
def get_err_response(code): """ Given an HTTP response code, create a properly formatted xml error response :param code: error code :returns: webob.response object """ error_table = { 'AccessDenied': (HTTP_FORBIDDEN, 'Access denied'), 'BucketAlreadyExists': ( HTTP_CONFLICT, 'The requested bucket name is not available'), 'BucketNotEmpty': ( HTTP_CONFLICT, 'The bucket you tried to delete is not empty'), 'InvalidArgument': (HTTP_BAD_REQUEST, 'Invalid Argument'), 'InvalidBucketName': ( HTTP_BAD_REQUEST, 'The specified bucket is not valid'), 'InvalidURI': (HTTP_BAD_REQUEST, 'Could not parse the specified URI'), 'InvalidDigest': ( HTTP_BAD_REQUEST, 'The Content-MD5 you specified was invalid'), 'BadDigest': ( HTTP_BAD_REQUEST, 'The Content-Length you specified was invalid'), 'EntityTooLarge': ( HTTP_BAD_REQUEST, 'Your proposed upload exceeds the maximum ' 'allowed object size.'), 'NoSuchBucket': ( HTTP_NOT_FOUND, 'The specified bucket does not exist'), 'SignatureDoesNotMatch': ( HTTP_FORBIDDEN, 'The calculated request signature does not match ' 'your provided one'), 'RequestTimeTooSkewed': ( HTTP_FORBIDDEN, 'The difference between the request time and the ' 'current time is too large'), 'NoSuchKey': ( HTTP_NOT_FOUND, 'The resource you requested does not exist'), 'Unsupported': ( HTTP_NOT_IMPLEMENTED, 'The feature you requested is not yet ' 'implemented'), 'MissingContentLength': (HTTP_LENGTH_REQUIRED, 'Length Required'), 'ServiceUnavailable': ( HTTP_SERVICE_UNAVAILABLE, 'Please reduce your request rate')} resp = Response(content_type='text/xml') resp.status = error_table[code][0] resp.body = """%s <Error><Code>%s</Code><Message>%s</Message></Error> """ % (XML_HEADER, code, error_table[code][1]) return resp
def DELETE(self, env, start_response): """ Handle DELETE Object request """ body_iter = self._app_call(env) status = self._get_status_int() if status != HTTP_NO_CONTENT: if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN): return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchKey') else: return get_err_response('InvalidURI') resp = Response() resp.status = HTTP_NO_CONTENT return resp
def OPTIONS_base(self, req): """ Base handler for OPTIONS requests :param req: swob.Request object :returns: swob.Response object """ headers = {'Allow': ', '.join(self.allowed_methods)} resp = Response(status=200, request=req, headers=headers) req_origin_value = req.headers.get('Origin', None) if not req_origin_value: # NOT a CORS request return resp # CORS preflight request try: container_info = \ self.container_info(self.account_name, self.container_name) except AttributeError: container_info = {} cors = container_info.get('cors', {}) allowed_origins = set() if cors.get('allow_origin'): allowed_origins.update(cors['allow_origin'].split(' ')) if self.app.cors_allow_origin: allowed_origins.update(self.app.cors_allow_origin) if (req_origin_value not in allowed_origins and '*' not in allowed_origins) or ( req.headers.get('Access-Control-Request-Method') not in self.allowed_methods): resp.status = HTTP_UNAUTHORIZED return resp # CORS preflight request that isn't valid headers['access-control-allow-origin'] = req_origin_value if cors.get('max_age', None) is not None: headers['access-control-max-age'] = '%d' % cors.get('max_age') headers['access-control-allow-methods'] = ', '.join( self.allowed_methods) if cors.get('allow_headers'): headers['access-control-allow-headers'] = cors.get('allow_headers') resp.headers = headers return resp
def DELETE(self, env, start_response): """ Handle DELETE Bucket request """ body_iter = self._app_call(env) status = self._get_status_int() if status != HTTP_NO_CONTENT: if status == HTTP_UNAUTHORIZED: return get_err_response('AccessDenied') elif status == HTTP_NOT_FOUND: return get_err_response('NoSuchBucket') elif status == HTTP_CONFLICT: return get_err_response('BucketNotEmpty') else: return get_err_response('InvalidURI') resp = Response() resp.status = HTTP_NO_CONTENT return resp
def _handle_sync_request(self, broker, remote_info): """ Update metadata, timestamps, sync points. """ with self.debug_timing('info'): try: info = self._get_synced_replication_info(broker, remote_info) except (Exception, Timeout) as e: if 'no such table' in str(e): self.logger.error(_("Quarantining DB %s"), broker) quarantine_db(broker.db_file, broker.db_type) return HTTPNotFound() raise # TODO(mattoliverau) At this point in the RPC, we have the callers # replication info and ours, so it would be cool to be able to make # an educated guess here on the size of the incoming replication (maybe # average object table row size * difference in ROWIDs or something) # and the fallocate_reserve setting so we could return a 507. # This would make db fallocate_reserve more or less on par with the # object's. if remote_info['metadata']: with self.debug_timing('update_metadata'): broker.update_metadata(remote_info['metadata']) sync_timestamps = ('created_at', 'put_timestamp', 'delete_timestamp') if any(info[ts] != remote_info[ts] for ts in sync_timestamps): with self.debug_timing('merge_timestamps'): broker.merge_timestamps(*(remote_info[ts] for ts in sync_timestamps)) with self.debug_timing('get_sync'): info['point'] = broker.get_sync(remote_info['id']) if remote_info['hash'] == info['hash'] and \ info['point'] < remote_info['point']: with self.debug_timing('merge_syncs'): translate = { 'remote_id': 'id', 'sync_point': 'point', } data = dict((k, remote_info[v]) for k, v in translate.items()) broker.merge_syncs([data]) info['point'] = remote_info['point'] return Response(json.dumps(info))
def dispatch(self, replicate_args, args): if not hasattr(args, 'pop'): return HTTPBadRequest(body='Invalid object type') op = args.pop(0) drive, partition, hsh = replicate_args if not check_drive(self.root, drive, self.mount_check): return Response(status='507 %s is not mounted' % drive) db_file = os.path.join(self.root, drive, storage_directory(self.datadir, partition, hsh), hsh + '.db') if op == 'rsync_then_merge': return self.rsync_then_merge(drive, db_file, args) if op == 'complete_rsync': return self.complete_rsync(drive, db_file, args) else: # someone might be about to rsync a db to us, # make sure there's a tmp dir to receive it. mkdirs(os.path.join(self.root, drive, 'tmp')) if not os.path.exists(db_file): return HTTPNotFound() return getattr(self, op)(self.broker_class(db_file), args)
def test_response_s3api_user_meta_headers(self): resp = Response( headers={ 'X-Object-Meta-Foo': 'Bar', 'X-Object-Meta-Non-\xdcnicode-Value': '\xff', 'X-Object-Meta-With=5FUnderscore': 'underscored', 'X-Object-Sysmeta-Baz': 'quux', 'Etag': 'unquoted', 'Content-type': 'text/plain', 'content-length': '0', }) s3resp = S3Response.from_swift_resp(resp) self.assertEqual( dict(s3resp.headers), { 'x-amz-meta-foo': 'Bar', 'x-amz-meta-non-\xdcnicode-value': '\xff', 'x-amz-meta-with_underscore': 'underscored', 'ETag': '"unquoted"', 'Content-Type': 'text/plain', 'Content-Length': '0', })
def page_login(self, req): """ create login page """ if req.method == 'POST': try: username = req.params_alt().get('username') password = req.params_alt().get('password') (storage_url, token) = get_auth(self.auth_url, username, password, auth_version=self.auth_version) if self.token_bank.get(token, None): self.token_bank[token].update({ 'url': storage_url, 'last': int(time()) }) else: self.token_bank[token] = { 'url': storage_url, 'last': int(time()) } resp = HTTPFound(location=self.add_prefix(storage_url) + \ '?limit=%s' % self.items_per_page) resp.set_cookie('_token', token, path=self.page_path, max_age=self.cookie_max_age, secure=self.secure) self.memcache_update(token) return resp except Exception, err: lang = self.get_lang(req) resp = Response(charset='utf8') resp.app_iter = self.tmpl({ 'ptype': 'login', 'top': self.page_path, 'title': self.title, 'lang': lang, 'message': 'Login Failed' }) return resp
def test_account_acl_headers_translated_for_GET_HEAD(self): # Verify that a GET/HEAD which receives X-Account-Sysmeta-Acl-* headers # from the account server will remap those headers to X-Account-Acl-* hdrs_ext, hdrs_int = self._make_user_and_sys_acl_headers_data() controller = proxy_server.AccountController(self.app, 'acct') for verb in ('GET', 'HEAD'): req = Request.blank('/v1/acct', environ={'swift_owner': True}) controller.GETorHEAD_base = lambda *_: Response( headers=hdrs_int, environ={ 'PATH_INFO': '/acct', 'REQUEST_METHOD': verb, }) method = getattr(controller, verb) resp = method(req) for header, value in hdrs_ext.items(): if value: self.assertEqual(resp.headers.get(header), value) else: # blank ACLs should result in no header self.assert_(header not in resp.headers)
def _get_object_from_cache(self, req_resp, app_iter): resp_headers = {} """ CHECK IF FILE IS IN CACHE """ if os.path.exists(CACHE_PATH): object_path = req_resp.environ['PATH_INFO'] object_id = (hashlib.md5(object_path).hexdigest()) object_id, object_size, object_etag = self.cache.access_cache("GET", object_id) if object_id: self.logger.info('SDS Cache Filter - Object '+object_path+' in cache') resp_headers = {} resp_headers['content-length'] = str(object_size) resp_headers['etag'] = object_etag cached_object = open(CACHE_PATH+object_id,'r') return Response(app_iter=cached_object, headers=resp_headers, request=req_resp)
def _process_function_response_onput(self, f_data): """ Processes the data returned from the function """ if f_data['command'] == 'DW': # Data Write from function new_fd = f_data['fd'] # Data from function fd self.req.environ['wsgi.input'] = DataFdIter(new_fd) if 'request_headers' in f_data: self.req.headers.update(f_data['request_headers']) if 'object_metadata' in f_data: self.req.headers.update(f_data['object_metadata']) elif f_data['command'] == 'RC': # Request Continue: normal req. execution if 'request_headers' in f_data: self.req.headers.update(f_data['request_headers']) if 'object_metadata' in f_data: self.req.headers.update(f_data['object_metadata']) elif f_data['command'] == 'RR': # Request Rewire to another object pass # TODO elif f_data['command'] == 'RE': # Request Error msg = f_data['message'] return Response(body=msg + '\n', headers={'etag': ''}, request=self.req) response = self.req.get_response(self.app) if 'response_headers' in f_data: response.headers.update(f_data['response_headers']) return response
def _process_function_response_onget(self, f_data): """ Processes the response from the function """ if f_data['command'] == 'DW': # Data Write from function new_fd = f_data['fd'] self.response.app_iter = DataFdIter(new_fd) if 'object_metadata' in f_data: self.response.headers.update(f_data['object_metadata']) if 'response_headers' in f_data: self.response.headers.update(f_data['response_headers']) if 'Content-Length' in self.response.headers: self.response.headers.pop('Content-Length') if 'Transfer-Encoding' in self.response.headers: self.response.headers.pop('Transfer-Encoding') if 'Etag' in self.response.headers: self.response.headers['Etag'] = '' elif f_data['command'] == 'RC': # Request Continue: normal req. execution if 'object_metadata' in f_data: self.response.headers.update(f_data['object_metadata']) if 'response_headers' in f_data: self.response.headers.update(f_data['response_headers']) elif f_data['command'] == 'RR': # Request Rewire to another object pass # TODO elif f_data['command'] == 'RE': # Request Error msg = f_data['message'] self.response = Response(body=msg + '\n', headers={'etag': ''}, request=self.req)
def best_response(self, req, statuses, reasons, bodies, server_type, etag=None): """ Given a list of responses from several servers, choose the best to return to the API. :param req: swob.Request object :param statuses: list of statuses returned :param reasons: list of reasons for each status :param bodies: bodies of each response :param server_type: type of server the responses came from :param etag: etag :returns: swob.Response object with the correct status, body, etc. set """ resp = Response(request=req) if len(statuses): for hundred in (HTTP_OK, HTTP_MULTIPLE_CHOICES, HTTP_BAD_REQUEST): hstatuses = \ [s for s in statuses if hundred <= s < hundred + 100] if len(hstatuses) > len(statuses) / 2: status = max(hstatuses) status_index = statuses.index(status) resp.status = '%s %s' % (status, reasons[status_index]) resp.body = bodies[status_index] resp.content_type = 'text/html' if etag: resp.headers['etag'] = etag.strip('"') return resp self.app.logger.error(_('%(type)s returning 503 for %(statuses)s'), { 'type': server_type, 'statuses': statuses }) resp.status = '503 Internal Server Error' return resp
def _process_object_move_and_link(self): """ Moves an object to the destination path and leaves a soft link in the original path. """ link_path = os.path.join(self.container, self.obj) dest_path = self.request.headers['X-Vertigo-Link-To'] if link_path != dest_path: response = self._verify_access(self.container, self.obj) headers = response.headers if "X-Object-Sysmeta-Vertigo-Link-to" not in response.headers \ and response.headers['Content-Type'] != 'vertigo/link': self.request.method = 'COPY' self.request.headers['Destination'] = dest_path response = self.request.get_response(self.app) if response.is_success: response = create_link(self, link_path, dest_path, headers) else: msg = ("Vertigo - Error: Link path and destination path " "cannot be the same.\n") response = Response(body=msg, headers={'etag': ''}, request=self.request) return response
def create_listing(self, req, out_content_type, info, resp_headers, metadata, container_list, container): for key, (value, timestamp) in metadata.items(): if value and (key.lower() in self.save_headers or is_sys_or_user_meta('container', key)): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') if out_content_type == 'application/json': ret.body = json.dumps( [self.update_data_record(record) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in [ "name", "hash", "bytes", "content_type", "last_modified" ]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record): SubElement(obj_element, field).text = str( record[field]).decode('utf-8') ret.body = tostring(doc, encoding='UTF-8').replace( "<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>', 1) else: if not container_list: return HTTPNoContent(request=req, headers=resp_headers) ret.body = '\n'.join(rec[0] for rec in container_list) + '\n' ret.last_modified = math.ceil(float(resp_headers['X-PUT-Timestamp'])) return ret
def get_working_response(self, req): source, node = self._get_source_and_node() res = None if source: res = Response(request=req) if req.method == 'GET' and \ source.status in (HTTP_OK, HTTP_PARTIAL_CONTENT): res.app_iter = self._make_app_iter(node, source) # See NOTE: swift_conn at top of file about this. res.swift_conn = source.swift_conn res.status = source.status update_headers(res, source.getheaders()) if not res.environ: res.environ = {} res.environ['swift_x_timestamp'] = \ source.getheader('x-timestamp') res.accept_ranges = 'bytes' res.content_length = source.getheader('Content-Length') if source.getheader('Content-Type'): res.charset = None res.content_type = source.getheader('Content-Type') return res
def make_object_response(self, req, metadata, stream=None): conditional_etag = None if 'X-Backend-Etag-Is-At' in req.headers: conditional_etag = metadata.get( req.headers['X-Backend-Etag-Is-At']) resp = Response(request=req, conditional_response=True, conditional_etag=conditional_etag) if config_true_value(metadata['deleted']): resp.headers['Content-Type'] = DELETE_MARKER_CONTENT_TYPE else: resp.headers['Content-Type'] = metadata.get( 'mime_type', 'application/octet-stream') properties = metadata.get('properties') if properties: for k, v in properties.items(): if is_sys_or_user_meta('object', k) or \ is_object_transient_sysmeta(k) or \ k.lower() in self.allowed_headers: resp.headers[str(k)] = v hash_ = metadata.get('hash') if hash_ is not None: hash_ = hash_.lower() resp.headers['etag'] = hash_ resp.headers['x-object-sysmeta-version-id'] = metadata['version'] resp.last_modified = int(metadata['mtime']) if stream: # Whether we are bothered with ranges or not, we wrap the # stream in order to handle exceptions. resp.app_iter = StreamRangeIterator(req, stream) length_ = metadata.get('length') if length_ is not None: length_ = int(length_) resp.content_length = length_ resp.content_encoding = metadata.get('encoding') resp.accept_ranges = 'bytes' return resp
def __call__(self, env, start_response): self.calls += 1 if env.get('swift.source') in ('EA', 'BD'): assert not env.get('swift.proxy_access_log_made') if not six.PY2: # Check that it's valid WSGI assert all(0 <= ord(c) <= 255 for c in env['PATH_INFO']) if env['REQUEST_METHOD'] == 'PUT': self.put_paths.append(env['PATH_INFO']) if env['PATH_INFO'].startswith('/unauth/'): if env['PATH_INFO'].endswith('/c/f_ok'): return Response(status='204 No Content')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/create_cont/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/create_cont_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='403 Forbidden')(env, start_response) return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].startswith('/create_obj_unauth/'): if env['PATH_INFO'].endswith('/cont'): return Response(status='201 Created')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/tar_works/'): if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/tar_works_cont_head_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) if len(env['PATH_INFO']) > 100: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if (env['PATH_INFO'].startswith('/delete_works/') and env['REQUEST_METHOD'] == 'DELETE'): self.delete_paths.append(env['PATH_INFO']) if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) if env['PATH_INFO'].endswith('404'): return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].endswith('badutf8'): return Response(status='412 Precondition Failed')( env, start_response) return Response(status='204 No Content')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_fail/'): return Response(status='409 Conflict')(env, start_response) if env['PATH_INFO'].startswith('/broke/'): return Response(status='500 Internal Error')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_success_after_attempts/'): if self.del_cont_cur_call < self.del_cont_total_calls: self.del_cont_cur_call += 1 return Response(status='409 Conflict')(env, start_response) else: return Response(status='204 No Content')(env, start_response)
def GET(self, req): root, rcheck, rtype = req.split_path(1, 3, True) all_rtypes = ['account', 'container', 'object'] if rcheck == "mem": content = self.get_mem() elif rcheck == "load": content = self.get_load() elif rcheck == "async": content = self.get_async_info() elif rcheck == 'replication' and rtype in all_rtypes: content = self.get_replication_info(rtype) elif rcheck == 'replication' and rtype is None: # handle old style object replication requests content = self.get_replication_info('object') elif rcheck == "devices": content = self.get_device_info() elif rcheck == "updater" and rtype in ['container', 'object']: content = self.get_updater_info(rtype) elif rcheck == "auditor" and rtype in all_rtypes: content = self.get_auditor_info(rtype) elif rcheck == "expirer" and rtype == 'object': content = self.get_expirer_info(rtype) elif rcheck == "mounted": content = self.get_mounted() elif rcheck == "unmounted": content = self.get_unmounted() elif rcheck == "diskusage": content = self.get_diskusage() elif rcheck == "ringmd5": content = self.get_ring_md5() elif rcheck == "swiftconfmd5": content = self.get_swift_conf_md5() elif rcheck == "quarantined": content = self.get_quarantine_count() elif rcheck == "sockstat": content = self.get_socket_info() elif rcheck == "version": content = self.get_version() elif rcheck == "driveaudit": content = self.get_driveaudit_error() elif rcheck == "time": content = self.get_time() elif rcheck == "sharding": content = self.get_sharding_info() elif rcheck == "relinker": content = self.get_relinker_info() else: content = "Invalid path: %s" % req.path return Response(request=req, status="404 Not Found", body=content, content_type="text/plain") if content is not None: return Response(request=req, body=json.dumps(content), content_type="application/json") else: return Response(request=req, status="500 Server Error", body="Internal server error.", content_type="text/plain")
def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" device, partition, account, container, obj = \ split_and_validate_path(request, 5, 5, True) try: disk_file = self._diskfile(device, partition, account, container, obj, keep_data_fp=True, iter_hook=sleep) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) if disk_file.is_deleted() or disk_file.is_expired(): if request.headers.get('if-match') == '*': return HTTPPreconditionFailed(request=request) else: return HTTPNotFound(request=request) try: file_size = disk_file.get_data_file_size() except (DiskFileError, DiskFileNotExist): disk_file.quarantine() return HTTPNotFound(request=request) if request.headers.get('if-match') not in (None, '*') and \ disk_file.metadata['ETag'] not in request.if_match: disk_file.close() return HTTPPreconditionFailed(request=request) if request.headers.get('if-none-match') is not None: if disk_file.metadata['ETag'] in request.if_none_match: resp = HTTPNotModified(request=request) resp.etag = disk_file.metadata['ETag'] disk_file.close() return resp try: if_unmodified_since = request.if_unmodified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_unmodified_since and \ datetime.fromtimestamp( float(disk_file.metadata['X-Timestamp']), UTC) > \ if_unmodified_since: disk_file.close() return HTTPPreconditionFailed(request=request) try: if_modified_since = request.if_modified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_modified_since and \ datetime.fromtimestamp( float(disk_file.metadata['X-Timestamp']), UTC) < \ if_modified_since: disk_file.close() return HTTPNotModified(request=request) response = Response(app_iter=disk_file, request=request, conditional_response=True) response.headers['Content-Type'] = disk_file.metadata.get( 'Content-Type', 'application/octet-stream') for key, value in disk_file.metadata.iteritems(): if key.lower().startswith('x-object-meta-') or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = disk_file.metadata['ETag'] response.last_modified = float(disk_file.metadata['X-Timestamp']) response.content_length = file_size if response.content_length < self.keep_cache_size and \ (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)): disk_file.keep_cache = True if 'Content-Encoding' in disk_file.metadata: response.content_encoding = disk_file.metadata['Content-Encoding'] response.headers['X-Timestamp'] = disk_file.metadata['X-Timestamp'] return request.get_response(response)
def __call__(self, req): if not self.memcache: self.memcache = cache_from_env(req.environ) login_path = '%s/%s' % (self.page_path, 'login') token = None storage_url = None # favicon if req.path == '/favicon.ico': return self.pass_file(req, 'images/favicon.ico', 'image/vnd.microsoft.icon') # not taylor if not req.path.startswith(self.page_path): return self.app # image if req.path.startswith(join(self.page_path, 'image')): return self.pass_file(req, join('images', basename(req.path))) # css if req.path.startswith(join(self.page_path, 'css')): return self.pass_file(req, join('css', basename(req.path))) # js if req.path.startswith(join(self.page_path, 'js')): return self.pass_file(req, join('js', basename(req.path))) # get token from cookie and query memcache token = req.cookies('_token') if self.memcache and token: cache_val = self.memcache.get('%s_%s' % (self.title, token)) if cache_val: self.token_bank[token] = cache_val status = self.token_bank.get(token, None) if status: storage_url = status.get('url', None) # login page if req.path == login_path: return self.page_login(req) if not token or not storage_url: return HTTPFound(location=login_path) self.token_bank[token].update({'last': time()}) # clean up token bank for tok, val in self.token_bank.items(): last = val.get('last', 0) if (time() - last) >= self.cookie_max_age: del (self.token_bank[tok]) if 'X-PJAX' in req.headers: return self.pass_file(req, 'images/test.html', 'text/html') # return self.page_cont_list(req, storage_url, token, # template_name='containers.tmpl') # return self.page_obj_list(req, storage_url, token, # template_name='objectss.tmpl') # ajax action if '_ajax' in req.params_alt(): if req.params_alt()['_action'].endswith('_meta_list'): status, headers = self.action_routine(req, storage_url, token) return Response(status=status, body=headers) return Response( status=self.action_routine(req, storage_url, token)) # after action if '_action' in req.params_alt(): if req.params_alt()['_action'] == 'logout': del self.token_bank[token] self.memcache.delete('%s_%s' % (self.title, token)) return HTTPFound(location=login_path) return self.page_after_action(req, storage_url, token) # construct main pages return self.page_main(req, storage_url, token)
class Taylor(object): """ swift embeded easy manipulator """ def __init__(self, app, conf): """ """ self.app = app self.conf = conf self.title = conf.get('taylor_title', 'Taylor') self.logger = get_logger(conf, log_route='%s' % self.title) self.page_path = conf.get('page_path', '/taylor') self.auth_url = conf.get('auth_url') self.auth_version = int(conf.get('auth_version', 1)) self.items_per_page = int(conf.get('items_per_page', 5)) self.cookie_max_age = int(conf.get('cookie_max_age', 3600)) self.enable_versions = config_true_value( conf.get('enable_versions', 'no')) self.enable_object_expire = config_true_value( conf.get('enable_object_expire', 'no')) self.enable_container_sync = config_true_value( conf.get('enable_container_sync', 'no')) self.delimiter = conf.get('delimiter', '/') self.path = abspath(dirname(__file__)) self.tmpl = TaylorTemplate() self.token_bank = {} self.memcache = None self.secure = True if 'key_file' in self.conf and 'cert_file' in self.conf else False self.logger.info('%s loaded.' % self.title) @wsgify def __call__(self, req): if not self.memcache: self.memcache = cache_from_env(req.environ) login_path = '%s/%s' % (self.page_path, 'login') token = None storage_url = None # favicon if req.path == '/favicon.ico': return self.pass_file(req, 'images/favicon.ico', 'image/vnd.microsoft.icon') # not taylor if not req.path.startswith(self.page_path): return self.app # image if req.path.startswith(join(self.page_path, 'image')): return self.pass_file(req, join('images', basename(req.path))) # css if req.path.startswith(join(self.page_path, 'css')): return self.pass_file(req, join('css', basename(req.path))) # js if req.path.startswith(join(self.page_path, 'js')): return self.pass_file(req, join('js', basename(req.path))) # get token from cookie and query memcache token = req.cookies('_token') if self.memcache and token: cache_val = self.memcache.get('%s_%s' % (self.title, token)) if cache_val: self.token_bank[token] = cache_val status = self.token_bank.get(token, None) if status: storage_url = status.get('url', None) # login page if req.path == login_path: return self.page_login(req) if not token or not storage_url: return HTTPFound(location=login_path) self.token_bank[token].update({'last': time()}) # clean up token bank for tok, val in self.token_bank.items(): last = val.get('last', 0) if (time() - last) >= self.cookie_max_age: del (self.token_bank[tok]) if 'X-PJAX' in req.headers: return self.pass_file(req, 'images/test.html', 'text/html') # return self.page_cont_list(req, storage_url, token, # template_name='containers.tmpl') # return self.page_obj_list(req, storage_url, token, # template_name='objectss.tmpl') # ajax action if '_ajax' in req.params_alt(): if req.params_alt()['_action'].endswith('_meta_list'): status, headers = self.action_routine(req, storage_url, token) return Response(status=status, body=headers) return Response( status=self.action_routine(req, storage_url, token)) # after action if '_action' in req.params_alt(): if req.params_alt()['_action'] == 'logout': del self.token_bank[token] self.memcache.delete('%s_%s' % (self.title, token)) return HTTPFound(location=login_path) return self.page_after_action(req, storage_url, token) # construct main pages return self.page_main(req, storage_url, token) def pass_file(self, req, path, content_type=None): """ pass a file to client """ resp = Response() if content_type: resp.content_type = content_type else: (ctype, enc) = guess_type(basename(path)) resp.content_type = ctype resp.charset = None try: with open(join(self.path, path)) as f: resp.app_iter = iter(f.read()) return resp except IOError: return HTTPNotFound(request=req) def page_login(self, req): """ create login page """ if req.method == 'POST': try: username = req.params_alt().get('username') password = req.params_alt().get('password') (storage_url, token) = get_auth(self.auth_url, username, password, auth_version=self.auth_version) if self.token_bank.get(token, None): self.token_bank[token].update({ 'url': storage_url, 'last': int(time()) }) else: self.token_bank[token] = { 'url': storage_url, 'last': int(time()) } resp = HTTPFound(location=self.add_prefix(storage_url) + \ '?limit=%s' % self.items_per_page) resp.set_cookie('_token', token, path=self.page_path, max_age=self.cookie_max_age, secure=self.secure) self.memcache_update(token) return resp except Exception, err: lang = self.get_lang(req) resp = Response(charset='utf8') resp.app_iter = self.tmpl({ 'ptype': 'login', 'top': self.page_path, 'title': self.title, 'lang': lang, 'message': 'Login Failed' }) return resp token = req.cookies('_token') status = self.token_bank.get(token, None) if token else None lang = self.get_lang(req) msg = '' if status: msg = status.get('msg', '') resp = Response(charset='utf8') resp.app_iter = self.tmpl({ 'ptype': 'login', 'top': self.page_path, 'title': self.title, 'lang': lang, 'message': msg }) if msg: self.token_bank[token].update({'msg': ''}) self.memcache_update(token) return resp
vrs, acc, cont, obj = split_path(path, 1, 4, True) path_type = len([i for i in [vrs, acc, cont, obj] if i]) if path_type == 2: # account return self.page_cont_list(req, storage_url, token) if path_type == 3: # container return self.page_obj_list(req, storage_url, token) if path_type == 4: # object try: (obj_status, objct) = get_object(storage_url, token, cont, obj) except ClientException, e: resp = Response(charset='utf8') resp.status = e.http_status return resp except err: pass resp = Response() resp.set_cookie('_token', token, path=self.page_path, max_age=self.cookie_max_age, secure=self.secure) resp.status = HTTP_OK resp.headers = obj_status resp.body = objct self.token_bank[token].update({'msg': ''}) self.memcache_update(token) return resp return HTTPFound(location=self.add_prefix(storage_url)) def page_cont_list(self, req, storage_url, token, template=None): """ """
def get_or_head_response(self, req, x_object_manifest, response_headers=None): if response_headers is None: response_headers = self._response_headers container, obj_prefix = x_object_manifest.split('/', 1) container = unquote(container) obj_prefix = unquote(obj_prefix) version, account, _junk = req.split_path(2, 3, True) error_response, segments = self._get_container_listing( req, version, account, container, obj_prefix) if error_response: return error_response have_complete_listing = len(segments) < \ constraints.CONTAINER_LISTING_LIMIT first_byte = last_byte = None actual_content_length = None content_length_for_swob_range = None if req.range and len(req.range.ranges) == 1: content_length_for_swob_range = sum(o['bytes'] for o in segments) # This is a hack to handle suffix byte ranges (e.g. "bytes=-5"), # which we can't honor unless we have a complete listing. _junk, range_end = req.range.ranges_for_length(float("inf"))[0] # If this is all the segments, we know whether or not this # range request is satisfiable. # # Alternately, we may not have all the segments, but this range # falls entirely within the first page's segments, so we know # that it is satisfiable. if (have_complete_listing or range_end < content_length_for_swob_range): byteranges = req.range.ranges_for_length( content_length_for_swob_range) if not byteranges: headers = {'Accept-Ranges': 'bytes'} if have_complete_listing: headers['Content-Range'] = 'bytes */%d' % ( content_length_for_swob_range, ) return HTTPRequestedRangeNotSatisfiable(request=req, headers=headers) first_byte, last_byte = byteranges[0] # For some reason, swob.Range.ranges_for_length adds 1 to the # last byte's position. last_byte -= 1 actual_content_length = last_byte - first_byte + 1 else: # The range may or may not be satisfiable, but we can't tell # based on just one page of listing, and we're not going to go # get more pages because that would use up too many resources, # so we ignore the Range header and return the whole object. actual_content_length = None content_length_for_swob_range = None req.range = None response_headers = [(h, v) for h, v in response_headers if h.lower() not in ("content-length", "content-range")] if content_length_for_swob_range is not None: # Here, we have to give swob a big-enough content length so that # it can compute the actual content length based on the Range # header. This value will not be visible to the client; swob will # substitute its own Content-Length. # # Note: if the manifest points to at least CONTAINER_LISTING_LIMIT # segments, this may be less than the sum of all the segments' # sizes. However, it'll still be greater than the last byte in the # Range header, so it's good enough for swob. response_headers.append( ('Content-Length', str(content_length_for_swob_range))) elif have_complete_listing: actual_content_length = sum(o['bytes'] for o in segments) response_headers.append( ('Content-Length', str(actual_content_length))) if have_complete_listing: response_headers = [(h, v) for h, v in response_headers if h.lower() != "etag"] etag = md5() for seg_dict in segments: etag.update(seg_dict['hash'].strip('"')) response_headers.append(('Etag', '"%s"' % etag.hexdigest())) app_iter = None if req.method == 'GET': listing_iter = RateLimitedIterator( self._segment_listing_iterator(req, version, account, container, obj_prefix, segments, first_byte=first_byte, last_byte=last_byte), self.dlo.rate_limit_segments_per_sec, limit_after=self.dlo.rate_limit_after_segment) app_iter = SegmentedIterable( req, self.dlo.app, listing_iter, ua_suffix="DLO MultipartGET", swift_source="DLO", name=req.path, logger=self.logger, max_get_time=self.dlo.max_get_time, response_body_length=actual_content_length) try: app_iter.validate_first_segment() except (SegmentError, ListingIterError): return HTTPConflict(request=req) resp = Response(request=req, headers=response_headers, conditional_response=True, app_iter=app_iter) return resp
def _manifest_get_response(self, req, content_length, response_headers, segments): if req.range: byteranges = [ # For some reason, swob.Range.ranges_for_length adds 1 to the # last byte's position. (start, end - 1) for start, end in req.range.ranges_for_length(content_length) ] else: byteranges = [] ver, account, _junk = req.split_path(3, 3, rest_with_last=True) plain_listing_iter = self._segment_listing_iterator( req, ver, account, segments, byteranges) def is_small_segment((seg_dict, start_byte, end_byte)): start = 0 if start_byte is None else start_byte end = int(seg_dict['bytes']) - 1 if end_byte is None else end_byte is_small = (end - start + 1) < self.slo.rate_limit_under_size return is_small ratelimited_listing_iter = RateLimitedIterator( plain_listing_iter, self.slo.rate_limit_segments_per_sec, limit_after=self.slo.rate_limit_after_segment, ratelimit_if=is_small_segment) # self._segment_listing_iterator gives us 3-tuples of (segment dict, # start byte, end byte), but SegmentedIterable wants (obj path, etag, # size, start byte, end byte), so we clean that up here segment_listing_iter = ( ("/{ver}/{acc}/{conobj}".format( ver=ver, acc=account, conobj=seg_dict['name'].lstrip('/')), seg_dict['hash'], int(seg_dict['bytes']), start_byte, end_byte) for seg_dict, start_byte, end_byte in ratelimited_listing_iter) segmented_iter = SegmentedIterable(req, self.slo.app, segment_listing_iter, name=req.path, logger=self.slo.logger, ua_suffix="SLO MultipartGET", swift_source="SLO", max_get_time=self.slo.max_get_time) try: segmented_iter.validate_first_segment() except (ListingIterError, SegmentError): # Copy from the SLO explanation in top of this file. # If any of the segments from the manifest are not found or # their Etag/Content Length no longer match the connection # will drop. In this case a 409 Conflict will be logged in # the proxy logs and the user will receive incomplete results. return HTTPConflict(request=req) response = Response(request=req, content_length=content_length, headers=response_headers, conditional_response=True, app_iter=segmented_iter) if req.range: response.headers.pop('Etag') return response
def handle_get_token(self, req): """ Handles the various `request for token and service end point(s)` calls. There are various formats to support the various auth servers in the past. Examples:: GET <auth-prefix>/v1/<act>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/auth X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> GET <auth-prefix>/v1.0 X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr> X-Auth-Key: <key> or X-Storage-Pass: <key> On successful authentication, the response will have X-Auth-Token and X-Storage-Token set to the token to use with Swift and X-Storage-URL set to the URL to the default Swift cluster to use. :param req: The swob.Request to process. :returns: swob.Response, 2xx on success with data set as explained above. """ # Validate the request info try: pathsegs = split_path(req.path_info, 1, 3, True) except ValueError: self.logger.increment('errors') return HTTPNotFound(request=req) if pathsegs[0] == 'v1' and pathsegs[2] == 'auth': account = pathsegs[1] user = req.headers.get('x-storage-user') if not user: user = req.headers.get('x-auth-user') if not user or ':' not in user: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account2, user = user.split(':', 1) if account != account2: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) key = req.headers.get('x-storage-pass') if not key: key = req.headers.get('x-auth-key') elif pathsegs[0] in ('auth', 'v1.0'): user = req.headers.get('x-auth-user') if not user: user = req.headers.get('x-storage-user') if not user or ':' not in user: self.logger.increment('token_denied') auth = 'Swift realm="unknown"' return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account, user = user.split(':', 1) key = req.headers.get('x-auth-key') if not key: key = req.headers.get('x-storage-pass') else: return HTTPBadRequest(request=req) if not all((account, user, key)): self.logger.increment('token_denied') realm = account or 'unknown' return HTTPUnauthorized(request=req, headers={'Www-Authenticate': 'Swift realm="%s"' % realm}) # Authenticate user account_user = account + ':' + user if account_user not in self.users: self.logger.increment('token_denied') auth = 'Swift realm="%s"' % account return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) if self.users[account_user]['key'] != key: self.logger.increment('token_denied') auth = 'Swift realm="unknown"' return HTTPUnauthorized(request=req, headers={'Www-Authenticate': auth}) account_id = self.users[account_user]['url'].rsplit('/', 1)[-1] # Get memcache client memcache_client = cache_from_env(req.environ) if not memcache_client: raise Exception('Memcache required') # See if a token already exists and hasn't expired token = None memcache_user_key = '%s/user/%s' % (self.reseller_prefix, account_user) candidate_token = memcache_client.get(memcache_user_key) if candidate_token: memcache_token_key = \ '%s/token/%s' % (self.reseller_prefix, candidate_token) cached_auth_data = memcache_client.get(memcache_token_key) if cached_auth_data: expires, old_groups = cached_auth_data old_groups = old_groups.split(',') new_groups = self._get_user_groups(account, account_user, account_id) if expires > time() and \ set(old_groups) == set(new_groups.split(',')): token = candidate_token # Create a new token if one didn't exist if not token: # Generate new token token = '%stk%s' % (self.reseller_prefix, uuid4().hex) expires = time() + self.token_life groups = self._get_user_groups(account, account_user, account_id) # Save token memcache_token_key = '%s/token/%s' % (self.reseller_prefix, token) memcache_client.set(memcache_token_key, (expires, groups), time=float(expires - time())) # Record the token with the user info for future use. memcache_user_key = \ '%s/user/%s' % (self.reseller_prefix, account_user) memcache_client.set(memcache_user_key, token, time=float(expires - time())) resp = Response(request=req, headers={ 'x-auth-token': token, 'x-storage-token': token}) url = self.users[account_user]['url'].replace('$HOST', resp.host_url) if self.storage_url_scheme != 'default': url = self.storage_url_scheme + ':' + url.split(':', 1)[1] resp.headers['x-storage-url'] = url return resp
def __call__(self, env, start_response): self.calls += 1 if env['PATH_INFO'].startswith('/unauth/'): if env['PATH_INFO'].endswith('/c/f_ok'): return Response(status='204 No Content')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/create_cont/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/create_cont_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='403 Forbidden')(env, start_response) return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].startswith('/create_obj_unauth/'): if env['PATH_INFO'].endswith('/cont'): return Response(status='201 Created')(env, start_response) return Response(status=401)(env, start_response) if env['PATH_INFO'].startswith('/tar_works/'): if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if env['PATH_INFO'].startswith('/tar_works_cont_head_fail/'): if env['REQUEST_METHOD'] == 'HEAD': return Response(status='404 Not Found')(env, start_response) if len(env['PATH_INFO']) > 100: return Response(status='400 Bad Request')(env, start_response) return Response(status='201 Created')(env, start_response) if (env['PATH_INFO'].startswith('/delete_works/') and env['REQUEST_METHOD'] == 'DELETE'): self.delete_paths.append(env['PATH_INFO']) if len(env['PATH_INFO']) > self.max_pathlen: return Response(status='400 Bad Request')(env, start_response) if env['PATH_INFO'].endswith('404'): return Response(status='404 Not Found')(env, start_response) if env['PATH_INFO'].endswith('badutf8'): return Response(status='412 Precondition Failed')( env, start_response) return Response(status='204 No Content')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_fail/'): return Response(status='409 Conflict')(env, start_response) if env['PATH_INFO'].startswith('/broke/'): return Response(status='500 Internal Error')(env, start_response) if env['PATH_INFO'].startswith('/delete_cont_success_after_attempts/'): if self.del_cont_cur_call < self.del_cont_total_calls: self.del_cont_cur_call += 1 return Response(status='409 Conflict')(env, start_response) else: return Response(status='204 No Content')(env, start_response)
def GET(self, request): __CBAC__ = True """Handle HTTP GET requests for the Swift Object Server.""" my_debug('testing', '@GET()') my_debug('#request headers', request.headers) #print my_debug device, partition, account, container, obj = \ split_and_validate_path(request, 5, 5, True) keep_cache = self.keep_cache_private or ( 'X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers) try: disk_file = self.get_diskfile( device, partition, account, container, obj) except DiskFileDeviceUnavailable: return HTTPInsufficientStorage(drive=device, request=request) my_debug("locals()",locals()) my_debug("original Diskfile", disk_file) try: with disk_file.open(): metadata = disk_file.get_metadata() my_debug("metadata", metadata) obj_size = int(metadata['Content-Length']) file_x_ts = metadata['X-Timestamp'] #json_policy = metadata['JSON-Policy'] file_x_ts_flt = float(file_x_ts) keep_cache = (self.keep_cache_private or ('X-Auth-Token' not in request.headers and 'X-Storage-Token' not in request.headers)) ''' if policy file is present & user_label is present apply JSONAC Additional thing to have: 1. Check if policy from metadata is valid json 2. Check if user_clearance is present is headers. Read keystone info. 3. Pass JSONPath as headers 4. ''' if __CBAC__: my_debug("----CBAC Starts here-----", "***********") disk_file_iter = disk_file.reader(keep_cache=keep_cache) original_data = "" for chunk in iter(disk_file_iter): original_data += chunk userRoles = request.headers['X-Roles'] json_policy = metadata['X-Object-Meta-Jsonpolicy'] \ if metadata.has_key('X-Object-Meta-Jsonpolicy') else None user_labels = metadata['X-Object-Meta-Userlabels'] \ if metadata.has_key('X-Object-Meta-Userlabels') else None object_labels = metadata['X-Object-Meta-Objectlabels'] \ if metadata.has_key('X-Object-Meta-Objectlabels') else None object_labelling = metadata['X-Object-Meta-Jsonlabelling'] \ if metadata.has_key('X-Object-Meta-Jsonlabelling') else None if json_policy and user_labels and object_labels : cbac_policy = {} cbac_policy['user_labels'] = json.loads(user_labels) cbac_policy['object_labels'] = json.loads(object_labels) cbac_policy['policy'] = json.loads(json_policy) #user_clearance = ['manager'] user_clearance = userRoles jsonpath = "/" filtered_content = "" my_debug("json_policy is", json_policy) my_debug("original data is ", original_data) #try: if json_policy : if json_policy and user_clearance and jsonpath and object_labelling: '''filtered_content = ContentFilter(content_str=original_data, labeling_policy_str=object_labelling, user_clearance=user_clearance, query=jsonpath, cbac_policy=cbac_policy).apply()''' filtered_content = "testing" my_debug("#filtered content is ", filtered_content) #else: #my_debug("#content_filter not working", True) #except Exception as e: #else: # my_debug("Exception with content filtering {}".format("e"), True) '''end of content -filter ''' tmp_file = self.tmp_disk_file( request=request, device=device, partition=partition, container=container, obj="tmp", data=filtered_content, account=account) tmp_file = self.get_diskfile( device, partition, account, container, "tmp") my_debug("tmp_file is", tmp_file) try: with tmp_file.open(): tmp_metadata = tmp_file.get_metadata() my_debug("with tmp_file.open()", tmp_file.reader(keep_cache=keep_cache)) response = Response( app_iter=tmp_file.reader(keep_cache=keep_cache), request=request, conditional_response=True) response.headers['Content-Type'] = tmp_metadata.get( 'Content-Type', 'application/octet-stream') response.content_length = len(filtered_content) for key, value in tmp_metadata.iteritems(): if is_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = tmp_metadata['ETag'] #response.last_modified = math.ceil(file_x_ts_flt) #response.content_length = obj_size my_debug("get_response", "test") resp = request.get_response(response) my_debug( "response", resp ) my_debug("rsponse.__dict__", resp.__dict__) except (DiskFileNotExist, DiskFileQuarantined): my_debug("tmp file is not found", "test") resp = HTTPNotFound(request=request, conditional_response=True) my_debug("final resp object", resp) return resp disk_file.open() response = Response( app_iter=disk_file.reader(keep_cache=keep_cache), # instead of app_iter which reads from the file, content is given in body #body="Yes, you are smart!\n", request=request, conditional_response=True) response.headers['Content-Type'] = metadata.get( 'Content-Type', 'application/octet-stream') for key, value in metadata.iteritems(): if is_user_meta('object', key) or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = metadata['ETag'] response.last_modified = math.ceil(file_x_ts_flt) response.content_length = obj_size #response.content_length = response.headers['Content-Length'] try: response.content_encoding = metadata[ 'Content-Encoding'] except KeyError: pass response.headers['X-Timestamp'] = file_x_ts resp = request.get_response(response) except (DiskFileNotExist, DiskFileQuarantined): resp = HTTPNotFound(request=request, conditional_response=True) my_debug("resp object without cbac", resp.__dict__) return resp
def __call__(self, env, start_response): self.calls += 1 resp = Response() resp.environ = env return resp(env, start_response)
def _listing(self, env, start_response, prefix=None): """ Sends an HTML object listing to the remote client. :param env: The original WSGI environment dict. :param start_response: The original WSGI start_response hook. :param prefix: Any prefix desired for the container listing. """ if not config_true_value(self._listings): body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \ 'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \ '<html>\n' \ '<head>\n' \ '<title>Listing of %s</title>\n' % cgi.escape(env['PATH_INFO']) if self._listings_css: body += ' <link rel="stylesheet" type="text/css" ' \ 'href="%s" />\n' % self._build_css_path(prefix or '') else: body += ' <style type="text/css">\n' \ ' h1 {font-size: 1em; font-weight: bold;}\n' \ ' p {font-size: 2}\n' \ ' </style>\n' body += '</head>\n<body>' \ ' <h1>Web Listing Disabled</h1>' \ ' <p>The owner of this web site has disabled web listing.' \ ' <p>If you are the owner of this web site, you can enable' \ ' web listing by setting X-Container-Meta-Web-Listings.</p>' if self._index: body += '<h1>Index File Not Found</h1>' \ ' <p>The owner of this web site has set ' \ ' <b>X-Container-Meta-Web-Index: %s</b>. ' \ ' However, this file is not found.</p>' % self._index body += ' </body>\n</html>\n' resp = HTTPNotFound(body=body)(env, self._start_response) return self._error_response(resp, env, start_response) tmp_env = make_env(env, 'GET', '/%s/%s/%s' % (self.version, self.account, self.container), self.agent, swift_source='SW') tmp_env['QUERY_STRING'] = 'delimiter=/&format=json' if prefix: tmp_env['QUERY_STRING'] += '&prefix=%s' % quote(prefix) else: prefix = '' resp = self._app_call(tmp_env) if not is_success(self._get_status_int()): return self._error_response(resp, env, start_response) listing = None body = ''.join(resp) if body: listing = json.loads(body) if not listing: resp = HTTPNotFound()(env, self._start_response) return self._error_response(resp, env, start_response) headers = {'Content-Type': 'text/html; charset=UTF-8'} body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \ 'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \ '<html>\n' \ ' <head>\n' \ ' <title>Listing of %s</title>\n' % \ cgi.escape(env['PATH_INFO']) if self._listings_css: body += ' <link rel="stylesheet" type="text/css" ' \ 'href="%s" />\n' % (self._build_css_path(prefix)) else: body += ' <style type="text/css">\n' \ ' h1 {font-size: 1em; font-weight: bold;}\n' \ ' th {text-align: left; padding: 0px 1em 0px 1em;}\n' \ ' td {padding: 0px 1em 0px 1em;}\n' \ ' a {text-decoration: none;}\n' \ ' </style>\n' body += ' </head>\n' \ ' <body>\n' \ ' <h1 id="title">Listing of %s</h1>\n' \ ' <table id="listing">\n' \ ' <tr id="heading">\n' \ ' <th class="colname">Name</th>\n' \ ' <th class="colsize">Size</th>\n' \ ' <th class="coldate">Date</th>\n' \ ' </tr>\n' % \ cgi.escape(env['PATH_INFO']) if prefix: body += ' <tr id="parent" class="item">\n' \ ' <td class="colname"><a href="../">../</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' for item in listing: if 'subdir' in item: subdir = item['subdir'].encode("utf-8") if prefix: subdir = subdir[len(prefix):] body += ' <tr class="item subdir">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize"> </td>\n' \ ' <td class="coldate"> </td>\n' \ ' </tr>\n' % \ (quote(subdir), cgi.escape(subdir)) for item in listing: if 'name' in item: name = item['name'].encode("utf-8") if prefix: name = name[len(prefix):] content_type = item['content_type'].encode("utf-8") bytes = human_readable(item['bytes']) last_modified = (cgi.escape(item['last_modified'].encode( "utf-8")).split('.')[0].replace('T', ' ')) body += ' <tr class="item %s">\n' \ ' <td class="colname"><a href="%s">%s</a></td>\n' \ ' <td class="colsize">%s</td>\n' \ ' <td class="coldate">%s</td>\n' \ ' </tr>\n' % \ (' '.join('type-' + cgi.escape(t.lower(), quote=True) for t in content_type.split('/')), quote(name), cgi.escape(name), bytes, last_modified) body += ' </table>\n' \ ' </body>\n' \ '</html>\n' resp = Response(headers=headers, body=body) return resp(env, start_response)
def __call__(self, req): if not self.super_admin_key: # profile management is disabled return self.denied_response(req) try: (endpoint, _rest) = req.split_path(1, 2, True) except ValueError: return self.denied_response(req) if endpoint == self.profile_path: account_id = req.environ.get('REMOTE_USER', '') if not account_id: return HTTPUnauthorized(request=req) user_id, user_email = account_id.split(':') whitelist_id = get_data_from_url(self.whitelist_url, self.app, user_email, self.logger, req.environ) invite_code = req.headers.get('x-auth-invite-code', None) if invite_code and req.method == 'PUT': invite_id = get_data_from_url(self.invite_url, self.app, invite_code, self.logger, req.environ) if not invite_id: return self.denied_response(req) service = None if not invite_id.startswith('email:'): if not store_data_in_url(self.invite_url, self.app, invite_code, 'email:%s:%s' % (user_email, invite_id), req.environ): return HTTPInternalServerError(request=req) service = invite_id elif 'email:%s:' % user_email in invite_id: service = invite_id.split(':', 3)[2] if service and not whitelist_id: if not store_data_in_url(self.whitelist_url, self.app, user_email, service, req.environ): return HTTPInternalServerError(request=req) whitelist_id = service if not whitelist_id: return Response(request=req, status=402, body='Account not in whitelist') if whitelist_id.startswith('service_'): req.environ['liteauth.new_service'] = \ whitelist_id.replace('service_', '', 1) if not store_data_in_url(self.whitelist_url, self.app, user_email, user_id, req.environ): return HTTPInternalServerError(request=req) if req.method == 'GET': return self.get_swauth(req, user_id, user_email) elif req.method == 'PUT': return self.put_swauth(req, user_id, user_email) return self.denied_response(req)
def REPLICATION(self, request): return Response(app_iter=ssync_receiver.Receiver(self, request)())
def GET(self, req): """Handle HTTP GET request.""" drive, part, account, container, obj = split_and_validate_path( req, 4, 5, True) path = get_param(req, 'path') prefix = get_param(req, 'prefix') delimiter = get_param(req, 'delimiter') if delimiter and (len(delimiter) > 1 or ord(delimiter) > 254): # delimiters can be made more flexible later return HTTPPreconditionFailed(body='Bad delimiter') marker = get_param(req, 'marker', '') end_marker = get_param(req, 'end_marker') limit = CONTAINER_LISTING_LIMIT given_limit = get_param(req, 'limit') if given_limit and given_limit.isdigit(): limit = int(given_limit) if limit > CONTAINER_LISTING_LIMIT: return HTTPPreconditionFailed(request=req, body='Maximum limit is %d' % CONTAINER_LISTING_LIMIT) out_content_type = get_listing_content_type(req) if self.mount_check and not check_mount(self.root, drive): return HTTPInsufficientStorage(drive=drive, request=req) broker = self._get_container_broker(drive, part, account, container, pending_timeout=0.1, stale_reads_ok=True) if broker.is_deleted(): return HTTPNotFound(request=req) info = broker.get_info() resp_headers = { 'X-Container-Object-Count': info['object_count'], 'X-Container-Bytes-Used': info['bytes_used'], 'X-Timestamp': info['created_at'], 'X-PUT-Timestamp': info['put_timestamp'], } for key, (value, timestamp) in broker.metadata.iteritems(): if value and (key.lower() in self.save_headers or is_sys_or_user_meta('container', key)): resp_headers[key] = value ret = Response(request=req, headers=resp_headers, content_type=out_content_type, charset='utf-8') container_list = broker.list_objects_iter(limit, marker, end_marker, prefix, delimiter, path) if out_content_type == 'application/json': ret.body = json.dumps( [self.update_data_record(record) for record in container_list]) elif out_content_type.endswith('/xml'): doc = Element('container', name=container.decode('utf-8')) for obj in container_list: record = self.update_data_record(obj) if 'subdir' in record: name = record['subdir'].decode('utf-8') sub = SubElement(doc, 'subdir', name=name) SubElement(sub, 'name').text = name else: obj_element = SubElement(doc, 'object') for field in [ "name", "hash", "bytes", "content_type", "last_modified" ]: SubElement(obj_element, field).text = str( record.pop(field)).decode('utf-8') for field in sorted(record): SubElement(obj_element, field).text = str( record[field]).decode('utf-8') ret.body = tostring(doc, encoding='UTF-8').replace( "<?xml version='1.0' encoding='UTF-8'?>", '<?xml version="1.0" encoding="UTF-8"?>', 1) else: if not container_list: return HTTPNoContent(request=req, headers=resp_headers) ret.body = '\n'.join(rec[0] for rec in container_list) + '\n' return ret