def get(self, operation, volume_id_str, file_id_str, *args): # valid operation? if operation not in MSFileHandler.get_api_calls.keys(): response_user_error(self, 401) return # valid file ID? file_id = -1 try: file_id = MSEntry.unserialize_id(int(file_id_str, 16)) except: response_user_error(self, 400) return # valid gateway and volume? gateway, volume, response_timing = response_begin( self, volume_id_str, fail_if_no_auth_header=False) if volume == None: return # reader allowed? allowed = file_read_auth(gateway, volume) if not allowed: response_user_error(self, 403) return # parse CGI arguments status, kw = MSFileHandler.parse_cgi(operation, self.request, self.cgi_args) if status != 200: response_user_error(self, status) return benchmark_header = MSFileHandler.get_benchmark_headers[operation] api_call = MSFileHandler.get_api_calls[operation] timing = {} # run and benchmark the operation try: data = benchmark( benchmark_header, timing, lambda: api_call(gateway, volume, file_id, args, kw)) except storagetypes.RequestDeadlineExceededError, de: response_user_error(self, 503) return
def get( self, operation, volume_id_str, file_id_str, *args ): # valid operation? if operation not in MSFileHandler.get_api_calls.keys(): response_user_error( self, 401 ) return # valid file ID? file_id = -1 try: file_id = MSEntry.unserialize_id( int( file_id_str, 16 ) ) except: response_user_error( self, 400 ) return # valid gateway and volume? gateway, volume, response_timing = response_begin( self, volume_id_str, fail_if_no_auth_header=False ) if volume == None: return # reader allowed? allowed = file_read_auth( gateway, volume ) if not allowed: response_user_error( self, 403 ) return # parse CGI arguments status, kw = MSFileHandler.parse_cgi( operation, self.request, self.cgi_args ) if status != 200: response_user_error( self, status ) return benchmark_header = MSFileHandler.get_benchmark_headers[ operation ] api_call = MSFileHandler.get_api_calls[ operation ] timing = {} # run and benchmark the operation try: data = benchmark( benchmark_header, timing, lambda: api_call( gateway, volume, file_id, args, kw ) ) except storagetypes.RequestDeadlineExceededError, de: response_user_error( self, 503 ) return
def post(self, volume_id_str ): file_post_start = storagetypes.get_time() update_set = file_update_parse( self ) if update_set == None: # malformed data response_user_error( self, 202, "%s\n" % (-errno.EINVAL) ) return # begin the response gateway, volume, response_timing = response_begin( self, volume_id_str ) if volume == None: return allowed = file_update_auth( gateway, volume ) if not allowed: log.error("Failed to authenticate") response_user_error( self, 403 ) return # verify the message integrity and authenticity if not gateway.verify_message( update_set ): # authentication failure response_user_error( self, 401, "Signature verification failed") return # TODO: rate-limit # populate the reply reply = file_update_init_response( volume ) status = 200 # validate requests before processing them for update in update_set.updates: if update.type not in MSFileHandler.post_validators.keys(): logging.error("Unrecognized update %s" % update.type) response_user_error( self, 501 ) return valid, failure_status = MSFileHandler.post_validators[update.type]( gateway, update ) if not valid: log.error("Failed to validate update") response_user_error( self, failure_status, "Argument validation failed" ) return timing = {} # carry out the operation(s), and count them num_processed = 0 types = {} for update in update_set.updates: if not types.has_key(update.type): types[update.type] = 0 types[update.type] += 1 # these are guaranteed to be non-None... api_call = MSFileHandler.post_api_calls.get( update.type, None ) benchmark_header = MSFileHandler.post_benchmark_headers.get( update.type, None ) # run the API call, but benchmark it too try: rc = benchmark( benchmark_header, timing, lambda: api_call( reply, gateway, volume, update ) ) reply.errors.append( rc ) num_processed += 1 except storagetypes.RequestDeadlineExceededError, de: # quickly now... response_user_error( self, 503 ) return except Exception, e: logging.exception(e) reply.error = -errno.EREMOTEIO break
def get( self, operation, volume_id_str, volume_version_str, cert_version_str, file_id_str, *args ): # valid operation? if operation not in MSFileHandler.get_api_calls.keys(): logging.error("Unrecognized operation '%s'" % operation) response_user_error( self, 401 ) return # valid file ID? file_id = -1 try: file_id = MSEntry.unserialize_id( int( file_id_str, 16 ) ) except: response_user_error( self, 400 ) return # valid volume ID? volume_id = -1 try: volume_id = int( volume_id_str ) except: response_user_error( self, 400 ) return # get gateway, volume, and timing... volume, gateway, status, response_timing = response_begin( self, volume_id ) if volume is None: response_user_error( self, status ) return # reader allowed? allowed = file_read_auth( gateway, volume ) if not allowed: response_user_error( self, 403 ) return # reader has fresh volume cert? if volume.version != int(volume_version_str): # stale log.error( "volume.version = %s, volume_version_str = %s" % (volume.version, volume_version_str) ) response_user_error( self, 410, "Stale volume version" ) return # reader has fresh cert bundle? if volume.cert_bundle is not None and volume.cert_bundle.mtime_sec != int(cert_version_str): # stale log.error( "volume.cert_bundle.mtime_sec = %s, cert_version_str = %s" % (volume.cert_bundle.mtime_sec, cert_version_str)) response_user_error( self, 410, "Stale gateway version" ) return # parse CGI arguments status, kw = MSFileHandler.parse_cgi( operation, self.request, self.cgi_args ) if status != 200: response_user_error( self, status ) return benchmark_header = MSFileHandler.get_benchmark_headers[ operation ] api_call = MSFileHandler.get_api_calls[ operation ] timing = {} # run and benchmark the operation try: data = benchmark( benchmark_header, timing, lambda: api_call( gateway, volume, file_id, args, kw ) ) except storagetypes.RequestDeadlineExceededError, de: response_user_error( self, 503 ) return
num_processed = 0 types = {} for request in update_set.requests: if not types.has_key(request.type): types[request.type] = 0 types[request.type] += 1 # these are guaranteed to be non-None... api_call = MSFileHandler.post_api_calls.get( request.type, None ) benchmark_header = MSFileHandler.post_benchmark_headers.get( request.type, None ) # run the API call, but benchmark it too try: rc = benchmark( benchmark_header, timing, lambda: api_call( reply, gateway, volume, request ) ) reply.errors.append( rc ) num_processed += 1 except storagetypes.RequestDeadlineExceededError, de: # quickly now... response_user_error( self, 503 ) return except Exception, e: logging.exception(e) reply.error = -errno.EREMOTEIO break logging.info("Processed %s requests (%s)" % (num_processed, types))
def get( self, operation, volume_id_str, file_id_str, *args ): # valid operation? if operation not in MSFileHandler.get_api_calls.keys(): response_user_error( self, 401 ) return # valid file ID? file_id = -1 try: file_id = MSEntry.unserialize_id( int( file_id_str, 16 ) ) except: response_user_error( self, 400 ) return # valid gateway and volume? gateway, volume, response_timing = response_begin( self, volume_id_str, fail_if_no_auth_header=False ) if volume == None: return # reader allowed? allowed = file_read_auth( gateway, volume ) if not allowed: response_user_error( self, 403 ) return # do we have a requested page for this request? page_id = self.request.get('page_id') file_ids_only = self.request.get('file_ids_only') if page_id is None or len(page_id) == 0: log.error("No page ID given") response_user_error( self, 400 ) return if file_ids_only is None or len(file_ids_only) == 0: file_ids_only = 0 else: try: file_ids_only = int(file_ids_only) except: log.error("Invalid file_ids_only value '%s'" % file_ids_only) response_user_error( self, 400 ) return if file_ids_only != 0: file_ids_only = True else: file_ids_only = False # validate try: page_id = int(page_id) assert page_id >= 0, "Invalid page ID value" except: # needs to be a number log.error("Invalid page ID '%s'" % page_id) response_user_error( self, 400 ) return benchmark_header = MSFileHandler.get_benchmark_headers[ operation ] api_call = MSFileHandler.get_api_calls[ operation ] timing = {} # run and benchmark the operation try: data = benchmark( benchmark_header, timing, lambda: api_call( gateway, volume, file_id, args, page_id, file_ids_only ) ) except storagetypes.RequestDeadlineExceededError, de: response_user_error( self, 503 ) return
def get(self, operation, volume_id_str, volume_version_str, cert_version_str, file_id_str, *args): # valid operation? if operation not in MSFileHandler.get_api_calls.keys(): logging.error("Unrecognized operation '%s'" % operation) response_user_error(self, 401) return # valid file ID? file_id = -1 try: file_id = MSEntry.unserialize_id(int(file_id_str, 16)) except: response_user_error(self, 400) return # valid volume ID? volume_id = -1 try: volume_id = int(volume_id_str) except: response_user_error(self, 400) return # get gateway, volume, and timing... volume, gateway, status, response_timing = response_begin( self, volume_id) if volume is None: response_user_error(self, status) return # reader allowed? allowed = file_read_auth(gateway, volume) if not allowed: response_user_error(self, 403) return # reader has fresh volume cert? if volume.version != int(volume_version_str): # stale log.error("volume.version = %s, volume_version_str = %s" % (volume.version, volume_version_str)) response_user_error(self, 410, "Stale volume version") return # reader has fresh cert bundle? if volume.cert_bundle is not None and volume.cert_bundle.mtime_sec != int( cert_version_str): # stale log.error( "volume.cert_bundle.mtime_sec = %s, cert_version_str = %s" % (volume.cert_bundle.mtime_sec, cert_version_str)) response_user_error(self, 410, "Stale gateway version") return # parse CGI arguments status, kw = MSFileHandler.parse_cgi(operation, self.request, self.cgi_args) if status != 200: response_user_error(self, status) return benchmark_header = MSFileHandler.get_benchmark_headers[operation] api_call = MSFileHandler.get_api_calls[operation] timing = {} # run and benchmark the operation try: data = benchmark( benchmark_header, timing, lambda: api_call(gateway, volume, file_id, args, kw)) except storagetypes.RequestDeadlineExceededError, de: response_user_error(self, 503) return
for request in update_set.requests: if not types.has_key(request.type): types[request.type] = 0 types[request.type] += 1 # these are guaranteed to be non-None... api_call = MSFileHandler.post_api_calls.get(request.type, None) benchmark_header = MSFileHandler.post_benchmark_headers.get( request.type, None) # run the API call, but benchmark it too try: rc = benchmark( benchmark_header, timing, lambda: api_call(reply, gateway, volume, request)) reply.errors.append(rc) num_processed += 1 except storagetypes.RequestDeadlineExceededError, de: # quickly now... response_user_error(self, 503) return except Exception, e: logging.exception(e) reply.error = -errno.EREMOTEIO break
def post(self, volume_id_str): file_post_start = storagetypes.get_time() update_set = file_update_parse(self) if update_set == None: # malformed data response_user_error(self, 202, "%s\n" % (-errno.EINVAL)) return # begin the response gateway, volume, response_timing = response_begin(self, volume_id_str) if volume == None: return allowed = file_update_auth(gateway, volume) if not allowed: log.error("Failed to authenticate") response_user_error(self, 403) return # verify the message integrity and authenticity if not gateway.verify_message(update_set): # authentication failure response_user_error(self, 401, "Signature verification failed") return # TODO: rate-limit # populate the reply reply = file_update_init_response(volume) status = 200 # validate requests before processing them for update in update_set.updates: if update.type not in MSFileHandler.post_validators.keys(): logging.error("Unrecognized update %s" % update.type) response_user_error(self, 501) return valid, failure_status = MSFileHandler.post_validators[update.type]( gateway, update) if not valid: log.error("Failed to validate update") response_user_error(self, failure_status, "Argument validation failed") return timing = {} # carry out the operation(s), and count them num_processed = 0 types = {} for update in update_set.updates: if not types.has_key(update.type): types[update.type] = 0 types[update.type] += 1 # these are guaranteed to be non-None... api_call = MSFileHandler.post_api_calls.get(update.type, None) benchmark_header = MSFileHandler.post_benchmark_headers.get( update.type, None) # run the API call, but benchmark it too try: rc = benchmark( benchmark_header, timing, lambda: api_call(reply, gateway, volume, update)) reply.errors.append(rc) num_processed += 1 except storagetypes.RequestDeadlineExceededError, de: # quickly now... response_user_error(self, 503) return except Exception, e: logging.exception(e) reply.error = -errno.EREMOTEIO break