class RateLimiter(object): _RATE_LIMIT_NAMESPACE = 'REST_API_RATE_LIMIT_v' + Config.API_VERSION_MINOR SHORT_WINDOW_SIZE = 10 #10 seconds LONG_WINDOW_SIZE = 60 * 60 #1 hour DEFAULT_CALL_WEIGHT = 10 def __init__(self, r_server=None): self.get_auth_token_payload() self.set_limits() self.set_unique_requester_id() self.set_short_window_key() self.set_long_window_key() def set_limits(self): self._auth_key = AuthKey() if self.auth_token_payload: self._auth_key = AuthKey( app_name=self.auth_token_payload['app_name'], secret=self.auth_token_payload['secret'], ) self._auth_key.get_loaded_data() self.short_window_rate = int(self._auth_key.short_window_rate) self.long_window_rate = int(self._auth_key.long_window_rate) def set_short_window_key(self): self.short_window_key = '|'.join(( self._RATE_LIMIT_NAMESPACE, self.get_remote_addr(), self.unique_id, time.strftime("%H%M%S")[:-1], #r.environ.get('HTTP_USER_AGENT') )) def set_long_window_key(self): self.long_window_key = '|'.join(( self._RATE_LIMIT_NAMESPACE, self.get_remote_addr(), self.unique_id, time.strftime("%d%H"), #r.environ.get('HTTP_USER_AGENT') )) def set_unique_requester_id(self): self.unique_id = self._auth_key.id def get_auth_token_payload(self): self.auth_token_payload = get_token_payload() if not self.auth_token_payload: if 'app_name' in request.form and \ 'secret' in request.form: self.auth_token_payload = dict( app_name=request.form['app_name'], secret=request.form['secret']) @staticmethod def get_remote_addr(): return get_remote_addr()
def set_limits(self): self._auth_key = AuthKey() if self.auth_token_payload: self._auth_key = AuthKey( app_name=self.auth_token_payload['app_name'], secret=self.auth_token_payload['secret'], ) self._auth_key.get_loaded_data() self.short_window_rate = int(self._auth_key.short_window_rate) self.long_window_rate = int(self._auth_key.long_window_rate)
def setUp(self): auth_credentials = { 'domain': '', 'reference': '*****@*****.**', 'app_name': 'api-test', 'short_window_rate': '10000', 'secret': 'YNVukca767p49Czt7jOt42U3R6t1FscD', 'users_allowed': 'true', 'long_window_rate': '6000000' } self.auth_key = AuthKey(**auth_credentials) self.app = create_app('testing') self.app.extensions['redis-user'].hmset(self.auth_key.get_key(), self.auth_key.__dict__) self.app_context = self.app.app_context() self.app_context.push() self.client = self.app.test_client() self.host = 'http://' + self.app_context.url_adapter.get_host('') self.token = None self.update_token()
class GenericTestCase(unittest.TestCase): _AUTO_GET_TOKEN = 'auto' def setUp(self): auth_credentials = { 'domain': '', 'reference': '*****@*****.**', 'app_name': 'api-test', 'short_window_rate': '10000', 'secret': 'YNVukca767p49Czt7jOt42U3R6t1FscD', 'users_allowed': 'true', 'long_window_rate': '6000000' } self.auth_key = AuthKey(**auth_credentials) self.app = create_app('testing') self.app.extensions['redis-user'].hmset(self.auth_key.get_key(), self.auth_key.__dict__) self.app_context = self.app.app_context() self.app_context.push() self.client = self.app.test_client() self.host = 'http://' + self.app_context.url_adapter.get_host('') self.token = None self.update_token() # log = logging.getLogger('dd.datadogpy') # log.setLevel(logging.DEBUG) def tearDown(self): self.app_context.pop() self.app.extensions['redis-user'].hdel(self.auth_key.get_key(), self.auth_key.__dict__.keys()) self.app.extensions['redis-user'].delete(self.auth_key.get_key()) def _make_token_request(self, expire=10 * 60): return self._make_request( '/api/latest/public/auth/request_token', data={ 'app_name': self.auth_key.app_name, 'secret': self.auth_key.secret, 'uid': str(uuid.uuid4()), 'password': '******', 'expiry': expire }, headers=dict(Authorization="Basic Y3R0djpkajhtaXhpamswNGpwZGc=")) def get_token(self, expire=10 * 60): return json.loads( self._make_token_request(expire).data.decode('utf-8'))['token'] def _make_request(self, path, data={}, method="GET", token=None, headers=None, rate_limit_fail=False, **kwargs): params = dict(method=method) params['data'] = data # params['data']['nocache']=True if headers is not None: params['headers'] = headers if token is not None: if token == self._AUTO_GET_TOKEN: self.update_token() token = self.token if 'headers' not in params: params['headers'] = {} params['headers']['Auth-Token'] = token params.update(**kwargs) if not rate_limit_fail: status_code = 429 while status_code == 429: response = self.client.open(path, **params) status_code = response.status_code if status_code == 429: time.sleep(10) else: response = self.client.open(path, **params) return response def update_token(self): if self.token: token_valid_response = self._make_request( '/api/latest/public/auth/validate_token', headers={'Auth-Token': self.token}) if token_valid_response.status_code == 200: return if token_valid_response.status_code == 419: pass self.token = self.get_token()
def create_app(config_name): app = Flask(__name__, static_url_path='') # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py app.config.from_object(config[config_name]) # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to. # For eg: # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg # # where settings.cfg looks like: # # DEBUG = False # SECRET_KEY = 'foo' # app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info '''setup cache''' app.extensions['redis-service'].config_set('save','') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) if app.config['ELASTICSEARCH_URL']: es = Elasticsearch(app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=32, ) else: es = None '''elasticsearch handlers''' app.extensions['esquery'] = esQuery(es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache ) app.extensions['es_access_store'] = esStore(es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel']= mp app.extensions['mp_access_store'] = MixPanelStore(mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains']) # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../'+rate_limit_file csvfile = None if Config.GITHUB_AUTH_TOKEN: r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv', headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN, 'Accept': 'application/vnd.github.v3.raw'}) if r.ok: csvfile = r.text.split('\n') app.logger.info('Retrieved rate limit file from github remote') else: app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!') elif os.path.exists(rate_limit_file): csvfile = open(rate_limit_file) app.logger.info('Using dummy rate limit file') if csvfile is None: app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file) else: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) try: csvfile.close() except: pass app.logger.info('succesfully loaded rate limit file') '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC') app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) '''set the right prefixes''' create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform') app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform') app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform') '''serve the static docs''' try: ''' NOTE: this file gets created only at deployment time ''' openapi_def = yaml.load(file('app/static/openapi.yaml', 'r')) app.logger.info('parsing swagger from static/openapi.yaml') except IOError: '''if we are not deployed, then simply use the template''' openapi_def = yaml.load(file('openapi.template.yaml', 'r')) app.logger.error('parsing swagger from openapi.template.yaml') with open("api-description.md", "r") as f: desc = f.read() openapi_def['info']['description'] = desc openapi_def['basePath'] = '/v%s' % str(api_version) @app.route('/v%s/platform/swagger' % str(api_version)) def serve_swagger(apiversion=api_version): return jsonify(openapi_def) @app.route('/v%s/platform/docs' % str(api_version)) def render_redoc(apiversion=api_version): return render_template('docs.html',api_version=apiversion) '''pre and post-request''' @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): try: rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds()*1000 if took > 500: cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took,rate_limiter) now = datetime.now() ceil10s=round(ceil_dt_to_future_time(now, 10),2) ceil1h=round(ceil_dt_to_future_time(now, 3600),2) usage_left_10s = rate_limiter.short_window_rate-current_values['short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h <0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h <0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token') resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS') if do_not_cache(request):# do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000)) return resp except Exception as e: app.logger.exception('failed request teardown function', str(e)) return resp # Override the HTTP exception handler. app.handle_http_exception = get_http_exception_handler(app) return app
def create_app(config_name): app = Flask(__name__, static_url_path='') app.config.from_object(config[config_name]) app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] # log_level = logging.INFO # if app.config['DEBUG']: # log_level = logging.DEBUG # Flask has a default logger which works well and pushes to stderr # if you want to add different handlers (to file, or logstash, or whatever) # you can use code similar to the one below and set the error level accordingly. # logHandler = logging.StreamHandler() # formatter = jsonlogger.JsonFormatter() # logHandler.setFormatter(formatter) # loghandler.setLevel(logging.INFO) # app.logger.addHandler(logHandler) # or for LOGSTASH # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1)) app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis( app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2) # user info '''setup cache''' app.extensions['redis-service'].config_set('save', '') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) es = Elasticsearch( app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=100, ) '''elasticsearch handlers''' app.extensions['esquery'] = esQuery( es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app. config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache) app.extensions['es_access_store'] = esStore( es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel'] = mp app.extensions['mp_access_store'] = MixPanelStore( mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler( allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS'] ['allowed_request_domains']) basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60 * 60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../' + rate_limit_file if os.path.exists(rate_limit_file): with open(rate_limit_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) print('INFO - succesfully loaded rate limit file') else: print('ERROR - cannot find rate limit file') app.logger.error( 'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' % rate_limit_file) '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning( 'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC' ) app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) app.register_blueprint(latest_blueprint, url_prefix='/api/latest') app.register_blueprint(current_version_blueprint, url_prefix='/api/' + str(api_version)) app.register_blueprint(current_minor_version_blueprint, url_prefix='/api/' + str(api_version_minor)) @app.route('/api-docs/%s' % str(api_version_minor)) def docs_current_minor_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs/%s' % str(api_version)) def docs_current_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs') def docs(): return redirect('/api/swagger/index.html') def serve_swagger(): return app.send_static_file('docs/swagger/swagger.yaml') @app.route('/api/docs/swagger.yaml') def send_swagger(): return serve_swagger() @app.route('/api/latest/docs/swagger.yaml') def send_swagger_latest(): return serve_swagger() @app.route('/api/' + str(api_version) + '/docs/swagger.yaml') def send_swagger_current_cersion(): return serve_swagger() @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds() * 1000 if took > 500: cache_time = str( int(3600 * took) ) # set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took, rate_limiter) now = datetime.now() ceil10s = round(ceil_dt_to_future_time(now, 10), 2) ceil1h = round(ceil_dt_to_future_time(now, 3600), 2) usage_left_10s = rate_limiter.short_window_rate - current_values[ 'short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h < 0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h < 0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers', 'Content-Type,Auth-Token') if do_not_cache(request): # do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add( 'Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i" % (took * 1800 / 1000, took * 9000 / 1000)) return resp return app