def __init__(self): super(SQLiteCache, self).__init__() self._table = config.get('sqlite_cache_table', 'beaker_cache') self.sql = MultiThreadOK(config.get('sqlite_dbfile', ':memory:')) self.sql.execute( "CREATE TABLE IF NOT EXISTS %s (cache_key varchar(255) PRIMARY KEY NOT NULL, value text, expires datetime NOT NULL)" % self._table)
def _throttle_check(*args, **kwargs): res = fn(*args, **kwargs) if not type(res) == str: return res else: identifier, auth_type = get_identifier(request) hourly_limit = config.get('throttle_hourly_rate_limit', 60 * 60) if auth_type == 'IP' else \ config.get('throttle_key_hourly_rate_limit', 60 * 60 * 2) daily_limit = config.get('throttle_daily_rate_limit', 60 * 60 * 24) if auth_type == 'IP' else \ config.get('throttle_key_daily_rate_limit', 60 * 60 * 24 * 2) response.headers['X-ChEMBL-Authentication-Type'] = auth_type hourly_remaining, daily_remaining = throttle.get_remaining_rates( identifier, auth_type) response.headers['X-HourlyRateLimit-Limit'] = hourly_limit response.headers['X-DailyRateLimit-Limit'] = daily_limit response.headers[ 'X-HourlyRateLimit-Remaining'] = hourly_remaining response.headers[ 'X-DailyRateLimit-Remaining'] = daily_remaining if not all((hourly_remaining, daily_remaining)): response.status = 429 response.body = 'Too many requests, try again later' return response throttle.accessed(identifier, auth_type) return res
def __init__(self): self.hourly_rate_limit = config.get('throttle_hourly_rate_limit', 60 * 60) self.daily_rate_limit = config.get('throttle_daily_rate_limit', 60 * 60 * 24) self.key_hourly_rate_limit = config.get( 'throttle_key_hourly_rate_limit', 60 * 60 * 2) self.key_daily_rate_limit = config.get('throttle_key_daily_rate_limit', 60 * 60 * 24 * 2)
def __init__(self, library, value_not_found_exception): super(BaseMemcachedCache, self).__init__() if config.get('memcached_servers'): self._servers = json.loads(config.get('memcached_servers')) else: self._servers = [config.get('memcached_server', '127.0.0.1:11211')] # The exception type to catch from the underlying library for a key # that was not found. This is a ValueError for python-memcache, # pylibmc.NotFound for pylibmc, and cmemcache will return None without # raising an exception. self.LibraryValueNotFoundException = value_not_found_exception self._lib = library
def main(conf_path=None): standalone = False if conf_path: config.load_config(conf_path) else: standalone = True parser = OptionParser() parser.add_option("-p", "--config_path", dest="config_path", help="path to config file", default="beaker.conf") (options, args) = parser.parse_args() conf_path = options.config_path config.load_config(conf_path) apps = json.loads(config.get('installed_apps', '[]')) plugins = json.loads(config.get('plugins', '[]')) loadApps(apps) loadPlugins(app, plugins) server = config.get('server_middleware', 'tornado') kwargs = {} if server == 'gunicorn': try: kwargs['workers'] = int(config.get('workers', '4')) except Exception as e: print(e) kwargs['workers'] = 4 if standalone: run(app=app, host=config.get('bottle_host', 'localhost'), port=config.get('bottle_port', '8080'), debug=config.get('debug', True), server=server, **kwargs) else: return app
def __init__(self): timeout = config.get('cache_timeout', 300) if timeout is not None: try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout max_entries = config.get('cache_max_entries', 300000) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = config.get('cache_cull_frequency', 3) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = config.get('cache_key_prefix', '') self.key_func = get_key_func(config.get('cache_key_function', None))
def image2ctabView(img, params): kwargs = dict() kwargs['jaggy'] = _parseFlag(params.get('jaggy', False)) kwargs['adaptive'] = _parseFlag(params.get('adaptive', False)) kwargs['unpaper'] = int(params.get('unpaper', 0)) known_location = '/usr/local/bin/osra' if not os.path.exists(known_location): known_location = '/usr/bin/osra' return _image2ctab(img, config.get('osra_binaries_location', known_location), **kwargs)
def _check_restrictions(*args, **kwargs): if config.get('request_max_size') and request.content_length > int( config['request_max_size']): response.status = 413 response.body = 'Request size larger than %s bytes' % config[ 'request_max_size'] return response if request.remote_addr: ip = IPAddress(request.remote_addr) if IP_WHITELIST: if not any([ip in white for white in IP_WHITELIST]): response.status = 403 return response elif IP_BLACKLIST: if any([ip in black for black in IP_BLACKLIST]): response.status = 403 return response return fn(*args, **kwargs)
def _caching(*args, **kwargs): start = time.time() if not cache: from_cache = False res = fn(*args, **kwargs) else: key = json.dumps(args) + json.dumps(kwargs) + json.dumps([(base64.b64encode(k), base64.b64encode(v)) for k, v in list(request.params.items())]) + \ request.method + request.path if len(request.files): for file_name, file_content in list(request.files.items()): key += file_name + file_content.file.read() file_content.file.seek(0) else: key += request.body.read() request.body.seek(0) try: cached_content, content_type = cache.get(key, (None, None)) except Exception as e: print(e) cached_content, content_type = (None, None) if cached_content: if content_type: response.headers['Content-Type'] = content_type from_cache = True res = cached_content else: from_cache = False res = fn(*args, **kwargs) if type(res) == str: content_type = response.headers.get('Content-Type') try: cache.set(key, (res, content_type)) except Exception as e: print(e) if config.get('debug', True): end = time.time() response.headers['X-ChEMBL-in-cache'] = from_cache response.headers['X-ChEMBL-retrieval-time'] = end - start return res
def __init__(self): if config.get('request_max_size'): BaseRequest.MEMFILE_MAX = int(config['request_max_size'])
__author__ = 'mnowotka' #----------------------------------------------------------------------------------------------------------------------- import json from bottle import BaseRequest, request, response from netaddr import IPNetwork, IPAddress from beaker import config #----------------------------------------------------------------------------------------------------------------------- IP_WHITELIST = config.get('ip_whitelist') try: if IP_WHITELIST: IP_WHITELIST = [IPNetwork(ip) for ip in json.loads(IP_WHITELIST)] except: pass IP_BLACKLIST = config.get('ip_blacklist') try: if IP_BLACKLIST: IP_BLACKLIST = [IPNetwork(ip) for ip in json.loads(IP_BLACKLIST)] except: pass #----------------------------------------------------------------------------------------------------------------------- class Restrictions(object):
from datetime import datetime import pytz import base64 from bottle import request, response from beaker.throttle import throttle from beaker import config from beaker.utils import import_class DATE_FORMAT = '%y-%m-%d %H%M%S' AES = None secret_key = None try: from Crypto.Cipher import AES from Crypto import Random secret_key = config.get('throttling_secret_key') except ImportError: pass #----------------------------------------------------------------------------------------------------------------------- def verify_key(key): if not (AES or secret_key): return False try: iv = Random.new().read(AES.block_size) aes = AES.new(secret_key, AES.MODE_CBC, iv) now = datetime.utcnow().replace(tzinfo=pytz.utc) decoded_key = base64.standard_b64decode(key) decrypted_key = aes.decrypt(decoded_key)
from beaker import config from beaker.utils import import_class from beaker.throttle.backends.base import BaseThrottle throttle = None throttle_class = config.get( 'throttle_backend', 'beaker.throttle.backends.cacheThrottle.CacheThrottle') if throttle_class: try: throttle = import_class(throttle_class)() if not isinstance(throttle, BaseThrottle): print( "Configured throttle class (%s) is not a BaseThrottle instance, skipping throttling." % throttle_class) throttle = None except ImportError: print('Error importing %s' % throttle_class)
__author__ = 'mnowotka' from beaker import config from beaker.utils import import_class from beaker.cache.backends.base import BaseCache cache = None cache_class = config.get('cache_backend') if cache_class: try: cache = import_class(cache_class)() if not isinstance(cache, BaseCache): print("Configured cache class (%s) is not a BaseCache instance, skipping caching." % cache_class) cache = None except ImportError: print('Error importing %s' % cache_class)
def __init__(self): BaseCache.__init__(self) name = config.get('cache_loc_mem_instance_name', 'beaker_cache') self._cache = _caches.setdefault(name, {}) self._expire_info = _expire_info.setdefault(name, {}) self._lock = _locks.setdefault(name, RWLock())
apps = json.loads(config.get('installed_apps', '[]')) plugins = json.loads(config.get('plugins', '[]')) loadApps(apps) loadPlugins(app, plugins) server = config.get('server_middleware', 'tornado') kwargs = {} if server == 'gunicorn': try: kwargs['workers'] = int(config.get('workers', '4')) except Exception as e: print(e) kwargs['workers'] = 4 if standalone: run(app=app, host=config.get('bottle_host', 'localhost'), port=config.get('bottle_port', '8080'), debug=config.get('debug', True), server=server, **kwargs) else: return app if __name__ == "__main__": main() else: apps = json.loads(config.get('installed_apps', '[]')) loadApps(apps) application = app # ----------------------------------------------------------------------------------------------------------------------
def __init__(self): super(MongoDBCache, self).__init__() self._host = config.get('mongo_host', 'localhost') self._port = int(config.get('mongo_port', 27017)) self._database = config.get('mongo_db', 'beaker_cache') self._auth_database = config.get('mongo_auth_db', self._database) self._rshosts = config.get('mongo_rshosts') self._rsname = config.get('mongo_rsname') self._user = config.get('mongo_user', None) self._compression = config.get('mongo_compression', False) self._server_selection_timeout_ms = config.get( 'mongo_server_selection_timeout_ms', 30000) self._password = config.get('mongo_pass', None) self._socket_timeout_ms = config.get('mongo_socket_timeout_ms', None) self._max_time_ms = config.get('mongo_max_time_ms', 2000) self._connect_timeout_ms = config.get('mongo_connect_timeout_ms', 20000) self.compression_level = config.get('mongo_compression_level', 6) self._tag_sets = json.loads(config.get('mongo_tag_sets', '[]')) self._read_preference = config.get("mongo_read_preference") self._collection = config.get('mongo_collection', 'cache') self.log = logging.getLogger(__name__)
__author__ = 'mnowotka' from collections import OrderedDict from bottle import response, static_file from beaker import __version__ as version import json from beaker import app, config from beaker import STATIC_ROOT from beaker import PARAM_REGEX EXCLUDED_METHODS = config.get('excluded_methods') try: if EXCLUDED_METHODS: EXCLUDED_METHODS = json.loads(EXCLUDED_METHODS) or [] else: EXCLUDED_METHODS = [] except: EXCLUDED_METHODS = [] # ---------------------------------------------------------------------------------------------------------------------- @app.route('/docs') def docs(): return static_file('docs.html', root=STATIC_ROOT) # ----------------------------------------------------------------------------------------------------------------------
__author__ = 'mnowotka' #----------------------------------------------------------------------------------------------------------------------- from datetime import datetime import pytz import base64 from beaker import config DATE_FORMAT = '%y-%m-%d %H%M%S' AES = None try: from .Crypto.Cipher import AES secret_key = config.get('throttling_secret_key') except ImportError: pass #----------------------------------------------------------------------------------------------------------------------- def generate_key(validity): if secret_key: aes = AES.new(secret_key, AES.MODE_CBC) now = datetime.utcnow().replace(tzinfo=pytz.utc) valid_from = now valid_to = now + validity input_string = "%s\t\t%s" % tuple( [date.strftime(DATE_FORMAT) for date in (valid_from, valid_to)]) return base64.standard_b64encode(aes.encrypt(input_string))
# ---------------------------------------------------------------------------------------------------------------------- import base64 import time import json from bottle import request, response from beaker import config from beaker.cache import cache if not cache: print( "Caching plugin enabled but no cache backend configured, cashing will be skipped..." ) if cache and config.get('clear_cache_on_start', False): cache.clear() # ---------------------------------------------------------------------------------------------------------------------- class Caching(object): name = 'caching' api = 2 def apply(self, fn, _): def _caching(*args, **kwargs): start = time.time() if not cache: from_cache = False res = fn(*args, **kwargs)