示例#1
0
def redis():
    rd = Redis()
    keys = set(rd.keys())
    yield rd
    to_del = [k for k in rd.keys() if k not in keys]
    if to_del:
        rd.delete(*to_del)
    def __init__(self, db_path):
        os.makedirs(os.path.dirname(db_path), exist_ok=True)

        try:
            if not self.conn:
                self.conn = Redis(db_path)
        except:
            raise Exception("Unable to create connection with Redis server")
    def __init__(self, dbPath):
        """
        Parameters
        __________
        dbPath : str
            path of the redis DB
        """

        self.conn = Redis(dbPath)
示例#4
0
def tests():

    b = Banco(lambda: Redis('/tmp/redis.db'))
    b.set(1, 2)
    assert b.get(1) == 2, "falhou em recuperar b[1]: %s" % str(b.get(1))
    print("assert b.get(1) == 2", str(b.get(1)))
    b.set(1, 3)
    assert b.get(1) == 3, "falhou em recuperar b[1]: %s" % str(b.get(1))
    c = b.save(4)
    assert b.get(c) == 4, "falhou em recuperar b[1]: %s" % str(b.get(c))
示例#5
0
    def __init__(self):
        super().__init__()
        if config.settings.stackl_redis_type == "fake":
            logger.info("Using fake client")

            self.redis = Redis()
        else:
            self.redis = redis.Redis(
                host=config.settings.stackl_redis_host,
                port=config.settings.stackl_redis_port,
                password=config.settings.stackl_redis_password,
                db=0)
示例#6
0
 def handler(self, sub_command, args):
     if sub_command == "watch":
         # We are connecting as consumer to Redis pubsub channel here. In production we don't want to expose our redis. It's best to
         # register client socket connection and push to socket when we get an key update event on Redis. This seems fairly complex and
         # out of scope for this assignment.
         with Redis("/tmp/redis/data.db") as conn:
             pubsub = conn.pubsub()
             pubsub.subscribe("key-update")
             while True:
                 data = pubsub.get_message()
                 data and print(data["data"])
                 sleep(1)
示例#7
0
    def create_redislite_if_needed(self):
        # check if redis server is already running it will be checked if we dont want
        # remote enabled but the local redis server instance is still running and we want
        # things implicit and stop bothering other developers forced to kill local redis
        if not self.remote and self.redis_server_is_up():
            raise RuntimeError(
                "Able to connect to redis when should be creating one!")
        elif self.remote and not self.redis_server_is_up():
            #we asked to use an external redis, but it doesn't exist
            raise RuntimeError("Unable to connect to redis")

        if not self.remote and not self.r_instance:
            self.redis_db_file = tmp.mktemp(suffix='.rdb', dir='/tmp')
            self.r_instance = Redis(dbfilename=self.redis_db_file,
                                    serverconfig={
                                        'save': [],
                                        'maxclients': 10000,
                                        'bind': str(self.host),
                                        'port': str(self.port)
                                    })
示例#8
0
    UI_CHANNEL: ui_data_handler,
    TTS_CHANNEL: tts_data_handler,
    LEDS_CHANNEL: leds_data_handler
}
channels = list(handlers.keys())


# Main Listener Event Handler
def main_event_handler(self, event):
    print("Logic main event handler called")
    handlers[event['channel']](self.redis, event['data'])


if __name__ == '__main__':
    print("Channel logic started")
    redis_object = Redis(REDIS_DB_FILE)
    listener = RedisListener('Logic Listener', redis_object, channels,
                             main_event_handler)
    listener.start()
    time.sleep(10)
    print('Pushing to ui channel')
    redis_object.publish(UI_CHANNEL, 'Test message!')
    time.sleep(5)
    print('Pushing to TTS channel')
    redis_object.publish(TTS_CHANNEL, 'Test speech message')
    time.sleep(10)
    print('Pushing to ui channel again')
    redis_object.publish(UI_CHANNEL, 'Another test message!')

    # emotions & speech to text need pubsub objects
    # ui & text to speech & leds need to subscribe & publish
示例#9
0
from redislite import Redis
from redislite import patch

from settings import REDIS_PORT

# Original intention was to use Redis for caching and task queue.
# But I found out that the server has both memcached and gearman installed,
# so this is no longer needed, as it will require installing another server.

# Also, since each process importing this file will try to launch its own
# instance of Redis on the same local port, this will be a problem once worker
# processes are started.

patch.patch_redis()

redis = Redis(serverconfig={'port': REDIS_PORT})
示例#10
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    app.config.from_object(config[config_name])
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)
    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']

    # log_level = logging.INFO
    # if app.config['DEBUG']:
    #     log_level = logging.DEBUG

    # Flask has a default logger which works well and pushes to stderr
    # if you want to add different handlers (to file, or logstash, or whatever)
    # you can use code similar to the one below and set the error level accordingly.

    # logHandler = logging.StreamHandler()
    # formatter = jsonlogger.JsonFormatter()
    # logHandler.setFormatter(formatter)
    # loghandler.setLevel(logging.INFO)
    # app.logger.addHandler(logHandler)

    # or for LOGSTASH
    # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1))

    app.logger.info('looking for elasticsearch at: %s' %
                    app.config['ELASTICSEARCH_URL'])
    print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])

    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=0)  #served data
    app.extensions['redis-service'] = Redis(
        app.config['REDIS_SERVER_PATH'],
        db=1)  #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=2)  # user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save', '')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    es = Elasticsearch(
        app.config['ELASTICSEARCH_URL'],
        # # sniff before doing anything
        # sniff_on_start=True,
        # # refresh nodes after a node fails to respond
        # sniff_on_connection_fail=True,
        # # and also every 60 seconds
        # sniffer_timeout=60
        timeout=60 * 20,
        maxsize=100,
    )
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.
        config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache)

    app.extensions['es_access_store'] = esStore(
        es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
    )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel'] = mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
        )

        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']
            ['allowed_request_domains'])

    basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')
    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache',
                                threshold=100000,
                                default_timeout=60 * 60,
                                mode=777)
    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits
    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../' + rate_limit_file
    if os.path.exists(rate_limit_file):
        with open(rate_limit_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                auth_key = AuthKey(**row)
                app.extensions['redis-user'].hmset(auth_key.get_key(),
                                                   auth_key.__dict__)
        print('INFO - succesfully loaded rate limit file')
    else:
        print('ERROR - cannot find rate limit file')
        app.logger.error(
            'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' %
            rate_limit_file)
    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning(
            'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC'
        )
    app.config['IP_RESOLVER'] = ip_resolver
    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor),
                                                __name__)

    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    app.register_blueprint(latest_blueprint, url_prefix='/api/latest')
    app.register_blueprint(current_version_blueprint,
                           url_prefix='/api/' + str(api_version))
    app.register_blueprint(current_minor_version_blueprint,
                           url_prefix='/api/' + str(api_version_minor))

    @app.route('/api-docs/%s' % str(api_version_minor))
    def docs_current_minor_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs/%s' % str(api_version))
    def docs_current_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs')
    def docs():
        return redirect('/api/swagger/index.html')

    def serve_swagger():
        return app.send_static_file('docs/swagger/swagger.yaml')

    @app.route('/api/docs/swagger.yaml')
    def send_swagger():
        return serve_swagger()

    @app.route('/api/latest/docs/swagger.yaml')
    def send_swagger_latest():
        return serve_swagger()

    @app.route('/api/' + str(api_version) + '/docs/swagger.yaml')
    def send_swagger_current_cersion():
        return serve_swagger()

    @app.before_request
    def before_request():
        g.request_start = datetime.now()

    @app.after_request
    def after(resp):
        rate_limiter = RateLimiter()
        now = datetime.now()
        took = (now - g.request_start).total_seconds() * 1000
        if took > 500:
            cache_time = str(
                int(3600 * took)
            )  # set cache to last one our for each second spent in the request
            resp.headers.add('X-Accel-Expires', cache_time)
        took = int(round(took))
        LogApiCallWeight(took)
        # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
        #     took = RateLimiter.DEFAULT_CALL_WEIGHT
        current_values = increment_call_rate(took, rate_limiter)
        now = datetime.now()
        ceil10s = round(ceil_dt_to_future_time(now, 10), 2)
        ceil1h = round(ceil_dt_to_future_time(now, 3600), 2)
        usage_left_10s = rate_limiter.short_window_rate - current_values[
            'short']
        usage_left_1h = rate_limiter.long_window_rate - current_values['long']
        min_ceil = ceil10s
        if usage_left_1h < 0:
            min_ceil = ceil1h
        if (usage_left_10s < 0) or (usage_left_1h < 0):
            resp.headers.add('Retry-After', min_ceil)
        resp.headers.add('X-API-Took', took)
        resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
        resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
        resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
        resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
        # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
        # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
        resp.headers.add('Access-Control-Allow-Origin', '*')
        resp.headers.add('Access-Control-Allow-Headers',
                         'Content-Type,Auth-Token')
        if do_not_cache(request):  # do not cache in the browser
            resp.headers.add('Cache-Control',
                             "no-cache, must-revalidate, max-age=0")
        else:
            resp.headers.add(
                'Cache-Control',
                "no-transform, public, max-age=%i, s-maxage=%i" %
                (took * 1800 / 1000, took * 9000 / 1000))
        return resp

    return app
示例#11
0
#!/usr/bin/python3
# encoding: utf-8
# @Time    : 2021/8/24 13:56
# @author  : zza
# @Email   : [email protected]
# @File    : demo_redislite.py
from redislite import Redis
redis_connection = Redis('./redis.db')
redis_connection.keys()
[]
redis_connection.set('key', 'value')
True
redis_connection.get('key')
'value'
示例#12
0
import os
from flask import Flask, render_template, request, redirect
from werkzeug.utils import secure_filename
from cgi import escape
from flask_dropzone import Dropzone
import PyPDF2
import docx2txt
from tempfile import mkdtemp
from shutil import rmtree
import csv
from redislite import Redis

app = Flask(__name__)
redis_connection = Redis('/tmp/redis.db')

collegeDataset = open('collegeDataset.csv', 'r', encoding='utf-8', newline='\n')
collegeDatasetReader = csv.reader(collegeDataset)
collegeData = []
for row in collegeDatasetReader:
    collegeData.append(row)

app.config.update(
    # Flask-Dropzone config:
    DROPZONE_ALLOWED_FILE_CUSTOM=True,
    DROPZONE_ALLOWED_FILE_TYPE='.pdf, .docx, .txt',
    DROPZONE_MAX_FILE_SIZE=3,
    DROPZONE_MAX_FILES=20,
    DROPZONE_UPLOAD_MULTIPLE = True,
)

dropzone = Dropzone(app)
示例#13
0
文件: core.py 项目: valency/quickauth
 def __init__(self, dbfile='auth.db'):
     self.conn = Redis(dbfile)
示例#14
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_drug=app.config['ELASTICSEARCH_DRUG_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_drug=app.config['ELASTICSEARCH_DRUG_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache
        )

    app.extensions['es_access_store'] = esStore(es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
        )

    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
            )


        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    openapi_def = yaml.load(file('app/static/openapi.template.yaml', 'r'))
    app.logger.info('parsing swagger from app/static/openapi.template.yaml')

    #inject the description into the docs
    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)

    @app.route('/v%s/platform/docs/swagger-ui' % str(api_version))
    def render_swaggerui(apiversion=api_version):
        return render_template('swaggerui.html',api_version=apiversion)

    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            now = datetime.now()
            took = int(round((now - g.request_start).total_seconds()))
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                cache = 30 * 24 * 60 * 60 #cache for seven days
                resp.headers.add('Cache-Control', "no-transform, max-age=%i"%(cache))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp

    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
示例#15
0
def _get_redis(db=JSONDB):
    return Redis(db)
示例#16
0
        self.banco = base()

    def save(self, value):
        key = str(uuid1())
        self.set(key, value)
        return key

    def get(self, key):
        return loads(self.banco.get(key).decode('utf8'))

    def set(self, key, value):
        self.banco.set(key, dumps(value))
        return key


DBT = lambda: Redis('/tmp/redis.db')


def tests():

    b = Banco(DBT)
    b.set(1, 2)
    assert int(b.get(1)) == 2, "falhou em recuperar b[1]: %s" % str(b.get(1))
    print("assert b.get(1) == 2", str(b.get(1)))
    b.set(1, 3)
    assert int(b.get(1)) == 3, "falhou em recuperar b[1]: %s" % str(b.get(1))
    c = b.save('oi maçã')
    assert b.get(c) == 'oi maçã', "falhou em recuperar b[1]: %s" % str(b.get(c))


if __name__ == "__main__":
示例#17
0
NOSE_ARGS = [
    '--with-coverage',
    '--cover-package=general,project,employee',
]

PAGINATION_PER_PAGE = 20

try:
    from .local_settings import *
except ImportError:
    pass

# CELERY

REDIS_DB_PATH = os.path.join(DATA_DIR,'my_redis.db')
rdb = Redis(REDIS_DB_PATH, serverconfig={'port': '1116'})
REDIS_SOCKET_PATH = 'redis+socket://%s' % (rdb.socket_file, )
BROKER_URL = REDIS_SOCKET_PATH
CELERY_RESULT_BACKEND = REDIS_SOCKET_PATH

CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['general.tasks']
CELERY_TIMEZONE = 'UTC'


# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'


EMAIL_USE_TLS = True
示例#18
0
USE_TZ = True

# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/

STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGIN_REDIRECT_URL = '/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'

#Celery-Redis settings
#CELERY_BROKER_URL = 'redis://localhost:6379'

# Create a Redis instance using redislite
REDIS_DB_PATH = os.path.join('/tmp/redis.rdb')
rdb = Redis(REDIS_DB_PATH)
REDIS_SOCKET_PATH = 'redis+socket://%s' % (rdb.socket_file, )

# Use redislite for the Celery broker
CELERY_BROKER_URL = REDIS_SOCKET_PATH

CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'

CELERY_BEAT_SCHEDULE = {
    'add_unical_pages_for_users': {
        'task': 'wiki.tasks.add_unical_pages_for_users',
        'schedule': 10.0  #crontab(minute = 0, hour='*/1'),
    },
}
示例#19
0
 def __call__(self):
     redis_instance = Redis('/tmp/redis.db')
     return redis_instance
示例#20
0
def new_redis_client(host=default_host, port=default_port):
    return Redis(host=host, port=port)
示例#21
0
def redis():
    try:
        redis_connection = Redis("ace.rdb")
        yield redis_connection
    finally:
        redis_connection.close()
示例#22
0
STATIC_URL = '/static/'
STATICFILES_DIRS = [
    os.path.join(BASE_DIR, "static"),
]

SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_TMP = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'

os.makedirs(STATIC_TMP, exist_ok=True)
os.makedirs(STATIC_ROOT, exist_ok=True)

LOGIN_REDIRECT_URL = '/reception'
LOGIN_URL = '/admin'

rdb = Redis('/tmp/redis.db', serverconfig={'port': '6379'})

CHANNEL_LAYERS = {
    'default': {
        'BACKEND': 'channels_redis.core.RedisChannelLayer',
        'CONFIG': {
            "hosts": [os.environ.get('REDIS_URL', 'redis://localhost:6379')],
        },
    },
}
示例#23
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(es,
                                        DataTypes(app),
                                        DataSourceScoring(app),
                                        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
                                        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
                                        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
                                        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
                                        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
                                        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
                                        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
                                        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
                                        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
                                        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
                                        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
                                        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
                                        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
                                        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
                                        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
                                        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
                                        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
                                        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
                                        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
                                        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
                                        log_level=app.logger.getEffectiveLevel(),
                                        cache=icache
                                        )

    app.extensions['es_access_store'] = esStore(es,
                                        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
                                        ip2org=ip2org,
                                        )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(mp,
                                            ip2org=ip2org,
                                            )


        app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
                                               allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
                                               allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits

    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../'+rate_limit_file
    csvfile = None
    if Config.GITHUB_AUTH_TOKEN:
        r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv',
                         headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN,
                                    'Accept': 'application/vnd.github.v3.raw'})
        if r.ok:
            csvfile = r.text.split('\n')
            app.logger.info('Retrieved rate limit file from github remote')
        else:
            app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!')
    elif os.path.exists(rate_limit_file):
        csvfile = open(rate_limit_file)
        app.logger.info('Using dummy rate limit file')

    if csvfile is None:
        app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file)
    else:
        reader = csv.DictReader(csvfile)
        for row in reader:
            auth_key = AuthKey(**row)
            app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__)
        try:
            csvfile.close()
        except:
            pass
        app.logger.info('succesfully loaded rate limit file')


    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    
    try:
        '''
        NOTE: this file gets created only at deployment time
        '''
        openapi_def = yaml.load(file('app/static/openapi.yaml', 'r'))
        app.logger.info('parsing swagger from static/openapi.yaml')

    except IOError:
        '''if we are not deployed, then simply use the template'''
        openapi_def = yaml.load(file('openapi.template.yaml', 'r'))
        app.logger.error('parsing swagger from openapi.template.yaml')

    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)


    @app.route('/v%s/platform/docs' % str(api_version))
    def render_redoc(apiversion=api_version):
        return render_template('docs.html',api_version=apiversion)


    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            rate_limiter = RateLimiter()
            now = datetime.now()
            took = (now - g.request_start).total_seconds()*1000
            if took > 500:
                cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request
                resp.headers.add('X-Accel-Expires', cache_time)
            took = int(round(took))
            LogApiCallWeight(took)
            # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
            #     took = RateLimiter.DEFAULT_CALL_WEIGHT
            current_values = increment_call_rate(took,rate_limiter)
            now = datetime.now()
            ceil10s=round(ceil_dt_to_future_time(now, 10),2)
            ceil1h=round(ceil_dt_to_future_time(now, 3600),2)
            usage_left_10s = rate_limiter.short_window_rate-current_values['short']
            usage_left_1h = rate_limiter.long_window_rate - current_values['long']
            min_ceil = ceil10s
            if usage_left_1h <0:
                min_ceil = ceil1h
            if (usage_left_10s < 0) or (usage_left_1h <0):
                resp.headers.add('Retry-After', min_ceil)
            resp.headers.add('X-API-Took', took)
            resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
            resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
            resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
            resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
            # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
            # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp



    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
示例#24
0
from redislite import Redis
from constants import *

if __name__ == '__main__':
    r = Redis(REDIS_DB_FILE)
示例#25
0
# ================================================================================

CACHES = {
    'default': {
        'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
    }
}

# ================================================================================
# static files (CSS, JavaScript, Images)
#
# References:
#   * https://docs.djangoproject.com/en/1.9/howto/static-files/
# ================================================================================

STATIC_ROOT = os.path.join(BASE_DIR, 'temporary', 'static')

# ================================================================================
# Celery
#
# References:
#   * http://docs.celeryproject.org/en/master/getting-started/index.html
# ================================================================================

CELERY_REDIS_PORT = os.environ.get('CELERY_REDIS_PORT', '8001')
CELERY_REDIS = Redis(os.path.join(BASE_DIR, 'temporary', 'celery.redis'),
                     serverconfig={'port': CELERY_REDIS_PORT})

BROKER_URL = 'redis://127.0.0.1:{}/0'.format(CELERY_REDIS_PORT)
CELERY_RESULT_BACKEND = BROKER_URL
示例#26
0
__author__ = 'carlo'
import os
from redislite import Redis
# from tinydb.storages import MemoryStorage
from uuid import uuid1
# DBM = lambda :TinyDB(storage=MemoryStorage)

JSONDB = os.path.dirname(__file__) + '/eica.json'

DBF = lambda: Redis(JSONDB)


class Banco:
    def __init__(self, base=DBF):
        self.banco = base()

    def save(self, value):
        key = str(uuid1())
        self.banco.set(key, value)
        return key

    def get(self, key):
        return self.banco.get(key)

    def set(self, key, value):
        self.banco.set(key, value)
        return key


def tests():
示例#27
0
from redislite import Redis

from utils.url_utils import get_filtered_links

redis_client = Redis(dbfilename="./redis.db", decode_responses=True)


def redis_cleanup(website_full_url):
    """removed invalid entries from redis caches"""
    # remove intersections
    for anchor in redis_client.sinter("new_urls", "processed_urls"):
        redis_client.srem("new_urls", anchor)
        print("Removed processed URL from redis: {}!\n".format(anchor))
    for anchor in redis_client.smembers("new_urls"):
        if len(get_filtered_links([anchor], website_full_url)) < 1:
            redis_client.srem("new_urls", anchor)


if __name__ == '__main__':
    redis_cleanup("https://www.wikipedia.org/")
    pass
    def process(self):
        logging.basicConfig(
            filename='output_mongo.log',
            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            level=logging.INFO)

        r_server = Redis(dbfilename=str(REDISLITE_DB_PATH),
                         serverconfig={
                             'save': [],
                             'maxclients': 10000
                         })
        mongo_client = MongoClient('host:port')
        db = mongo_client['xxx']
        db.authenticate('uname', 'pwd')
        gwas23andme = db['xxxx']

        no_of_workers = NO_OF_WORKERS or multiprocessing.cpu_count()

        mongo_q = RedisQueue(queue_id=UNIQUE_RUN_ID + '|mongo_processor_q',
                             max_size=MAX_CHUNKS * no_of_workers,
                             job_timeout=120)
        transformer_q = RedisQueue(queue_id=UNIQUE_RUN_ID +
                                   '|data_transformer_q',
                                   max_size=MAX_CHUNKS * no_of_workers,
                                   job_timeout=120)
        writer_q = RedisQueue(queue_id=UNIQUE_RUN_ID + '|writer_q',
                              max_size=MAX_CHUNKS * no_of_workers,
                              job_timeout=120)

        workers = [
            MongoWorkerProcess(
                mongo_q,
                r_server.db,
                transformer_q,
            ) for i in range(no_of_workers)
        ]

        for w in workers:
            w.start()

        transformers = [
            DataTransformerProcess(
                transformer_q,
                r_server.db,
                writer_q,
            ) for i in range(no_of_workers)
        ]

        for t in transformers:
            t.start()

        # writers = [FileWriterProcess(writer_q,
        #                               r_server.db
        #                               )for i in range(no_of_workers)]
        #
        # for writer in writers:
        #     writer.start()

        writer = FileWriterProcess(writer_q, r_server.db)
        writer.start()

        logging.info('Getting distinct genes and phenotypes')
        distinct_phenotypes = list(gwas23andme.distinct("source"))
        distinct_genes = list(gwas23andme.distinct("ingenes"))
        logging.info('Start the real deal here!!!! ')
        for phenotype in distinct_phenotypes:
            logging.info('Processing phenotype {} '.format(phenotype))
            logging.info('Total docs for phenotype {} are {}'.format(
                phenotype, gwas23andme.count({"source": phenotype})))

            mongo_q.put((phenotype, 'test'), r_server)
            # for gene in distinct_genes:
            #     mongo_q.put((phenotype,gene),r_server)

        mongo_q.set_submission_finished(r_server=r_server)

        for a in workers:
            a.join()
        for a in transformers:
            a.join()
        writer.join()
示例#29
0
文件: fetch.py 项目: avral/mapala
from multiprocessing import Pool

from redislite import Redis
from piston.steem import Steem
from piston.blockchain import Blockchain
from django.core.management.base import BaseCommand
from django.db.utils import InterfaceError
from django import db

from backend import settings
from apps.blockchains.sync import BaseUpdater

logger = logging.getLogger('mapala.fetch')

pool = Pool(processes=4)
redis = Redis('redis.db')


def get_block(steem, blockchain_name):
    last_block = redis.get('%s_last_block' % blockchain_name)

    if last_block is None:
        # Fitst app fetch
        last_block = Blockchain(steem).get_current_block_num()
        print(last_block, settings.LOCALE)

    return int(last_block)


class Command(BaseCommand):
    def add_arguments(self, parser):
示例#30
0
from flask import Flask, render_template, request, redirect, send_from_directory, url_for
from redislite import Redis
from functools import wraps
import hashlib
import binascii
import jwt
app = Flask(__name__)
redis = Redis('/tmp/redis.db')


def add_entry(login, password):
    salted_hash = hashlib.pbkdf2_hmac('sha256', bytes(password, 'utf-8'),
                                      bytes(password, 'utf-8'), int(50000))
    redis.set(login, binascii.hexlify(salted_hash).decode('utf-8'))
    return True


def create_token(login):
    token = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(45))
    token = "token"
    redis.set(token, login)
    return token


def check_auth(username, password):
    return username == 'admin' and password == 'secret'


def requires_auth(f):