Esempio n. 1
0
class QuickAuth:
    def __init__(self, dbfile='auth.db'):
        self.conn = Redis(dbfile)

    def register(self, key=None):
        if key is None:
            key = str(uuid1(clock_seq=int(datetime.now().timestamp())))
        value = str(uuid4())
        if self.conn.set(key, value):
            return {
                'key': key,
                'secret': value
            }
        else:
            raise ValueError('write to database failed: {} = {}'.format(key, value))

    def authorize(self, key, value):
        c = self.conn.get(key)
        if c is not None:
            return c.decode() == value
        return False

    def update(self, key):
        if self.conn.get(key):
            return self.register(key=key)
        else:
            raise ValueError('key is not found: {}'.format(key))
    def __init__(self, db_path):
        os.makedirs(os.path.dirname(db_path), exist_ok=True)

        try:
            if not self.conn:
                self.conn = Redis(db_path)
        except:
            raise Exception("Unable to create connection with Redis server")
Esempio n. 3
0
    def __init__(self):
        super().__init__()
        if config.settings.stackl_redis_type == "fake":
            logger.info("Using fake client")

            self.redis = Redis()
        else:
            self.redis = redis.Redis(
                host=config.settings.stackl_redis_host,
                port=config.settings.stackl_redis_port,
                password=config.settings.stackl_redis_password,
                db=0)
Esempio n. 4
0
class RedisManager():
    def __init__(self, remote, host, port):
        self.logger = logging.getLogger(__name__)
        self.r_instance = None
        self.remote = remote
        self.host = host
        self.port = port

    def redis_server_is_up(self):
        is_up = False
        try:
            c = new_redis_client(self.host, self.port)
            c.ping()
            is_up = True
            self.logger.debug(
                'detected a redis-server instance running, so attaching to it')
        except redis_ex.ConnectionError:
            self.logger.warning('not detected a redis-server running')
        finally:
            return is_up

    def create_redislite_if_needed(self):
        # check if redis server is already running it will be checked if we dont want
        # remote enabled but the local redis server instance is still running and we want
        # things implicit and stop bothering other developers forced to kill local redis
        if not self.remote and self.redis_server_is_up():
            raise RuntimeError(
                "Able to connect to redis when should be creating one!")
        elif self.remote and not self.redis_server_is_up():
            #we asked to use an external redis, but it doesn't exist
            raise RuntimeError("Unable to connect to redis")

        if not self.remote and not self.r_instance:
            self.redis_db_file = tmp.mktemp(suffix='.rdb', dir='/tmp')
            self.r_instance = Redis(dbfilename=self.redis_db_file,
                                    serverconfig={
                                        'save': [],
                                        'maxclients': 10000,
                                        'bind': str(self.host),
                                        'port': str(self.port)
                                    })

    def __enter__(self):
        self.create_redislite_if_needed()
        return self

    def __exit__(self, type, value, traceback):
        if self.r_instance:
            self.r_instance.shutdown()
            self.r_instance = None
            os.remove(self.redis_db_file + '.settings')
    def __init__(self, dbPath):
        """
        Parameters
        __________
        dbPath : str
            path of the redis DB
        """

        self.conn = Redis(dbPath)
Esempio n. 6
0
def tests():

    b = Banco(lambda: Redis('/tmp/redis.db'))
    b.set(1, 2)
    assert b.get(1) == 2, "falhou em recuperar b[1]: %s" % str(b.get(1))
    print("assert b.get(1) == 2", str(b.get(1)))
    b.set(1, 3)
    assert b.get(1) == 3, "falhou em recuperar b[1]: %s" % str(b.get(1))
    c = b.save(4)
    assert b.get(c) == 4, "falhou em recuperar b[1]: %s" % str(b.get(c))
Esempio n. 7
0
    def create_redislite_if_needed(self):
        # check if redis server is already running it will be checked if we dont want
        # remote enabled but the local redis server instance is still running and we want
        # things implicit and stop bothering other developers forced to kill local redis
        if not self.remote and self.redis_server_is_up():
            raise RuntimeError(
                "Able to connect to redis when should be creating one!")
        elif self.remote and not self.redis_server_is_up():
            #we asked to use an external redis, but it doesn't exist
            raise RuntimeError("Unable to connect to redis")

        if not self.remote and not self.r_instance:
            self.redis_db_file = tmp.mktemp(suffix='.rdb', dir='/tmp')
            self.r_instance = Redis(dbfilename=self.redis_db_file,
                                    serverconfig={
                                        'save': [],
                                        'maxclients': 10000,
                                        'bind': str(self.host),
                                        'port': str(self.port)
                                    })
Esempio n. 8
0
 def __init__(
         self, name, serializer=pickle, redis=None, max_queue_length=None,
         **kwargs
 ):
     self.name = name
     self.serializer = serializer
     self.max_queue_length = max_queue_length
     if redis:
         self.__redis = redis
     else:
         self.__redis = Redis(**kwargs)
Esempio n. 9
0
 def handler(self, sub_command, args):
     if sub_command == "watch":
         # We are connecting as consumer to Redis pubsub channel here. In production we don't want to expose our redis. It's best to
         # register client socket connection and push to socket when we get an key update event on Redis. This seems fairly complex and
         # out of scope for this assignment.
         with Redis("/tmp/redis/data.db") as conn:
             pubsub = conn.pubsub()
             pubsub.subscribe("key-update")
             while True:
                 data = pubsub.get_message()
                 data and print(data["data"])
                 sleep(1)
Esempio n. 10
0
def redis():
    rd = Redis()
    keys = set(rd.keys())
    yield rd
    to_del = [k for k in rd.keys() if k not in keys]
    if to_del:
        rd.delete(*to_del)
class RedisClient:

    conn = ""
    CHANNEL = "key-update"
    pubsub = ""

    def __init__(self, db_path):
        os.makedirs(os.path.dirname(db_path), exist_ok=True)

        try:
            if not self.conn:
                self.conn = Redis(db_path)
        except:
            raise Exception("Unable to create connection with Redis server")

    def get_key(self, key):
        return self.conn.get(key)

    def set_key(self, key, value, publish_update=False):
        if publish_update:
            self.publish_update(message=json.dumps({key: value}))

        return self.conn.set(key, value)

    def remove_key(self, key):
        return self.conn.delete(key)

    def publish_update(self, channel=CHANNEL, message=""):
        self.conn.pubsub()
        self.conn.publish(channel, message)

    def get_update(self, channel=CHANNEL):
        if not self.pubsub:
            self.pubsub = self.conn.pubsub()
            self.pubsub.subscribe(channel)

        return self.pubsub.get_message(channel)
Esempio n. 12
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(es,
                                        DataTypes(app),
                                        DataSourceScoring(app),
                                        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
                                        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
                                        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
                                        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
                                        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
                                        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
                                        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
                                        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
                                        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
                                        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
                                        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
                                        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
                                        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
                                        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
                                        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
                                        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
                                        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
                                        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
                                        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
                                        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
                                        log_level=app.logger.getEffectiveLevel(),
                                        cache=icache
                                        )

    app.extensions['es_access_store'] = esStore(es,
                                        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
                                        ip2org=ip2org,
                                        )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(mp,
                                            ip2org=ip2org,
                                            )


        app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
                                               allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
                                               allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits

    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../'+rate_limit_file
    csvfile = None
    if Config.GITHUB_AUTH_TOKEN:
        r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv',
                         headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN,
                                    'Accept': 'application/vnd.github.v3.raw'})
        if r.ok:
            csvfile = r.text.split('\n')
            app.logger.info('Retrieved rate limit file from github remote')
        else:
            app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!')
    elif os.path.exists(rate_limit_file):
        csvfile = open(rate_limit_file)
        app.logger.info('Using dummy rate limit file')

    if csvfile is None:
        app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file)
    else:
        reader = csv.DictReader(csvfile)
        for row in reader:
            auth_key = AuthKey(**row)
            app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__)
        try:
            csvfile.close()
        except:
            pass
        app.logger.info('succesfully loaded rate limit file')


    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    
    try:
        '''
        NOTE: this file gets created only at deployment time
        '''
        openapi_def = yaml.load(file('app/static/openapi.yaml', 'r'))
        app.logger.info('parsing swagger from static/openapi.yaml')

    except IOError:
        '''if we are not deployed, then simply use the template'''
        openapi_def = yaml.load(file('openapi.template.yaml', 'r'))
        app.logger.error('parsing swagger from openapi.template.yaml')

    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)


    @app.route('/v%s/platform/docs' % str(api_version))
    def render_redoc(apiversion=api_version):
        return render_template('docs.html',api_version=apiversion)


    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            rate_limiter = RateLimiter()
            now = datetime.now()
            took = (now - g.request_start).total_seconds()*1000
            if took > 500:
                cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request
                resp.headers.add('X-Accel-Expires', cache_time)
            took = int(round(took))
            LogApiCallWeight(took)
            # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
            #     took = RateLimiter.DEFAULT_CALL_WEIGHT
            current_values = increment_call_rate(took,rate_limiter)
            now = datetime.now()
            ceil10s=round(ceil_dt_to_future_time(now, 10),2)
            ceil1h=round(ceil_dt_to_future_time(now, 3600),2)
            usage_left_10s = rate_limiter.short_window_rate-current_values['short']
            usage_left_1h = rate_limiter.long_window_rate - current_values['long']
            min_ceil = ceil10s
            if usage_left_1h <0:
                min_ceil = ceil1h
            if (usage_left_10s < 0) or (usage_left_1h <0):
                resp.headers.add('Retry-After', min_ceil)
            resp.headers.add('X-API-Took', took)
            resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
            resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
            resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
            resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
            # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
            # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp



    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
Esempio n. 13
0
 def __init__(self, dbfile='auth.db'):
     self.conn = Redis(dbfile)
Esempio n. 14
0
USE_TZ = True

# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/

STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGIN_REDIRECT_URL = '/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'

#Celery-Redis settings
#CELERY_BROKER_URL = 'redis://localhost:6379'

# Create a Redis instance using redislite
REDIS_DB_PATH = os.path.join('/tmp/redis.rdb')
rdb = Redis(REDIS_DB_PATH)
REDIS_SOCKET_PATH = 'redis+socket://%s' % (rdb.socket_file, )

# Use redislite for the Celery broker
CELERY_BROKER_URL = REDIS_SOCKET_PATH

CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'

CELERY_BEAT_SCHEDULE = {
    'add_unical_pages_for_users': {
        'task': 'wiki.tasks.add_unical_pages_for_users',
        'schedule': 10.0  #crontab(minute = 0, hour='*/1'),
    },
}
Esempio n. 15
0
def redis():
    try:
        redis_connection = Redis("ace.rdb")
        yield redis_connection
    finally:
        redis_connection.close()
Esempio n. 16
0
def _get_redis(db=JSONDB):
    return Redis(db)
Esempio n. 17
0
import os
from flask import Flask, render_template, request, redirect
from werkzeug.utils import secure_filename
from cgi import escape
from flask_dropzone import Dropzone
import PyPDF2
import docx2txt
from tempfile import mkdtemp
from shutil import rmtree
import csv
from redislite import Redis

app = Flask(__name__)
redis_connection = Redis('/tmp/redis.db')

collegeDataset = open('collegeDataset.csv', 'r', encoding='utf-8', newline='\n')
collegeDatasetReader = csv.reader(collegeDataset)
collegeData = []
for row in collegeDatasetReader:
    collegeData.append(row)

app.config.update(
    # Flask-Dropzone config:
    DROPZONE_ALLOWED_FILE_CUSTOM=True,
    DROPZONE_ALLOWED_FILE_TYPE='.pdf, .docx, .txt',
    DROPZONE_MAX_FILE_SIZE=3,
    DROPZONE_MAX_FILES=20,
    DROPZONE_UPLOAD_MULTIPLE = True,
)

dropzone = Dropzone(app)
Esempio n. 18
0
NOSE_ARGS = [
    '--with-coverage',
    '--cover-package=general,project,employee',
]

PAGINATION_PER_PAGE = 20

try:
    from .local_settings import *
except ImportError:
    pass

# CELERY

REDIS_DB_PATH = os.path.join(DATA_DIR,'my_redis.db')
rdb = Redis(REDIS_DB_PATH, serverconfig={'port': '1116'})
REDIS_SOCKET_PATH = 'redis+socket://%s' % (rdb.socket_file, )
BROKER_URL = REDIS_SOCKET_PATH
CELERY_RESULT_BACKEND = REDIS_SOCKET_PATH

CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['general.tasks']
CELERY_TIMEZONE = 'UTC'


# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'


EMAIL_USE_TLS = True
Esempio n. 19
0
STATIC_URL = '/static/'
STATICFILES_DIRS = [
    os.path.join(BASE_DIR, "static"),
]

SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_TMP = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'

os.makedirs(STATIC_TMP, exist_ok=True)
os.makedirs(STATIC_ROOT, exist_ok=True)

LOGIN_REDIRECT_URL = '/reception'
LOGIN_URL = '/admin'

rdb = Redis('/tmp/redis.db', serverconfig={'port': '6379'})

CHANNEL_LAYERS = {
    'default': {
        'BACKEND': 'channels_redis.core.RedisChannelLayer',
        'CONFIG': {
            "hosts": [os.environ.get('REDIS_URL', 'redis://localhost:6379')],
        },
    },
}
    def process(self):
        logging.basicConfig(
            filename='output_mongo.log',
            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            level=logging.INFO)

        r_server = Redis(dbfilename=str(REDISLITE_DB_PATH),
                         serverconfig={
                             'save': [],
                             'maxclients': 10000
                         })
        mongo_client = MongoClient('host:port')
        db = mongo_client['xxx']
        db.authenticate('uname', 'pwd')
        gwas23andme = db['xxxx']

        no_of_workers = NO_OF_WORKERS or multiprocessing.cpu_count()

        mongo_q = RedisQueue(queue_id=UNIQUE_RUN_ID + '|mongo_processor_q',
                             max_size=MAX_CHUNKS * no_of_workers,
                             job_timeout=120)
        transformer_q = RedisQueue(queue_id=UNIQUE_RUN_ID +
                                   '|data_transformer_q',
                                   max_size=MAX_CHUNKS * no_of_workers,
                                   job_timeout=120)
        writer_q = RedisQueue(queue_id=UNIQUE_RUN_ID + '|writer_q',
                              max_size=MAX_CHUNKS * no_of_workers,
                              job_timeout=120)

        workers = [
            MongoWorkerProcess(
                mongo_q,
                r_server.db,
                transformer_q,
            ) for i in range(no_of_workers)
        ]

        for w in workers:
            w.start()

        transformers = [
            DataTransformerProcess(
                transformer_q,
                r_server.db,
                writer_q,
            ) for i in range(no_of_workers)
        ]

        for t in transformers:
            t.start()

        # writers = [FileWriterProcess(writer_q,
        #                               r_server.db
        #                               )for i in range(no_of_workers)]
        #
        # for writer in writers:
        #     writer.start()

        writer = FileWriterProcess(writer_q, r_server.db)
        writer.start()

        logging.info('Getting distinct genes and phenotypes')
        distinct_phenotypes = list(gwas23andme.distinct("source"))
        distinct_genes = list(gwas23andme.distinct("ingenes"))
        logging.info('Start the real deal here!!!! ')
        for phenotype in distinct_phenotypes:
            logging.info('Processing phenotype {} '.format(phenotype))
            logging.info('Total docs for phenotype {} are {}'.format(
                phenotype, gwas23andme.count({"source": phenotype})))

            mongo_q.put((phenotype, 'test'), r_server)
            # for gene in distinct_genes:
            #     mongo_q.put((phenotype,gene),r_server)

        mongo_q.set_submission_finished(r_server=r_server)

        for a in workers:
            a.join()
        for a in transformers:
            a.join()
        writer.join()
Esempio n. 21
0
        self.banco = base()

    def save(self, value):
        key = str(uuid1())
        self.set(key, value)
        return key

    def get(self, key):
        return loads(self.banco.get(key).decode('utf8'))

    def set(self, key, value):
        self.banco.set(key, dumps(value))
        return key


DBT = lambda: Redis('/tmp/redis.db')


def tests():

    b = Banco(DBT)
    b.set(1, 2)
    assert int(b.get(1)) == 2, "falhou em recuperar b[1]: %s" % str(b.get(1))
    print("assert b.get(1) == 2", str(b.get(1)))
    b.set(1, 3)
    assert int(b.get(1)) == 3, "falhou em recuperar b[1]: %s" % str(b.get(1))
    c = b.save('oi maçã')
    assert b.get(c) == 'oi maçã', "falhou em recuperar b[1]: %s" % str(b.get(c))


if __name__ == "__main__":
Esempio n. 22
0
__author__ = 'carlo'
import os
from redislite import Redis
# from tinydb.storages import MemoryStorage
from uuid import uuid1
# DBM = lambda :TinyDB(storage=MemoryStorage)

JSONDB = os.path.dirname(__file__) + '/eica.json'

DBF = lambda: Redis(JSONDB)


class Banco:
    def __init__(self, base=DBF):
        self.banco = base()

    def save(self, value):
        key = str(uuid1())
        self.banco.set(key, value)
        return key

    def get(self, key):
        return self.banco.get(key)

    def set(self, key, value):
        self.banco.set(key, value)
        return key


def tests():
Esempio n. 23
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_drug=app.config['ELASTICSEARCH_DRUG_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_drug=app.config['ELASTICSEARCH_DRUG_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache
        )

    app.extensions['es_access_store'] = esStore(es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
        )

    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
            )


        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    openapi_def = yaml.load(file('app/static/openapi.template.yaml', 'r'))
    app.logger.info('parsing swagger from app/static/openapi.template.yaml')

    #inject the description into the docs
    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)

    @app.route('/v%s/platform/docs/swagger-ui' % str(api_version))
    def render_swaggerui(apiversion=api_version):
        return render_template('swaggerui.html',api_version=apiversion)

    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            now = datetime.now()
            took = int(round((now - g.request_start).total_seconds()))
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                cache = 30 * 24 * 60 * 60 #cache for seven days
                resp.headers.add('Cache-Control', "no-transform, max-age=%i"%(cache))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp

    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
Esempio n. 24
0
from redislite import Redis
from redislite import patch

from settings import REDIS_PORT

# Original intention was to use Redis for caching and task queue.
# But I found out that the server has both memcached and gearman installed,
# so this is no longer needed, as it will require installing another server.

# Also, since each process importing this file will try to launch its own
# instance of Redis on the same local port, this will be a problem once worker
# processes are started.

patch.patch_redis()

redis = Redis(serverconfig={'port': REDIS_PORT})
Esempio n. 25
0
# ================================================================================

CACHES = {
    'default': {
        'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
    }
}

# ================================================================================
# static files (CSS, JavaScript, Images)
#
# References:
#   * https://docs.djangoproject.com/en/1.9/howto/static-files/
# ================================================================================

STATIC_ROOT = os.path.join(BASE_DIR, 'temporary', 'static')

# ================================================================================
# Celery
#
# References:
#   * http://docs.celeryproject.org/en/master/getting-started/index.html
# ================================================================================

CELERY_REDIS_PORT = os.environ.get('CELERY_REDIS_PORT', '8001')
CELERY_REDIS = Redis(os.path.join(BASE_DIR, 'temporary', 'celery.redis'),
                     serverconfig={'port': CELERY_REDIS_PORT})

BROKER_URL = 'redis://127.0.0.1:{}/0'.format(CELERY_REDIS_PORT)
CELERY_RESULT_BACKEND = BROKER_URL
Esempio n. 26
0
    UI_CHANNEL: ui_data_handler,
    TTS_CHANNEL: tts_data_handler,
    LEDS_CHANNEL: leds_data_handler
}
channels = list(handlers.keys())


# Main Listener Event Handler
def main_event_handler(self, event):
    print("Logic main event handler called")
    handlers[event['channel']](self.redis, event['data'])


if __name__ == '__main__':
    print("Channel logic started")
    redis_object = Redis(REDIS_DB_FILE)
    listener = RedisListener('Logic Listener', redis_object, channels,
                             main_event_handler)
    listener.start()
    time.sleep(10)
    print('Pushing to ui channel')
    redis_object.publish(UI_CHANNEL, 'Test message!')
    time.sleep(5)
    print('Pushing to TTS channel')
    redis_object.publish(TTS_CHANNEL, 'Test speech message')
    time.sleep(10)
    print('Pushing to ui channel again')
    redis_object.publish(UI_CHANNEL, 'Another test message!')

    # emotions & speech to text need pubsub objects
    # ui & text to speech & leds need to subscribe & publish
Esempio n. 27
0
class HotQueue(object):
    """Simple FIFO message queue stored in a Redis list.

    Parameters
    ----------

    name : str
        name of the queue

    max_queue_length : int
        Maximum length the queue can grow to (default is None allows the queue
        to grow without any limits.

    serializer : class, module, optional
        the class or module to serialize msgs with, must have
        methods or functions named ``dumps`` and ``loads``,
        `pickle <http://docs.python.org/library/pickle.html>`_ is the default,
        use ``None`` to store messages in plain text (suitable for strings,
        integers, etc)

    redis : redis.Redis, redislite.Redis, optional
        redis connection object, defaults to redislite.Redis with fallback to
        redis.Redis.

    **kwargs
        Additional kwargs to pass to :class:`redislite.Redis`, most commonly
        :attr:`dbfilename`.

    Examples
    --------

    >>> from hotqueue import HotQueue
    >>> queue = HotQueue("myqueue", dbfilename="queue.rdb")

    """

    def __init__(
            self, name, serializer=pickle, redis=None, max_queue_length=None,
            **kwargs
    ):
        self.name = name
        self.serializer = serializer
        self.max_queue_length = max_queue_length
        if redis:
            self.__redis = redis
        else:
            self.__redis = Redis(**kwargs)

    def __len__(self):
        return self.__redis.llen(self.key)

    @property
    def key(self):
        """
        Key in Redis to store the queue

        Returns
        -------
        str
            The name of the key containing the queue in redis.
        """
        return key_for_name(self.name)

    def clear(self):
        """
        Clear the queue of all messages, by deleting the Redis key.
        """
        self.__redis.delete(self.key)

    def consume(self, **kwargs):
        """
        A blocking generator that yields whenever a message is waiting in the
        queue.

        Parameters
        ----------

        **kwargs
            any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)

        Yields
        ------
        object
            The deserialized object from the queue.

        Examples
        --------

        >>> queue = HotQueue("example")
        >>> for msg in queue.consume(timeout=1):
        ...     print(msg)
        my message
        another message

        """
        kwargs.setdefault('block', True)
        try:
            while True:
                msg = self.get(**kwargs)
                if msg is None:
                    break
                yield msg
        except KeyboardInterrupt:  # pragma: no cover
            print()
            return

    def get(self, block=False, timeout=None):
        """
        Get a message from the queue.

        Parameters
        ----------

        block : bool
            whether or not to wait until a msg is available in
            the queue before returning; ``False`` by default

        timeout : int
            When using :attr:`block`, if no msg is available
            for :attr:`timeout` in seconds, give up and return

        Returns
        -------
        object
            The deserialized object from the queue.

        Examples
        --------

        >>> queue.get()
        'my message'
        >>> queue.get()
        'another message'

        """
        if block:
            if timeout is None:
                timeout = 0
            msg = self.__redis.blpop(self.key, timeout=timeout)
            if msg is not None:
                msg = msg[1]
        else:
            msg = self.__redis.lpop(self.key)
        if msg is not None and self.serializer is not None:
            msg = self.serializer.loads(msg)

        if isinstance(msg, bytes):
            msg = msg.decode()

        return msg

    def put(self, *msgs):
        """Put one or more messages onto the queue. Example:

        >>> queue.put("my message")
        >>> queue.put("another message")

        To put messages onto the queue in bulk, which can be significantly
        faster if you have a large number of messages:

        >>> queue.put("my message", "another message", "third message")
        """
        if self.serializer is not None:
            msgs = [self.serializer.dumps(m) for m in msgs]
        self.__redis.rpush(self.key, *msgs)
        if self.max_queue_length:
            self.__redis.ltrim(self.key, 0, int(self.max_queue_length) - 1)

    def worker(self, *args, **kwargs):
        """Decorator for using a function as a queue worker. Example:

        >>> @queue.worker(timeout=1)
        ... def printer(msg):
        ...     print(msg)
        >>> printer()
        my message
        another message

        You can also use it without passing any keyword arguments:

        >>> @queue.worker
        ... def printer(msg):
        ...     print(msg)
        >>> printer()
        my message
        another message

        :param kwargs: any arguments that :meth:`~hotqueue.HotQueue.get` can
            accept (:attr:`block` will default to ``True`` if not given)
        """
        def decorator(worker):
            """
            Worker decorator
            :param worker:
            :return:
            """
            @wraps(worker)
            def wrapper(*args):
                """
                Inner wrapper
                :param args:
                :return:
                """
                for msg in self.consume(**kwargs):
                    worker(*args + (msg,))
            return wrapper
        if args:
            return decorator(*args)
        return decorator
Esempio n. 28
0
 def __call__(self):
     redis_instance = Redis('/tmp/redis.db')
     return redis_instance
Esempio n. 29
0
from redislite import Redis

from utils.url_utils import get_filtered_links

redis_client = Redis(dbfilename="./redis.db", decode_responses=True)


def redis_cleanup(website_full_url):
    """removed invalid entries from redis caches"""
    # remove intersections
    for anchor in redis_client.sinter("new_urls", "processed_urls"):
        redis_client.srem("new_urls", anchor)
        print("Removed processed URL from redis: {}!\n".format(anchor))
    for anchor in redis_client.smembers("new_urls"):
        if len(get_filtered_links([anchor], website_full_url)) < 1:
            redis_client.srem("new_urls", anchor)


if __name__ == '__main__':
    redis_cleanup("https://www.wikipedia.org/")
    pass
Esempio n. 30
0
class RedisStore(DataStore):
    """Implementation of Redis datastore"""
    def __init__(self):
        super().__init__()
        if config.settings.stackl_redis_type == "fake":
            logger.info("Using fake client")

            self.redis = Redis()
        else:
            self.redis = redis.Redis(
                host=config.settings.stackl_redis_host,
                port=config.settings.stackl_redis_port,
                password=config.settings.stackl_redis_password,
                db=0)

    def get(self, **keys):
        """Gets a document from a redis instance"""
        document_key = keys.get("category") + '/' + keys.get(
            "type") + '/' + keys.get("name")
        logger.debug(f"[RedisStore] get on key '{document_key}'")

        redis_value = self.redis.get(document_key)
        if redis_value is None:
            response = self._create_store_response(
                status_code=StatusCode.NOT_FOUND, content={})
        else:
            content = json.loads(self.redis.get(document_key))
            response = self._create_store_response(status_code=StatusCode.OK,
                                                   content=content)
        logger.debug(f"[RedisStore] StoreResponse for get: {response}")
        return response

    def get_all(self, category, document_type, wildcard_prefix=""):
        """Gets all documents of a type from a Redis"""
        document_key = f"{category}/{document_type}/{wildcard_prefix}*"
        logger.debug(
            f"[RedisStore] get_all in '{document_key}' for type '{document_type}'"
        )
        content = []
        for key in self.redis.scan_iter(document_key):
            content.append(json.loads(self.redis.get(key)))
        response = self._create_store_response(status_code=StatusCode.OK,
                                               content=content)
        logger.debug(f"[RedisStore] StoreResponse for get: {response}")
        return response

    def get_history(self, category, document_type, name):
        """Gets the snapshots of document from Redis"""
        document_key = category + '/' + document_type + '/' + name
        logger.debug(
            f"[RedisStore] get_history in '{document_key}' for type '{document_type}'"
        )
        content = []
        for key in self.redis.scan_iter(document_key):
            content.append(json.loads(self.redis.get(key)))
        response = self._create_store_response(status_code=StatusCode.OK,
                                               content=content)
        logger.debug(f"[RedisStore] StoreResponse for get: {response}")
        return response

    def put(self, file):
        """Puts a document in Redis"""
        document_key = file.get("category") + '/' + file.get(
            "type") + '/' + file["name"]
        logger.debug(f"[RedisStore] put on '{document_key}' with file {file}")
        self.redis.set(document_key, json.dumps(file))
        response = self._create_store_response(
            status_code=StatusCode.CREATED,
            content=json.loads(self.redis.get(document_key)))
        logger.debug(f"[RedisStore] StoreResponse for put: {response}")
        return response

    def delete(self, **keys):
        """Deletes a document in Redis"""
        document_key = keys.get("category") + '/' + keys.get(
            "type") + '/' + keys.get("name")
        self.redis.delete(document_key)
        response = self._create_store_response(status_code=200, content={})
        logger.debug(f"[RedisStore] StoreResponse for delete: {response}")
        return response
Esempio n. 31
0
from multiprocessing import Pool

from redislite import Redis
from piston.steem import Steem
from piston.blockchain import Blockchain
from django.core.management.base import BaseCommand
from django.db.utils import InterfaceError
from django import db

from backend import settings
from apps.blockchains.sync import BaseUpdater

logger = logging.getLogger('mapala.fetch')

pool = Pool(processes=4)
redis = Redis('redis.db')


def get_block(steem, blockchain_name):
    last_block = redis.get('%s_last_block' % blockchain_name)

    if last_block is None:
        # Fitst app fetch
        last_block = Blockchain(steem).get_current_block_num()
        print(last_block, settings.LOCALE)

    return int(last_block)


class Command(BaseCommand):
    def add_arguments(self, parser):
Esempio n. 32
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    app.config.from_object(config[config_name])
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)
    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']

    # log_level = logging.INFO
    # if app.config['DEBUG']:
    #     log_level = logging.DEBUG

    # Flask has a default logger which works well and pushes to stderr
    # if you want to add different handlers (to file, or logstash, or whatever)
    # you can use code similar to the one below and set the error level accordingly.

    # logHandler = logging.StreamHandler()
    # formatter = jsonlogger.JsonFormatter()
    # logHandler.setFormatter(formatter)
    # loghandler.setLevel(logging.INFO)
    # app.logger.addHandler(logHandler)

    # or for LOGSTASH
    # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1))

    app.logger.info('looking for elasticsearch at: %s' %
                    app.config['ELASTICSEARCH_URL'])
    print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])

    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=0)  #served data
    app.extensions['redis-service'] = Redis(
        app.config['REDIS_SERVER_PATH'],
        db=1)  #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=2)  # user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save', '')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    es = Elasticsearch(
        app.config['ELASTICSEARCH_URL'],
        # # sniff before doing anything
        # sniff_on_start=True,
        # # refresh nodes after a node fails to respond
        # sniff_on_connection_fail=True,
        # # and also every 60 seconds
        # sniffer_timeout=60
        timeout=60 * 20,
        maxsize=100,
    )
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.
        config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache)

    app.extensions['es_access_store'] = esStore(
        es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
    )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel'] = mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
        )

        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']
            ['allowed_request_domains'])

    basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')
    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache',
                                threshold=100000,
                                default_timeout=60 * 60,
                                mode=777)
    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits
    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../' + rate_limit_file
    if os.path.exists(rate_limit_file):
        with open(rate_limit_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                auth_key = AuthKey(**row)
                app.extensions['redis-user'].hmset(auth_key.get_key(),
                                                   auth_key.__dict__)
        print('INFO - succesfully loaded rate limit file')
    else:
        print('ERROR - cannot find rate limit file')
        app.logger.error(
            'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' %
            rate_limit_file)
    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning(
            'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC'
        )
    app.config['IP_RESOLVER'] = ip_resolver
    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor),
                                                __name__)

    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    app.register_blueprint(latest_blueprint, url_prefix='/api/latest')
    app.register_blueprint(current_version_blueprint,
                           url_prefix='/api/' + str(api_version))
    app.register_blueprint(current_minor_version_blueprint,
                           url_prefix='/api/' + str(api_version_minor))

    @app.route('/api-docs/%s' % str(api_version_minor))
    def docs_current_minor_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs/%s' % str(api_version))
    def docs_current_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs')
    def docs():
        return redirect('/api/swagger/index.html')

    def serve_swagger():
        return app.send_static_file('docs/swagger/swagger.yaml')

    @app.route('/api/docs/swagger.yaml')
    def send_swagger():
        return serve_swagger()

    @app.route('/api/latest/docs/swagger.yaml')
    def send_swagger_latest():
        return serve_swagger()

    @app.route('/api/' + str(api_version) + '/docs/swagger.yaml')
    def send_swagger_current_cersion():
        return serve_swagger()

    @app.before_request
    def before_request():
        g.request_start = datetime.now()

    @app.after_request
    def after(resp):
        rate_limiter = RateLimiter()
        now = datetime.now()
        took = (now - g.request_start).total_seconds() * 1000
        if took > 500:
            cache_time = str(
                int(3600 * took)
            )  # set cache to last one our for each second spent in the request
            resp.headers.add('X-Accel-Expires', cache_time)
        took = int(round(took))
        LogApiCallWeight(took)
        # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
        #     took = RateLimiter.DEFAULT_CALL_WEIGHT
        current_values = increment_call_rate(took, rate_limiter)
        now = datetime.now()
        ceil10s = round(ceil_dt_to_future_time(now, 10), 2)
        ceil1h = round(ceil_dt_to_future_time(now, 3600), 2)
        usage_left_10s = rate_limiter.short_window_rate - current_values[
            'short']
        usage_left_1h = rate_limiter.long_window_rate - current_values['long']
        min_ceil = ceil10s
        if usage_left_1h < 0:
            min_ceil = ceil1h
        if (usage_left_10s < 0) or (usage_left_1h < 0):
            resp.headers.add('Retry-After', min_ceil)
        resp.headers.add('X-API-Took', took)
        resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
        resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
        resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
        resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
        # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
        # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
        resp.headers.add('Access-Control-Allow-Origin', '*')
        resp.headers.add('Access-Control-Allow-Headers',
                         'Content-Type,Auth-Token')
        if do_not_cache(request):  # do not cache in the browser
            resp.headers.add('Cache-Control',
                             "no-cache, must-revalidate, max-age=0")
        else:
            resp.headers.add(
                'Cache-Control',
                "no-transform, public, max-age=%i, s-maxage=%i" %
                (took * 1800 / 1000, took * 9000 / 1000))
        return resp

    return app