Exemplo n.º 1
0
    def test_flushes_after_first_event_if_first_flush_true(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE)

        self.send_event()
        self.wait_for_threads()

        sync_flush.assert_called_once_with()
Exemplo n.º 2
0
    def test_endpoint_events_get_flushed_instantly_with_max_size_1(
            self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=1, flush_first=False)

        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)
Exemplo n.º 3
0
    def test_does_not_drop_events(self):
        self.consumer = AsyncBufferedConsumer(flush_first=True)
        send_patch = patch.object(self.consumer._consumer, 'send').start()

        self.send_event()
        self.send_event()

        self.wait_for_threads()

        send_patch.assert_called_once_with(self.ENDPOINT, '[{"test": true}]')
        self.assertEqual(self.consumer._async_buffers[self.ENDPOINT],
                         [self.JSON])
    def test_flushes_after_first_event_if_first_flush_true(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE)

        self.send_event()
        self.wait_for_threads()

        sync_flush.assert_called_once_with()
    def test_endpoint_events_get_flushed_instantly_with_max_size_1(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=1, flush_first=False)

        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)
Exemplo n.º 6
0
def init_mixpannel_clients(mxp_tokens: List[str]) -> List[Mixpanel]:
    """
    Return a list of mixpannel clients.
    """
    projects: List[Mixpanel] = []
    logger.info('Found %s Mixpannel tokens.', len(mxp_tokens))
    for project_token in mxp_tokens:
        mp = Mixpanel(project_token, consumer=AsyncBufferedConsumer())
        projects.append(mp)
    logger.info('%s Mixpannel projects ready to go.', len(projects))
    return projects
    def test_does_not_drop_events(self):
        self.consumer = AsyncBufferedConsumer(flush_first=True)
        send_patch = patch.object(self.consumer._consumer, 'send').start()

        self.send_event()
        self.send_event()

        self.wait_for_threads()

        send_patch.assert_called_once_with(self.ENDPOINT, '[{"test": true}]')
        self.assertEqual(self.consumer._async_buffers[self.ENDPOINT], [self.JSON])
Exemplo n.º 8
0
def mixpanel_event(name, username=None, properties=None):
    """
    Takes an event name and a dict of args and registers it with Mixpanel.
    If the username is None, it will assumed that it can be found in a
    .siphon file in the directory.
    """
    # Use AsyncBufferedConsumer to avoid blocking the main thread
    mp = Mixpanel(MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
    if not username:
        auth = Auth()
        username = auth.username

    props = {'user_platform': get_platform_name()}
    if properties:
        props.update(properties)

    mp.track(username, name, props)
def _trace_mixpanel(action_name, auth_user, client_user, client_token, kwargs):
    from mixpanel import Mixpanel
    from mixpanel_async import AsyncBufferedConsumer

    some_user_id = auth_user or client_user or client_token
    try:
        if action_name in ["get_server_info", "publish"]:
            mp = Mixpanel(BII_MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
            properties = {
                'action': action_name,
                'anonymous': (some_user_id == client_token)
            }
            if action_name == "get_server_info":
                properties["os"] = kwargs["bson_data"]["data"][0]["family"]
                properties["biicode_version"] = kwargs["bson_data"]["data"][1]
            mp.track(some_user_id, BII_API_MIXPANEL_EVENT_NAME, properties)
    except Exception as e:
        logger.warning("Error sending action to mixpanel: %s" % e)
Exemplo n.º 10
0
 def setUp(self):
     self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE,
                                           flush_first=False)
Exemplo n.º 11
0
class AsyncBufferedConsumerTestCase(unittest.TestCase):
    MAX_SIZE = 5
    ENDPOINT = 'people'
    JSON = json.dumps(dict(test=True))

    def setUp(self):
        self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE,
                                              flush_first=False)

    @patch.object(mixpanel.BufferedConsumer, '_flush_endpoint')
    def test_sync_flush_endpoint_calls_buffered_consumer_flush_endpoint(
            self, _flush_endpoint):
        self.send_event()

        self.assertFalse(_flush_endpoint.called)

        self.consumer._sync_flush(endpoint=self.ENDPOINT)

        _flush_endpoint.assert_called_once_with(self.ENDPOINT)

    @patch.object(mixpanel.BufferedConsumer, '_flush_endpoint')
    def test_sync_flush_calls_buffered_consumer_flush_endpoint(
            self, flush_endpoint):
        self.send_event()

        self.assertFalse(flush_endpoint.called)

        self.consumer._sync_flush()

        flush_endpoint.assert_any_call(self.ENDPOINT)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_flush_gets_called_in_different_thread_if_async(self, sync_flush):
        main_thread_id = thread.get_ident()
        flush_thread_id = None

        def side_effect(endpoint=None):
            self.assertNotEqual(main_thread_id, thread.get_ident())
            return DEFAULT

        sync_flush.side_effect = side_effect

        self.send_max_events()
        self.wait_for_threads()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_flush_gets_called_in_same_thread_if_not_async(self, sync_flush):
        main_thread_id = thread.get_ident()
        flush_thread_id = None

        def side_effect(endpoint=None):
            self.assertEqual(main_thread_id, thread.get_ident())
            return DEFAULT

        sync_flush.side_effect = side_effect

        self.send_event()
        self.consumer.flush(use_async=False)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_flushes_after_first_event_if_first_flush_true(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE)

        self.send_event()
        self.wait_for_threads()

        sync_flush.assert_called_once_with()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_does_not_flush_after_first_event_if_first_flush_false(
            self, sync_flush):
        self.send_event()
        self.wait_for_threads()

        self.assertFalse(sync_flush.called)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_endpoints_get_flushed_when_they_hit_max_size(self, sync_flush):
        self.send_event("events")
        self.send_max_events()
        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_all_events_get_flushed_after_flush_after(self, sync_flush):
        self.consumer.flush_after = datetime.timedelta(0, .5)

        self.send_event()
        self.assertFalse(sync_flush.called)

        time.sleep(.6)
        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_endpoint_events_get_flushed_instantly_with_max_size_1(
            self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=1, flush_first=False)

        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)

    def test_does_not_drop_events(self):
        self.consumer = AsyncBufferedConsumer(flush_first=True)
        send_patch = patch.object(self.consumer._consumer, 'send').start()

        self.send_event()
        self.send_event()

        self.wait_for_threads()

        send_patch.assert_called_once_with(self.ENDPOINT, '[{"test": true}]')
        self.assertEqual(self.consumer._async_buffers[self.ENDPOINT],
                         [self.JSON])

    def test_raises_exception_with_bad_endpoint(self):
        with self.assertRaises(mixpanel.MixpanelException):
            self.consumer.send('badendpoint', True)

    def send_event(self, endpoint=None):
        endpoint = endpoint or self.ENDPOINT
        self.consumer.send(endpoint, self.JSON)

    def send_max_events(self):
        for i in range(self.MAX_SIZE):
            self.send_event()

    def wait_for_threads(self):
        if not self.consumer._flush_thread_is_free():
            self.consumer.flushing_thread.join()
 def setUp(self):
     self.consumer = AsyncBufferedConsumer(
         max_size=self.MAX_SIZE, 
         flush_first=False
     )
class AsyncBufferedConsumerTestCase(unittest.TestCase):
    MAX_SIZE = 5
    ENDPOINT = 'people'
    JSON = json.dumps(dict(test=True))

    def setUp(self):
        self.consumer = AsyncBufferedConsumer(
            max_size=self.MAX_SIZE, 
            flush_first=False
        )

    @patch.object(mixpanel.BufferedConsumer, '_flush_endpoint')
    def test_sync_flush_endpoint_calls_buffered_consumer_flush_endpoint(self, _flush_endpoint):
        self.send_event()

        self.assertFalse(_flush_endpoint.called)

        self.consumer._sync_flush(endpoint=self.ENDPOINT)

        _flush_endpoint.assert_called_once_with(self.ENDPOINT)


    @patch.object(mixpanel.BufferedConsumer, '_flush_endpoint')
    def test_sync_flush_calls_buffered_consumer_flush_endpoint(self, flush_endpoint):
        self.send_event()

        self.assertFalse(flush_endpoint.called)

        self.consumer._sync_flush()

        flush_endpoint.assert_called_with(self.ENDPOINT)


    @patch.object(AsyncBufferedConsumer, '_sync_flush')    
    def test_flush_gets_called_in_different_thread_if_async(self, sync_flush):
        main_thread_id = thread.get_ident()
        flush_thread_id = None

        def side_effect(endpoint=None):
            self.assertNotEqual(main_thread_id, thread.get_ident())
            return DEFAULT

        sync_flush.side_effect = side_effect

        self.send_max_events()
        self.wait_for_threads()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_flush_gets_called_in_same_thread_if_not_async(self, sync_flush):
        main_thread_id = thread.get_ident()
        flush_thread_id = None

        def side_effect(endpoint=None):
            self.assertEqual(main_thread_id, thread.get_ident())
            return DEFAULT

        sync_flush.side_effect = side_effect

        self.send_event()
        self.consumer.flush(async=False)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_flushes_after_first_event_if_first_flush_true(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=self.MAX_SIZE)

        self.send_event()
        self.wait_for_threads()

        sync_flush.assert_called_once_with()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_does_not_flush_after_first_event_if_first_flush_false(self, sync_flush):
        self.send_event()
        self.wait_for_threads()

        self.assertFalse(sync_flush.called)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_endpoints_get_flushed_when_they_hit_max_size(self, sync_flush):
        self.send_event("events")
        self.send_max_events()
        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_all_events_get_flushed_after_flush_after(self, sync_flush):
        self.consumer.flush_after = datetime.timedelta(0, .5)

        self.send_event()
        self.assertFalse(sync_flush.called)

        time.sleep(.6)
        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with()

    @patch.object(AsyncBufferedConsumer, '_sync_flush')
    def test_endpoint_events_get_flushed_instantly_with_max_size_1(self, sync_flush):
        self.consumer = AsyncBufferedConsumer(max_size=1, flush_first=False)

        self.send_event()

        self.wait_for_threads()

        sync_flush.assert_called_once_with(endpoint=self.ENDPOINT)


    def send_event(self, endpoint=None):
        endpoint = endpoint or self.ENDPOINT
        self.consumer.send(endpoint, self.JSON)

    def send_max_events(self):
        for i in range(self.MAX_SIZE):
            self.send_event()

    def wait_for_threads(self):
        if not self.consumer._flush_thread_is_free():
            self.consumer.flushing_thread.join()
Exemplo n.º 14
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    app.config.from_object(config[config_name])
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)
    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']

    # log_level = logging.INFO
    # if app.config['DEBUG']:
    #     log_level = logging.DEBUG

    # Flask has a default logger which works well and pushes to stderr
    # if you want to add different handlers (to file, or logstash, or whatever)
    # you can use code similar to the one below and set the error level accordingly.

    # logHandler = logging.StreamHandler()
    # formatter = jsonlogger.JsonFormatter()
    # logHandler.setFormatter(formatter)
    # loghandler.setLevel(logging.INFO)
    # app.logger.addHandler(logHandler)

    # or for LOGSTASH
    # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1))

    app.logger.info('looking for elasticsearch at: %s' %
                    app.config['ELASTICSEARCH_URL'])
    print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])

    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=0)  #served data
    app.extensions['redis-service'] = Redis(
        app.config['REDIS_SERVER_PATH'],
        db=1)  #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=2)  # user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save', '')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    es = Elasticsearch(
        app.config['ELASTICSEARCH_URL'],
        # # sniff before doing anything
        # sniff_on_start=True,
        # # refresh nodes after a node fails to respond
        # sniff_on_connection_fail=True,
        # # and also every 60 seconds
        # sniffer_timeout=60
        timeout=60 * 20,
        maxsize=100,
    )
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.
        config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache)

    app.extensions['es_access_store'] = esStore(
        es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
    )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel'] = mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
        )

        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']
            ['allowed_request_domains'])

    basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')
    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache',
                                threshold=100000,
                                default_timeout=60 * 60,
                                mode=777)
    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits
    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../' + rate_limit_file
    if os.path.exists(rate_limit_file):
        with open(rate_limit_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                auth_key = AuthKey(**row)
                app.extensions['redis-user'].hmset(auth_key.get_key(),
                                                   auth_key.__dict__)
        print('INFO - succesfully loaded rate limit file')
    else:
        print('ERROR - cannot find rate limit file')
        app.logger.error(
            'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' %
            rate_limit_file)
    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning(
            'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC'
        )
    app.config['IP_RESOLVER'] = ip_resolver
    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor),
                                                __name__)

    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    app.register_blueprint(latest_blueprint, url_prefix='/api/latest')
    app.register_blueprint(current_version_blueprint,
                           url_prefix='/api/' + str(api_version))
    app.register_blueprint(current_minor_version_blueprint,
                           url_prefix='/api/' + str(api_version_minor))

    @app.route('/api-docs/%s' % str(api_version_minor))
    def docs_current_minor_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs/%s' % str(api_version))
    def docs_current_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs')
    def docs():
        return redirect('/api/swagger/index.html')

    def serve_swagger():
        return app.send_static_file('docs/swagger/swagger.yaml')

    @app.route('/api/docs/swagger.yaml')
    def send_swagger():
        return serve_swagger()

    @app.route('/api/latest/docs/swagger.yaml')
    def send_swagger_latest():
        return serve_swagger()

    @app.route('/api/' + str(api_version) + '/docs/swagger.yaml')
    def send_swagger_current_cersion():
        return serve_swagger()

    @app.before_request
    def before_request():
        g.request_start = datetime.now()

    @app.after_request
    def after(resp):
        rate_limiter = RateLimiter()
        now = datetime.now()
        took = (now - g.request_start).total_seconds() * 1000
        if took > 500:
            cache_time = str(
                int(3600 * took)
            )  # set cache to last one our for each second spent in the request
            resp.headers.add('X-Accel-Expires', cache_time)
        took = int(round(took))
        LogApiCallWeight(took)
        # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
        #     took = RateLimiter.DEFAULT_CALL_WEIGHT
        current_values = increment_call_rate(took, rate_limiter)
        now = datetime.now()
        ceil10s = round(ceil_dt_to_future_time(now, 10), 2)
        ceil1h = round(ceil_dt_to_future_time(now, 3600), 2)
        usage_left_10s = rate_limiter.short_window_rate - current_values[
            'short']
        usage_left_1h = rate_limiter.long_window_rate - current_values['long']
        min_ceil = ceil10s
        if usage_left_1h < 0:
            min_ceil = ceil1h
        if (usage_left_10s < 0) or (usage_left_1h < 0):
            resp.headers.add('Retry-After', min_ceil)
        resp.headers.add('X-API-Took', took)
        resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
        resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
        resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
        resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
        # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
        # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
        resp.headers.add('Access-Control-Allow-Origin', '*')
        resp.headers.add('Access-Control-Allow-Headers',
                         'Content-Type,Auth-Token')
        if do_not_cache(request):  # do not cache in the browser
            resp.headers.add('Cache-Control',
                             "no-cache, must-revalidate, max-age=0")
        else:
            resp.headers.add(
                'Cache-Control',
                "no-transform, public, max-age=%i, s-maxage=%i" %
                (took * 1800 / 1000, took * 9000 / 1000))
        return resp

    return app
Exemplo n.º 15
0
class LampiApp(App):
    _updated = False
    _updatingUI = False
    _hue = NumericProperty()
    _saturation = NumericProperty()
    _brightness = NumericProperty()
    _preset = NumericProperty()
    lamp_is_on = BooleanProperty()
    _preset_color = NumericProperty()
    _preset_temp = NumericProperty()

    remote_connection = StringProperty("[b]Connected:[/b] No")
    trusted_remotes = StringProperty("[b]Trusted Remotes:[/b] None")

    mp = Mixpanel(MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())

    def _get_hue(self):
        return self._hue

    def _set_hue(self, value):
        self._hue = value

    def _get_saturation(self):
        return self._saturation

    def _set_saturation(self, value):
        self._saturation = value

    def _get_brightness(self):
        return self._brightness

    def _set_brightness(self, value):
        self._brightness = value

    def _get_preset(self):
        return self._preset

    def _set_preset(self, value):
        self._preset = value

    def _get_preset_color(self):
        return self._preset_color

    def _set_preset_color(self, value):
        self._preset_color = value

    def _get_preset_temp(self):
        return self._preset_temp

    def _set_preset_temp(self, value):
        self._preset_temp = value

    hue = AliasProperty(_get_hue, _set_hue, bind=['_hue'])
    saturation = AliasProperty(_get_saturation,
                               _set_saturation,
                               bind=['_saturation'])
    brightness = AliasProperty(_get_brightness,
                               _set_brightness,
                               bind=['_brightness'])
    preset = AliasProperty(_get_preset, _set_preset, bind=['_preset'])
    preset_color = AliasProperty(_get_preset_color,
                                 _set_preset_color,
                                 bind=['_preset_color'])
    preset_temp = AliasProperty(_get_preset_temp,
                                _set_preset_temp,
                                bind=['_preset_temp'])
    gpio17_pressed = BooleanProperty(False)
    device_associated = BooleanProperty(True)

    def on_start(self):
        self._publish_clock = None
        self.mqtt_broker_bridged = False
        self._associated = True
        self.association_code = None
        self.mqtt = Client(client_id=MQTT_CLIENT_ID)
        self.mqtt.enable_logger()
        self.mqtt.will_set(client_state_topic(MQTT_CLIENT_ID),
                           "0",
                           qos=2,
                           retain=True)
        self.mqtt.on_connect = self.on_connect
        self.mqtt.connect(MQTT_BROKER_HOST,
                          port=MQTT_BROKER_PORT,
                          keepalive=MQTT_BROKER_KEEP_ALIVE_SECS)
        self.mqtt.loop_start()
        self.set_up_GPIO_and_device_status_popup()
        self.associated_status_popup = self._build_associated_status_popup()
        self.associated_status_popup.bind(on_open=self.update_popup_associated)

        self._remote = None
        self._popup_remote = None
        self.pairing_popup = self._build_pairing_popup()

        self._update_remotes_ui()

        self.discoverswitch = self.root.ids.discoverswitch
        self.discoverswitch.bind(active=self.toggle_discovery)

        Clock.schedule_interval(self._poll_associated, 0.1)

    def _build_associated_status_popup(self):
        return Popup(title='Associate your Lamp',
                     content=Label(text='Msg here', font_size='30sp'),
                     size_hint=(1, 1),
                     auto_dismiss=False)

    def _build_pairing_popup(self):
        layout = StackLayout()
        label = Label(
            text=
            'A new remote is attempting\nto connect to your lamp.\n\nWould you like to\nallow it?',
            size_hint=(1, None),
            padding=(4, 4))
        label.bind(size=self._update_textsize)
        deny = Button(text='Deny', size_hint=(0.49, None), height=40)
        allow = Button(text='Trust', size_hint=(0.49, None), height=40)
        allow.on_release = self._allow_remote
        deny.on_release = self._decline_remote
        layout.add_widget(label)
        layout.add_widget(Label(size_hint=(1, None), height=15))
        layout.add_widget(deny)
        layout.add_widget(Label(size_hint=(0.02, None), height=1))
        layout.add_widget(allow)
        return Popup(title='Remote Pairing Request',
                     content=layout,
                     size_hint=(1, 0.68),
                     auto_dismiss=False)

    def _update_textsize(self, instance, value):
        instance.text_size = (value[0], value[1])

    def on_new_remote(self, client, userdata, message):
        isEmpty = message.payload == b''

        if isEmpty:
            self._remote = None
        else:
            remote = json.loads(message.payload.decode('utf-8'))
            self._remote = remote
            self._popup_remote = remote
            if (not remote['allowed']):
                self.pairing_popup.open()

        self._update_remotes_ui()

    def _allow_remote(self):
        print("Pairing allowed for {}".format(self._popup_remote['address']))
        remotes.saveAddress(self._popup_remote['address'])
        self._remote = None
        self._popup_remote = None
        self.pairing_popup.dismiss()
        self._update_remotes_ui()

        # Display confirmation
        conf = Popup(
            title='Remote Trusted',
            content=Label(
                text=
                'You have successfully trusted\nyour remote. Pair it again to\nuse it'
            ),
            size_hint=(1, 0.5),
            auto_dismiss=False)

        conf.open()
        Clock.schedule_once(lambda dt: conf.dismiss(), 3)

    def _decline_remote(self):
        print("Pairing denied for {}".format(self._popup_remote['address']))
        self._popup_remote = None
        self._remote = None
        self.pairing_popup.dismiss()
        self._update_remotes_ui()

    def clear_remotes(self):
        remotes.clear()
        self.mqtt.publish(DISCONNECT_TOPIC, b'')
        self._update_remotes_ui()

    def toggle_discovery(self, instance, value):
        # Send message accordingly
        self.mqtt.publish(DISCOVERY_TOPIC,
                          ("true" if value else "false").encode('utf8'),
                          retain=True)

    def _update_remotes_ui(self):
        savedremotes = remotes._read()
        statustext = "[b]Connected:[/b] False\n\n"

        if (self._remote is not None):
            self.remote_connection = "[b]Connected:[/b] [color=32ff32]{}[/color]".format(
                self._remote['address'])
        else:
            self.remote_connection = "[b]Connected:[/b] [color=ff3232]Not connected[/color]"

        if (len(savedremotes) == 0):
            self.trusted_remotes = "[b]Trusted Remotes:[/b] None"
        else:
            self.trusted_remotes = "[b]Trusted Remotes:[/b]\n" + "\n".join(
                [" • {}".format(addr) for addr in savedremotes])

    def on_hue(self, instance, value):
        if self._updatingUI:
            return
        self._track_ui_event('Slider Change', {
            'slider': 'hue-slider',
            'value': value
        })
        if self._publish_clock is None:
            self._publish_clock = Clock.schedule_once(
                lambda dt: self._update_leds(), 0.01)

    def on_saturation(self, instance, value):
        if self._updatingUI:
            return
        self._track_ui_event('Slider Change', {
            'slider': 'saturation-slider',
            'value': value
        })
        if self._publish_clock is None:
            self._publish_clock = Clock.schedule_once(
                lambda dt: self._update_leds(), 0.01)

    def on_brightness(self, instance, value):
        if self._updatingUI:
            return
        self._track_ui_event('Slider Change', {
            'slider': 'brightness-slider',
            'value': value
        })
        if self._publish_clock is None:
            self._publish_clock = Clock.schedule_once(
                lambda dt: self._update_leds(), 0.01)

    def on_lamp_is_on(self, instance, value):
        if self._updatingUI:
            return
        self._track_ui_event('Toggle Power', {'isOn': value})
        if self._publish_clock is None:
            self._publish_clock = Clock.schedule_once(
                lambda dt: self._update_leds(), 0.01)

    def on_preset_temp(self, instance, value):
        if self._updatingUI:
            return
        self._track_ui_event('Slider Change', {
            'slider': 'preset_hue_slider',
            'value': value
        })

    def _track_ui_event(self, event_name, additional_props={}):
        device_id = lampi.lampi_util.get_device_id()

        event_props = {
            'event_type': 'ui',
            'interface': 'lampi',
            'device_id': device_id
        }
        event_props.update(additional_props)

        self.mp.track(device_id, event_name, event_props)

    def on_connect(self, client, userdata, flags, rc):
        self.mqtt.publish(client_state_topic(MQTT_CLIENT_ID),
                          b"1",
                          qos=2,
                          retain=True)
        self.mqtt.message_callback_add(TOPIC_LAMP_CHANGE_NOTIFICATION,
                                       self.receive_new_lamp_state)
        self.mqtt.message_callback_add(broker_bridge_connection_topic(),
                                       self.receive_bridge_connection_status)
        self.mqtt.message_callback_add(TOPIC_LAMP_ASSOCIATED,
                                       self.receive_associated)
        self.mqtt.message_callback_add(NEW_REMOTE_TOPIC, self.on_new_remote)
        self.mqtt.subscribe(broker_bridge_connection_topic(), qos=1)
        self.mqtt.subscribe(TOPIC_LAMP_CHANGE_NOTIFICATION, qos=1)
        self.mqtt.subscribe(TOPIC_LAMP_ASSOCIATED, qos=2)
        self.mqtt.subscribe(NEW_REMOTE_TOPIC, qos=2)

    def _poll_associated(self, dt):
        # this polling loop allows us to synchronize changes from the
        #  MQTT callbacks (which happen in a different thread) to the
        #  Kivy UI
        self.device_associated = self._associated

    def receive_associated(self, client, userdata, message):
        # this is called in MQTT event loop thread
        new_associated = json.loads(message.payload.decode('utf-8'))
        if self._associated != new_associated['associated']:
            if not new_associated['associated']:
                self.association_code = new_associated['code']
            else:
                self.association_code = None
            self._associated = new_associated['associated']

    def on_device_associated(self, instance, value):
        if value:
            self.associated_status_popup.dismiss()
        else:
            self.associated_status_popup.open()

    def update_popup_associated(self, instance):
        code = self.association_code[0:6]
        instance.content.text = ("Please use the\n"
                                 "following code\n"
                                 "to associate\n"
                                 "your device\n"
                                 "on the Web\n{}".format(code))

    def receive_bridge_connection_status(self, client, userdata, message):
        # monitor if the MQTT bridge to our cloud broker is up
        if message.payload == b"1":
            self.mqtt_broker_bridged = True
        else:
            self.mqtt_broker_bridged = False

    def receive_new_lamp_state(self, client, userdata, message):
        new_state = json.loads(message.payload.decode('utf-8'))
        Clock.schedule_once(lambda dt: self._update_ui(new_state), 0.01)

    def _update_ui(self, new_state):
        if self._updated and new_state['client'] == MQTT_CLIENT_ID:
            # ignore updates generated by this client, except the first to
            #   make sure the UI is syncrhonized with the lamp_service
            return
        self._updatingUI = True
        try:
            if 'color' in new_state:
                self.hue = new_state['color']['h']
                self.saturation = new_state['color']['s']
            if 'brightness' in new_state:
                self.brightness = new_state['brightness']
            if 'on' in new_state:
                self.lamp_is_on = new_state['on']
        finally:
            self._updatingUI = False

        self._updated = True

    def _update_leds(self):
        msg = {
            'color': {
                'h': self._hue,
                's': self._saturation
            },
            'brightness': self._brightness,
            'on': self.lamp_is_on,
            'client': MQTT_CLIENT_ID
        }
        self.mqtt.publish(TOPIC_SET_LAMP_CONFIG,
                          json.dumps(msg).encode('utf-8'),
                          qos=1)
        self._publish_clock = None

    def set_up_GPIO_and_device_status_popup(self):
        self.pi = pigpio.pi()
        self.pi.set_mode(17, pigpio.INPUT)
        self.pi.set_pull_up_down(17, pigpio.PUD_UP)
        Clock.schedule_interval(self._poll_GPIO, 0.05)
        self.network_status_popup = self._build_network_status_popup()
        self.network_status_popup.bind(on_open=self.update_device_status_popup)

    def _build_network_status_popup(self):
        return Popup(title='Device Status',
                     content=Label(text='IP ADDRESS WILL GO HERE'),
                     size_hint=(1, 1),
                     auto_dismiss=False)

    def update_device_status_popup(self, instance):
        interface = "wlan0"
        ipaddr = lampi.lampi_util.get_ip_address(interface)
        deviceid = lampi.lampi_util.get_device_id()
        msg = ("Version: {}\n"
               "{}: {}\n"
               "DeviceID: {}\n"
               "Broker Bridged: {}\n"
               "Async Analytics").format(LAMPI_APP_VERSION, interface, ipaddr,
                                         deviceid, self.mqtt_broker_bridged)
        instance.content.text = msg

    def on_gpio17_pressed(self, instance, value):
        if value:
            self.network_status_popup.open()
        else:
            self.network_status_popup.dismiss()

    def _poll_GPIO(self, dt):
        # GPIO17 is the rightmost button when looking front of LAMPI
        self.gpio17_pressed = not self.pi.read(17)

    def write_preset(self, num):

        filewrite = {
            "stateList": [
                {
                    "state": {
                        "h": self._preset_color,
                        "s": 1.0,
                        "b": 1.0
                    },
                    "smooth": False,
                    "waitTime": 0,
                    "transitionTime": 0
                },
            ],
            'loop':
            False
        }

        with open(PRESETS[num - 1] + ".json", "w") as f:
            json.dump(filewrite, f)
Exemplo n.º 16
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(es,
                                        DataTypes(app),
                                        DataSourceScoring(app),
                                        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
                                        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
                                        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
                                        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
                                        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
                                        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
                                        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
                                        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
                                        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
                                        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
                                        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
                                        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
                                        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
                                        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
                                        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
                                        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
                                        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
                                        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
                                        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
                                        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
                                        log_level=app.logger.getEffectiveLevel(),
                                        cache=icache
                                        )

    app.extensions['es_access_store'] = esStore(es,
                                        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
                                        ip2org=ip2org,
                                        )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(mp,
                                            ip2org=ip2org,
                                            )


        app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
                                               allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
                                               allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits

    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../'+rate_limit_file
    csvfile = None
    if Config.GITHUB_AUTH_TOKEN:
        r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv',
                         headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN,
                                    'Accept': 'application/vnd.github.v3.raw'})
        if r.ok:
            csvfile = r.text.split('\n')
            app.logger.info('Retrieved rate limit file from github remote')
        else:
            app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!')
    elif os.path.exists(rate_limit_file):
        csvfile = open(rate_limit_file)
        app.logger.info('Using dummy rate limit file')

    if csvfile is None:
        app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file)
    else:
        reader = csv.DictReader(csvfile)
        for row in reader:
            auth_key = AuthKey(**row)
            app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__)
        try:
            csvfile.close()
        except:
            pass
        app.logger.info('succesfully loaded rate limit file')


    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    
    try:
        '''
        NOTE: this file gets created only at deployment time
        '''
        openapi_def = yaml.load(file('app/static/openapi.yaml', 'r'))
        app.logger.info('parsing swagger from static/openapi.yaml')

    except IOError:
        '''if we are not deployed, then simply use the template'''
        openapi_def = yaml.load(file('openapi.template.yaml', 'r'))
        app.logger.error('parsing swagger from openapi.template.yaml')

    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)


    @app.route('/v%s/platform/docs' % str(api_version))
    def render_redoc(apiversion=api_version):
        return render_template('docs.html',api_version=apiversion)


    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            rate_limiter = RateLimiter()
            now = datetime.now()
            took = (now - g.request_start).total_seconds()*1000
            if took > 500:
                cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request
                resp.headers.add('X-Accel-Expires', cache_time)
            took = int(round(took))
            LogApiCallWeight(took)
            # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
            #     took = RateLimiter.DEFAULT_CALL_WEIGHT
            current_values = increment_call_rate(took,rate_limiter)
            now = datetime.now()
            ceil10s=round(ceil_dt_to_future_time(now, 10),2)
            ceil1h=round(ceil_dt_to_future_time(now, 3600),2)
            usage_left_10s = rate_limiter.short_window_rate-current_values['short']
            usage_left_1h = rate_limiter.long_window_rate - current_values['long']
            min_ceil = ceil10s
            if usage_left_1h <0:
                min_ceil = ceil1h
            if (usage_left_10s < 0) or (usage_left_1h <0):
                resp.headers.add('Retry-After', min_ceil)
            resp.headers.add('X-API-Took', took)
            resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
            resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
            resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
            resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
            # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
            # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp



    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
Exemplo n.º 17
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_drug=app.config['ELASTICSEARCH_DRUG_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_drug=app.config['ELASTICSEARCH_DRUG_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache
        )

    app.extensions['es_access_store'] = esStore(es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
        )

    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
            )


        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    openapi_def = yaml.load(file('app/static/openapi.template.yaml', 'r'))
    app.logger.info('parsing swagger from app/static/openapi.template.yaml')

    #inject the description into the docs
    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)

    @app.route('/v%s/platform/docs/swagger-ui' % str(api_version))
    def render_swaggerui(apiversion=api_version):
        return render_template('swaggerui.html',api_version=apiversion)

    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            now = datetime.now()
            took = int(round((now - g.request_start).total_seconds()))
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                cache = 30 * 24 * 60 * 60 #cache for seven days
                resp.headers.add('Cache-Control', "no-transform, max-age=%i"%(cache))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp

    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
Exemplo n.º 18
0
 def __init__(self):
     self.mp = Mixpanel(self.PROJECT_TOKEN,
                        consumer=AsyncBufferedConsumer())