Exemplo n.º 1
0
def create_app(priority_settings=None):
    
    # Initialising a Flask App
    app = Flask(__name__, static_url_path='')
    heroku = Heroku()
    compress = Compress() 

    # Load configuraiton from settings file
    app.config.from_object(settings)
    app.config.from_object(priority_settings)

    # Initialise database
    db.init_app(app)

    # Using Heroku as deployment server
    heroku.init_app(app)
    
    # Gziping responses from app
    compress.init_app(app)
    
    # Using Flask-Login
    login_manager = LoginManager()
    login_manager.init_app(app)
    login_manager.login_view = 'accounts.login'
    @login_manager.user_loader
    def load_user(id):
        return User.query.get(int(id))

    # Registering Blueprints in an effor to make app modular
    app.register_blueprint(index_blueprint)
    app.register_blueprint(todo_blueprint)
    app.register_blueprint(user_blueprint)
    app.register_blueprint(api_todo_blueprint)

    with app.app_context():
        db.create_all()
    return app
Exemplo n.º 2
0
def create_app(config_name):
    """
    An "application factory" used to initialise the app object. Configs and
    extensions are loaded here.

    See Flask docs for additional information:
    http://flask.pocoo.org/docs/0.10/patterns/appfactories/

    Parameters
    ----------
    config_name : str
        The configuration to run. Currently should be one of "development"
        or "default".

    Returns
    -------
    app : flask.app.Flask
        Flask application object.
    """
    app = Flask(__name__, static_url_path="/static")
    app.config.from_object(config[config_name])

    bootstrap = Bootstrap()
    compress = Compress()

    bootstrap.init_app(app)
    compress.init_app(app)
    mongo.init_app(app)

    from app.main import main as main_blueprint
    app.register_blueprint(main_blueprint)

    from app.dbapi import dbapi as dbapi_blueprint
    app.register_blueprint(dbapi_blueprint, url_prefix="/api")

    return app
Exemplo n.º 3
0
from flask.ext.mysql import MySQL
from flask.ext.compress import Compress

compress = Compress()
import convert_file
from werkzeug import secure_filename
from hurry.filesize import size
import generate_random, generate_hash, send_email, random_string, datetime, port_manage, perform_calcy, delete_calc, repeat, port_manage, single_container, db_manage
import datetime, getdisk, subprocess
import cms, getfile
from lamp import *
import threading

mysql = MySQL()
app = Flask(__name__)
compress.init_app(app)
app.config['MYSQL_DATABASE_USER'] = db_details.db_user
app.config['MYSQL_DATABASE_PASSWORD'] = db_details.db_pass
app.config['MYSQL_DATABASE_DB'] = db_details.db_name
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
app.secret_key = os.urandom(24)
thread = None

mysql.init_app(app)

basedir = os.path.abspath(os.path.dirname(__file__))
user_session = {}
user_data = {}
key = ''

Exemplo n.º 4
0
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import Flask, render_template, redirect, request, session
from flask.ext.sqlalchemy import SQLAlchemy
from flask_socketio import SocketIO
from flask.ext.compress import Compress




app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///dayu.db'
app.secret_key = 'secret'
db = SQLAlchemy(app)
compress = Compress()
compress.init_app(app)
socketio = SocketIO(app)

@app.errorhandler(500)
def all_exception_handler(error):
    return redirect('/user/logout')

@app.errorhandler(401)
def no_authorization_handler(error):
    return redirect(session['last_url'])

@app.after_request
def after_any_request(response):
    path = request.path
    if request.method == 'GET' and '/static/' not in path:
        if request.query_string is not None:
Exemplo n.º 5
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py
    app.config.from_object(config[config_name])
    # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to.
    # For eg:
    # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg
    #
    # where settings.cfg looks like:
    #
    # DEBUG = False
    # SECRET_KEY = 'foo'
    #
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)

    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']


    app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])


    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data
    app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save','')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    if app.config['ELASTICSEARCH_URL']:
        es = Elasticsearch(app.config['ELASTICSEARCH_URL'],
                           # # sniff before doing anything
                           # sniff_on_start=True,
                           # # refresh nodes after a node fails to respond
                           # sniff_on_connection_fail=True,
                           # # and also every 60 seconds
                           # sniffer_timeout=60
                           timeout=60 * 20,
                           maxsize=32,
                           )
    else:
        es = None
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(es,
                                        DataTypes(app),
                                        DataSourceScoring(app),
                                        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
                                        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
                                        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
                                        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
                                        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
                                        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
                                        index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
                                        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
                                        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
                                        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
                                        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
                                        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
                                        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
                                        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
                                        docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
                                        docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
                                        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
                                        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
                                        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
                                        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
                                        log_level=app.logger.getEffectiveLevel(),
                                        cache=icache
                                        )

    app.extensions['es_access_store'] = esStore(es,
                                        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
                                        ip2org=ip2org,
                                        )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel']= mp
        app.extensions['mp_access_store'] = MixPanelStore(mp,
                                            ip2org=ip2org,
                                            )


        app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
                                               allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
                                               allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains'])

    # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')

    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777)

    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits

    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../'+rate_limit_file
    csvfile = None
    if Config.GITHUB_AUTH_TOKEN:
        r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv',
                         headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN,
                                    'Accept': 'application/vnd.github.v3.raw'})
        if r.ok:
            csvfile = r.text.split('\n')
            app.logger.info('Retrieved rate limit file from github remote')
        else:
            app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!')
    elif os.path.exists(rate_limit_file):
        csvfile = open(rate_limit_file)
        app.logger.info('Using dummy rate limit file')

    if csvfile is None:
        app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file)
    else:
        reader = csv.DictReader(csvfile)
        for row in reader:
            auth_key = AuthKey(**row)
            app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__)
        try:
            csvfile.close()
        except:
            pass
        app.logger.info('succesfully loaded rate limit file')


    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC')
    app.config['IP_RESOLVER'] = ip_resolver



    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__)


    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])


    '''set the right prefixes'''

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform')
    app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform')
    app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform')


    '''serve the static docs'''
    
    try:
        '''
        NOTE: this file gets created only at deployment time
        '''
        openapi_def = yaml.load(file('app/static/openapi.yaml', 'r'))
        app.logger.info('parsing swagger from static/openapi.yaml')

    except IOError:
        '''if we are not deployed, then simply use the template'''
        openapi_def = yaml.load(file('openapi.template.yaml', 'r'))
        app.logger.error('parsing swagger from openapi.template.yaml')

    with open("api-description.md", "r") as f:
        desc = f.read()
    openapi_def['info']['description'] = desc
    openapi_def['basePath'] = '/v%s' % str(api_version)
    @app.route('/v%s/platform/swagger' % str(api_version))
    def serve_swagger(apiversion=api_version):
        return jsonify(openapi_def)


    @app.route('/v%s/platform/docs' % str(api_version))
    def render_redoc(apiversion=api_version):
        return render_template('docs.html',api_version=apiversion)


    '''pre and post-request'''


    @app.before_request
    def before_request():
        g.request_start = datetime.now()
    @app.after_request
    def after(resp):
        try:
            rate_limiter = RateLimiter()
            now = datetime.now()
            took = (now - g.request_start).total_seconds()*1000
            if took > 500:
                cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request
                resp.headers.add('X-Accel-Expires', cache_time)
            took = int(round(took))
            LogApiCallWeight(took)
            # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
            #     took = RateLimiter.DEFAULT_CALL_WEIGHT
            current_values = increment_call_rate(took,rate_limiter)
            now = datetime.now()
            ceil10s=round(ceil_dt_to_future_time(now, 10),2)
            ceil1h=round(ceil_dt_to_future_time(now, 3600),2)
            usage_left_10s = rate_limiter.short_window_rate-current_values['short']
            usage_left_1h = rate_limiter.long_window_rate - current_values['long']
            min_ceil = ceil10s
            if usage_left_1h <0:
                min_ceil = ceil1h
            if (usage_left_10s < 0) or (usage_left_1h <0):
                resp.headers.add('Retry-After', min_ceil)
            resp.headers.add('X-API-Took', took)
            resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
            resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
            resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
            resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
            # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
            # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
            resp.headers.add('Access-Control-Allow-Origin', '*')
            resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token')
            resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
            if do_not_cache(request):# do not cache in the browser
                resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0")
            else:
                resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000))
            return resp

        except Exception as e:
            app.logger.exception('failed request teardown function', str(e))
            return resp



    # Override the HTTP exception handler.
    app.handle_http_exception = get_http_exception_handler(app)
    return app
Exemplo n.º 6
0
def make_app():
    cors = CORS(origins=[
        'https://app.communityshare.us:443', # production app
        'http://communityshare.localhost:5000', # local dev angular app
        'http://communityshare.localhost:8000', # local dev elm app
        'https://dmsnell.github.io/cs-elm/', # live elm app
    ])
    compress = Compress()
    webpack = Webpack()
    app = Flask(__name__, template_folder='../static/')
    app.response_class = JsonifyDictResponse

    app.config['SQLALCHEMY_DATABASE_URI'] = config.DB_CONNECTION
    app.config['WEBPACK_ASSETS_URL'] = config.WEBPACK_ASSETS_URL
    app.config['WEBPACK_MANIFEST_PATH'] = config.WEBPACK_MANIFEST_PATH

    cors.init_app(app)
    compress.init_app(app)
    webpack.init_app(app)

    if config.SSL != 'NO_SSL':
        flask_sslify.SSLify(app)
        app.wsgi_app = ReverseProxied(app.wsgi_app)

    register_user_routes(app)
    register_search_routes(app)
    register_conversation_routes(app)
    register_share_routes(app)
    register_survey_routes(app)
    register_email_routes(app)
    register_statistics_routes(app)

    community_share.api.register_routes(app)

    @app.teardown_appcontext
    def close_db_connection(exception):
        store.session.remove()

    @app.errorhandler(BadRequest)
    def handle_bad_request(error):
        return str(error), HTTPStatus.BAD_REQUEST

    app.errorhandler(Unauthorized)(jsonify_with_code(HTTPStatus.UNAUTHORIZED))
    app.errorhandler(Forbidden)(jsonify_with_code(HTTPStatus.FORBIDDEN))
    app.errorhandler(NotFound)(jsonify_with_code(HTTPStatus.NOT_FOUND))
    app.errorhandler(InternalServerError)(jsonify_with_code(HTTPStatus.INTERNAL_SERVER_ERROR))

    @app.route('/static/build/<path:filename>')
    def build_static(filename):
        return send_from_directory(
            app.root_path + '/../static/build/',
            filename,
            cache_timeout=YEAR_IN_SECONDS,
        )

    @app.route('/static/js/<path:filename>')
    def js_static(filename):
        return send_from_directory(app.root_path + '/../static/js/', filename)

    @app.route('/static/fonts/<path:filename>')
    def fonts_static(filename):
        return send_from_directory(app.root_path + '/../static/fonts/', filename)

    @app.route('/static/css/<path:filename>')
    def css_static(filename):
        return send_from_directory(app.root_path + '/../static/css/', filename)

    @app.route('/static/templates/footer.html')
    def footer_template():
        return render_template('templates/footer.html', config=config)

    @app.route('/static/templates/<path:filename>')
    def templates_static(filename):
        return send_from_directory(app.root_path + '/../static/templates/', filename)

    @app.route('/')
    def index():
        logger.debug('rendering index')
        return render_template('index.html', config=config)

    return app
Exemplo n.º 7
0
 def test_delayed_init(self):
     compress = Compress()
     compress.init_app(self.app)
Exemplo n.º 8
0
def create_app(config_name):
    app = Flask(__name__, static_url_path='')
    app.config.from_object(config[config_name])
    app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True)
    config[config_name].init_app(app)
    api_version = app.config['API_VERSION']
    api_version_minor = app.config['API_VERSION_MINOR']

    # log_level = logging.INFO
    # if app.config['DEBUG']:
    #     log_level = logging.DEBUG

    # Flask has a default logger which works well and pushes to stderr
    # if you want to add different handlers (to file, or logstash, or whatever)
    # you can use code similar to the one below and set the error level accordingly.

    # logHandler = logging.StreamHandler()
    # formatter = jsonlogger.JsonFormatter()
    # logHandler.setFormatter(formatter)
    # loghandler.setLevel(logging.INFO)
    # app.logger.addHandler(logHandler)

    # or for LOGSTASH
    # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1))

    app.logger.info('looking for elasticsearch at: %s' %
                    app.config['ELASTICSEARCH_URL'])
    print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL'])

    app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=0)  #served data
    app.extensions['redis-service'] = Redis(
        app.config['REDIS_SERVER_PATH'],
        db=1)  #cache, rate limit and internal things
    app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'],
                                         db=2)  # user info
    '''setup cache'''
    app.extensions['redis-service'].config_set('save', '')
    app.extensions['redis-service'].config_set('appendonly', 'no')
    icache = InternalCache(app.extensions['redis-service'],
                           str(api_version_minor))
    ip2org = IP2Org(icache)
    es = Elasticsearch(
        app.config['ELASTICSEARCH_URL'],
        # # sniff before doing anything
        # sniff_on_start=True,
        # # refresh nodes after a node fails to respond
        # sniff_on_connection_fail=True,
        # # and also every 60 seconds
        # sniffer_timeout=60
        timeout=60 * 20,
        maxsize=100,
    )
    '''elasticsearch handlers'''
    app.extensions['esquery'] = esQuery(
        es,
        DataTypes(app),
        DataSourceScoring(app),
        index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'],
        index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'],
        index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'],
        index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'],
        index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'],
        index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'],
        index_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'],
        index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'],
        index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'],
        docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'],
        docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'],
        docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'],
        docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'],
        docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'],
        docname_reactome=app.
        config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'],
        docname_association=app.
        config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'],
        docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'],
        # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'],
        # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'],
        docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'],
        log_level=app.logger.getEffectiveLevel(),
        cache=icache)

    app.extensions['es_access_store'] = esStore(
        es,
        eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'],
        ip2org=ip2org,
    )
    '''mixpanel handlers'''
    if Config.MIXPANEL_TOKEN:
        mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer())
        app.extensions['mixpanel'] = mp
        app.extensions['mp_access_store'] = MixPanelStore(
            mp,
            ip2org=ip2org,
        )

        app.extensions['proxy'] = ProxyHandler(
            allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'],
            allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'],
            allowed_request_domains=app.config['PROXY_SETTINGS']
            ['allowed_request_domains'])

    basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version
    # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token')
    ''' define cache'''
    # cache = Cache(config={'CACHE_TYPE': 'simple'})
    # cache.init_app(latest_blueprint)
    # latest_blueprint.cache = cache
    # latest_blueprint.extensions['cache'] = cache
    # app.cache = SimpleCache()
    app.cache = FileSystemCache('/tmp/cttv-rest-api-cache',
                                threshold=100000,
                                default_timeout=60 * 60,
                                mode=777)
    '''Set usage limiter '''
    # limiter = Limiter(global_limits=["2000 per hour", "20 per second"])
    # limiter.init_app(app)# use redis to store limits
    '''Load api keys in redis'''
    rate_limit_file = app.config['USAGE_LIMIT_PATH']
    if not os.path.exists(rate_limit_file):
        rate_limit_file = '../' + rate_limit_file
    if os.path.exists(rate_limit_file):
        with open(rate_limit_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                auth_key = AuthKey(**row)
                app.extensions['redis-user'].hmset(auth_key.get_key(),
                                                   auth_key.__dict__)
        print('INFO - succesfully loaded rate limit file')
    else:
        print('ERROR - cannot find rate limit file')
        app.logger.error(
            'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' %
            rate_limit_file)
    '''load ip name resolution'''
    ip_resolver = defaultdict(lambda: "PUBLIC")
    ip_list_file = app.config['IP_RESOLVER_LIST_PATH']
    if not os.path.exists(ip_list_file):
        ip_list_file = '../' + ip_list_file
    if os.path.exists(ip_list_file):
        with open(ip_list_file) as csvfile:
            reader = csv.DictReader(csvfile)
            for row in reader:
                net = IPNetwork(row['ip'])
                ip_resolver[net] = row['org']
    else:
        app.logger.warning(
            'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC'
        )
    app.config['IP_RESOLVER'] = ip_resolver
    '''compress http response'''
    compress = Compress()
    compress.init_app(app)

    latest_blueprint = Blueprint('latest', __name__)
    current_version_blueprint = Blueprint(str(api_version), __name__)
    current_minor_version_blueprint = Blueprint(str(api_version_minor),
                                                __name__)

    specpath = '/cttv'

    if app.config['PROFILE'] == True:
        from werkzeug.contrib.profiler import ProfilerMiddleware
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    create_api(latest_blueprint, api_version, specpath)
    create_api(current_version_blueprint, api_version, specpath)
    create_api(current_minor_version_blueprint, api_version_minor, specpath)

    app.register_blueprint(latest_blueprint, url_prefix='/api/latest')
    app.register_blueprint(current_version_blueprint,
                           url_prefix='/api/' + str(api_version))
    app.register_blueprint(current_minor_version_blueprint,
                           url_prefix='/api/' + str(api_version_minor))

    @app.route('/api-docs/%s' % str(api_version_minor))
    def docs_current_minor_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs/%s' % str(api_version))
    def docs_current_version():
        return redirect('/api/swagger/index.html')

    @app.route('/api-docs')
    def docs():
        return redirect('/api/swagger/index.html')

    def serve_swagger():
        return app.send_static_file('docs/swagger/swagger.yaml')

    @app.route('/api/docs/swagger.yaml')
    def send_swagger():
        return serve_swagger()

    @app.route('/api/latest/docs/swagger.yaml')
    def send_swagger_latest():
        return serve_swagger()

    @app.route('/api/' + str(api_version) + '/docs/swagger.yaml')
    def send_swagger_current_cersion():
        return serve_swagger()

    @app.before_request
    def before_request():
        g.request_start = datetime.now()

    @app.after_request
    def after(resp):
        rate_limiter = RateLimiter()
        now = datetime.now()
        took = (now - g.request_start).total_seconds() * 1000
        if took > 500:
            cache_time = str(
                int(3600 * took)
            )  # set cache to last one our for each second spent in the request
            resp.headers.add('X-Accel-Expires', cache_time)
        took = int(round(took))
        LogApiCallWeight(took)
        # if took < RateLimiter.DEFAULT_CALL_WEIGHT:
        #     took = RateLimiter.DEFAULT_CALL_WEIGHT
        current_values = increment_call_rate(took, rate_limiter)
        now = datetime.now()
        ceil10s = round(ceil_dt_to_future_time(now, 10), 2)
        ceil1h = round(ceil_dt_to_future_time(now, 3600), 2)
        usage_left_10s = rate_limiter.short_window_rate - current_values[
            'short']
        usage_left_1h = rate_limiter.long_window_rate - current_values['long']
        min_ceil = ceil10s
        if usage_left_1h < 0:
            min_ceil = ceil1h
        if (usage_left_10s < 0) or (usage_left_1h < 0):
            resp.headers.add('Retry-After', min_ceil)
        resp.headers.add('X-API-Took', took)
        resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate)
        resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate)
        resp.headers.add('X-Usage-Remaining-10s', usage_left_10s)
        resp.headers.add('X-Usage-Remaining-1h', usage_left_1h)
        # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s)
        # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h)
        resp.headers.add('Access-Control-Allow-Origin', '*')
        resp.headers.add('Access-Control-Allow-Headers',
                         'Content-Type,Auth-Token')
        if do_not_cache(request):  # do not cache in the browser
            resp.headers.add('Cache-Control',
                             "no-cache, must-revalidate, max-age=0")
        else:
            resp.headers.add(
                'Cache-Control',
                "no-transform, public, max-age=%i, s-maxage=%i" %
                (took * 1800 / 1000, took * 9000 / 1000))
        return resp

    return app
Exemplo n.º 9
0
 def test_delayed_init(self):
     compress = Compress()
     compress.init_app(self.app)
Exemplo n.º 10
0
app = Flask(__name__)
app.config.from_object(server_config)

app.wsgi_app = StreamConsumingMiddleware(app.wsgi_app)

app.logger.handlers = []

app.logger.addHandler(create_logging_handler(app.config))
app.logger.addHandler(create_console_logger_handler())

app.userstorage = UserStorage()
load_users(app.userstorage)

#Enables GZIP compression
compressor = Compress()
compressor.init_app(app)

#Expose markdown trough application object
app.markdown = Markdown()

app.storage = RecipeStorage(directory=app.config['RECIPE_DIRECTORY'], 
							backup=True, 
							logger=app.logger.info)

#Jinja Context Processor
@app.context_processor
def inject_template_variables():
    return dict(base_path=app.config['BASE_PATH'], upload_directory=app.config['UPLOAD_DIRECTORY'])

@app.context_processor
def file_size_context_processor():
Exemplo n.º 11
0
def make_app():
    cors = CORS(origins=[
        'https://app.communityshare.us:443',  # production app
        'http://communityshare.localhost:5000',  # local dev angular app
        'http://communityshare.localhost:8000',  # local dev elm app
        'https://dmsnell.github.io/cs-elm/',  # live elm app
    ])
    compress = Compress()
    webpack = Webpack()
    app = Flask(__name__, template_folder='../static/')

    app.config['SQLALCHEMY_DATABASE_URI'] = config.DB_CONNECTION
    app.config['WEBPACK_ASSETS_URL'] = config.WEBPACK_ASSETS_URL
    app.config['WEBPACK_MANIFEST_PATH'] = config.WEBPACK_MANIFEST_PATH

    cors.init_app(app)
    compress.init_app(app)
    webpack.init_app(app)

    if config.SSL != 'NO_SSL':
        flask_sslify.SSLify(app)
        app.wsgi_app = ReverseProxied(app.wsgi_app)

    register_user_routes(app)
    register_search_routes(app)
    register_conversation_routes(app)
    register_share_routes(app)
    register_survey_routes(app)
    register_email_routes(app)
    register_statistics_routes(app)

    community_share.api.register_routes(app)

    @app.teardown_appcontext
    def close_db_connection(exception):
        store.session.remove()

    @app.errorhandler(BadRequest)
    def handle_bad_request(error):
        return str(error), HTTPStatus.BAD_REQUEST

    @app.route('/static/build/<path:filename>')
    def build_static(filename):
        return send_from_directory(
            app.root_path + '/../static/build/',
            filename,
            cache_timeout=YEAR_IN_SECONDS,
        )

    @app.route('/static/js/<path:filename>')
    def js_static(filename):
        return send_from_directory(app.root_path + '/../static/js/', filename)

    @app.route('/static/fonts/<path:filename>')
    def fonts_static(filename):
        return send_from_directory(app.root_path + '/../static/fonts/',
                                   filename)

    @app.route('/static/css/<path:filename>')
    def css_static(filename):
        return send_from_directory(app.root_path + '/../static/css/', filename)

    @app.route('/static/templates/footer.html')
    def footer_template():
        return render_template('templates/footer.html', config=config)

    @app.route('/static/templates/<path:filename>')
    def templates_static(filename):
        return send_from_directory(app.root_path + '/../static/templates/',
                                   filename)

    @app.route('/')
    def index():
        logger.debug('rendering index')
        return render_template('index.html', config=config)

    return app
Exemplo n.º 12
0
def serve():
    compress = Compress()
    app = Flask(__name__, static_folder='static')
    compress.init_app(app)

    @app.route('/top_identities.json', methods=['GET', 'POST'])
    @json_api
    def top_identities():
        source = request.args.get('source', 'celebrity')
        emotion = request.args.get('emotion', 'happy')
        key = '%s x %s' % (source, emotion)
        text = redis().hget('top_identities.json', key)
        if not text:
            text = json.dumps(back.top_identities(source, emotion))
            redis().hset('top_identities.json', key, text)
        return text

    @app.route('/top_articles.json', methods=['GET', 'POST'])
    def top_articles():
        source = request.args.get('source', 'celebrity')
        emotion = request.args.get('emotion', 'happy')
        key = '%s x %s' % (source, emotion)
        text = redis().hget('top_articles.json', key)
        if not text:
            text = json.dumps(back.top_articles(source, emotion), indent=4)
            redis().hset('top_articles.json', key, text)
        return text

    @report_error
    @json_api
    @app.route('/all_articles.json', methods=['GET', 'POST'])
    def all_articles():
        source = request.args.get(
            'source', '20140629_government_politician_nyt')
        model = request.args.get('model', 'vectors_50d')
        text = redis().get('all_articles.json')
        if not text:
            text = redis_call(
                'all_articles.json', {'source': 'hdfs:///%s' % source, 'model': model})
            redis().set('all_articles.json', text)
        return text

    #@report_error
    #@json_api
    @app.route('/all_identities.json', methods=['GET', 'POST'])
    def all_identities():
        source = request.args.get(
            'source', '20140629_government_politician_nyt')
        model = request.args.get('model', 'vectors_50d')
        text = redis_call(
            'all_identities.json', {'source': 'hdfs:///%s' % source, 'model': model})
        response = Response(text, mimetype='application/json')
        response.headers['Access-Control-Allow-Origin'] = '*'
        return response

    @json_api
    @report_error
    @app.route('/confusion.json', methods=['GET', 'POST'])
    def confusion():
        key = 'json'
        text = redis().hget('confusion.json', key)
        if not text:
            text = redis_call(
                'confusion.json', {'source': 'semeval', 'model': 'twitter_vectors_20d'})
            redis().hset('confusion.json', key, text)
        return text

    # running the api
    app.run(host='0.0.0.0', port=int(
        os.getenv('PORT', '80')), debug=True, threaded=True)