Ejemplo n.º 1
0
def create_app():
    """创建Flask app"""
    app = Flask(__name__)
    cache = Cache(app, config={'CACHE_TYPE': 'simple'})
    cache.init_app(app)
    # Load config
    config = load_config()
    app.config.from_object(config)

    # CSRF protect
    csrf.init_app(app)

    # 注册组件
    register_db(app)
    register_routes(app)
    register_jinja(app)
    register_error_handle(app)
    register_logger(app)
    register_uploadsets(app)

    # before every request
    @app.before_request
    def before_request():
        """Do something before request"""
        # 记录用户的访问时间到redis
        g.user = get_current_user()
        if g.user:
            set_user_active_time(g.user.id)
            g.msg_num = MailBox.query.filter(MailBox.recver_id == g.user.id, MailBox.result == None).count()

    from .utils.devices import Devices

    devices = Devices(app)

    return app
Ejemplo n.º 2
0
 def assertCacheInitedWithoutError(self, cache_type_value):
     config = {
         'CACHE_TYPE': cache_type_value,
         'CACHE_NO_NULL_WARNING': True,
     }
     cache = Cache()
     try:
         cache.init_app(self.app, config=config)
     except:
         self.fail("Can't set CACHE_TYPE to %s" % cache_type_value)
Ejemplo n.º 3
0
 def test_21_redis_url_custom_db(self):
     config = {
         'CACHE_TYPE': 'redis',
         'CACHE_REDIS_URL': 'redis://localhost:6379/2',
     }
     cache = Cache()
     cache.init_app(self.app, config=config)
     rconn = self.app.extensions['cache'][cache] \
                 ._client.connection_pool.get_connection('foo')
     assert rconn.db == 2
Ejemplo n.º 4
0
 def test_20_redis_url_default_db(self):
     config = {
         'CACHE_TYPE': 'redis',
         'CACHE_REDIS_URL': 'redis://localhost:6379',
     }
     cache = Cache()
     cache.init_app(self.app, config=config)
     from werkzeug.contrib.cache import RedisCache
     assert isinstance(self.app.extensions['cache'][cache], RedisCache)
     rconn = self.app.extensions['cache'][cache] \
                 ._client.connection_pool.get_connection('foo')
     assert rconn.db == 0
Ejemplo n.º 5
0
def create_app(name=None):
    app = Flask(name)

    if os.environ.get('PRODUCTION'):
        app.config.from_object(ProductionConfig)
        print "running with ProductionConfig"
    else:
        app.config.from_object(DefaultConfig)
        print "running with DefaultConfig"

    # sentry
    if app.config.get('SENTRY_DSN'):
        sentry = Sentry()
        sentry.init_app(app)
        app.sentry = sentry

    # assets
    assets = Environment(app)
    assets.url = app.static_url_path
    scss_bundle = Bundle('css/*.scss', 'css/*.css',
        filters=['scss', 'cssmin'], depends='css/*.scss', output='css/all.css')
    assets.register('scss_all', scss_bundle)
    js_bundle = Bundle('js/*.js', filters='rjsmin', output='js/all.js')
    assets.register('js_all', js_bundle)
    Compress(app)

    # cache
    if app.config['DEBUG']:
        cache_type = 'null'
    else:
        cache_type = 'simple'

    cache = Cache(config={'CACHE_TYPE': cache_type})
    cache.init_app(app)
    app.cache = cache

    # CDN
    cdn = CDN()
    cdn.init_app(app)

    # workaround flask-assets / flask-cdn integration
    if app.config.get('CDN_HTTPS'):
        cdn_scheme = 'https'
    else:
        cdn_scheme = 'http'
    if app.config.get('FLASK_ASSETS_USE_CDN') and app.config.get('CDN_DOMAIN'):
        app.jinja_env.globals['FLASK_CDN'] = '%s://%s' % (cdn_scheme, app.config['CDN_DOMAIN'])

    return app
Ejemplo n.º 6
0
 def __init__(self, **kwargs):
     """See :meth:`~jsonalchemy.storage.Storage.__init__`."""
     self._cache = kwargs.get('cache', None)
     if self._cache is None:
         from flask.ext.cache import Cache
         self._cache = Cache()
     self._prefix = kwargs.get('model', '')
Ejemplo n.º 7
0
def create_app():
    """创建Flask app"""
    app = Flask(__name__)
    cache = Cache(app,config={'CACHE_TYPE': 'simple'})
    cache.init_app(app)
    # Load config
    config = load_config()
    app.config.from_object(config)

    # CSRF protect
    csrf.init_app(app)

    # if app.debug:
    #     DebugToolbarExtension(app)


    # from .mails import mail
    # mail.init_app(app)

    # 注册组件
    register_db(app)
    register_routes(app)
    register_jinja(app)
    register_error_handle(app)
    register_logger(app)
    register_uploadsets(app)

    # load chinese name dict from dict.txt once
    # from utils.account import CheckName
    #
    # # 加载dict.txt
    # CheckName(app)

    # before every request
    @app.before_request
    def before_request():
        """Do something before request"""
        # 记录用户的访问时间到redis
        g.user = get_current_user()
        if g.user:
            set_user_active_time(g.user.id)

    from .utils.devices import Devices

    devices = Devices(app)

    return app
Ejemplo n.º 8
0
def compile_cam_images():
    try:
        with current_app.test_request_context():
            print "[+] Starting cam images cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)

            cam_images = _compile_cam_images()

            if "error" not in cam_images:
                cache.set('cam_images', cam_images, timeout=CACHE_TIMEOUT)
                print "[+] cam images cache reset."
            else:
                print "[-] Error in cache update"
    except Exception as err:
        message = 'compile_cam_images exception: %s' % err.message
        current_app.logger.warning(message)
Ejemplo n.º 9
0
def compile_large_format_files():
    try:
        with current_app.test_request_context():
            print "[+] Starting large format file cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)

            data = _compile_large_format_files()

            if "error" not in data:
                cache.set('large_format', data, timeout=CACHE_TIMEOUT)
                print "[+] large format files updated."
            else:
                print "[-] Error in large file format update"
    except Exception as err:
        message = 'compile_large_format_files exception: %s' % err.message
        current_app.logger.warning(message)
Ejemplo n.º 10
0
class CachedStore(SessionStore):

    def __init__(self, app, session_cache_config=None, app_cache_config=None):
        SessionStore.__init__(self)
        # check if app cache config exists, otherwise use the same as cache_config
        if app_cache_config and not session_cache_config:
            self._app_cache_client = Cache(config=app_cache_config)
            self._session_cache_client = Cache(config=app_cache_config)
        elif app_cache_config and session_cache_config:
            self._app_cache_client = Cache(config=app_cache_config)
            self._session_cache_client = Cache(config=session_cache_config)
        elif not app_cache_config and session_cache_config:
            self._app_cache_client = Cache(config=session_cache_config)
            self._session_cache_client = Cache(config=session_cache_config)
        else:
            self._app_cache_client = self._session_cache_client = Cache()
        self._app_cache_client.init_app(app)
        #self._session_cache_client.init_app(app)
        # now set the app config to contain the cache
        app.config['_session_cache'] = self._session_cache_client
        app.config['_app_cache'] = self._app_cache_client

    def save(self, session):
        key = self._get_memcache_key(session.sid)
        data = json.dumps(dict(session))
        print "{0}:{1}".format(key, data)
        self._cache_client.set(key, data)

    def delete(self, session):
        key = self._get_memcache_key(session.sid)
        self._memcache_client.delete(key)

    def get(self, sid):
        key = self._get_memcache_key(sid)
        data = self._memcache_client.get(key)
        if data is not None:
           session = json.loads(data)
        else:
            session = {}
        return self.session_class(session, sid, False)

    def _get_memcache_key(self, sid):
        if isinstance(sid, unicode):
            sid = sid.encode('utf-8')
        return sid
Ejemplo n.º 11
0
    def setUp(self):
        app = Flask(__name__)

        app.debug = False
        app.config['CACHE_TYPE'] = 'simple'

        self.cache = Cache(app)

        self.app = app
Ejemplo n.º 12
0
    def setUp(self):
        app = Flask(__name__)

        app.debug = True
        self._set_app_config(app)

        self.cache = Cache(app)

        self.app = app
Ejemplo n.º 13
0
    def setUp(self):
        app = Flask(__name__, template_folder=os.path.dirname(__file__))

        app.debug = True
        self._set_app_config(app)

        self.cache = Cache(app)

        self.app = app
Ejemplo n.º 14
0
def compile_glider_tracks():
    try:
        with current_app.test_request_context():
            print "[+] Starting glider tracks cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)

            glider_tracks = _compile_glider_tracks(True)

            if "error" not in glider_tracks:
                cache.set('glider_tracks', glider_tracks, timeout=CACHE_TIMEOUT)
                print "[+] Glider tracks cache reset."
            else:
                print "[-] Error in cache update"

    except Exception as err:
        message = 'compile_glider_tracks exception: %s' % err.message
        current_app.logger.warning(message)
Ejemplo n.º 15
0
    def setUp(self):
        from flask import Flask
        self.testapp = Flask('test-app')
        self.app = app.test_client()
        self.cache = Cache(self.testapp, config={'CACHE_TYPE': 'simple'})

        self.real_requests = utilities.requests
        self.real_cache = location.cache
        utilities.requests = MockRequests()
        location.cache = self.cache
Ejemplo n.º 16
0
 def __init__(self, app, session_cache_config=None, app_cache_config=None):
     SessionStore.__init__(self)
     # check if app cache config exists, otherwise use the same as cache_config
     if app_cache_config and not session_cache_config:
         self._app_cache_client = Cache(config=app_cache_config)
         self._session_cache_client = Cache(config=app_cache_config)
     elif app_cache_config and session_cache_config:
         self._app_cache_client = Cache(config=app_cache_config)
         self._session_cache_client = Cache(config=session_cache_config)
     elif not app_cache_config and session_cache_config:
         self._app_cache_client = Cache(config=session_cache_config)
         self._session_cache_client = Cache(config=session_cache_config)
     else:
         self._app_cache_client = self._session_cache_client = Cache()
     self._app_cache_client.init_app(app)
     #self._session_cache_client.init_app(app)
     # now set the app config to contain the cache
     app.config['_session_cache'] = self._session_cache_client
     app.config['_app_cache'] = self._app_cache_client
Ejemplo n.º 17
0
def compile_bad_assets():
    try:
        with current_app.test_request_context():
            print "[+] Starting bad asset cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)
            url = current_app.config['UFRAME_ASSETS_URL'] + '/assets'
            payload = requests.get(url)
            if payload.status_code is 200:
                data = payload.json()
                bad_assets = _compile_bad_assets(data)
                if "error" not in bad_assets:
                    cache.set('bad_asset_list', bad_assets, timeout=CACHE_TIMEOUT)
                    print "[+] Bad asset cache reset"
                else:
                    print "[-] Error in cache update"
    except Exception as err:
        message = 'compile_bad_assets exception: %s' % err.message
        current_app.logger.warning(message)
Ejemplo n.º 18
0
def compile_assets():
    try:
        print '\n debug - *** tasks - compile_assets()'
        with current_app.test_request_context():
            print "[+] Starting asset cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)
            url = current_app.config['UFRAME_ASSETS_URL'] + '/%s' % ('assets')
            payload = requests.get(url)
            if payload.status_code is 200:

                # Cache assets_list
                data = payload.json()
                assets, asset_rds = _compile_assets(data)
                if "error" not in assets:
                    cache.set('asset_list', assets, timeout=CACHE_TIMEOUT)
                    print "[+] Asset list cache reset"

                    # Cache assets_dict (based on success of _compile_assets returning assets)
                    assets_dict = get_assets_dict_from_list(assets)
                    if not assets_dict:
                        message = 'Warning: get_assets_dict_from_list returned empty assets_dict.'
                        print '\n debug -- message: ', message
                        current_app.logger.info(message)
                    if isinstance(assets_dict, dict):
                        cache.set('assets_dict', assets_dict, timeout=CACHE_TIMEOUT)
                        print "[+] Assets dictionary cache reset"
                    else:
                        print "[-] Error in Assets dictionary cache update"
                else:
                    print "[-] Error in asset_list and asset_dict cache update"

                # Cache assets_rd
                if asset_rds:
                    cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT)
                    print "[+] Asset reference designators cache reset..."
                else:
                    print "[-] Error in asset_rds cache update"

            else:
                print "[-] Error in cache update"
    except Exception as err:
        message = 'compile_assets exception: %s' % err.message
        current_app.logger.warning(message)
        raise Exception(message)
Ejemplo n.º 19
0
def compile_c2_toc():
    try:
        c2_toc = {}
        with current_app.test_request_context():
            print "[+] Starting c2 toc cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)
            try:
                c2_toc = _compile_c2_toc()
            except Exception as err:
                message = 'Error processing compile_c2_toc: ', err.message
                current_app.logger.warning(message)

            if c2_toc is not None:
                cache.set('c2_toc', c2_toc, timeout=CACHE_TIMEOUT)
                print "[+] C2 toc cache reset..."
            else:
                print "[-] Error in cache update"

    except Exception as err:
        message = 'compile_c2_toc exception: ', err.message
        current_app.logger.warning(message)
Ejemplo n.º 20
0
    def test_06a_memoize(self):
        self.app.config['CACHE_DEFAULT_TIMEOUT'] = 1
        self.cache = Cache(self.app)

        with self.app.test_request_context():
            @self.cache.memoize(50)
            def big_foo(a, b):
                return a+b+random.randrange(0, 100000)

            result = big_foo(5, 2)

            time.sleep(2)

            assert big_foo(5, 2) == result
Ejemplo n.º 21
0
    def __init__(self, app):
        self.app = app

        # configuration
        config={'CACHE_TYPE':settings.DS_TYPE}
        if settings.DS_TYPE == 'filesystem':
            config['CACHE_DIR'] = settings.DS_FILESYSTEM_DIR
        elif settings.DS_TYPE == 'redis':
            config['CACHE_REDIS_HOST'] = settings.DS_REDIS_HOST
            config['CACHE_REDIS_PORT'] = settings.DS_REDIS_PORT
            config['CACHE_REDIS_PASSWORD'] = settings.DS_REDIS_PASSWORD
            config['CACHE_REDIS_DB'] = settings.DS_REDIS_DB

        self.cache = Cache(app, config=config)
Ejemplo n.º 22
0
def compile_assets_rd():
    try:
        asset_rds = {}
        with current_app.test_request_context():
            print "[+] Starting asset reference designators cache reset..."

            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)
            try:
                asset_rds, _ = _compile_asset_rds()
            except Exception as err:
                message = 'Error processing _compile_asset_rds: ', err.message
                current_app.logger.warning(message)

        if asset_rds:
            cache.set('asset_rds', asset_rds, timeout=CACHE_TIMEOUT)
            print "[+] Asset reference designators cache reset..."
        else:
            print "[-] Error in cache update"

    except Exception as err:
        message = 'compile_asset_rds exception: %s' % err.message
        current_app.logger.warning(message)
        raise Exception(message)
Ejemplo n.º 23
0
    def __init__(self, app_config, import_name, static_path=None, static_url_path=None, static_folder='static',
                 template_folder='templates', instance_path=None, instance_relative_config=False):

        from flask.ext.cache import Cache
        from ambry.library import Library
        from ambry.run import get_runconfig

        self._initialized = False
        self.csrf = CsrfProtect()
        self.login_manager = LoginManager()

        super(Application, self).__init__(import_name, static_path, static_url_path, static_folder,
                                          template_folder, instance_path, instance_relative_config)

        self.config.update(app_config)


        l = Library(get_runconfig(), read_only=True, echo=False)

        self.cache = Cache(config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': l.filesystem.cache('ui')})
        self.cache.init_app(self)
Ejemplo n.º 24
0
def compile_vocabulary():
    try:
        with current_app.test_request_context():
            print "[+] Starting vocabulary cache reset..."
            cache = Cache(config={'CACHE_TYPE': 'redis', 'CACHE_REDIS_DB': 0})
            cache.init_app(current_app)
            url = current_app.config['UFRAME_VOCAB_URL'] + '/vocab'
            payload = requests.get(url)
            if payload.status_code is 200:
                data = payload.json()
                vocab_dict, vocab_codes = compile_vocab(data)
                if "error" not in vocab_dict:
                    cache.set('vocab_dict', vocab_dict, timeout=CACHE_TIMEOUT)
                    cache.set('vocab_codes', codes, timeout=CACHE_TIMEOUT)
                    print "[+] Vocabulary cache reset"
                else:
                    print "[-] Error in cache update"
    except Exception as err:
        message = 'compile_vocabulary exception: %s' % err.message
        current_app.logger.warning(message)
Ejemplo n.º 25
0
def run():
    """Runs the flask app using the config.ini found in the working dir."""
    import webcronmon
    import webcronmon.config

    config = webcronmon.active_config = webcronmon.config.load()

    webcronmon.app.debug = config.app.debug
    cache = Cache(config=config.cache.as_dictionary)
    cache.init_app(webcronmon.app)

    import webcronmon.api
    webcronmon.api.list_monitors = cache.cached(
        key_prefix='list_monitors',
        timeout=config.api_cache.list_monitors
    )(webcronmon.api.list_monitors)
    webcronmon.api.list_monitor_states = cache.cached(
        key_prefix='list_monitor_states',
        timeout=config.api_cache.list_monitor_states
    )(webcronmon.api.list_monitor_states)
    webcronmon.api.list_monitor_uptimes = cache.cached(
        key_prefix='list_monitor_uptimes',
        timeout=config.api_cache.list_monitor_uptimes
    )(webcronmon.api.list_monitor_uptimes)

    import webcronmon.views
    for route, shown_groups in config.routes:
        view = partial(
            webcronmon.views.show_monitors,
            config=config,
            shown_groups=shown_groups
        )
        webcronmon.app.add_url_rule(route, route.replace('/', '-'), view)

    # force logging through webcronmon
    webcronmon.app._logger = logging.getLogger('webcronmon')
    webcronmon.app.logger_name = 'webcronmon'

    try:
        webcronmon.app.run(
            host=config.app.host, port=config.app.port, debug=config.app.debug,
        )
    except KeyboardInterrupt:
        pass
Ejemplo n.º 26
0
# Application setup


class MyFlask(Flask):
    jinja_options = dict(Flask.jinja_options)
    jinja_options.setdefault(
        'extensions', []).append('i2p2www.extensions.HighlightExtension')


app = application = MyFlask('i2p2www',
                            template_folder=TEMPLATE_DIR,
                            static_url_path='/_static',
                            static_folder=STATIC_DIR)
app.debug = bool(os.environ.get('APP_DEBUG', 'False'))
babel = Babel(app, default_domain=DEFAULT_GETTEXT_DOMAIN)
cache = Cache(app, config=CACHE_CONFIG)

#################
# Babel selectors


@babel.localeselector
def get_locale():
    # If viewing specs, require English
    if request.path.startswith('/spec'):
        return 'en'
    # If the language is already set from the url, use that
    if hasattr(g, 'lang'):
        return g.lang
    # otherwise try to guess the language from the user accept
    # header the browser transmits. The best match wins.
Ejemplo n.º 27
0
domain_check = 'google'
jsonp_to_json_keys = {'json-in-script': 'json'}
jsonp_header_overrides = {'content-type': 'text/javascript; charset=UTF-8'}

# Make and configure the Flask app
app = Flask(__name__)
if debug_app:
    app.debug = True

# Set up cache
cache_config = {
    'CACHE_TYPE': 'filesystem',
    'CACHE_THRESHOLD': 1000,
    'CACHE_DIR': 'cache'
}
cache = Cache(config=cache_config)
cache.init_app(app, config=cache_config)


# Just a default route
@app.route('/')
@cache.cached(timeout=500)
def index():
    return 'Supported keys: %s' % ', '.join(proxy_keys)


# Proxy route
@app.route('/proxy')
def handle_proxy():
    request_url = request.args.get('url', '')
    request_parsed = urlparse.urlparse(request_url)
Ejemplo n.º 28
0
# Our own modules
###
import geocoder

###
# Globals
###
import CONFIG
from presidents import PRESIDENTS
CACHESIZE = CONFIG.CACHESIZE

app = flask.Flask(__name__)
cache = Cache(app,
              config={
                  "CACHE_TYPE": "filesystem",
                  "CACHE_THRESHOLD": CACHESIZE,
                  "CACHE_DIR": ".flaskCache",
                  "CACHE_DEFAULT_TIMEOUT": 1000
              })

app.secret_key = CONFIG.COOKIE_KEY  # Should allow using session variables


@cache.memoize()  # Cached so we can avoid the 4+ second API lookup time
def geocode_presidents(town):
    """
    Returns a list of all intersections where a numbered street crosses a street
    named after the corresponding president ("1st and Washington", etc.)
    
    Each item in the resulting list is a tuple, with item[0] holding the name
    of the intersection ("1st and Washington"), and item[1] holding a tuple
Ejemplo n.º 29
0
    logging.basicConfig(level=logging.INFO)
    app.debug = False
logging.info("Using database: %s" % elephant_url)




engine = create_engine(elephant_url)

Session.configure(bind=engine)  # once engine is available

Base.metadata.create_all(engine)



cache = Cache(app,config={'CACHE_TYPE': 'simple'})
toolbar = DebugToolbarExtension(app)

cpsession = requests.session()
cpsession.headers.update(cfg.chargepoint_session_headers)
urls = cfg.urls
garage_mapping = cfg.garage_mapping



def do_login(cpuser,cppassword):
    """

    :param cpuser: string
    :param cppassword: string
    :return: :rtype: string
Ejemplo n.º 30
0
from spam import is_spam, is_working_akismet_key
from requests import get
from time import time
from flask.ext.cache import Cache
from recaptcha.client import captcha
from timeout import timeout
from flask import jsonify, request, Response
import anyjson
import helpers

# Initialize login
login_manager = LoginManager()
login_manager.init_app(app)

# Initialize cache
cache = Cache()
cache.init_app(app, config={'CACHE_TYPE': 'simple'})

# Set flags:

check_for_spam = False
if app.config['ENVIRONMENT'] == 'PRODUCTION':
	check_for_spam = True

# Submitting a new request
def new_request(passed_recaptcha = False, data = None):
	user_id = get_user_id()
	if data or request.method == 'POST':
		if not data and not passed_recaptcha:
			data = request.form.copy()
		email = data['request_email']
Ejemplo n.º 31
0
app = Flask(__name__)
app.config.from_object(__name__ + '.ConfigClass')

Compress(app)  # Initialize Flask-Compress
db = SQLAlchemy(app)  # Initialize Flask-SQLAlchemy
mail = Mail(app)  # Initialize Flask-Mail

if not app.debug:
    flask_raygun.Provider(app, app.config['RAYGUN_APIKEY']).attach()

celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)

cache = Cache(app,
              config={
                  'CACHE_TYPE': app.config['CACHE_TYPE'],
                  'CACHE_REDIS_URL': app.config['CACHE_REDIS_URL'],
              })

from app import views, models, forms, tasks
from forms import InviteForm
from app.models import User
from app.views import *

db_adapter = SQLAlchemyAdapter(db, User)  # Register the User model
user_manager = UserManager(  # Initialize Flask-User
    db_adapter,
    app,
    register_form=InviteForm,
    register_view_function=invite)
Ejemplo n.º 32
0
def configure(app):
    config_file = os.environ.get('GRAPHITE_API_CONFIG',
                                 '/etc/graphite-api.yaml')
    if os.path.exists(config_file):
        with open(config_file) as f:
            config = yaml.safe_load(f)
            config['path'] = config_file
    else:
        warnings.warn("Unable to find configuration file at {0}, using "
                      "default config.".format(config_file))
        config = {}

    configure_logging(config)

    for key, value in list(default_conf.items()):
        config.setdefault(key, value)

    app.statsd = None
    if 'statsd' in config:
        try:
            from statsd import StatsClient
        except ImportError:
            warnings.warn("'statsd' is provided in the configuration but "
                          "the statsd client is not installed. Please `pip "
                          "install statsd`.")
        else:
            c = config['statsd']
            app.statsd = StatsClient(c['host'], c.get('port', 8125))

    app.cache = None
    if 'cache' in config:
        try:
            from flask.ext.cache import Cache
        except ImportError:
            warnings.warn("'cache' is provided in the configuration but "
                          "Flask-Cache is not installed. Please `pip install "
                          "Flask-Cache`.")
        else:
            cache_conf = {
                'CACHE_DEFAULT_TIMEOUT': 60,
                'CACHE_KEY_PREFIX': 'graphite-api:'
            }
            for key, value in config['cache'].items():
                cache_conf['CACHE_{0}'.format(key.upper())] = value
            app.cache = Cache(app, config=cache_conf)

    loaded_config = {'functions': {}}
    for functions in config['functions']:
        loaded_config['functions'].update(load_by_path(functions))

    if 'carbon' in config:
        if 'hashing_keyfunc' in config['carbon']:
            config['carbon']['hashing_keyfunc'] = load_by_path(
                config['carbon']['hashing_keyfunc'])
        else:
            config['carbon']['hashing_keyfunc'] = lambda x: x
    loaded_config['carbon'] = config.get('carbon', None)

    finders = []
    for finder in config['finders']:
        finders.append(load_by_path(finder)(config))
    path = config.get('tagdb',
                      {}).get('path') or 'graphite_api.tags.base.DummyTagDB'
    tagdb = get_tagdb(path, config, app.cache)
    loaded_config['store'] = Store(finders, tagdb=tagdb)
    app.config['GRAPHITE'] = loaded_config
    app.config['TIME_ZONE'] = config['time_zone']
    logger.info("configured timezone", timezone=app.config['TIME_ZONE'])

    if 'sentry_dsn' in config:
        try:
            from raven.contrib.flask import Sentry
        except ImportError:
            warnings.warn("'sentry_dsn' is provided in the configuration but "
                          "the sentry client is not installed. Please `pip "
                          "install raven[flask]`.")
        else:
            Sentry(app, dsn=config['sentry_dsn'])

    app.wsgi_app = TrailingSlash(
        CORS(app.wsgi_app, config.get('allowed_origins')))
    if config.get('render_errors', True):
        app.errorhandler(500)(error_handler)
Ejemplo n.º 33
0
from models import *
import json
from elasticsearch import Elasticsearch
from flask.ext.cache import Cache
import redis
init_db()

app = Flask(__name__)


es = Elasticsearch([{'host': 'localhost', 'port': 9200}])

cache = Cache(app, config={
    'CACHE_TYPE': 'redis',
    'CACHE_KEY_PREFIX': 'redfcache',
    'CACHE_REDIS_HOST': '127.0.0.1',
    'CACHE_REDIS_PORT': '6379',
    'CACHE_REDIS_URL': 'redis://127.0.0.1:6379'
    })
@app.route('/alldoctors')
def get_all_doctors():
    doctor_data = {}
    doclist=[]
    for d in Doctor.query.all():
    	print d.name
    	doctor_data = {}
    	clinicList = []
    	doctor_data['name'] = d.name
    	doctor_data['id'] = d.id
    	doctor_data['email'] = d.email
    	doctor_data['recommendations']=d.recommendations
Ejemplo n.º 34
0
from flask.ext.cache import Cache

#error_handler
from error_handler import ExceptionHandler

#watson stuff
from watson_developer_cloud import AlchemyLanguageV1
from watsonapi import WatsonAPI
import json

#custom stuff
from readabilityScore import *
from ConfigParser import SafeConfigParser

app = Flask(__name__)
cache = Cache(config={'CACHE_TYPE': 'null'})
cache.init_app(app)
auth = HTTPBasicAuth()
app.debug = True

# Snippet taken the Flask documentation site: http://flask.pocoo.org/docs/0.10/patterns/fileuploads/
app.config['UPLOAD_FOLDER'] = 'app/uploadFiles/'
app.config['ALLOWED_EXTENSIONS'] = set(['txt', 'bib', 'xls'])

parser = reqparse.RequestParser()
parser.add_argument('person_id', type=str)
parser.add_argument('content', type=str)
parser.add_argument('features', action='append')


@auth.get_password
Ejemplo n.º 35
0
from flask.json import JSONEncoder

db = SQLAlchemy(app)
migrate = Migrate(app, db)

manager = Manager(app)
manager.add_command('db', MigrateCommand)

from user_command import UserCommand

manager.add_command('user', UserCommand)

login_manager = LoginManager()
login_manager.init_app(app)

cache = Cache(app, config={'CACHE_TYPE': 'simple'})

# Customize jsonify


class CustomJSONEncoder(JSONEncoder):
    def default(self, obj):
        try:
            if isinstance(obj, datetime):
                if obj.utcoffset() is not None:
                    obj = obj - obj.utcoffset()
                millis = int(
                    calendar.timegm(obj.timetuple()) * 1000 +
                    obj.microsecond / 1000)
                return millis
            iterable = iter(obj)
Ejemplo n.º 36
0
def create_app():
    app = Flask(__name__)
    app.config.from_object('config')
    wfilehandler = logging.FileHandler('werkzeug.log')
    wfilehandler.setLevel(logging.DEBUG)
    wlog = logging.getLogger('werkzeug')
    wlog.setLevel(logging.DEBUG)
    wlog.addHandler(wfilehandler)
    filehandler = logging.FileHandler('flask.log')
    filehandler.setLevel(logging.DEBUG)
    app.logger.setLevel(logging.DEBUG)
    app.logger.addHandler(filehandler)
    return app


app = create_app()
cache = Cache(app,
              config={
                  'CACHE_TYPE': 'simple',
                  'CACHE_DEFAULT_TIMEOUT': 1000000000
              })

from app import views, models
from database import db


@app.teardown_appcontext
def shutdown_session(exception=None):
    db.remove()
Ejemplo n.º 37
0
import collections
import copy
import geojson
import json
from obspy.imaging.mopad_wrapper import Beach
import io
import inspect
import numpy as np
import os

WEBSERVER_DIRECTORY = os.path.dirname(
    os.path.abspath(inspect.getfile(inspect.currentframe())))
STATIC_DIRECTORY = os.path.join(WEBSERVER_DIRECTORY, "static")

app = flask.Flask("LASIF Webinterface", static_folder=STATIC_DIRECTORY)
cache = Cache()


def make_cache_key(*args, **kwargs):
    path = flask.request.path
    args = str(hash(frozenset(flask.request.args.items())))
    return (path + args).encode('utf-8')


@app.route("/rest/domain.geojson")
def get_domain_geojson():
    """
    Return the domain as GeoJSON multipath.
    """
    domain = app.comm.project.domain
Ejemplo n.º 38
0
# -*- coding: UTF-8 -*-
"""
This is a util methods to work with flask-cache.
how-to:

variable = fromcache('<cachekey>') or tocache('<cachekey>', <live-object> )

print variable

"""
from flask.ext.cache import Cache
cache = Cache()
cache.CACHE_DEFAULT_TIMEOUT = 600 #10 minutos

def fromcache(name):
	o = cache.get(name)
	if o:
		return o
	else:
		return False

def tocache(name, obj):
	global cache
	cache.add(name, obj)
	return obj

def removecache(name):
	global cache
	cache.delete(name)
Ejemplo n.º 39
0
from flask import Flask
from flask.ext.cache import Cache

app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# Check Configuring Flask-Cache section for more details
cache = Cache(app, config={'CACHE_TYPE': 'memcached'})

from db import init_db

init_db()

from api import views, model
Ejemplo n.º 40
0
import os, json
from urlparse import urlparse, urljoin
from notifications import send_prr_email
from spam import is_spam, is_working_akismet_key
from requests import get
from time import time
from flask.ext.cache import Cache
from recaptcha.client import captcha
from timeout import timeout

# Initialize login
login_manager = LoginManager()
login_manager.init_app(app)

# Initialize cache
cache = Cache()
cache.init_app(app, config={"CACHE_TYPE": "simple"})


# Submitting a new request
def new_request(passed_recaptcha=False, data=None):
    if data or request.method == "POST":
        if not data and not passed_recaptcha:
            data = request.form.copy()
        email = data["request_email"]
        request_text = data["request_text"]
        if request_text == "":
            return render_template("error.html", message="You cannot submit an empty request.")
        if email == "" and "ignore_email" not in data and not passed_recaptcha:
            return render_template("missing_email.html", form=data, user_id=get_user_id())
        if (app.config["ENVIRONMENT"] == "PRODUCTION") and is_spam(request_text) and not passed_recaptcha:
Ejemplo n.º 41
0
from flask.ext.blogging import SQLAStorage, BloggingEngine
from flask.ext.principal import identity_changed, Identity, AnonymousIdentity, identity_loaded, \
    UserNeed, RoleNeed
from flask.ext.cache import Cache

app = Flask(__name__)
app.config["SECRET_KEY"] = "secret"  # for WTF-forms and login
app.config["BLOGGING_URL_PREFIX"] = "/blog"
app.config["BLOGGING_DISQUS_SITENAME"] = "test"
app.config["BLOGGING_SITEURL"] = "http://localhost:8000"
app.config["BLOGGING_SITENAME"] = "My Site"
app.config["BLOGGING_PERMISSIONS"] = False  # Enable blogger permissions'
app.config["CACHE_TYPE"] = "simple"

# create cache
cache = Cache(app)

# extensions
engine = create_engine('sqlite:////tmp/blog.db')
meta = MetaData()
sql_storage = SQLAStorage(engine, metadata=meta)
blog_engine = BloggingEngine(app, sql_storage, cache=cache)
login_manager = LoginManager(app)
meta.create_all(bind=engine)


class User(UserMixin):
    def __init__(self, user_id):
        self.id = user_id

    def get_name(self):
Ejemplo n.º 42
0
from flask import Flask, url_for, request, send_from_directory, redirect
from flask import render_template
from flask.ext.cache import Cache
from jinja2 import FileSystemLoader



cache = Cache(config={'CACHE_TYPE': 'simple'})

app = Flask(__name__)
cache.init_app(app)


@app.route('/')
@cache.cached()
def main():
    return redirect("http://jemyoung.com/about/")


@app.route('/about/')
def about():
    dev = request.args.get('dev', False)
    return render_template('about.html', dev=dev)


@app.route('/blog/')
def blog():
    return redirect("http://blog.jemyoung.com", code=302)

# @app.route('/robots.txt')
# @cache.cached()
Ejemplo n.º 43
0
from flask import Flask
import logging
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy

app = Flask('application')

# app.config.from_object('config.ProductionConfig')
app.config.from_object('config.DevelopmentConfig')

db = SQLAlchemy(app)

cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cache.init_app(app)

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

import views