Beispiel #1
0
def create_app(config):
    global app
    app = Flask(
        __name__,
        template_folder='../templates',
        static_folder='../static',
    )
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.config.update(config)

    app.url_rule_class = GeneratorRule

    app.redis = redis.Redis.from_url(config['REDISCLOUD_URL'])
    app.eventee = {
        'token': config['EVENTEE_TOKEN'],
        'email': config['EVENTEE_EMAIL'],
    }

    from . import views
    from . import login
    from . import login_oauth
    from . import talks
    from . import program
    from . import entrant
    from . import vote
    from . import filters
    from . import service
    from . import workshops
    from . import invoices

    from .admin import admin

    app.register_blueprint(admin, url_prefix='/admin')
    return app
Beispiel #2
0
def create_app(config):
    global app
    app = Flask(
        __name__,
        template_folder='../templates',
        static_folder='../static',
    )
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.config.update(config)

    app.url_rule_class = GeneratorRule

    app.redis = redis.Redis.from_url(config['REDISCLOUD_URL'])
    app.eventee = {
        'token': config['EVENTEE_TOKEN'],
        'email': config['EVENTEE_EMAIL'],
    }

    import views
    import login
    import login_oauth
    import talks
    import program
    import entrant
    import vote
    import filters
    import service
    import workshops
    import invoices
    return app
Beispiel #3
0
def create_app():
    duchess = Flask(__name__, static_url_path='/static',
                    static_folder='assets')
    env = os.getenv('DUCHESS_ENV', 'Development')
    try:
        duchess.config.from_object('config.%s' % env)
    except ImportError:  # Assuming Heroku
        duchess.config.update(
            APP_DIR=os.path.abspath(os.path.dirname(__file__)),
            PROJECT_ROOT=os.path.abspath(
                os.path.join(os.path.dirname(__file__), os.pardir)
            ),
            SECRET_KEY=os.urandom(24),
            DEBUG=bool(os.getenv('DEBUG')),
            REDIS_URL=os.getenv('REDISCLOUD_URL'),
        )
    duchess.redis = Redis(duchess)

    if duchess.debug:
        from flask.ext.debugtoolbar import DebugToolbarExtension
        DebugToolbarExtension(duchess)

        autocompiler.watch_assets('duchess/assets')

    init_views(duchess)
    duchess.register_blueprint(api_router)

    return duchess
Beispiel #4
0
def create_app(config):
    global app
    app = Flask(
        __name__,
        template_folder='../templates',
        static_folder='../static'
    )
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.config.update(config)

    app.url_rule_class = GeneratorRule

    app.redis = redis.Redis.from_url(config['REDISCLOUD_URL'])

    import views
    import login
    import login_oauth
    import talks
    import program
    import entrant
    import vote
    import filters
    import presenters_go
    import service
    return app
Beispiel #5
0
def create_app(app_name='web_app'):
    app = Flask(app_name)
    CORS(app)

    app.config['DEBUG'] = True
    app.register_blueprint(api)
    app.redis = redis.StrictRedis(host='localhost', port=6379, db=0)
    return app
Beispiel #6
0
def create_app(log_level="INFO", config="config.yml"):
    app = Flask(__name__)
    app.secret_key = 'test'
    app.config.from_object(__name__)

    config_vars = yaml.load(open(root + '/config.yml'))
    # inject all the yaml configs
    app.config.update(config_vars)
    db.init_app(app)
    Migrate(app, db)

    # Setup redis
    redis_config = app.config.get('redis_conn', dict(type='live'))
    typ = redis_config.pop('type')
    if typ == "mock_redis":
        from mockredis import mock_redis_client
        app.redis = mock_redis_client()
    else:
        app.redis = Redis(**redis_config)

    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter('%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
    log_level = getattr(logging, str(log_level), app.config.get('log_level', "INFO"))

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Dynamically add all the filters in the filters.py file
    for name, func in inspect.getmembers(filters, inspect.isfunction):
        app.jinja_env.filters[name] = func

    app.rpc_connection = Proxy(
        "http://{0}:{1}@{2}:{3}/"
        .format(app.config['coinserv']['username'],
                app.config['coinserv']['password'],
                app.config['coinserv']['address'],
                app.config['coinserv']['port'])
        )

    from . import views
    app.register_blueprint(views.main)
    return app
Beispiel #7
0
def create_app(config):
    global app
    application = Flask(__name__)
    application.secret_key = "jednadvehonzajde"
    application.config.update(config)

    application.redis = redis.Redis()

    app = application

    import views
    import login
    import login_oauth
    import talks
    import filters

    return app
Beispiel #8
0
def create_app():
    """Return an instance of the main Flask application."""
    app = Flask(package_name)

    # TODO: do some config
    app.redis = StrictRedis()

    from .error import register_error_handler, html_handler
    register_error_handler(app, html_handler)

    from .session import LazyRedisSessionInterface
    app.session_interface = LazyRedisSessionInterface()

    from .views import views
    app.register_blueprint(views)

    return app
Beispiel #9
0
def create_app(setting, debug=False):

    app = Flask(__name__)
    app.config.from_object(setting)
    app.debug = debug
    app.redis = redis.StrictRedis(host=setting.REDIS_HOST, port=setting.REDIS_PORT, db=0)
    Cache.redis = app.redis
    # users routes
    user_view = UserHandler.as_view('user_api')
    app.add_url_rule(
        '/user/',
        view_func=user_view,
        methods=['POST']
    )
    app.add_url_rule(
        '/user/<string:name>',
        view_func=user_view,
        methods=['GET', 'PUT', 'DELETE']
    )
    # urls routes
    url_view = UrlHandler.as_view('url_api')
    app.add_url_rule(
        '/user/<string:name>/url',
        view_func=url_view,
        methods=['POST']
    )
    app.add_url_rule(
        '/user/<string:name>/url/<string:shortened>',
        view_func=url_view,
        methods=['GET', 'PUT', 'DELETE']
    )
    # redirect routes
    url_view = RedirectHandler.as_view('redirect_api')
    app.add_url_rule(
        '/<string:shortened>',
        view_func=url_view,
        methods=['GET']
    )

    db.init_app(app)
    return app
Beispiel #10
0
def create_app():
    app = Flask(__name__)
    app.config.from_pyfile('settings.cfg')
    app.config.from_pyfile('local_settings.cfg', silent=True)

    app.redis = StrictRedis(db=app.config['REDIS_DB'])

    assets = Environment(app)
    assets.config['stylus_plugins'] = ['nib']
    assets.config['stylus_extra_args'] = ['--inline', '--include', 'static']
    assets.register(
        'css',
        '../styles/normalize.styl',
        '../styles/screen.styl',
        filters='stylus,cssmin', output='gen/screen.css')
    assets.register(
        'pinmeal',
        'vendor/jquery-1.8.2.js',
        'vendor/underscore.js',
        'vendor/backbone.js',
        'vendor/handlebars.runtime-1.0.rc.1.js',
        Bundle(
            '../scripts/spot*.handlebars',
            depends='dummy',  # to work around a webassets caching bug
            filters='handlebars', output='gen/pinmeal-handlebars.js'),
        Bundle(
            '../scripts/pinmeal.coffee',
            filters='coffeescript', output='gen/pinmeal-coffee.js'),
        filters='uglifyjs', output='gen/pinmeal.js')

    from main import main
    app.register_blueprint(main)
    #from pinmeal import pinmeal
    #app.register_blueprint(pinmeal, url_prefix='/pinmeal')

    return app
Beispiel #11
0
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.principal import Principal
from redis import Redis


app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
principals = Principal(app)

# init redis and set as a app internal variable
app.redis = Redis(host=app.config.get('REDIS_HOST', None),
                  port=app.config.get('REDIS_PORT', None),
                  password=app.config.get('REDIS_PASS', None))

from saulify import views, models
Beispiel #12
0
def create_app(mode, configs=None, log_level=None, **kwargs):
    # Allow configuration information to be specified with enviroment vars
    env_configs = {}
    for key in os.environ:
        if key.startswith('SIMPLECOIN_CONFIG'):
            env_configs[key] = os.environ[key]

    env_configs = [env_configs[value] for value in sorted(env_configs)]

    configs = ['defaults.toml'] + (env_configs or []) + (configs or [])
    if len(configs) == 1:
        print("Unable to start with only the default config values! {}"
              .format(configs))
        exit(2)

    config_vars = {}
    for config in configs:
        if isinstance(config, basestring):
            if os.path.isabs(config):
                config_path = config
            else:
                config_path = os.path.join(root, config)
            config = open(config_path)

        updates = toml.loads(config.read())
        toml.toml_merge_dict(config_vars, updates)

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder='../static', static_url_path='/static')
    app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates'))

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter('%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
    log_level = getattr(logging, str(log_level), app.config['log_level'])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config['manage_log_file']:
        hdlr = logging.FileHandler(app.config['manage_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config['scheduler_log_file']:
        hdlr = logging.FileHandler(app.config['scheduler_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config['webserver_log_file']:
        hdlr = logging.FileHandler(app.config['webserver_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config['DEBUG'] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != '\n':
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the powerpool datastore + Cache
    # =======================================================================
    db.init_app(app)
    babel.init_app(app)
    app.config['BABEL_DEFAULT_LOCALE'] = app.config.get('default_locale')

    def configure_redis(config):
        typ = config.pop('type')
        if typ == "mock_redis":
            from mockredis import mock_redis_client
            return mock_redis_client()
        return Redis(**config)

    cache_config = app.config.get('main_cache', dict(type='live'))
    cache_redis = configure_redis(cache_config)

    ds_config = app.config.get('redis_conn', dict(type='live'))
    ds_redis = configure_redis(ds_config)

    # Take advantage of the fact that werkzeug lets the host kwargs be a Redis
    # compatible object
    cache.init_app(app, config=dict(CACHE_TYPE='redis', CACHE_REDIS_HOST=cache_redis))
    app.redis = ds_redis

    sentry = False
    if app.config.get('sentry'):
        try:
            from raven.contrib.flask import Sentry
            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal('0.00000001')
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage" or mode == "webserver":
        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'",
                                             shell=True).strip().rsplit(" ", 1)
            app.config['hash'] = output[1]
            app.config['revdate'] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config['hash'] = ''
            app.config['revdate'] = ''

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and 'SENTRY_NAME' in app.config:
            app.config['SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        sched = Scheduler(standalone=True)

        # monkey patch the scheduler to wrap each job call in its own flask
        # context. Kind of sloppy way to pass in the app context...
        Scheduler.app = app
        Scheduler._old_run_job = Scheduler._run_job

        def _run_job(self, *args, **kwargs):
            with self.app.app_context():
                Scheduler._old_run_job(self, *args, **kwargs)
        Scheduler._run_job = _run_job

        stage_tasks = set(["cache_profitability", "leaderboard",
                           "server_status", "update_network",
                           "cache_user_donation", "update_online_workers"])
        for task_config in app.config['tasks']:
            if not task_config.get('enabled', False):
                continue
            if app.config['stage'] and task_config['name'] not in stage_tasks:
                app.logger.debug(
                    "Skipping scheduling {} because in stage mode!"
                    .format(task_config['name']))
                continue

            stripped_config = task_config.copy()
            del stripped_config['enabled']
            task = getattr(sch, task_config['name'])
            sched.add_cron_job(task, **stripped_config)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views
    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix='/api')

    return app
Beispiel #13
0
def create_app(testing=False, live=False):
    from bhs_api.models import User, Role
    from bhs_api.forms import LoginForm

    app = Flask(__name__)
    app.testing = testing

    # load the config file
    conf = get_conf()
    app.conf = conf
    # Our config - need to move everything here
    app.config['VIDEO_BUCKET_URL'] = "https://storage.googleapis.com/bhs-movies"
    app.config['IMAGE_BUCKET_URL'] = "https://storage.googleapis.com/bhs-flat-pics"

    # Set app config
    app.config['DEBUG'] = True
    app.config['FRONTEND_SERVER'] = conf.frontend_server
    app.config['DEFAULT_NEXT'] = '/mjs'
    # Security Config
    app.config['SECRET_KEY'] = conf.secret_key
    app.config['WTF_CSRF_ENABLED'] = False
    app.config['SECURITY_PASSWORDLESS'] = True
    app.config['SECURITY_EMAIL_SENDER'] = 'BH Databases<support@bh.org.il>'
    app.config['SECURITY_USER_IDENTITY_ATTRIBUTES'] = 'email'
    app.config['SECURITY_EMAIL_SUBJECT_PASSWORDLESS'] = 'Login link for Your Jewish Story'
    app.config['SECURITY_POST_LOGIN_VIEW'] = '/mjs'
    app.config['SECURITY_USER_IDENTITY_ATTRIBUTES'] = ('email', 'username', 'hash')
    # Mail Config
    app.config['MAIL_SERVER'] = conf.mail_server
    app.config['MAIL_PORT'] = conf.mail_port
    # Mail optional username and password
    try:
        app.config['MAIL_USERNAME'] = conf.mail_username
        app.config['MAIL_PASSWORD'] = conf.mail_password
    except AttributeError:
        pass

    # DB Config
    app.config['MONGODB_DB'] = conf.user_db_name
    app.config['MONGODB_HOST'] = conf.user_db_host
    app.config['MONGODB_PORT'] = conf.user_db_port
    # Redis
    app.config['REDIS_HOST'] = conf.redis_host
    app.config['REDIS_PORT'] = conf.redis_port
    app.config['REDIS_PASSWORD'] = getattr(conf, 'redis_password', None)

    # CACHING
    app.config['CACHING_TTL'] = conf.caching_ttl

    app.mail = Mail(app)
    app.db = MongoEngine(app)
    app.user_datastore = MongoEngineUserDatastore(app.db, User, Role)
    app.security = Security(app, app.user_datastore,
                            passwordless_login_form=LoginForm)
    # Create database connection object
    app.client_data_db = pymongo.MongoClient(conf.data_db_host, conf.data_db_port,
                    read_preference=pymongo.ReadPreference.SECONDARY_PREFERRED)
    app.data_db = app.client_data_db[conf.data_db_name]

    # Create the elasticsearch connection
    app.es = elasticsearch.Elasticsearch(conf.elasticsearch_host)
    app.es_data_db_index_name = getattr(conf, "elasticsearch_data_index", app.data_db.name)

    # Add the user's endpoints
    from bhs_api.user import user_endpoints
    app.register_blueprint(user_endpoints)
    # Add the v1 endpoint
    from bhs_api.v1_endpoints import v1_endpoints
    app.register_blueprint(v1_endpoints, url_prefix='/v1')
    # Initialize autodoc - https://github.com/acoomans/flask-autodoc
    #allow CORS
    cors = CORS(app, origins=['*'], headers=['content-type', 'accept',
                                            'authentication-token', 'Authorization'])
    # logging
    if live:
        app.config['PROPAGATE_EXCEPTIONS'] = True
        try:
            fh = logging.FileHandler(conf.log_file)
            formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
            fh.setFormatter(formatter)
            app.logger.addHandler(fh)
        except AttributeError:
            pass

    # redis
    try:
        app.redis = redis.StrictRedis(host=conf.redis_host,
                                      port=conf.redis_port,
                                      password = app.config['REDIS_PASSWORD'],
                                      db=0)
    except AttributeError:
        app.redis = None

    return app, conf
Beispiel #14
0
# -*- coding: utf-8 -*-
import os
import redis

from flask import Flask
from flask import Response
from flask import json

app = Flask(__name__)
app.redis = redis.StrictRedis(host=os.getenv('WERCKER_REDIS_HOST', 'localhost'),
      port= 6379, db=0)

@app.route("/clouds.json")
def clouds():
  data = app.redis.lrange("clouds", 0, -1)
  resp = Response(json.dumps(data), status=200, mimetype='application/json')
  return resp

if __name__ == "__main__":
  port = int(os.getenv('PORT', 5000))
  app.run(host='0.0.0.0', port=port)
Beispiel #15
0
# -*- coding: utf-8 -*-
from flask import Flask, current_app, render_template
import redis
import time
import json


REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379

app = Flask(__name__)
app.debug = True
app.redis = None
app.ds = {
    'name': 'ds-articles-ok'
}

@app.before_first_request
def first():
    current_app.redis = redis.StrictRedis(host=REDIS_HOST,
                                          port=REDIS_PORT)

@app.route('/', methods=['GET'])
def welcome():
    now = int(time.time())
    items = current_app.redis.zrange(current_app.ds['name'], 0, now)

    return render_template('index.html', 
                           keys=[json.loads(item) for item in items])

if __name__ == '__main__':
Beispiel #16
0
# 创建基于 Flask 的应用程序
app = Flask('tongbupan')

from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)

app.config.from_object(config)
app.debug = app.config.get('DEBUG', False)

app.secret_key = app.config['SECRET_KEY']
app.selnk_key = app.config["SELNK_KEY"]

assets = Assets(app)

from redis import Redis
app.redis = Redis(app.config['REDIS_HOST'], app.config['REDIS_PORT'], app.config['REDIS_DB'], app.config['REDIS_PASSWORD'])
app.session_interface = RedisSessionInterface(app.redis)

from lib.cache import init_cache
init_cache(app)

from flask.ext.pymongo import PyMongo
app.mongo = PyMongo(app, config_prefix='MONGO')

from lib.tokenstore import RedisTokenStore
RedisTokenStore.setup_store(app)

# 初始化进程全局的数据模型对象
models.setup(app)

Beispiel #17
0
def create_instance():
    """
    Construct a new Flask instance and return it.
    """
    import os

    app = Flask(__name__)
    app.config.from_object('notifico.default_config')

    if app.config.get('NOTIFICO_ROUTE_STATIC'):
        # We should handle routing for static assets ourself (handy for
        # small and quick deployments).
        import os.path
        from werkzeug import SharedDataMiddleware

        app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
            '/': os.path.join(os.path.dirname(__file__), 'static')
        })

    if not app.debug:
        # If sentry (http://getsentry.com) is configured for
        # error collection we should use it.
        if app.config.get('SENTRY_DSN'):
            sentry.dsn = app.config.get('SENTRY_DSN')
            sentry.init_app(app)

    # Setup our redis connection (which is already thread safe)
    app.redis = Redis(
        host=app.config['REDIS_HOST'],
        port=app.config['REDIS_PORT'],
        db=app.config['REDIS_DB']
    )
    # Attach Flask-Cache to our application instance. We override
    # the backend configuration settings because we only want one
    # Redis instance.
    cache.init_app(app, config={
        'CACHE_TYPE': 'redis',
        'CACHE_REDIS_HOST': app.redis,
        'CACHE_OPTIONS': {
            'key_prefix': 'cache_'
        }
    })
    # Attach Flask-Mail to our application instance.
    mail.init_app(app)
    # Attach Flask-SQLAlchemy to our application instance.
    db.init_app(app)

    # Update celery's configuration with our application config.
    celery.config_from_object(app.config)

    # Import and register all of our blueprints.
    from notifico.views.account import account
    from notifico.views.public import public
    from notifico.views.projects import projects
    from notifico.views.pimport import pimport
    from notifico.views.admin import admin

    app.register_blueprint(account, url_prefix='/u')
    app.register_blueprint(projects)
    app.register_blueprint(public)
    app.register_blueprint(pimport, url_prefix='/i')
    app.register_blueprint(admin, url_prefix='/_')

    # Register our custom error handlers.
    from notifico.views import errors

    app.error_handler_spec[None][500] = errors.error_500

    # cia.vc XML-RPC kludge.
    from notifico.services.hooks.cia import handler
    handler.connect(app, '/RPC2')

    # Setup some custom Jinja2 filters.
    app.jinja_env.filters['pretty_date'] = pretty.pretty_date
    app.jinja_env.filters['plural'] = pretty.plural
    app.jinja_env.filters['fix_link'] = pretty.fix_link

    return app
def create_app(mode, configs=None, log_level=None, **kwargs):
    # Allow configuration information to be specified with enviroment vars
    env_configs = {}
    for key in os.environ:
        if key.startswith('SIMPLECOIN_CONFIG'):
            env_configs[key] = os.environ[key]

    env_configs = [env_configs[value] for value in sorted(env_configs)]

    configs = ['defaults.toml'] + (env_configs or []) + (configs or [])
    if len(configs) == 1:
        print("Unable to start with only the default config values! {}"
              .format(configs))
        exit(2)

    config_vars = {}
    for config in configs:
        if isinstance(config, basestring):
            if os.path.isabs(config):
                config_path = config
            else:
                config_path = os.path.join(root, config)
            config = open(config_path)

        updates = toml.loads(config.read())
        toml.toml_merge_dict(config_vars, updates)

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder='../static', static_url_path='/static')
    app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates'))

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter('%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
    log_level = getattr(logging, str(log_level), app.config['log_level'])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config['manage_log_file']:
        hdlr = logging.FileHandler(app.config['manage_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config['scheduler_log_file']:
        hdlr = logging.FileHandler(app.config['scheduler_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config['webserver_log_file']:
        hdlr = logging.FileHandler(app.config['webserver_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config['DEBUG'] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != '\n':
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the powerpool datastore + Cache
    # =======================================================================
    db.init_app(app)

    def configure_redis(config):
        typ = config.pop('type')
        if typ == "mock_redis":
            from mockredis import mock_redis_client
            return mock_redis_client()
        return Redis(**config)

    cache_config = app.config.get('main_cache', dict(type='live'))
    cache_redis = configure_redis(cache_config)

    ds_config = app.config.get('redis_conn', dict(type='live'))
    ds_redis = configure_redis(ds_config)

    # Take advantage of the fact that werkzeug lets the host kwargs be a Redis
    # compatible object
    cache.init_app(app, config=dict(CACHE_TYPE='redis', CACHE_REDIS_HOST=cache_redis))
    app.redis = ds_redis

    sentry = False
    if app.config.get('sentry'):
        try:
            from raven.contrib.flask import Sentry
            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal('0.00000001')
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage" or mode == "webserver":
        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'",
                                             shell=True).strip().rsplit(" ", 1)
            app.config['hash'] = output[1]
            app.config['revdate'] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config['hash'] = ''
            app.config['revdate'] = ''

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and 'SENTRY_NAME' in app.config:
            app.config['SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        # Make app accessible from out monkey patched code. Messy....
        ThreadPool.app = app
        sched = Scheduler(standalone=True)
        # monkey patch the thread pool for flask contexts
        ThreadPool._old_run_jobs = ThreadPool._run_jobs
        def _run_jobs(self, core):
            self.app.logger.debug("Starting patched threadpool worker!")
            with self.app.app_context():
                ThreadPool._old_run_jobs(self, core)
        ThreadPool._run_jobs = _run_jobs
        # All these tasks actually change the database, and shouldn't
        # be run by the staging server
        if not app.config.get('stage', False):
            sched.add_cron_job(sch.compress_slices, minute='0,15,30,45',
                               second=35)
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.generate_credits, second=55)
            sched.add_cron_job(sch.create_trade_req, args=("sell",), minute=1,
                               hour="0,6,12,18")
            sched.add_cron_job(sch.create_trade_req, args=("buy",), minute=1,
                               hour="0,6,12,18")
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.collect_minutes, second=35)
            sched.add_cron_job(sch.collect_ppagent_data, second=40)
            # every five minutes 20 seconds after the minute
            sched.add_cron_job(sch.compress_minute,
                               minute='0,5,10,15,20,25,30,35,40,45,50,55',
                               second=20)
            # every hour 2.5 minutes after the hour
            sched.add_cron_job(sch.compress_five_minute, minute=2, second=30)
            # every minute 2 seconds after the minute
            sched.add_cron_job(sch.update_block_state, second=2)
            # every day
            sched.add_cron_job(sch.update_block_state, hour=0, second=0, minute=3)
        else:
            app.logger.info("Stage mode has been set in the configuration, not "
                            "running scheduled database altering cron tasks")

        sched.add_cron_job(sch.update_online_workers,
                           minute='0,5,10,15,20,25,30,35,40,45,50,55',
                           second=30)
        sched.add_cron_job(sch.cache_user_donation, minute='0,15,30,45',
                           second=15)
        sched.add_cron_job(sch.server_status, second=15)
        # every 15 minutes 2 seconds after the minute
        sched.add_cron_job(sch.leaderboard,
                           minute='0,5,10,15,20,25,30,35,40,45,50,55',
                           second=30)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views
    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix='/api')

    return app
Beispiel #19
0
def create_instance():
    """
    Construct a new Flask instance and return it.
    """
    import os

    app = Flask(__name__)
    app.config.from_object('notifico.default_config')

    if app.config.get('HANDLE_STATIC'):
        # We should handle routing for static assets ourself (handy for
        # small and quick deployments).
        import os.path
        from werkzeug import SharedDataMiddleware

        app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
            '/': os.path.join(os.path.dirname(__file__), 'static')
        })

    if not app.debug:
        # If sentry (http://getsentry.com) is configured for
        # error collection we should use it.
        if app.config.get('SENTRY_DSN'):
            sentry.dsn = app.config.get('SENTRY_DSN')
            sentry.init_app(app)

    # Setup our redis connection (which is already thread safe)
    app.redis = Redis(
        host=app.config['REDIS_HOST'],
        port=app.config['REDIS_PORT'],
        db=app.config['REDIS_DB']
    )
    cache.init_app(app, config={
        'CACHE_TYPE': 'redis',
        'CACHE_REDIS_HOST': app.redis,
        'CACHE_OPTIONS': {
            'key_prefix': 'cache_'
        }
    })
    db.init_app(app)

    with app.app_context():
        # Let SQLAlchemy create any missing tables.
        db.create_all()

    # Import and register all of our blueprints.
    from notifico.views.account import account
    from notifico.views.public import public
    from notifico.views.projects import projects
    from notifico.views.pimport import pimport
    from notifico.views.admin import admin

    app.register_blueprint(account, url_prefix='/u')
    app.register_blueprint(projects)
    app.register_blueprint(public)
    app.register_blueprint(pimport, url_prefix='/i')
    app.register_blueprint(admin, url_prefix='/_')

    # cia.vc XML-RPC kludge.
    from notifico.services.hooks.cia import handler
    handler.connect(app, '/RPC2')

    # Setup some custom Jinja2 filters.
    app.jinja_env.filters['pretty_date'] = pretty.pretty_date
    app.jinja_env.filters['plural'] = pretty.plural
    app.jinja_env.filters['fix_link'] = pretty.fix_link

    return app
Beispiel #20
0
from flask import Flask
#from flask import Response
#from flask import json
import redis
app = Flask(__name__)
app.redis = redis.Redis(host='localhost',port=6379,db=0)
from app import views
Beispiel #21
0
from werkzeug import secure_filename
import flask
import utils
import json
from hanzi import Hanzi
from flask import current_app
import redis
import HanziMatcher
import sys

ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])

app = Flask(__name__)
app.config.from_pyfile('config.py')
print app.config
app.redis = redis.StrictRedis(host=app.config['REDIS_HOST'],port=app.config['REDIS_PORT'], db=0)

#print app.hanzi_list
#app.config['LABELED_DATASET_PATH'] = LABELED_DATASET_PATH

 #=  set(map(lambda x:x.decode('utf-8'), os.listdir(app.config['LABELED_DATASET_PATH']) ))
#app.hanzi_list = utils.read_handwriting(os.path.join("./","handwriting.txt"))

#if not app.debug:
#app.debug = True
import logging
from logging import FileHandler


file_handler = FileHandler(app.config['LOG_FILE_PATH'],encoding='utf-8')
#file_handler.setLevel(logging.ERROR)
Beispiel #22
0
import os
import redis

from flask import Flask
from flask import request, redirect, render_template, url_for
from flask import Response

app = Flask(__name__)
app.redis = redis.StrictRedis(host='db', port=6379, db=0)

# Be super aggressive about saving for the development environment.
# This says save every second if there is at least 1 change.  If you use
# redis in production you'll want to read up on the redis persistence
# model.
app.redis.config_set('save', '1 1')

@app.route('/', methods=['GET', 'POST'])
def main_page():
    if request.method == 'POST':
        app.redis.lpush('entries', request.form['entry'])        
        return redirect(url_for('main_page'))
    else:
        entries = app.redis.lrange('entries', 0, -1)
        return render_template('main.html', entries=entries)

@app.route('/clear', methods=['POST'])
def clear_entries():
    app.redis.ltrim('entries', 1, 0)
    return redirect(url_for('main_page'))

if __name__ == "__main__":
Beispiel #23
0
from __future__ import print_function

import os
import sys
import redis

from flask import Flask
from flask import jsonify


app = Flask(__name__)
app.redis = redis.StrictRedis(host=os.getenv('REDIS_HOST', 'localhost'), socket_timeout=5)

@app.route('/set', methods=['POST'])
def set_value():
    try:
        app.redis.incr('total')
    except redis.ConnectionError as e:
        return jsonify({"status": "redis connection error"})
    else:
        return jsonify({"status": "ok"})

@app.route('/get')
def get_value():
    try:
        data = app.redis.get('total')
    except redis.ConnectionError as e:
        return jsonify({"status": "redis connection error"})
    else:
        return jsonify({"status": "ok", "data": int(data)})
Beispiel #24
0
    Redis will set up a connection pooler automatically, but the nano
    tier of Redis To Go only supports 10 concurrent connections.

    """
    pool = redis.ConnectionPool(host=app.config['REDIS_HOST'],
                                port=app.config['REDIS_PORT'],
                                password=app.config['REDIS_PASSWORD'],
                                db=db,
                                max_connections=max_connections)
    return redis.Redis(connection_pool=pool)


###
### Global Redis connection.
###
app.redis = redis_init()


###
### Hooks.
###


@app.before_request
def force_ssl():
    """Throw an error if the request is not secure."""
    criteria = [
        app.debug,
        request.is_secure,
        request.headers.get('X-Forwarded-Proto', "http") == "https",
    ]
Beispiel #25
0
from wtforms import widgets
import redis
import os



app = Flask(__name__)

app.config['SECRET_KEY'] = '123456790'
app.config['MONGODB_SETTINGS'] = {'DB': 'taobao',
                                  "HOST": 'localhost',
                                  "PORT": 27017}

app.__rootdir__ = os.getcwd()


# Create models
db = MongoEngine()
db.init_app(app)
#db.connection.admin.authenticate("root", "chenfuzhi")

app.conn = db.connection
app.redis = redis.Redis("localhost", 6379)

__all__ = ["app", "modes", "api", "views", "cqlui", "blacklist"]

from webadmin import *



Beispiel #26
0
from scrapy.utils.project import get_project_settings

from social_scraper.spiders.twitter import TwitterSpider
from social_scraper.spiders.fb import FacebookSpider

from social_scraper.settings import (API_HOST, API_PORT, API_DEBUG_MODE,
        CELERY_BROKER_URL, CELERY_IMPORTS, CELERY_RESULT_BACKEND,
        CELERYD_MAX_TASKS_PER_CHILD, REDIS_HOST, REDIS_PORT)


# flask app configuration
flask_app = Flask('profile_api')
api = restful.Api(flask_app)

# database setup
flask_app.redis = redis.StrictRedis(REDIS_HOST, port=REDIS_PORT, db=0)

# api output json encoding
output_json.func_globals['settings'] = {'ensure_ascii': False, 'encoding': 'utf8'}

# bind celery to flask app
flask_app.config.update(CELERY_BROKER_URL=CELERY_BROKER_URL,
    CELERY_RESULT_BACKEND=CELERY_RESULT_BACKEND,
    CELERYD_MAX_TASKS_PER_CHILD=CELERYD_MAX_TASKS_PER_CHILD,
    CELERY_IMPORTS=CELERY_IMPORTS,
)
celery = Celery(flask_app.import_name, broker=flask_app.config['CELERY_BROKER_URL'])
celery.conf.update(flask_app.config)

# Tasks
@celery.task(name='tasks.crawl')
import os
import redis

from flask import Flask
from flask import request, redirect, render_template, url_for
from flask import Response

app = Flask(__name__)
app.redis = redis.StrictRedis(host='127.0.0.1', port=6379, db=0)

# Be super aggressive about saving for the development environment.
# This says save every second if there is at least 1 change.  If you use
# redis in production you'll want to read up on the redis persistence
# model.
app.redis.config_set('save', '1 1')

@app.route('/', methods=['GET', 'POST'])
def main_page():
    if request.method == 'POST':
        app.redis.lpush('entries', request.form['entry'])        
        return redirect(url_for('main_page'))
    else:
        entries = app.redis.lrange('entries', 0, -1)
        return render_template('main.html', entries=entries)

@app.route('/clear', methods=['POST'])
def clear_entries():
    app.redis.ltrim('entries', 1, 0)
    return redirect(url_for('main_page'))

if __name__ == "__main__":
Beispiel #28
0
# -*- coding: utf-8 -*-

import redis
from flask import Flask, g, render_template, current_app
from account.helpers import get_current_user
from account.views import bp_account
from trade.views import bp_trade
from fund.views import bp_fund
from api.views import bp_api
import database

app = Flask(__name__)
app.config.from_object('settings')

app.redis = redis.Redis(
        host=app.config['REDIS_HOST'],
        port=app.config['REDIS_PORT'],
        db=app.config['REDIS_NUM'])

app.db = database.Connection(
        host=app.config['MYSQL_HOST'], database=app.config['MYSQL_DATABASE'],
        user=app.config['MYSQL_USER'], password=app.config['MYSQL_PASSWORD'])

app.register_blueprint(bp_account, url_prefix="/account")
app.register_blueprint(bp_trade, url_prefix="/trade")
app.register_blueprint(bp_fund, url_prefix="/fund")
app.register_blueprint(bp_api, url_prefix="/api")


@app.before_request
def before_request():
    g.user = get_current_user()
Beispiel #29
0
from room.views import room
from room.events import socketio

REDIS_HOST = os.getenv('REDIS_HOST', 'localhost')
REDIS_PORT = os.getenv('REDIS_PORT', 6379)
REDIS_PASS = os.getenv('REDIS_PASS', None)
REDIS_MASTER = os.getenv('REDIS_MASTER', None)

DEBUG = os.getenv('DEBUG', True)
PORT = int(os.getenv('PORT', '8000'))

app = Flask(__name__)

app.config['SECRET_KEY'] = '4403dac8-370a-4877-8d24-bab0511dc976'
app.config['SESSION_TYPE'] = 'redis'

app.register_blueprint(home)
app.register_blueprint(room)

if REDIS_HOST == 'localhost':
    app.redis = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=0)
else:
    sentinel = Sentinel([(REDIS_HOST, REDIS_PORT)], socket_timeout=0.1, password=REDIS_PASS)
    app.redis = sentinel.master_for(REDIS_MASTER, socket_timeout=0.1)

app.debug = DEBUG

socketio.init_app(app)
if __name__ == '__main__':
    socketio.run(app, port=PORT, host='0.0.0.0')
Beispiel #30
0
    formatter = logging.Formatter('%(asctime)s - %(process)d - %(name)s - %(module)s:%(lineno)d - %(levelname)s - %(message)s')
    file_handler.setFormatter(formatter)
    stream_handler.setFormatter(formatter)
    app.logger.addHandler(file_handler)
    #app.logger.addHandler(stream_handler)
    app.logger.setLevel(logging.DEBUG)
    app.logger.info('Application Process Started')

from cchecker_web import cchecker_web
app.register_blueprint(cchecker_web, url_prefix='')

import redis
redis_pool = redis.ConnectionPool(host=app.config.get('REDIS_HOST'),
                                  port=app.config.get('REDIS_PORT'),
                                  db=app.config.get('REDIS_DB'))
app.redis = redis.Redis(connection_pool=redis_pool)
redis_connection = app.redis

# rq
from rq import Queue
app.queue = Queue('default', connection=app.redis)

@app.context_processor
def url_process():
    def url_root():
        return url_for('.show_root')
    return {'url_root': url_root}


from cchecker_web.utils import setup_uploads
setup_uploads(app)