Пример #1
0
def create_app(log_level="INFO", config="config.yml"):
    app = Flask(__name__)
    app.secret_key = "test"
    app.config.from_object(__name__)

    config_vars = yaml.load(open(root + "/config.yml"))
    # inject all the yaml configs
    app.config.update(config_vars)
    db.init_app(app)
    Migrate(app, db)

    # Setup redis
    redis_config = app.config.get("redis_conn", dict(type="live"))
    typ = redis_config.pop("type")
    if typ == "mock_redis":
        from mockredis import mock_redis_client

        app.redis = mock_redis_client()
    else:
        app.redis = Redis(**redis_config)

    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter("%(asctime)s [%(name)s] [%(levelname)s]: %(message)s")
    log_level = getattr(logging, str(log_level), app.config.get("log_level", "INFO"))

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Dynamically add all the filters in the filters.py file
    for name, func in inspect.getmembers(filters, inspect.isfunction):
        app.jinja_env.filters[name] = func

    app.rpc_connection = Proxy(
        "http://{0}:{1}@{2}:{3}/".format(
            app.config["coinserv"]["username"],
            app.config["coinserv"]["password"],
            app.config["coinserv"]["address"],
            app.config["coinserv"]["port"],
        )
    )

    from . import views

    app.register_blueprint(views.main)
    return app
Пример #2
0
def create_instance():
    """
    Construct a new Flask instance and return it.
    """
    import os

    app = Flask(__name__)
    app.config.from_object("notifico.default_config")

    if app.config.get("HANDLE_STATIC"):
        # We should handle routing for static assets ourself (handy for
        # small and quick deployments).
        import os.path
        from werkzeug import SharedDataMiddleware

        app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {"/": os.path.join(os.path.dirname(__file__), "static")})

    if not app.debug:
        # If sentry (http://getsentry.com) is configured for
        # error collection we should use it.
        if app.config.get("SENTRY_DSN"):
            sentry.dsn = app.config.get("SENTRY_DSN")
            sentry.init_app(app)

    # Setup our redis connection (which is already thread safe)
    app.redis = Redis(host=app.config["REDIS_HOST"], port=app.config["REDIS_PORT"], db=app.config["REDIS_DB"])
    cache.init_app(
        app, config={"CACHE_TYPE": "redis", "CACHE_REDIS_HOST": app.redis, "CACHE_OPTIONS": {"key_prefix": "cache_"}}
    )
    db.init_app(app)

    with app.app_context():
        # Let SQLAlchemy create any missing tables.
        db.create_all()

    # Import and register all of our blueprints.
    from notifico.views.account import account
    from notifico.views.public import public
    from notifico.views.projects import projects
    from notifico.views.pimport import pimport
    from notifico.views.admin import admin

    app.register_blueprint(account, url_prefix="/u")
    app.register_blueprint(projects)
    app.register_blueprint(public)
    app.register_blueprint(pimport, url_prefix="/i")
    app.register_blueprint(admin, url_prefix="/_")

    # cia.vc XML-RPC kludge.
    from notifico.services.hooks.cia import handler

    handler.connect(app, "/RPC2")

    # Setup some custom Jinja2 filters.
    app.jinja_env.filters["pretty_date"] = pretty.pretty_date
    app.jinja_env.filters["plural"] = pretty.plural
    app.jinja_env.filters["fix_link"] = pretty.fix_link

    return app
Пример #3
0
def create_app(app_name="web_app"):
    app = Flask(app_name)
    CORS(app)

    app.config["DEBUG"] = True
    app.register_blueprint(api)
    app.redis = redis.StrictRedis(host="localhost", port=6379, db=0)
    return app
Пример #4
0
def create_app(config):
    global app
    application = Flask(__name__)
    application.secret_key = "jednadvehonzajde"
    application.config.update(config)

    application.redis = redis.Redis()

    app = application

    import views
    import login
    import login_oauth
    import talks
    import filters

    return app
Пример #5
0
def create_app():
    """Return an instance of the main Flask application."""
    app = Flask(package_name)

    # TODO: do some config
    app.redis = StrictRedis()

    from .error import register_error_handler, html_handler

    register_error_handler(app, html_handler)

    from .session import LazyRedisSessionInterface

    app.session_interface = LazyRedisSessionInterface()

    from .views import views

    app.register_blueprint(views)

    return app
Пример #6
0
def create_app(setting, debug=False):

    app = Flask(__name__)
    app.config.from_object(setting)
    app.debug = debug
    app.redis = redis.StrictRedis(host=setting.REDIS_HOST, port=setting.REDIS_PORT, db=0)
    Cache.redis = app.redis
    # users routes
    user_view = UserHandler.as_view("user_api")
    app.add_url_rule("/user/", view_func=user_view, methods=["POST"])
    app.add_url_rule("/user/<string:name>", view_func=user_view, methods=["GET", "PUT", "DELETE"])
    # urls routes
    url_view = UrlHandler.as_view("url_api")
    app.add_url_rule("/user/<string:name>/url", view_func=url_view, methods=["POST"])
    app.add_url_rule("/user/<string:name>/url/<string:shortened>", view_func=url_view, methods=["GET", "PUT", "DELETE"])
    # redirect routes
    url_view = RedirectHandler.as_view("redirect_api")
    app.add_url_rule("/<string:shortened>", view_func=url_view, methods=["GET"])

    db.init_app(app)
    return app
Пример #7
0
def create_app(config):
    global app
    app = Flask(__name__, template_folder="../templates", static_folder="../static")
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.config.update(config)

    app.url_rule_class = GeneratorRule

    app.redis = redis.Redis.from_url(config["REDISCLOUD_URL"])
    app.eventee = {"token": config["EVENTEE_TOKEN"], "email": config["EVENTEE_EMAIL"]}

    import views
    import login
    import login_oauth
    import talks
    import program
    import entrant
    import vote
    import filters
    import service
    import workshops
    import invoices

    return app
Пример #8
0
import os
import redis

from flask import Flask
from flask import request, redirect, render_template, url_for
from flask import Response

app = Flask(__name__)
app.redis = redis.StrictRedis(host="db", port=6379, db=0)

# Be super aggressive about saving for the development environment.
# This says save every second if there is at least 1 change.  If you use
# redis in production you'll want to read up on the redis persistence
# model.
app.redis.config_set("save", "1 1")


@app.route("/", methods=["GET", "POST"])
def main_page():
    if request.method == "POST":
        app.redis.lpush("entries", request.form["entry"])
        return redirect(url_for("main_page"))
    else:
        entries = app.redis.lrange("entries", 0, -1)
        return render_template("main.html", entries=entries)


@app.route("/clear", methods=["POST"])
def clear_entries():
    app.redis.ltrim("entries", 1, 0)
    return redirect(url_for("main_page"))
Пример #9
0
def create_instance():
    """
    Construct a new Flask instance and return it.
    """
    import os

    app = Flask(__name__)
    app.config.from_object("notifico.default_config")

    if app.config.get("NOTIFICO_ROUTE_STATIC"):
        # We should handle routing for static assets ourself (handy for
        # small and quick deployments).
        import os.path
        from werkzeug import SharedDataMiddleware

        app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {"/": os.path.join(os.path.dirname(__file__), "static")})

    if not app.debug:
        # If sentry (http://getsentry.com) is configured for
        # error collection we should use it.
        if app.config.get("SENTRY_DSN"):
            sentry.dsn = app.config.get("SENTRY_DSN")
            sentry.init_app(app)

    # Setup our redis connection (which is already thread safe)
    app.redis = Redis(host=app.config["REDIS_HOST"], port=app.config["REDIS_PORT"], db=app.config["REDIS_DB"])
    # Attach Flask-Cache to our application instance. We override
    # the backend configuration settings because we only want one
    # Redis instance.
    cache.init_app(
        app, config={"CACHE_TYPE": "redis", "CACHE_REDIS_HOST": app.redis, "CACHE_OPTIONS": {"key_prefix": "cache_"}}
    )
    # Attach Flask-Mail to our application instance.
    mail.init_app(app)
    # Attach Flask-SQLAlchemy to our application instance.
    db.init_app(app)

    # Update celery's configuration with our application config.
    celery.config_from_object(app.config)

    # Import and register all of our blueprints.
    from notifico.views.account import account
    from notifico.views.public import public
    from notifico.views.projects import projects
    from notifico.views.pimport import pimport
    from notifico.views.admin import admin

    app.register_blueprint(account, url_prefix="/u")
    app.register_blueprint(projects)
    app.register_blueprint(public)
    app.register_blueprint(pimport, url_prefix="/i")
    app.register_blueprint(admin, url_prefix="/_")

    # Register our custom error handlers.
    from notifico.views import errors

    app.error_handler_spec[None][500] = errors.error_500

    # cia.vc XML-RPC kludge.
    from notifico.services.hooks.cia import handler

    handler.connect(app, "/RPC2")

    # Setup some custom Jinja2 filters.
    app.jinja_env.filters["pretty_date"] = pretty.pretty_date
    app.jinja_env.filters["plural"] = pretty.plural
    app.jinja_env.filters["fix_link"] = pretty.fix_link

    return app
Пример #10
0
from account.views import bp_account
from item.views import bp_item
from timeline.views import bp_timeline
from we.views import bp_we
from sale.views import bp_sale

app = Flask(__name__)
app.config.from_object("settings")


@app.before_request
def before_request():
    g.user = get_current_user()


app.redis = redis.Redis(host=app.config["REDIS_HOST"], port=app.config["REDIS_PORT"], db=2)

app.register_blueprint(bp_account)
app.register_blueprint(bp_item)
app.register_blueprint(bp_timeline)
app.register_blueprint(bp_we)
app.register_blueprint(bp_sale)


@app.route("/")
def index():
    if g.user:
        return redirect(url_for("timeline.public"))
    return render_template("index.html")

Пример #11
0
def create_app(mode, config="config.yml", log_level=None, **kwargs):

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder="../static", static_url_path="/static")

    # Set our template path and configs
    # =======================================================================
    app.jinja_loader = FileSystemLoader(os.path.join(root, "templates"))
    config_vars = dict(
        manage_log_file="manage.log",
        webserver_log_file="webserver.log",
        scheduler_log_file=None,
        log_level="INFO",
        worker_hashrate_fold=86400,
    )
    if os.path.isabs(config):
        config_path = config
    else:
        config_path = os.path.join(root, config)
    config_vars.update(yaml.load(open(config_path)))
    config_vars.update(**kwargs)

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter("%(asctime)s [%(name)s] [%(levelname)s]: %(message)s")
    log_level = getattr(logging, str(log_level), app.config["log_level"])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config["manage_log_file"]:
        hdlr = logging.FileHandler(app.config["manage_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config["scheduler_log_file"]:
        hdlr = logging.FileHandler(app.config["scheduler_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config["webserver_log_file"]:
        hdlr = logging.FileHandler(app.config["webserver_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config["DEBUG"] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != "\n":
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the DB + Cache
    # =======================================================================
    db.init_app(app)
    # Redis connection configuration
    cache_config = {"CACHE_TYPE": "redis"}
    cache_config.update(app.config.get("main_cache", {}))
    cache.init_app(app, config=cache_config)
    # Redis connection for persisting application information
    app.redis = Redis(**app.config.get("redis_conn", {}))

    sentry = False
    if app.config.get("sentry"):
        try:
            from raven.contrib.flask import Sentry

            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal("0.00000001")
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'", shell=True).strip().rsplit(" ", 1)
            app.config["hash"] = output[1]
            app.config["revdate"] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config["hash"] = ""
            app.config["revdate"] = ""

        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and "SENTRY_NAME" in app.config:
            app.config["SENTRY_NAME"] = app.config["SENTRY_NAME"] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        # Make app accessible from out monkey patched code. Messy....
        ThreadPool.app = app
        sched = Scheduler(standalone=True)
        # monkey patch the thread pool for flask contexts
        ThreadPool._old_run_jobs = ThreadPool._run_jobs

        def _run_jobs(self, core):
            self.app.logger.debug("Starting patched threadpool worker!")
            with self.app.app_context():
                ThreadPool._old_run_jobs(self, core)

        ThreadPool._run_jobs = _run_jobs
        # All these tasks actually change the database, and shouldn't
        # be run by the staging server
        if not app.config.get("stage", False):
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.generate_credits, second=55)
            sched.add_cron_job(sch.create_trade_req, args=("sell",), minute=1, hour="0,6,12,18")
            sched.add_cron_job(sch.create_trade_req, args=("buy",), minute=1, hour="0,6,12,18")
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.collect_minutes, second=35)
            sched.add_cron_job(sch.collect_ppagent_data, second=40)
            # every five minutes 20 seconds after the minute
            sched.add_cron_job(sch.compress_minute, minute="0,5,10,15,20,25,30,35,40,45,50,55", second=20)
            # every hour 2.5 minutes after the hour
            sched.add_cron_job(sch.compress_five_minute, minute=2, second=30)
            # every minute 2 seconds after the minute
            sched.add_cron_job(sch.update_block_state, second=2)
            # every day
            sched.add_cron_job(sch.update_block_state, hour=0, second=0, minute=3)
        else:
            app.logger.info(
                "Stage mode has been set in the configuration, not " "running scheduled database altering cron tasks"
            )

        sched.add_cron_job(sch.update_online_workers, minute="0,5,10,15,20,25,30,35,40,45,50,55", second=30)
        sched.add_cron_job(sch.cache_user_donation, minute="0,15,30,45", second=15)
        sched.add_cron_job(sch.server_status, second=15)
        # every 15 minutes 2 seconds after the minute
        sched.add_cron_job(sch.leaderboard, minute="0,5,10,15,20,25,30,35,40,45,50,55", second=30)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views

    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix="/api")

    return app
Пример #12
0
#!/usr/bin/env python3.4

from flask import Flask, jsonify
import redis


# Create aplication
app = Flask(__name__)
app.redis = redis.StrictRedis()


def get_max(number):
    try:
        return max(list(map(int, app.redis.lrange("listofpings", 0, number))))
    except:
        return None


def get_mean(number):
    try:
        list_of_int = list(map(int, app.redis.lrange("listofpings", 0, number)))
        return int(sum(list_of_int) / number)
    except:
        return None


def get_last():
    try:
        return int(app.redis.lrange("listofpings", 0, 0)[0])
    except:
        return None
Пример #13
0
#!/usr/bin/env python2.7

#
import logging
import os
import redis

from flask import Flask, Response, json, render_template, url_for


keys = ["Door_garageentryclosed", "Door_frontclosed"]

app = Flask(__name__, static_url_path="")
app.redis = redis.StrictRedis(host=os.getenv("REDIS_HOST", "localhost"), port=6379, db=0)


@app.route("/doors.json")
def doors():
    values = app.redis.mget(keys)
    data = dict(dict(zip(keys, values)))
    resp = Response(json.dumps(data), status=200, mimetype="application/json")
    return resp


@app.route("/house.json")
def house():
    sensorTypes = {"Door": {}, "Motion": {}, "Doorbell": {}}
    data = {}

    for sensortype in keys:
        newbucket = sensortype.split("_")[0]
import os
import redis

from flask import Flask
from flask import request, redirect, render_template, url_for
from flask import Response

app = Flask(__name__)
app.redis = redis.StrictRedis(host="127.0.0.1", port=6379, db=0)

# Be super aggressive about saving for the development environment.
# This says save every second if there is at least 1 change.  If you use
# redis in production you'll want to read up on the redis persistence
# model.
app.redis.config_set("save", "1 1")


@app.route("/", methods=["GET", "POST"])
def main_page():
    if request.method == "POST":
        app.redis.lpush("entries", request.form["entry"])
        return redirect(url_for("main_page"))
    else:
        entries = app.redis.lrange("entries", 0, -1)
        return render_template("main.html", entries=entries)


@app.route("/clear", methods=["POST"])
def clear_entries():
    app.redis.ltrim("entries", 1, 0)
    return redirect(url_for("main_page"))
Пример #15
0
from hashlib import md5
from redis import Redis
from rsession import RedisSessionInterface
import types
import config
from assets import Assets
from random import choice
import string


from flask import Flask

app = Flask("match")
app.config.from_object(config)
app.redis = Redis(
    app.config["REDIS_HOST"], app.config["REDIS_PORT"], app.config["REDIS_DB"], app.config["REDIS_PASSWORD"]
)
app.session_interface = RedisSessionInterface(app.redis)

assets = Assets(app)

import models

models.setup(app)

from views import login, home, team, match, user, school

app.register_blueprint(login.module)
app.register_blueprint(home.module)
app.register_blueprint(team.module)
app.register_blueprint(match.module)
Пример #16
0
def create_app(mode, configs=None, log_level=None, **kwargs):
    # Allow configuration information to be specified with enviroment vars
    env_configs = {}
    for key in os.environ:
        if key.startswith("SIMPLECOIN_CONFIG"):
            env_configs[key] = os.environ[key]

    env_configs = [env_configs[value] for value in sorted(env_configs)]

    configs = ["defaults.toml"] + (env_configs or []) + (configs or [])
    if len(configs) == 1:
        print("Unable to start with only the default config values! {}".format(configs))
        exit(2)

    config_vars = {}
    for config in configs:
        if isinstance(config, basestring):
            if os.path.isabs(config):
                config_path = config
            else:
                config_path = os.path.join(root, config)
            config = open(config_path)

        updates = toml.loads(config.read())
        toml.toml_merge_dict(config_vars, updates)

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder="../static", static_url_path="/static")
    app.jinja_loader = FileSystemLoader(os.path.join(root, "templates"))

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter("%(asctime)s [%(name)s] [%(levelname)s]: %(message)s")
    log_level = getattr(logging, str(log_level), app.config["log_level"])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config["manage_log_file"]:
        hdlr = logging.FileHandler(app.config["manage_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config["scheduler_log_file"]:
        hdlr = logging.FileHandler(app.config["scheduler_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config["webserver_log_file"]:
        hdlr = logging.FileHandler(app.config["webserver_log_file"])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config["DEBUG"] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != "\n":
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the powerpool datastore + Cache
    # =======================================================================
    db.init_app(app)
    babel.init_app(app)
    app.config["BABEL_DEFAULT_LOCALE"] = app.config.get("default_locale")

    def configure_redis(config):
        typ = config.pop("type")
        if typ == "mock_redis":
            from mockredis import mock_redis_client

            return mock_redis_client()
        return Redis(**config)

    cache_config = app.config.get("main_cache", dict(type="live"))
    cache_redis = configure_redis(cache_config)

    ds_config = app.config.get("redis_conn", dict(type="live"))
    ds_redis = configure_redis(ds_config)

    # Take advantage of the fact that werkzeug lets the host kwargs be a Redis
    # compatible object
    cache.init_app(app, config=dict(CACHE_TYPE="redis", CACHE_REDIS_HOST=cache_redis))
    app.redis = ds_redis

    sentry = False
    if app.config.get("sentry"):
        try:
            from raven.contrib.flask import Sentry

            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal("0.00000001")
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage" or mode == "webserver":
        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'", shell=True).strip().rsplit(" ", 1)
            app.config["hash"] = output[1]
            app.config["revdate"] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config["hash"] = ""
            app.config["revdate"] = ""

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and "SENTRY_NAME" in app.config:
            app.config["SENTRY_NAME"] = app.config["SENTRY_NAME"] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        sched = Scheduler(standalone=True)

        # monkey patch the scheduler to wrap each job call in its own flask
        # context. Kind of sloppy way to pass in the app context...
        Scheduler.app = app
        Scheduler._old_run_job = Scheduler._run_job

        def _run_job(self, *args, **kwargs):
            with self.app.app_context():
                Scheduler._old_run_job(self, *args, **kwargs)

        Scheduler._run_job = _run_job

        stage_tasks = set(
            [
                "cache_profitability",
                "leaderboard",
                "server_status",
                "update_network",
                "cache_user_donation",
                "update_online_workers",
            ]
        )
        for task_config in app.config["tasks"]:
            if not task_config.get("enabled", False):
                continue
            if app.config["stage"] and task_config["name"] not in stage_tasks:
                app.logger.debug("Skipping scheduling {} because in stage mode!".format(task_config["name"]))
                continue

            stripped_config = task_config.copy()
            del stripped_config["enabled"]
            task = getattr(sch, task_config["name"])
            sched.add_cron_job(task, **stripped_config)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views

    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix="/api")

    return app
Пример #17
0
from flask import Flask

from flask.ext import admin
from flask.ext.mongoengine import MongoEngine
from flask.ext.admin.contrib.mongoengine import ModelView
from flask.ext import admin, wtf
from wtforms import widgets
import redis
import os


app = Flask(__name__)

app.config["SECRET_KEY"] = "123456790"
app.config["MONGODB_SETTINGS"] = {"DB": "taobao", "HOST": "localhost", "PORT": 27017}

app.__rootdir__ = os.getcwd()


# Create models
db = MongoEngine()
db.init_app(app)
# db.connection.admin.authenticate("root", "chenfuzhi")

app.conn = db.connection
app.redis = redis.Redis("localhost", 6379)

__all__ = ["app", "modes", "api", "views", "cqlui", "blacklist"]

from webadmin import *
Пример #18
0
    stream_handler.setFormatter(formatter)
    app.logger.addHandler(file_handler)
    # app.logger.addHandler(stream_handler)
    app.logger.setLevel(logging.DEBUG)
    app.logger.info("Application Process Started")

from cchecker_web import cchecker_web

app.register_blueprint(cchecker_web, url_prefix="")

import redis

redis_pool = redis.ConnectionPool(
    host=app.config.get("REDIS_HOST"), port=app.config.get("REDIS_PORT"), db=app.config.get("REDIS_DB")
)
app.redis = redis.Redis(connection_pool=redis_pool)
redis_connection = app.redis

# rq
from rq import Queue

app.queue = Queue("default", connection=app.redis)


@app.context_processor
def url_process():
    def url_root():
        return url_for(".show_root")

    return {"url_root": url_root}
Пример #19
0
    LoginController,
    ResetpasswordController,
    UserController,
    AuthController,
    UserActivationController,
)
import redis
import jinja2

app = Flask(__name__, static_folder="src/static")
# set path to custom templates folder
app.jinja_loader = jinja2.ChoiceLoader([app.jinja_loader, jinja2.FileSystemLoader("src/templates")])


# set up redis
app.redis = redis.StrictRedis(host="localhost", port=6379, db=0)
app.debug = True

# allow CORS so that localhost:8000 can talk with localhost:9000
CORS(app)

# initial for api
api = Api(app, prefix="/api/")

api.add_resource(UsersController, "users")
api.add_resource(UserController, "users/<string:object_id>")

api.add_resource(LoginController, "login")

api.add_resource(SignupController, "signup")
Пример #20
0
    API_DEBUG_MODE,
    CELERY_BROKER_URL,
    CELERY_IMPORTS,
    CELERY_RESULT_BACKEND,
    CELERYD_MAX_TASKS_PER_CHILD,
    REDIS_HOST,
    REDIS_PORT,
)


# flask app configuration
flask_app = Flask("profile_api")
api = restful.Api(flask_app)

# database setup
flask_app.redis = redis.StrictRedis(REDIS_HOST, port=REDIS_PORT, db=0)

# api output json encoding
output_json.func_globals["settings"] = {"ensure_ascii": False, "encoding": "utf8"}

# bind celery to flask app
flask_app.config.update(
    CELERY_BROKER_URL=CELERY_BROKER_URL,
    CELERY_RESULT_BACKEND=CELERY_RESULT_BACKEND,
    CELERYD_MAX_TASKS_PER_CHILD=CELERYD_MAX_TASKS_PER_CHILD,
    CELERY_IMPORTS=CELERY_IMPORTS,
)
celery = Celery(flask_app.import_name, broker=flask_app.config["CELERY_BROKER_URL"])
celery.conf.update(flask_app.config)

# Tasks
Пример #21
0
from home.views import home
from room.views import room
from room.events import socketio

REDIS_HOST = os.getenv("REDIS_HOST", "localhost")
REDIS_PORT = os.getenv("REDIS_PORT", 6379)
REDIS_PASS = os.getenv("REDIS_PASS", None)
REDIS_MASTER = os.getenv("REDIS_MASTER", None)

DEBUG = os.getenv("DEBUG", True)
PORT = int(os.getenv("PORT", "8000"))

app = Flask(__name__)

app.config["SECRET_KEY"] = "4403dac8-370a-4877-8d24-bab0511dc976"
app.config["SESSION_TYPE"] = "redis"

app.register_blueprint(home)
app.register_blueprint(room)

if REDIS_HOST == "localhost":
    app.redis = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=0)
else:
    sentinel = Sentinel([(REDIS_HOST, REDIS_PORT)], socket_timeout=0.1, password=REDIS_PASS)
    app.redis = sentinel.master_for(REDIS_MASTER, socket_timeout=0.1)

app.debug = DEBUG

socketio.init_app(app)
if __name__ == "__main__":
    socketio.run(app, port=PORT, host="0.0.0.0")