示例#1
0
文件: app.py 项目: tonysyu/scrappyr
def _init_db(app):
    db.init_app(app)

    migrate = Migrate()
    migrate.init_app(app, db)

    with app.app_context():
        db.create_all()
示例#2
0
文件: manage.py 项目: fdgogogo/fangs
import sys
import os

from flask import url_for
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.script import Manager, Shell, Option
from flask.ext.security import script

dirname = os.path.dirname
sys.path.insert(0, dirname(dirname(os.path.abspath(__file__))))

import backend
import backend.blog.models
import backend.utils.wordpress_importer

migrate = Migrate()
migrate.init_app(backend.app, backend.db)

manager = Manager(backend.app)
manager.add_command('db', MigrateCommand)


class PromotedCreateUserCommand(script.CreateUserCommand):
    """Create a user"""

    option_list = (
        Option('-e', '--email', dest='email', default=None),
        Option('-p', '--password', dest='password', default=None),
        Option('-a', '--active', dest='active', default=''),
    )
示例#3
0
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.script import Manager

from ivadb.factory import create_app
from ivadb.core import db

app = create_app()
Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)

if __name__ == '__main__':
    manager.run()
示例#4
0
"""
    notv.extensions
    ===============

    Extensions factory.

    :copyright: Copyright (c) 2015 Andrey Martyanov. All rights reserved.
    :license: MIT, see LICENSE for more details.
"""

from flask.ext.migrate import Migrate
from flask.ext.sqlalchemy import SQLAlchemy
from flask_mailgun import Mailgun

db = SQLAlchemy()
mailgun = Mailgun()
migrate = Migrate()
示例#5
0
from lemur import create_app

# Needed to be imported so that SQLAlchemy create_all can find our models
from lemur.users.models import User  # noqa
from lemur.roles.models import Role  # noqa
from lemur.authorities.models import Authority  # noqa
from lemur.certificates.models import Certificate  # noqa
from lemur.destinations.models import Destination  # noqa
from lemur.domains.models import Domain  # noqa
from lemur.notifications.models import Notification  # noqa
from lemur.sources.models import Source  # noqa

manager = Manager(create_app)
manager.add_option('-c', '--config', dest='config')

migrate = Migrate(create_app)

KEY_LENGTH = 40
DEFAULT_CONFIG_PATH = '~/.lemur/lemur.conf.py'
DEFAULT_SETTINGS = 'lemur.conf.server'
SETTINGS_ENVVAR = 'LEMUR_CONF'

CONFIG_TEMPLATE = """
# This is just Python which means you can inherit and tweak settings

import os
_basedir = os.path.abspath(os.path.dirname(__file__))

ADMINS = frozenset([''])

THREADS_PER_PAGE = 8
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from config import SAMPLE_DATABASE_URI
from app import application, db
import sqlite3
import sys
import os.path

migrate = Migrate(application, db)

manager = Manager(application)
manager.add_command('db', MigrateCommand)


@manager.command
def import_db(overwrite=False):
    """
    Import sample data for testing using `import_db` command. This command also contains an
    option `--overwrite`. In case there is already some data in the database, user has
    the option to overwrite the database using `import_db --overwrite` or `import_db -o`.
    """

    tables = [
        'user_system_info', 'successful_installs', 'failed_installs',
        'attempts'
    ]

    if os.path.isfile(SAMPLE_DATABASE_URI):  # Check if the database exists
        con = sqlite3.connect(SAMPLE_DATABASE_URI)
    else:
        print("\nNo database found")
示例#7
0
def create_app(mode, configs=None, log_level=None, **kwargs):
    # Allow configuration information to be specified with enviroment vars
    env_configs = {}
    for key in os.environ:
        if key.startswith('SIMPLECOIN_CONFIG'):
            env_configs[key] = os.environ[key]

    env_configs = [env_configs[value] for value in sorted(env_configs)]

    configs = ['defaults.toml'] + (env_configs or []) + (configs or [])
    if len(configs) == 1:
        print("Unable to start with only the default config values! {}".format(
            configs))
        exit(2)

    config_vars = {}
    for config in configs:
        if isinstance(config, basestring):
            if os.path.isabs(config):
                config_path = config
            else:
                config_path = os.path.join(root, config)
            config = open(config_path)

        updates = toml.loads(config.read())
        toml.toml_merge_dict(config_vars, updates)

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder='../static', static_url_path='/static')
    app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates'))

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter(
        '%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
    log_level = getattr(logging, str(log_level), app.config['log_level'])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config['manage_log_file']:
        hdlr = logging.FileHandler(app.config['manage_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config['scheduler_log_file']:
        hdlr = logging.FileHandler(app.config['scheduler_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config['webserver_log_file']:
        hdlr = logging.FileHandler(app.config['webserver_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(
        logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config['DEBUG'] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != '\n':
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the powerpool datastore + Cache
    # =======================================================================
    db.init_app(app)
    babel.init_app(app)
    app.config['BABEL_DEFAULT_LOCALE'] = app.config.get('default_locale')

    def configure_redis(config):
        typ = config.pop('type')
        if typ == "mock_redis":
            from mockredis import mock_redis_client
            return mock_redis_client()
        return Redis(**config)

    cache_config = app.config.get('main_cache', dict(type='live'))
    cache_redis = configure_redis(cache_config)

    ds_config = app.config.get('redis_conn', dict(type='live'))
    ds_redis = configure_redis(ds_config)

    # Take advantage of the fact that werkzeug lets the host kwargs be a Redis
    # compatible object
    cache.init_app(app,
                   config=dict(CACHE_TYPE='redis',
                               CACHE_REDIS_HOST=cache_redis))
    app.redis = ds_redis

    sentry = False
    if app.config.get('sentry'):
        try:
            from raven.contrib.flask import Sentry
            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal('0.00000001')
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage" or mode == "webserver":
        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'",
                                             shell=True).strip().rsplit(
                                                 " ", 1)
            app.config['hash'] = output[1]
            app.config['revdate'] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config['hash'] = ''
            app.config['revdate'] = ''

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and 'SENTRY_NAME' in app.config:
            app.config[
                'SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        sched = Scheduler(standalone=True)

        # monkey patch the scheduler to wrap each job call in its own flask
        # context. Kind of sloppy way to pass in the app context...
        Scheduler.app = app
        Scheduler._old_run_job = Scheduler._run_job

        def _run_job(self, *args, **kwargs):
            with self.app.app_context():
                Scheduler._old_run_job(self, *args, **kwargs)

        Scheduler._run_job = _run_job

        stage_tasks = set([
            "cache_profitability", "leaderboard", "server_status",
            "update_network", "cache_user_donation", "update_online_workers"
        ])
        for task_config in app.config['tasks']:
            if not task_config.get('enabled', False):
                continue
            if app.config['stage'] and task_config['name'] not in stage_tasks:
                app.logger.debug(
                    "Skipping scheduling {} because in stage mode!".format(
                        task_config['name']))
                continue

            stripped_config = task_config.copy()
            del stripped_config['enabled']
            task = getattr(sch, task_config['name'])
            sched.add_cron_job(task, **stripped_config)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views
    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix='/api')

    return app
示例#8
0
    def __init__(self, *args, **kwargs):
        self.app = Flask(__name__, instance_relative_config=True)
        self.app.config.from_object('config')
        self.app.config.from_pyfile('config.py')
        # config/xxx.py -- scence config
        # app.config.from_envvar('APP_CONFIG_FILE') # APP_CONFIG_FILE defined in start.sh

        db.init_app(self.app)

        from flask.ext.migrate import Migrate, MigrateCommand
        self.migrate = Migrate(self.app, db)

        from flask.ext.script import Manager, Shell
        self.manager = Manager(self.app)

        self.manager.add_command('db', MigrateCommand)

        def make_shell_context():
            from .admin.models import Admin
            from .cart.models import Cart
            from .category.models import Cat1, Cat2
            from .location.models import School, Building
            from .order.models import Order, Order_snapshot
            from .product.models import Product, Product_building, Snapshot
            from .pic.models import File, Promotion
            from .user.models import User

            return dict(
                app=self.app,
                db=db,
                Admin=Admin,
                Cart=Cart,
                Cat1=Cat1,
                Cat2=Cat2,
                School=School,
                Building=Building,
                Order=Order,
                Order_snapshot=Order_snapshot,
                Product=Product,
                Product_building=Product_building,
                Snapshot=Snapshot,
                File=File,
                Promotion=Promotion,
                User=User,
            )

        self.manager.add_command('shell',
                                 Shell(make_context=make_shell_context))
        from .admin import adminbp
        self.app.register_blueprint(
            adminbp,
            url_prefix='/admin',
        )
        from .cart import cartbp
        self.app.register_blueprint(
            cartbp,
            url_prefix='/cart',
        )
        from .category import categorybp
        self.app.register_blueprint(
            categorybp,
            url_prefix='/category',
        )
        from .location import locationbp
        self.app.register_blueprint(
            locationbp,
            url_prefix='/location',
        )
        from .order import orderbp
        self.app.register_blueprint(
            orderbp,
            url_prefix='/order',
        )
        from .product import productbp
        self.app.register_blueprint(
            productbp,
            url_prefix='/product',
        )
        from .user import userbp
        self.app.register_blueprint(
            userbp,
            url_prefix='/user',
        )
        from .pic import picbp
        self.app.register_blueprint(
            picbp,
            url_prefix='/pic',
        )
        from .main import mainbp
        self.app.register_blueprint(mainbp, )
示例#9
0
    def run(self):
        from api.servers.scripts import add_grafana_dashboads
        add_grafana_dashboads()
        print "Done"


class RecalculateCounters(Command):
    """Recalculates models tags counters"""
    def run(self):
        from api.ml_models.scripts import recalculate_tags_counters
        recalculate_tags_counters()
        print "Done"


manager = Manager(app)
migrate = Migrate(app, app.sql_db)
manager.add_command('clearlocalcache', ClearLocalCache())
manager.add_command('rundynamodb', RunDynamoDB())
manager.add_command('db', MigrateCommand)
manager.add_command("celeryd", Celeryd())
manager.add_command("celeryw", Celeryw())
manager.add_command("flower", Flower())
manager.add_command('test', Test())
manager.add_command('generate_crc', GenerateCrc())
manager.add_command("shell", Shell(make_context=_make_context))
manager.add_command("create_db_tables", CreateDbTables())
manager.add_command("create_dynamodb_tables", CreateDynamoDbTables())
manager.add_command("drop_db_tables", DropDbTables())
manager.add_command("create_image", CreateWorkerImage())
manager.add_command("update_deployed", UpdateDeployed())
manager.add_command("create_grafana", CreateGrafanaDashboards())
示例#10
0
from flask import Flask, redirect
from flask.ext.appbuilder import SQLA, AppBuilder, IndexView
from flask.ext.appbuilder.baseviews import expose
from flask.ext.migrate import Migrate

APP_DIR = os.path.dirname(__file__)
CONFIG_MODULE = os.environ.get('DASHED_CONFIG', 'dashed.config')

# Logging configuration
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)

app = Flask(__name__)
app.config.from_object(CONFIG_MODULE)
db = SQLA(app)
migrate = Migrate(app, db, directory=APP_DIR + "/migrations")


class MyIndexView(IndexView):
    @expose('/')
    def index(self):
        return redirect('/dashed/featured')


appbuilder = AppBuilder(
    app,
    db.session,
    base_template='dashed/base.html',
    indexview=MyIndexView,
    security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
示例#11
0
with open('config/logging-%s.yaml' % env) as f:
    import yaml
    logging.config.dictConfig(yaml.load(f))


db = SQLAlchemy(app, session_options={"autoflush": False})
# Define naming constraints so that Alembic just works
# See http://docs.sqlalchemy.org/en/rel_0_9/core/constraints.html#constraint-naming-conventions
db.metadata.naming_convention = {
    "ix": 'ix_%(column_0_label)s',
    "uq": "%(table_name)s_%(column_0_name)s_key",
    "ck": "ck_%(table_name)s_%(constraint_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s"
}
migrate = Migrate(app, db, transaction_per_migration=True)
csrf = CsrfProtect(app)
mail = Mail(app)
ma = Marshmallow(app)


UPLOAD_PATH = app.config['UPLOAD_PATH']
if not os.path.isdir(UPLOAD_PATH):
    os.mkdir(UPLOAD_PATH)


# override flask mail's send operation to inject some customer headers
original_send = mail.send
def send_email_with_sendgrid(message):
    extra_headers = {
        "filters": {
示例#12
0
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
import logging
from models import db
from tasks.update_from_sirene import update_from_sirene
from tasks.update_from_bce import update_from_bce
from tasks.seed_database_bridge import seed_database_bridge


server = Flask(__name__)
server.debug = config.DEBUG
server.config['SQLALCHEMY_DATABASE_URI'] = config.DB_URI
db.init_app(server)


migrate = Migrate(server, db)
manager = Manager(server)
manager.add_command('db', MigrateCommand)


@manager.command
def script_update_from_bce():
    update_from_bce()


@manager.command
def script_update_from_sirene():
    update_from_sirene()


@manager.command
示例#13
0
#!/usr/bin/env python
from flask.ext.script import Manager, Server, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from app import app, db, models, views
import pytest

migrate = Migrate(app, db, directory=app.config['ALEMBIC_MIGRATE_DIR'])


# Wire up the DB for normal server run
class MyServer(Server):
    def run(self, *args, **kwargs):
        db.init_app(app)
        super(MyServer, self).run(*args, **kwargs)


manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command('runserver', MyServer)


# Run the testsuite instead of the normal server
@manager.option('-v',
                '--verbose',
                '-e',
                '--echo',
                dest='echo',
                default=False,
                action='store_true',
                help="Echo generated SQL to stderr")
def test(echo):
示例#14
0
import sys
from flask.ext.script import Manager, Command, Option
from flask.ext.migrate import Migrate, MigrateCommand
from sqlalchemy.orm.exc import NoResultFound
from models import db, Library
from app import create_app

app_ = create_app()

app_.config.from_pyfile('config.py')
try:
    app_.config.from_pyfile('local_config.py')
except IOError:
    pass

migrate = Migrate(app_, db)
manager = Manager(app_)


class CreateDatabase(Command):
    """
    Creates the database based on models.py
    """
    @staticmethod
    def run(app=app_):
        """
        Creates the database in the application context
        :return: no return
        """
        with app.app_context():
            db.create_all()
示例#15
0
from app.config.config import app, db, server
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from app.manager.seed import seed

migrate = Migrate(app, db)

migrate.init_app(app, db, directory='../../migrations')

manager = Manager(app)

manager.add_command("runserver", server)

manager.add_command('db', MigrateCommand)

manager.add_command('seed', seed)
示例#16
0
from flask.ext.migrate import MigrateCommand, Migrate
from flask.ext.script import Manager
from factory import create_app

from app import app
from libs.db import db

migrate = Migrate()
migrate.init_app(app, db)

manager = Manager(app)

manager.add_command('db', MigrateCommand)


if __name__ == '__main__':
    manager.run()
示例#17
0
                              backupCount=5)
handler.setLevel(logging.INFO)
handler.setFormatter(formatter)
app_instance.logger.addHandler(handler)


# create context for shell access
def _make_context():
    ctx = app_instance.test_request_context()
    ctx.push()
    from app.packages import models
    return {"app": app_instance, "db": app.db, "models": models}


# init flask migrate
migrate = Migrate(app_instance, app.db)

manager = Manager(app_instance)
manager.add_command("runserver", Server())
manager.add_command("shell", Shell(make_context=_make_context,
                                   use_ipython=True))
manager.add_command('db', MigrateCommand)
manager.add_command('load', Load(app_instance, app.db.session))
server = Server(host="0.0.0.0", port=9000)


@manager.command
def sitemap():
    app_instance.config["SERVER_NAME"] = "atom.shortcircuits.io"
    sitemap = Sitemap(app=app_instance)
示例#18
0
import os

basedir = os.path.abspath(os.path.dirname(__file__))  # 获取当前路径,用于后面指定数据库文件路径
app = Flask(__name__)
app.config['SECRET_KEY'] = 'hard to guess string'  # 设置秘钥,防止表单受到CSRF攻击
# 程序使用数据库的URL必须保存到Flask配置对象的SQLALCHEMY_DATABASE_URI键
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(
    basedir, 'data.sqlite')  # 配置数据库URL到SQLALCHEMY_DATABASE_URI键中
app.config[
    'SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True  # 配置SQLALCHEMY_COMMIT_ON_TEARDWON键,每次请求结束之后都会自动提交数据库中的变动
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True  # 狗书里没写,这个要自己加...
manager = Manager(app)
bootstrap = Bootstrap(app)
moment = Moment(app)
db = SQLAlchemy(app)
migrate = Migrate(app, db)  #初始化应用程序和数据库
manager.add_command(
    'db', MigrateCommand)  #将MigrateCommand类附加到Flask-Script的manager对象上

app.config['MAIL_SERVER'] = 'smtp.163.com'
app.config['MAIL_PORT'] = 25
app.config['MAIL_USE_TLS'] = True
app.config['MAIL_USERNAME'] = '******'  # os.environ.get(xx)
app.config['MAIL_PASSWORD'] = '******'  # 这里使用的是163的授权码 不是登录实际密码
# 这里的MAIL_USERNAME 为你的gmail邮箱
# 这里的MAIL_PASSWORD 为你的gmail密码,这是你的实际密码,
# 就是你登录gmail邮箱时用的密码,这和国内邮箱是有区别的
app.config['MAIL_DEBUG'] = True
app.config['FLASKY_MAIL_SUBJECT_PREFIX'] = '163.com'
app.config['FLASKY_MAIL_SENDER'] = '*****@*****.**'
app.config['FLASKY_ADMIN'] = '*****@*****.**'
示例#19
0
        return

    password = ''
    password_2 = ''
    while not (password and password_2) or password != password_2:
        password = getpass('Password: '******'Re-enter password: '******'Good')
    rank2 = Ranks(rank=2, rankdesc='Ok')
    rank3 = Ranks(rank=3, rankdesc='Bad')
    session.add_all([rank1, rank2, rank3])
    session.commit()

class DB(object):
    def __init__(self, metadata):
        self.metadata = metadata
migrate = Migrate(piewhole, DB(Base.metadata))
manager.add_command('db', MigrateCommand)

if __name__ == '__main__':
    logging.info("Application start")
    manager.run()
示例#20
0
def create_app(config_name="development",
               config_file='/etc/privacyidea/pi.cfg',
               silent=False):
    """
    First the configuration from the config.py is loaded depending on the
    config type like "production" or "development" or "testing".

    Then the environment variable PRIVACYIDEA_CONFIGFILE is checked for a
    config file, that contains additional settings, that will overwrite the
    default settings from config.py

    :param config_name: The config name like "production" or "testing"
    :type config_name: basestring
    :param config_file: The name of a config file to read configuration from
    :type config_file: basestring
    :param silent: If set to True the additional information are not printed
        to stdout
    :type silent: bool
    :return: The flask application
    :rtype: App object
    """
    if not silent:
        print("The configuration name is: {0!s}".format(config_name))
    if os.environ.get(ENV_KEY):
        config_file = os.environ[ENV_KEY]
    if not silent:
        print(
            "Additional configuration can be read from the file {0!s}".format(
                config_file))
    app = Flask(__name__,
                static_folder="static",
                template_folder="static/templates")
    if config_name:
        app.config.from_object(config[config_name])

    try:
        # Try to load the given config_file.
        # If it does not exist, just ignore it.
        app.config.from_pyfile(config_file, silent=True)
    except IOError:
        sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
        sys.stderr.write("  WARNING: privacyidea create_app has no access\n")
        sys.stderr.write("  to {0!s}!\n".format(config_file))
        sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")

    # Try to load the file, that was specified in the environment variable
    # PRIVACYIDEA_CONFIG_FILE
    # If this file does not exist, we create an error!
    app.config.from_envvar(ENV_KEY, silent=True)

    app.register_blueprint(validate_blueprint, url_prefix='/validate')
    app.register_blueprint(token_blueprint, url_prefix='/token')
    app.register_blueprint(system_blueprint, url_prefix='/system')
    app.register_blueprint(resolver_blueprint, url_prefix='/resolver')
    app.register_blueprint(realm_blueprint, url_prefix='/realm')
    app.register_blueprint(defaultrealm_blueprint, url_prefix='/defaultrealm')
    app.register_blueprint(policy_blueprint, url_prefix='/policy')
    app.register_blueprint(login_blueprint, url_prefix='/')
    app.register_blueprint(jwtauth, url_prefix='/auth')
    app.register_blueprint(user_blueprint, url_prefix='/user')
    app.register_blueprint(audit_blueprint, url_prefix='/audit')
    app.register_blueprint(machineresolver_blueprint,
                           url_prefix='/machineresolver')
    app.register_blueprint(machine_blueprint, url_prefix='/machine')
    app.register_blueprint(application_blueprint, url_prefix='/application')
    app.register_blueprint(caconnector_blueprint, url_prefix='/caconnector')
    app.register_blueprint(cert_blueprint, url_prefix='/certificate')
    app.register_blueprint(ttype_blueprint, url_prefix='/ttype')
    app.register_blueprint(register_blueprint, url_prefix='/register')
    app.register_blueprint(smtpserver_blueprint, url_prefix='/smtpserver')
    app.register_blueprint(recover_blueprint, url_prefix='/recover')
    app.register_blueprint(radiusserver_blueprint, url_prefix='/radiusserver')
    db.init_app(app)
    migrate = Migrate(app, db)

    try:
        # Try to read logging config from file
        log_config_file = app.config.get("PI_LOGCONFIG",
                                         "/etc/privacyidea/logging.cfg")
        if os.path.isfile(log_config_file):
            logging.config.fileConfig(log_config_file)
            if not silent:
                print("Reading Logging settings from {0!s}".format(
                    log_config_file))
        else:
            raise Exception("The config file specified in PI_LOGCONFIG does "
                            "not exist.")
    except Exception as exx:
        sys.stderr.write("{0!s}\n".format(exx))
        sys.stderr.write("Could not use PI_LOGCONFIG. "
                         "Using PI_LOGLEVEL and PI_LOGFILE.\n")
        level = app.config.get("PI_LOGLEVEL", logging.DEBUG)
        # If there is another logfile in pi.cfg we use this.
        logfile = app.config.get("PI_LOGFILE")
        if logfile:
            sys.stderr.write("Using PI_LOGLEVEL {0!s}.\n".format(level))
            sys.stderr.write("Using PI_LOGFILE {0!s}.\n".format(logfile))
            PI_LOGGING_CONFIG["handlers"]["file"]["filename"] = logfile
            PI_LOGGING_CONFIG["handlers"]["file"]["level"] = level
            PI_LOGGING_CONFIG["loggers"]["privacyidea"]["level"] = level
            logging.config.dictConfig(PI_LOGGING_CONFIG)
        else:
            sys.stderr.write("No PI_LOGFILE found. Using default config.\n")
            logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)

    return app
示例#21
0
                        d = len(d)

                        db.session.delete(service_user)
                        db.session.commit()
                        current_app.logger.info(
                            'Removed stale user: {} and {} libraries'.format(
                                service_user, d))
                        removal_list.append(service_user)

                    except Exception as error:
                        current_app.logger.info(
                            'Problem with database, could not remove user {}: {}'
                            .format(service_user, error))
                        db.session.rollback()
            current_app.logger.info('Deleted {} stale users: {}'.format(
                len(removal_list), removal_list))


# Set up the alembic migration
migrate = Migrate(app, db, compare_type=True)

# Setup the command line arguments using Flask-Script
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command('createdb', CreateDatabase())
manager.add_command('destroydb', DestroyDatabase())
manager.add_command('syncdb', DeleteStaleUsers())

if __name__ == '__main__':
    manager.run()
示例#22
0
#! /usr/bin/env python

from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand

from photolog import db, photolog

migrate = Migrate(photolog, db)
manager = Manager(photolog)
manager.add_command('db', MigrateCommand)

if __name__ == '__main__':
    manager.run()
示例#23
0
def create_app():
    Autodoc(app)
    # cal = Calendar()
    babel.init_app(app)

    app.register_blueprint(babel_routes)
    app.register_blueprint(api_v1_routes)
    app.register_blueprint(sitemap_routes)
    Migrate(app, db)

    app.config.from_object(environ.get('APP_CONFIG',
                                       'config.ProductionConfig'))
    db.init_app(app)
    manager = Manager(app)
    manager.add_command('db', MigrateCommand)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    CORS(app)
    stripe.api_key = 'SomeStripeKey'
    app.secret_key = 'super secret key'
    app.json_encoder = MiniJSONEncoder
    app.config['BABEL_DEFAULT_LOCALE'] = 'en'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)
    app.jinja_env.add_extension('jinja2.ext.do')
    app.jinja_env.add_extension('jinja2.ext.loopcontrols')
    app.jinja_env.undefined = SilentUndefined
    app.jinja_env.filters['operation_name'] = operation_name

    # set up jwt
    app.config['JWT_AUTH_USERNAME_KEY'] = 'email'
    app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=24 * 60 * 60)
    app.config['JWT_AUTH_URL_RULE'] = None
    jwt = JWT(app, jwt_authenticate, jwt_identity)

    # setup celery
    app.config['CELERY_BROKER_URL'] = environ.get('REDIS_URL', 'redis://localhost:6379/0')
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']

    HTMLMIN(app)
    admin_view = AdminView("Open Event")
    admin_view.init(app)
    admin_view.init_login(app)

    if app.config['TESTING']:
        # Profiling
        app.config['PROFILE'] = True
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # API version 2
    with app.app_context():
        from app.api import api_v2
        app.register_blueprint(api_v2)

    sa.orm.configure_mappers()

    return app, manager, db, jwt
示例#24
0
文件: manage.py 项目: azedlee/blogful
    email = input("Email: ")
    if session.query(User).filter_by(email=email).first():
        print("User with that email address already exists")
        return

    password = ""
    password_2 = ""
    while len(password) < 8 or password != password_2:
        password = getpass("Password: "******"Re-enter password: "******"__main__":
    manager.run()
            helpers.set_maintenance(True)
        else:
            helpers.set_maintenance(False)
        sys.exit(0)

    if args.command == CLI_COMMANDS.set_node_schedulable:
        if args.schedulable in ('on', '1'):
            set_schedulable(args.node, True)
        else:
            set_schedulable(args.node, False)
        sys.exit(0)

    app = create_app()
    directory = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             'kdmigrations')
    migrate = Migrate(app, db, directory)

    # All commands that need app context are follow here:
    with app.app_context():

        if AFTER_RELOAD:
            try:
                os.unlink(settings.UPDATES_RELOAD_LOCK_FILE)
            except OSError:
                pass

            err = do_cycle_updates(args.use_testing)
            post_upgrade(for_successful=not bool(err))
            if not args.local and not bool(err):
                print 'Restarting upgrade script to check next new package...'
                os.execv(__file__, sys.argv)
示例#26
0
if os.path.exists('.env'):
    print('Importing environment from .env...')
    for line in open('.env'):
        var = line.strip().split('=')
        if len(var) == 2:
            os.environ[var[0]] = var[1]

from app import create_app, db
from app.models import User, Follow, Role, Permission, Post, Comment
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand

app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)


def make_shell_context():
    return dict(app=app,
                db=db,
                User=User,
                Follow=Follow,
                Role=Role,
                Permission=Permission,
                Post=Post,
                Comment=Comment)


manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
示例#27
0
def create_app(mode, config='config.yml', log_level=None, **kwargs):

    # Initialize our flask application
    # =======================================================================
    app = Flask(__name__, static_folder='../static', static_url_path='/static')

    # Set our template path and configs
    # =======================================================================
    app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates'))
    config_vars = dict(manage_log_file="manage.log",
                       webserver_log_file="webserver.log",
                       scheduler_log_file=None,
                       log_level='INFO',
                       worker_hashrate_fold=86400)
    if os.path.isabs(config):
        config_path = config
    else:
        config_path = os.path.join(root, config)
    config_vars.update(yaml.load(open(config_path)))
    config_vars.update(**kwargs)

    # Objectizes all configurations
    # =======================================================================
    ConfigChecker(config_vars, app)

    # Setup logging
    # =======================================================================
    del app.logger.handlers[0]
    app.logger.setLevel(logging.NOTSET)
    log_format = logging.Formatter('%(asctime)s [%(name)s] [%(levelname)s]: %(message)s')
    log_level = getattr(logging, str(log_level), app.config['log_level'])

    logger = logging.getLogger()
    logger.setLevel(log_level)
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.setFormatter(log_format)
    logger.addHandler(handler)

    # Handle optionally adding log file writers for each different run mode
    # =======================================================================
    if mode == "manage" and app.config['manage_log_file']:
        hdlr = logging.FileHandler(app.config['manage_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "scheduler" and app.config['scheduler_log_file']:
        hdlr = logging.FileHandler(app.config['scheduler_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)
    if mode == "webserver" and app.config['webserver_log_file']:
        hdlr = logging.FileHandler(app.config['webserver_log_file'])
        hdlr.setFormatter(log_format)
        logger.addHandler(hdlr)

    logging.getLogger("gunicorn.access").setLevel(logging.WARN)
    logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.INFO)

    # Add the debug toolbar if we're in debug mode
    # =======================================================================
    if app.config['DEBUG'] and mode == "webserver":
        # Log all stdout and stderr when in debug mode for convenience
        class LoggerWriter:
            def __init__(self, logger, level):
                self.logger = logger
                self.level = level

            def write(self, message):
                if message != '\n':
                    self.logger.log(self.level, message)

        sys.stdout = LoggerWriter(app.logger, logging.DEBUG)
        sys.stderr = LoggerWriter(app.logger, logging.DEBUG)

    # Register the DB + Cache
    # =======================================================================
    db.init_app(app)
    # Redis connection configuration
    cache_config = {'CACHE_TYPE': 'redis'}
    cache_config.update(app.config.get('main_cache', {}))
    cache.init_app(app, config=cache_config)
    # Redis connection for persisting application information
    app.redis = Redis(**app.config.get('redis_conn', {}))

    sentry = False
    if app.config.get('sentry'):
        try:
            from raven.contrib.flask import Sentry
            sentry = Sentry()
        except Exception:
            app.logger.error("Unable to initialize sentry!")

    # Helpful global vars
    # =======================================================================
    app.SATOSHI = Decimal('0.00000001')
    app.MAX_DECIMALS = 28

    # Configure app for running manage.py functions
    # =======================================================================
    if mode == "manage":
        # Initialize the migration settings
        Migrate(app, db)
        # Disable for management mode
        if sentry:
            sentry = False

    # Configure app for serving web content
    # =======================================================================
    elif mode == "webserver":
        # try and fetch the git version information
        try:
            output = subprocess.check_output("git show -s --format='%ci %h'",
                                             shell=True).strip().rsplit(" ", 1)
            app.config['hash'] = output[1]
            app.config['revdate'] = output[0]
        # celery won't work with this, so set some default
        except Exception:
            app.config['hash'] = ''
            app.config['revdate'] = ''

        # Dynamically add all the filters in the filters.py file
        for name, func in inspect.getmembers(filters, inspect.isfunction):
            app.jinja_env.filters[name] = func

        app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100))

    # Configure app for running scheduler.py functions + instantiate scheduler
    # =======================================================================
    elif mode == "scheduler":
        if sentry and 'SENTRY_NAME' in app.config:
            app.config['SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler"

        app.logger.info("=" * 80)
        app.logger.info("SimpleCoin cron scheduler starting up...")
        setproctitle.setproctitle("simplecoin_scheduler")

        # Make app accessible from out monkey patched code. Messy....
        ThreadPool.app = app
        sched = Scheduler(standalone=True)
        # monkey patch the thread pool for flask contexts
        ThreadPool._old_run_jobs = ThreadPool._run_jobs
        def _run_jobs(self, core):
            self.app.logger.debug("Starting patched threadpool worker!")
            with self.app.app_context():
                ThreadPool._old_run_jobs(self, core)
        ThreadPool._run_jobs = _run_jobs
        # All these tasks actually change the database, and shouldn't
        # be run by the staging server
        if not app.config.get('stage', False):
            sched.add_cron_job(sch.compress_slices, minute='0,15,30,45',
                               second=35)
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.generate_credits, second=55)
            sched.add_cron_job(sch.create_trade_req, args=("sell",), minute=1,
                               hour="0,6,12,18")
            sched.add_cron_job(sch.create_trade_req, args=("buy",), minute=1,
                               hour="0,6,12,18")
            # every minute at 55 seconds after the minute
            sched.add_cron_job(sch.collect_minutes, second=35)
            sched.add_cron_job(sch.collect_ppagent_data, second=40)
            # every five minutes 20 seconds after the minute
            sched.add_cron_job(sch.compress_minute,
                               minute='0,5,10,15,20,25,30,35,40,45,50,55',
                               second=20)
            # every hour 2.5 minutes after the hour
            sched.add_cron_job(sch.compress_five_minute, minute=2, second=30)
            # every minute 2 seconds after the minute
            sched.add_cron_job(sch.update_block_state, second=2)
            # every day
            sched.add_cron_job(sch.update_block_state, hour=0, second=0, minute=3)
        else:
            app.logger.info("Stage mode has been set in the configuration, not "
                            "running scheduled database altering cron tasks")

        sched.add_cron_job(sch.update_online_workers,
                           minute='0,5,10,15,20,25,30,35,40,45,50,55',
                           second=30)
        sched.add_cron_job(sch.cache_user_donation, minute='0,15,30,45',
                           second=15)
        sched.add_cron_job(sch.server_status, second=15)
        # every 15 minutes 2 seconds after the minute
        sched.add_cron_job(sch.leaderboard,
                           minute='0,5,10,15,20,25,30,35,40,45,50,55',
                           second=30)

        app.scheduler = sched

    if sentry:
        sentry.init_app(app, logging=True, level=logging.ERROR)

    # Route registration
    # =======================================================================
    from . import views, models, api, rpc_views
    app.register_blueprint(views.main)
    app.register_blueprint(rpc_views.rpc_views)
    app.register_blueprint(api.api, url_prefix='/api')

    return app
示例#28
0
文件: db.py 项目: sindile/bauble.web
 def init_app(self, app):
     app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
     app.config['SQLALCHEMY_DATABASE_URI'] = app.config.get('DATABASE_URL')
     super().init_app(app)
     ma.init_app(app)
     Migrate(app, self)
示例#29
0
else:
    run_migrations_online()


########NEW FILE########
__FILENAME__ = app
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand

app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'

db = SQLAlchemy(app)
migrate = Migrate(app, db)

manager = Manager(app)
manager.add_command('db', MigrateCommand)

class User(db.Model):
    id = db.Column(db.Integer, primary_key = True)
    name = db.Column(db.String(128))

if __name__ == '__main__':
    manager.run()


########NEW FILE########
__FILENAME__ = app2
from flask import Flask
示例#30
0
from flask.ext.migrate import Migrate, MigrateCommand

from app.database import db
from app import models
from app import create_app
from flask_s3 import FlaskS3
import flask_s3
import logging, logging.config, yaml
import os

from flask.ext.cors import CORS

app = create_app()

manager = Manager(app)
migrate = Migrate()
cors = CORS(app)
migrate.init_app(app, db, directory="./migrations")
s3 = FlaskS3(app)


def _make_context():
    return dict(app=current_app, db=db, models=models)


server = Server(host="0.0.0.0", port=80)
manager.add_option('-c', '--config', dest='config', required=False)
manager.add_command("shell", Shell(use_ipython=True, make_context=_make_context))
manager.add_command("runserver", server)
manager.add_command('db', MigrateCommand)
示例#31
0
import os
import datetime

from flask.ext.script import Manager, prompt_bool
from flask.ext.migrate import Migrate, MigrateCommand

from formspree import create_app, app, settings
from formspree.app import redis_store
from formspree.forms.helpers import REDIS_COUNTER_KEY
from formspree.forms.models import Form

forms_app = create_app()
manager = Manager(forms_app)

# add flask-migrate commands
Migrate(forms_app, app.DB)
manager.add_command('db', MigrateCommand)


@manager.command
def run_debug(port=os.getenv('PORT', 5000)):
    '''runs the app with debug flag set to true'''
    forms_app.run(host='0.0.0.0', debug=True, port=int(port))


@manager.option('-H',
                '--host',
                dest='host',
                default=None,
                help='referer hostname')
@manager.option('-e', '--email', dest='email', default=None, help='form email')
示例#32
0
import os
from datetime import datetime

from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand

from web import app, db
from dbupdate import (update_ratebeer as rb_update, update_pol_beers,
                      update_pol_shops, update_pol_stock, update_adminareas)

manager = Manager(app)

DIR = os.path.dirname(os.path.realpath(__file__))

migrate = Migrate()
migrate.init_app(app, db, directory=DIR + '/alembic')
manager.add_command('db', MigrateCommand)


def log_write(message):
    with open('log.txt', 'a') as logfile:
        logfile.write('%s %s\n' % (message, datetime.now()))


@manager.command
def update_ratebeer():
    print 'Importing data from ratebeer'
    rb_update(app.config.get('SQLALCHEMY_DATABASE_URI', None))


@manager.command
示例#33
0
from jmilkfansblog.extensions import assets_env
from jmilkfansblog.i18n import _LI
from jmilkfansblog.common import config

CONF = cfg.CONF

LOG = logging.getLogger(__name__)

# Create thr app instance via Factory Method
app = create_app(config.__name__ + '.Config')

# Init manager object via app object
manager = Manager(app)

# Init migrate object via app and db object
migrate = Migrate(app, models.db)

# Create the new application manage commands as below
# Start the flask web server
manager.add_command("server", Server(host=CONF.host, port=CONF.server_port))
# Manage database migrate
manager.add_command("db", MigrateCommand)
# Show all mapper of route url
manager.add_command("show-urls", ShowUrls())
# Clean alll the file of .pyc and .pyo
manager.add_command("clean", Clean())
# Pack the static file
manager.add_command('assets', ManageAssets(assets_env))


@manager.shell
示例#34
0
import os
from app import create_app, db
from app.models import User
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand

basedir = os.path.abspath(os.path.dirname(__file__))

app = create_app('testing')
manager = Manager(app)
migrate = Migrate()
migrate.init_app(app, db, directory=os.path.join(basedir, 'migrations/'))

def make_shell_context():
    return dict(app=app, db=db, User=User)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)

@manager.command
def test():
    import unittest
    tests = unittest.TestLoader().discover('tests') 
    unittest.TextTestRunner(verbosity=3).run(tests)

if __name__ == '__main__': 
    manager.run()
示例#35
0
def upgradeDB():
    Migrate(app, db)
    with app.app_context():
        flask.ext.migrate.upgrade()