backupCount=5) handler.setLevel(logging.INFO) handler.setFormatter(formatter) app_instance.logger.addHandler(handler) # create context for shell access def _make_context(): ctx = app_instance.test_request_context() ctx.push() from app.packages import models return {"app": app_instance, "db": app.db, "models": models} # init flask migrate migrate = Migrate(app_instance, app.db) manager = Manager(app_instance) manager.add_command("runserver", Server()) manager.add_command("shell", Shell(make_context=_make_context, use_ipython=True)) manager.add_command('db', MigrateCommand) manager.add_command('load', Load(app_instance, app.db.session)) server = Server(host="0.0.0.0", port=9000) @manager.command def sitemap(): app_instance.config["SERVER_NAME"] = "atom.shortcircuits.io" sitemap = Sitemap(app=app_instance)
elif mode == 'TESTING': app.config.from_object('config.TestingConfig') elif mode == 'PRODUCTION': app.config.from_object('config.ProductionConfig') db = SQLAlchemy(app) manager = Manager(app) if app.config['STORAGE_TYPE'] == 's3': s3_client = boto3.client('s3', aws_access_key_id=app.config['AWS_ACCESS_KEY_ID'], aws_secret_access_key=app.config['AWS_SECRET_ACCESS_KEY'], region_name=app.config['AWS_REGION'] ) from dive.base.db.models import * migrate = Migrate(app, db, compare_type=True) manager.add_command('db', MigrateCommand) @manager.command def fresh_migrations(): try: shutil.rmtree('migrations') except OSError as e: pass command = 'DROP TABLE IF EXISTS alembic_version;' db.engine.execute(command) @manager.command
def run(self): from api.servers.scripts import add_grafana_dashboads add_grafana_dashboads() print "Done" class RecalculateCounters(Command): """Recalculates models tags counters""" def run(self): from api.ml_models.scripts import recalculate_tags_counters recalculate_tags_counters() print "Done" manager = Manager(app) migrate = Migrate(app, app.sql_db) manager.add_command('clearlocalcache', ClearLocalCache()) manager.add_command('rundynamodb', RunDynamoDB()) manager.add_command('db', MigrateCommand) manager.add_command("celeryd", Celeryd()) manager.add_command("celeryw", Celeryw()) manager.add_command("flower", Flower()) manager.add_command('test', Test()) manager.add_command('generate_crc', GenerateCrc()) manager.add_command("shell", Shell(make_context=_make_context)) manager.add_command("create_db_tables", CreateDbTables()) manager.add_command("create_dynamodb_tables", CreateDynamoDbTables()) manager.add_command("drop_db_tables", DropDbTables()) manager.add_command("create_image", CreateWorkerImage()) manager.add_command("update_deployed", UpdateDeployed()) manager.add_command("create_grafana", CreateGrafanaDashboards())
from flask_sqlalchemy import SQLAlchemy from datetime import datetime from flask.ext.migrate import Migrate, MigrateCommand db = SQLAlchemy() migrate = Migrate() class User(db.Model): """ from test import db db.create_app() """ __tablename__ = "user" idx = db.Column(db.Integer, primary_key=True) nickname = db.Column(db.String(20), unique=True) email = db.Column(db.String(20), unique=True) pw = db.Column(db.String(20)) created = db.Column(db.DateTime, default=datetime.now) # no __init__() class Comment(db.Model): __tablename__ = "comment" idx = db.Column(db.Integer, primary_key=True) text = db.Column(db.String(200))
with open('config/logging-%s.yaml' % env) as f: import yaml logging.config.dictConfig(yaml.load(f)) db = SQLAlchemy(app, session_options={"autoflush": False}) # Define naming constraints so that Alembic just works # See http://docs.sqlalchemy.org/en/rel_0_9/core/constraints.html#constraint-naming-conventions db.metadata.naming_convention = { "ix": 'ix_%(column_0_label)s', "uq": "%(table_name)s_%(column_0_name)s_key", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" } migrate = Migrate(app, db, transaction_per_migration=True) csrf = CsrfProtect(app) mail = Mail(app) ma = Marshmallow(app) UPLOAD_PATH = app.config['UPLOAD_PATH'] if not os.path.isdir(UPLOAD_PATH): os.mkdir(UPLOAD_PATH) # override flask mail's send operation to inject some customer headers original_send = mail.send def send_email_with_sendgrid(message): extra_headers = { "filters": {
from flask import Flask, redirect from flask.ext.appbuilder import SQLA, AppBuilder, IndexView from flask.ext.appbuilder.baseviews import expose from flask.ext.migrate import Migrate APP_DIR = os.path.dirname(__file__) CONFIG_MODULE = os.environ.get('DASHED_CONFIG', 'dashed.config') # Logging configuration logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s') logging.getLogger().setLevel(logging.DEBUG) app = Flask(__name__) app.config.from_object(CONFIG_MODULE) db = SQLA(app) migrate = Migrate(app, db, directory=APP_DIR + "/migrations") class MyIndexView(IndexView): @expose('/') def index(self): return redirect('/dashed/featured') appbuilder = AppBuilder( app, db.session, base_template='dashed/base.html', indexview=MyIndexView, security_manager_class=app.config.get("CUSTOM_SECURITY_MANAGER"))
from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand import logging from models import db from tasks.update_from_sirene import update_from_sirene from tasks.update_from_bce import update_from_bce from tasks.seed_database_bridge import seed_database_bridge server = Flask(__name__) server.debug = config.DEBUG server.config['SQLALCHEMY_DATABASE_URI'] = config.DB_URI db.init_app(server) migrate = Migrate(server, db) manager = Manager(server) manager.add_command('db', MigrateCommand) @manager.command def script_update_from_bce(): update_from_bce() @manager.command def script_update_from_sirene(): update_from_sirene() @manager.command
def __init__(self, *args, **kwargs): self.app = Flask(__name__, instance_relative_config=True) self.app.config.from_object('config') self.app.config.from_pyfile('config.py') # config/xxx.py -- scence config # app.config.from_envvar('APP_CONFIG_FILE') # APP_CONFIG_FILE defined in start.sh db.init_app(self.app) from flask.ext.migrate import Migrate, MigrateCommand self.migrate = Migrate(self.app, db) from flask.ext.script import Manager, Shell self.manager = Manager(self.app) self.manager.add_command('db', MigrateCommand) def make_shell_context(): from .admin.models import Admin from .cart.models import Cart from .category.models import Cat1, Cat2 from .location.models import School, Building from .order.models import Order, Order_snapshot from .product.models import Product, Product_building, Snapshot from .pic.models import File, Promotion from .user.models import User return dict( app=self.app, db=db, Admin=Admin, Cart=Cart, Cat1=Cat1, Cat2=Cat2, School=School, Building=Building, Order=Order, Order_snapshot=Order_snapshot, Product=Product, Product_building=Product_building, Snapshot=Snapshot, File=File, Promotion=Promotion, User=User, ) self.manager.add_command('shell', Shell(make_context=make_shell_context)) from .admin import adminbp self.app.register_blueprint( adminbp, url_prefix='/admin', ) from .cart import cartbp self.app.register_blueprint( cartbp, url_prefix='/cart', ) from .category import categorybp self.app.register_blueprint( categorybp, url_prefix='/category', ) from .location import locationbp self.app.register_blueprint( locationbp, url_prefix='/location', ) from .order import orderbp self.app.register_blueprint( orderbp, url_prefix='/order', ) from .product import productbp self.app.register_blueprint( productbp, url_prefix='/product', ) from .user import userbp self.app.register_blueprint( userbp, url_prefix='/user', ) from .pic import picbp self.app.register_blueprint( picbp, url_prefix='/pic', ) from .main import mainbp self.app.register_blueprint(mainbp, )
manager = Manager(app) develcfg = DevelopmentConfig() SERVER_IP = develcfg.SERVER_IP class DB(object): """Database metadata object""" def __init__(self, metadata): super(DB, self).__init__() self.metadata = metadata # Need to find a way to format the postgresql connection string to have two % signs wherever there is one # Answer: edit it in ./migrations/env.py migrate = Migrate(app, DB(Base.metadata)) manager.add_command('db', MigrateCommand) @manager.command def seed(): content = """Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.""" for i in range(25): entry = Entry(title="Test Entry #{}".format(i), content=content) session.add(entry) session.commit() @manager.command def adduser():
#!/usr/bin/env python from flask.ext.script import Manager, Server, Shell from flask.ext.migrate import Migrate, MigrateCommand from app import app, db, models, views import pytest migrate = Migrate(app, db, directory=app.config['ALEMBIC_MIGRATE_DIR']) # Wire up the DB for normal server run class MyServer(Server): def run(self, *args, **kwargs): db.init_app(app) super(MyServer, self).run(*args, **kwargs) manager = Manager(app) manager.add_command('db', MigrateCommand) manager.add_command('runserver', MyServer) # Run the testsuite instead of the normal server @manager.option('-v', '--verbose', '-e', '--echo', dest='echo', default=False, action='store_true', help="Echo generated SQL to stderr") def test(echo):
import datetime from flask.ext.script import Manager, prompt_bool from flask.ext.migrate import Migrate, MigrateCommand from formspree import create_app, app from formspree.app import redis_store from formspree.forms.helpers import MONTHLY_COUNTER_KEY from formspree.forms.models import Form forms_app = create_app() manager = Manager(forms_app) # add flask-migrate commands Migrate(forms_app, app.DB) manager.add_command('db', MigrateCommand) @manager.command def run_debug(port=os.getenv('PORT', 5000)): '''runs the app with debug flag set to true''' forms_app.run(host='0.0.0.0', debug=True, port=int(port)) @manager.option('-H', '--host', dest='host', default=None, help='referer hostname') @manager.option('-e', '--email', dest='email', default=None, help='form email')
def create_app(mode, config='config.yml', log_level=None, **kwargs): # Initialize our flask application # ======================================================================= app = Flask(__name__, static_folder='../static', static_url_path='/static') # Set our template path and configs # ======================================================================= app.jinja_loader = FileSystemLoader(os.path.join(root, 'templates')) config_vars = dict(manage_log_file="manage.log", webserver_log_file="webserver.log", scheduler_log_file=None, log_level='INFO', worker_hashrate_fold=86400) if os.path.isabs(config): config_path = config else: config_path = os.path.join(root, config) config_vars.update(yaml.load(open(config_path))) config_vars.update(**kwargs) # Objectizes all configurations # ======================================================================= ConfigChecker(config_vars, app) # Setup logging # ======================================================================= del app.logger.handlers[0] app.logger.setLevel(logging.NOTSET) log_format = logging.Formatter( '%(asctime)s [%(name)s] [%(levelname)s]: %(message)s') log_level = getattr(logging, str(log_level), app.config['log_level']) logger = logging.getLogger() logger.setLevel(log_level) handler = logging.StreamHandler(stream=sys.stdout) handler.setFormatter(log_format) logger.addHandler(handler) # Handle optionally adding log file writers for each different run mode # ======================================================================= if mode == "manage" and app.config['manage_log_file']: hdlr = logging.FileHandler(app.config['manage_log_file']) hdlr.setFormatter(log_format) logger.addHandler(hdlr) if mode == "scheduler" and app.config['scheduler_log_file']: hdlr = logging.FileHandler(app.config['scheduler_log_file']) hdlr.setFormatter(log_format) logger.addHandler(hdlr) if mode == "webserver" and app.config['webserver_log_file']: hdlr = logging.FileHandler(app.config['webserver_log_file']) hdlr.setFormatter(log_format) logger.addHandler(hdlr) logging.getLogger("gunicorn.access").setLevel(logging.WARN) logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( logging.INFO) # Add the debug toolbar if we're in debug mode # ======================================================================= if app.config['DEBUG'] and mode == "webserver": # Log all stdout and stderr when in debug mode for convenience class LoggerWriter: def __init__(self, logger, level): self.logger = logger self.level = level def write(self, message): if message != '\n': self.logger.log(self.level, message) sys.stdout = LoggerWriter(app.logger, logging.DEBUG) sys.stderr = LoggerWriter(app.logger, logging.DEBUG) # Register the DB + Cache # ======================================================================= db.init_app(app) # Redis connection configuration cache_config = {'CACHE_TYPE': 'redis'} cache_config.update(app.config.get('main_cache', {})) cache.init_app(app, config=cache_config) # Redis connection for persisting application information app.redis = Redis(**app.config.get('redis_conn', {})) sentry = False if app.config.get('sentry'): try: from raven.contrib.flask import Sentry sentry = Sentry() except Exception: app.logger.error("Unable to initialize sentry!") # Helpful global vars # ======================================================================= app.SATOSHI = Decimal('0.00000001') app.MAX_DECIMALS = 28 # Configure app for running manage.py functions # ======================================================================= if mode == "manage": # Initialize the migration settings Migrate(app, db) # Disable for management mode if sentry: sentry = False # Configure app for serving web content # ======================================================================= elif mode == "webserver": # try and fetch the git version information try: output = subprocess.check_output("git show -s --format='%ci %h'", shell=True).strip().rsplit( " ", 1) app.config['hash'] = output[1] app.config['revdate'] = output[0] # celery won't work with this, so set some default except Exception: app.config['hash'] = '' app.config['revdate'] = '' # Dynamically add all the filters in the filters.py file for name, func in inspect.getmembers(filters, inspect.isfunction): app.jinja_env.filters[name] = func app.logger.info("Starting up SimpleCoin!\n{}".format("=" * 100)) # Configure app for running scheduler.py functions + instantiate scheduler # ======================================================================= elif mode == "scheduler": if sentry and 'SENTRY_NAME' in app.config: app.config[ 'SENTRY_NAME'] = app.config['SENTRY_NAME'] + "_scheduler" app.logger.info("=" * 80) app.logger.info("SimpleCoin cron scheduler starting up...") setproctitle.setproctitle("simplecoin_scheduler") # Make app accessible from out monkey patched code. Messy.... ThreadPool.app = app sched = Scheduler(standalone=True) # monkey patch the thread pool for flask contexts ThreadPool._old_run_jobs = ThreadPool._run_jobs def _run_jobs(self, core): self.app.logger.debug("Starting patched threadpool worker!") with self.app.app_context(): ThreadPool._old_run_jobs(self, core) ThreadPool._run_jobs = _run_jobs # All these tasks actually change the database, and shouldn't # be run by the staging server if not app.config.get('stage', False): # every minute at 55 seconds after the minute sched.add_cron_job(sch.generate_credits, second=55) sched.add_cron_job(sch.create_trade_req, args=("sell", ), minute=1, hour="0,6,12,18") sched.add_cron_job(sch.create_trade_req, args=("buy", ), minute=1, hour="0,6,12,18") # every minute at 55 seconds after the minute sched.add_cron_job(sch.collect_minutes, second=35) sched.add_cron_job(sch.collect_ppagent_data, second=40) # every five minutes 20 seconds after the minute sched.add_cron_job(sch.compress_minute, minute='0,5,10,15,20,25,30,35,40,45,50,55', second=20) # every hour 2.5 minutes after the hour sched.add_cron_job(sch.compress_five_minute, minute=2, second=30) # every minute 2 seconds after the minute sched.add_cron_job(sch.update_block_state, second=2) # every day sched.add_cron_job(sch.update_block_state, hour=0, second=0, minute=3) else: app.logger.info( "Stage mode has been set in the configuration, not " "running scheduled database altering cron tasks") sched.add_cron_job(sch.update_online_workers, minute='0,5,10,15,20,25,30,35,40,45,50,55', second=30) sched.add_cron_job(sch.cache_user_donation, minute='0,15,30,45', second=15) sched.add_cron_job(sch.server_status, second=15) # every 15 minutes 2 seconds after the minute sched.add_cron_job(sch.leaderboard, minute='0,5,10,15,20,25,30,35,40,45,50,55', second=30) app.scheduler = sched if sentry: sentry.init_app(app, logging=True, level=logging.ERROR) # Route registration # ======================================================================= from . import views, models, api, rpc_views app.register_blueprint(views.main) app.register_blueprint(rpc_views.rpc_views) app.register_blueprint(api.api, url_prefix='/api') return app
import sys from flask.ext.script import Manager, Command, Option from flask.ext.migrate import Migrate, MigrateCommand from sqlalchemy.orm.exc import NoResultFound from models import db, Library from app import create_app app_ = create_app() app_.config.from_pyfile('config.py') try: app_.config.from_pyfile('local_config.py') except IOError: pass migrate = Migrate(app_, db) manager = Manager(app_) class CreateDatabase(Command): """ Creates the database based on models.py """ @staticmethod def run(app=app_): """ Creates the database in the application context :return: no return """ with app.app_context(): db.create_all()
#!/usr/bin/env python """ app ~~~~~ Launch point for the server. """ import logging from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from routes import * from common import app, db from config import Development app.config.from_object(Development) migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) app.logger.addHandler(logging.StreamHandler()) app.logger.setLevel(logging.INFO) if __name__ == '__main__': manager.run()
from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from config import SQLALCHEMY_DATABASE_URI from run import application from app.basemodels import db migrate = Migrate(application, db) manager = Manager(application) manager.add_command('db', MigrateCommand) if __name__ == '__main__': manager.run()
return password = '' password_2 = '' while not (password and password_2) or password != password_2: password = getpass('Password: '******'Re-enter password: '******'Good') rank2 = Ranks(rank=2, rankdesc='Ok') rank3 = Ranks(rank=3, rankdesc='Bad') session.add_all([rank1, rank2, rank3]) session.commit() class DB(object): def __init__(self, metadata): self.metadata = metadata migrate = Migrate(piewhole, DB(Base.metadata)) manager.add_command('db', MigrateCommand) if __name__ == '__main__': logging.info("Application start") manager.run()
def upgradeDB(): Migrate(app, db) with app.app_context(): flask.ext.migrate.upgrade()