host = os.getenv("POSTGRES_PORT_5432_TCP_ADDR") config.set_section_option("alembic", "sqlalchemy.url", "postgres://*****:*****@%s/bagbunker" % host) # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata # target_metadata = None import marv marv.load_formats() marv.load_jobs() target_metadata = marv.db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available.
from functools import partial from threading import Thread from werkzeug import release_local from marv import create_app, load_formats, load_jobs from marv.globals import _job_ctx_stack from marv.listing import populate_listing_cache, trigger_update_listing_entries from marv.log import loglevel_option from marv.model import db, Fileset, Jobfile, Jobrun from marv.storage import Storage from marv.registry import JOB from marv._utils import make_async_job, async_job_milker, Done load_formats() load_jobs() def config_option(key): def callback(ctx, param, value): ctx.params.setdefault('config', dict())[key] = value return value return callback def read_config(path): parser = ConfigParser.RawConfigParser() parser.read([path]) cfg = {} for section in parser.sections(): seccfg = reduce(lambda acc, x: acc.setdefault(x, {}), section.split('.'), cfg)