def _config_init(script_path: str, dsn: str) -> Config:
    log.info("Alembic config initialization", script_location=script_path)

    alembic_cfg = Config()
    alembic_cfg.set_main_option("script_location", script_path)
    alembic_cfg.set_main_option("sqlalchemy.url", dsn)
    filename_format = (
        "%%(year)d_%%(month).2d_%%(day).2d_"
        "%%(hour).2d%%(minute).2d%%(second).2d_%%(slug)s"
    )
    alembic_cfg.set_main_option("file_template", filename_format)

    alembic_cfg.set_section_option("post_write_hooks", "hooks", "black isort")
    alembic_cfg.set_section_option(
        "post_write_hooks", "black.type", "console_scripts"
    )
    alembic_cfg.set_section_option(
        "post_write_hooks", "black.entrypoint", "black"
    )
    alembic_cfg.set_section_option(
        "post_write_hooks", "isort.type", "console_scripts"
    )
    alembic_cfg.set_section_option(
        "post_write_hooks", "isort.entrypoint", "isort"
    )
    return alembic_cfg
Beispiel #2
0
def db(request, monkeypatch):
    """Session-wide test database."""
    if os.path.exists(TESTDB_PATH):
        os.unlink(TESTDB_PATH)

    from wordbook.infra import config
    from wordbook.infra import db

    monkeypatch.setattr(config, 'SQLALCHEMY_ENGINE', 'sqlite:///' + TESTDB_PATH)

    from sqlalchemy import create_engine
    from sqlalchemy.orm import sessionmaker
    _engine = create_engine(
        'sqlite:///' + TESTDB_PATH,
        echo=config.SQLALCHEMY_ECHO,
        echo_pool=config.SQLALCHEMY_ECHO_POOL)
    db_session = sessionmaker(bind=_engine)

    def get_session():
        return db_session()

    monkeypatch.setattr(db, 'get_session', get_session)

    alembic_config = Config('alembic.ini')
    alembic_config.set_section_option('alembic', 'sqlalchemy.url', 'sqlite:///' + TESTDB_PATH)

    def teardown():
        downgrade(alembic_config, '18554c40c9e')
        os.unlink(TESTDB_PATH)

    upgrade(alembic_config, 'head')
    request.addfinalizer(teardown)
    return
Beispiel #3
0
 def alembic_config(self):
     alembic_cfg = Config()
     alembic_cfg.set_section_option(
         'alembic', 'script_location', 'alembic')
     alembic_cfg.set_section_option(
         'alembic', 'sqlalchemy.url', str(self.db.engine.url))
     return alembic_cfg
Beispiel #4
0
    def run_alembic_migration(self):
        """ Migrate to latest Alembic revision if not up-to-date. """

        def migrate_if_required(rev, context):
            rev = script.get_revision(rev)
            if not (rev and rev.is_head):
                migration_required = True

            return []

        migration_required = False
        config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
        config.set_section_option('alembic', 'script_location',
                                  os.path.join(os.path.dirname(__file__), 'migrations'))
        config.set_main_option('sqlalchemy.url',
                               'sqlite:///' + self.filepath)
        script = ScriptDirectory.from_config(config)

        with EnvironmentContext(config, script, fn=migrate_if_required):
            script.run_env()

        if migration_required:
            logging.info('Migrating SQLite database to latest revision')
            alembic.command.upgrade(config, 'head')
        else:
            logging.info('SQLite database is on the latest revision')
Beispiel #5
0
 def _get_alembic_config(self):
     config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
     config.set_section_option(
         'alembic', 'script_location',
         os.path.join(os.path.dirname(__file__), 'migrations'))
     config.set_main_option('sqlalchemy.url', self.connect_string)
     return config
Beispiel #6
0
    def run_alembic_migration(self):
        """ Migrate to latest Alembic revision if not up-to-date. """
        def migrate_if_required(rev, context):
            rev = script.get_revision(rev)
            if not (rev and rev.is_head):
                migration_required = True

            return []

        migration_required = False
        config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
        config.set_section_option(
            'alembic', 'script_location',
            os.path.join(os.path.dirname(__file__), 'migrations'))
        config.set_main_option('sqlalchemy.url', 'sqlite:///' + self.filepath)
        script = ScriptDirectory.from_config(config)

        with EnvironmentContext(config, script, fn=migrate_if_required):
            script.run_env()

        if migration_required:
            logging.info('Migrating SQLite database to latest revision')
            alembic.command.upgrade(config, 'head')
        else:
            logging.info('SQLite database is on the latest revision')
Beispiel #7
0
def _get_alembic_config():
    from alembic.config import Config
    from gam.settings import DATABASE_URL
    config_file = path.join(path.dirname(path.abspath(__file__)),
                            'alembic.ini')
    config = Config(config_file)
    config.set_section_option('alembic', 'sqlalchemy.url', DATABASE_URL)
    return config
Beispiel #8
0
def setup(ctx, force):
    # TODO: change this
    p = Path(bcpc_build.__file__)
    pkg_dir = p.parent.parent
    config_file = pkg_dir.joinpath('alembic.ini').as_posix()
    # TODO(kamidzi): check for file
    alembic_cfg = Config(config_file)
    alembic_cfg.set_section_option('alembic', 'sqlalchemy.url', config.db.url)
    command.upgrade(alembic_cfg, 'head')
def clean_db(reset_db, ckan_config):
    reset_db()
    alembic_ini = _resolve_alembic_config("spc")

    alembic_cfg = Config(alembic_ini)
    alembic_cfg.set_section_option("alembic", "sqlalchemy.url",
                                   ckan_config.get("sqlalchemy.url"))

    command.upgrade(alembic_cfg, "head")
Beispiel #10
0
    def setup_tables(cls, engine):
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option("alembic", "script_location", "alembic")
            alembic_cfg.set_section_option("alembic", "sqlalchemy.url", str(engine.url))

            command.stamp(alembic_cfg, "head")
            trans.commit()
Beispiel #11
0
    def setup_tables(cls, engine):
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option('alembic', 'script_location',
                                           'alembic')
            alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                           str(engine.url))

            command.stamp(alembic_cfg, "head")
            trans.commit()
Beispiel #12
0
def create_session(database_uri: str, is_test=False) -> Session:
    if database_uri.startswith("sqlite"):
        engine = create_engine(database_uri,
                               echo=False,
                               connect_args={"check_same_thread": False})
    else:
        # Force mysql driver to be utf8mb4
        if database_uri.startswith(
                "mysql") and "charset=utf8mb4" not in database_uri:
            database_uri += "?charset=utf8mb4"

        engine = create_engine(
            database_uri,
            echo=False,
            pool_pre_ping=True,
            pool_recycle=600,
            pool_size=10,
            pool_use_lifo=True,
            poolclass=QueuePool,
        )

    alembic_cfg = Config(
        str((Path(__file__).parent.parent.parent / "alembic.ini").absolute()))

    # Hot-patch migration location if in test environment
    if is_test:
        alembic_cfg.set_section_option(
            "alembic", "script_location",
            str(Path(__file__).parent / "migrations"))

    # Create tables if accounts does not exist
    if not engine.dialect.has_table(engine, "accounts"):
        models.Base.metadata.create_all(engine)
        command.stamp(alembic_cfg, "head")

    if not is_test:
        # Check if there are any pending migrations
        with engine.begin() as con:
            script_dir = script.ScriptDirectory.from_config(alembic_cfg)
            ctx = migration.MigrationContext.configure(con)

            assert set(ctx.get_current_heads()) == set(
                script_dir.get_heads()
            ), 'There are pending migrations, run them via "alembic upgrade heads"'

    # Bind engine and create session
    models.Base.metadata.bind = engine
    session: Session = sessionmaker(bind=engine)()

    return session
Beispiel #13
0
def get_alembic_settings(config_uri):
    """ Get alembic settings from the config file.

    :param config_uri: an .ini config file
    :return: an ``alembic.config.Config`` object
    """
    # get setting from the pyramid config file
    settings = get_appsettings(config_uri)
    # set alembic settings
    alembic_cfg = Config(config_uri)
    alembic_cfg.set_section_option(
        'alembic',
        'sqlalchemy.url',
        settings['sqlalchemy.url'])
    return alembic_cfg
Beispiel #14
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = load_full_settings(config_uri, options=options)

    engine = engine_from_config(settings, 'sqlalchemy.')
    with engine.begin() as connection:
        initdb(connection, drop_all='--force' in options)

    # generate the Alembic version table and stamp it with the latest revision
    alembic_cfg = Config('alembic.ini')
    alembic_cfg.set_section_option(
        'alembic', 'sqlalchemy.url', engine.url.__str__())
    command.stamp(alembic_cfg, 'head')
Beispiel #15
0
def ensure_database(dsn=TEST_DB_DSN):
    """Ensure that a clean database is available for tests."""
    engine = sqlalchemy.create_engine(dsn)
    path_to_alembic = os.path.abspath(
        os.path.join(os.getcwd(), 'templates/alembic.ini'))
    output_buffer = io.BytesIO()
    alembic_cfg = Config(path_to_alembic, output_buffer=output_buffer)
    alembic_cfg.set_section_option('alembic', 'sqlalchemy.url', dsn)
    alembic_cfg.set_section_option('loggers', 'keys', '')
    with engine.begin() as connection:
        alembic_cfg.attributes['connection'] = connection
        logging.disable(logging.ERROR)
        command.upgrade(alembic_cfg, 'head')
        logging.disable(logging.NOTSET)
        for table in reversed(METADATA.sorted_tables):
            connection.execute(table.delete())
    engine.dispose()
Beispiel #16
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = load_full_settings(config_uri, options=options)

    engine = engine_from_config(settings, 'sqlalchemy.')
    with engine.begin() as connection:
        initdb(connection, drop_all='--force' in options)

    # generate the Alembic version table and stamp it with the latest revision
    alembic_cfg = Config('alembic.ini')
    alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                   engine.url.__str__())
    command.stamp(alembic_cfg, 'head')
Beispiel #17
0
def create_config(app):
    """Programmatically create Alembic config"""
    cfg = Config(stdout=StringIO())
    cfg.get_template_directory = get_template_directory
    migrations = app.get("migrations_dir") or os.path.join(
        app["cwd"], "migrations")
    cfg.set_main_option("script_location", migrations)
    cfg.config_file_name = os.path.join(migrations, "alembic.ini")
    db = app["db"]
    cfg.set_section_option("default", "sqlalchemy.url",
                           str(db.sync_engine.url))
    # put database in main options
    cfg.set_main_option("databases", "default")
    # create empty logging section to avoid raising errors in env.py
    cfg.set_section_option("logging", "path", "")
    cfg.metadata = dict(default=db.metadata)
    return cfg
Beispiel #18
0
    def setup_engine(cls):
        cls.db_master = _make_db()
        cls.db_slave = _make_db()

        engine = cls.db_master.engine
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option('alembic',
                                           'script_location',
                                           'alembic')
            alembic_cfg.set_section_option('alembic',
                                           'sqlalchemy.url',
                                           str(engine.url))

            command.stamp(alembic_cfg, "head")
            trans.commit()
Beispiel #19
0
def create_db_with_alembic(app, request):
    # bind the app the database instance
    _db.app = app

    alembic_config = AlembicConfig(_project_root / 'alembic.ini')
    # override settings & run migrations
    alembic_config.set_section_option('alembic', 'sqlalchemy.url',
                                      app.config['SQLALCHEMY_DATABASE_URI'])
    alembic.command.upgrade(alembic_config, 'head')

    def teardown():
        _db.session.remove()
        _db.drop_all()
        # drop alembic's revisions table, so next test can re-run these
        # migrations
        _db.session.execute('drop table if exists alembic_version')

    request.addfinalizer(teardown)

    return _db
Beispiel #20
0
    def setup_tables(cls, engine):
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option(
                'alembic', 'script_location', 'alembic')
            alembic_cfg.set_section_option(
                'alembic', 'sqlalchemy.url', str(engine.url))
            alembic_cfg.set_section_option(
                'alembic', 'sourceless', 'true')

            command.stamp(alembic_cfg, 'head')

            # always add a test API key
            conn.execute(ApiKey.__table__.delete())

            key1 = ApiKey.__table__.insert().values(
                valid_key='test', allow_fallback=False, allow_locate=True)
            conn.execute(key1)
            key2 = ApiKey.__table__.insert().values(
                valid_key='export', allow_fallback=False, allow_locate=False)
            conn.execute(key2)

            trans.commit()
Beispiel #21
0
    def setup_tables(cls, engine):
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option('alembic', 'script_location',
                                           'alembic')
            alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                           str(engine.url))
            alembic_cfg.set_section_option('alembic', 'sourceless', 'true')

            command.stamp(alembic_cfg, 'head')

            # always add a test API key
            conn.execute(ApiKey.__table__.delete())

            key1 = ApiKey.__table__.insert().values(valid_key='test',
                                                    shortname='test',
                                                    allow_fallback=False,
                                                    allow_locate=True,
                                                    log_locate=True,
                                                    log_region=True,
                                                    log_submit=True)
            conn.execute(key1)
            key2 = ApiKey.__table__.insert().values(valid_key='export',
                                                    shortname='export',
                                                    allow_fallback=False,
                                                    allow_locate=False,
                                                    log_locate=False,
                                                    log_region=False,
                                                    log_submit=False)
            conn.execute(key2)

            trans.commit()
Beispiel #22
0
    def setup_tables(cls, engine):
        with engine.connect() as conn:
            trans = conn.begin()
            _Model.metadata.create_all(engine)
            # Now stamp the latest alembic version
            alembic_cfg = Config()
            alembic_cfg.set_section_option('alembic', 'script_location',
                                           'alembic')
            alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                           str(engine.url))
            alembic_cfg.set_section_option('alembic', 'sourceless', 'true')

            command.stamp(alembic_cfg, 'head')

            # always add a test API key
            conn.execute(ApiKey.__table__.delete())

            key1 = ApiKey.__table__.insert().values(
                valid_key='test',
                allow_fallback=False,
                allow_locate=True,
                fallback_name='fall',
                fallback_url='http://127.0.0.1:9/?api',
                fallback_ratelimit=10,
                fallback_ratelimit_interval=60,
                fallback_cache_expire=60,
            )
            conn.execute(key1)
            key2 = ApiKey.__table__.insert().values(valid_key='export',
                                                    allow_fallback=False,
                                                    allow_locate=False)
            conn.execute(key2)

            trans.commit()
    def alembic_config(self):
        config = join(self.alembic_path, 'alembic.ini')
        versions = join(self.alembic_path, 'versions')
        script_location = self.alembic_path

        alembic_cfg = Config(config)
        alembic_cfg.set_section_option('alembic', 'version_locations',
                                       versions)
        alembic_cfg.set_section_option('alembic', 'script_location',
                                       script_location)
        alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                       str(self.engine.url))

        return alembic_cfg
Beispiel #24
0
    LOCAL_DATABASE = "test"

    app.secret_key = "ME@deepgully"
    app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///%s.db" % LOCAL_DATABASE
    # app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://*****:*****@127.0.0.1:3306/%s'%LOCAL_DATABASE

elif RUNTIME_ENV in ("gae", "gae_dev"):

    app.secret_key = "ME@deepgully+GAE"


if RUNTIME_ENV in ("bae", "local"):
    from alembic.config import Config

    MIGRATE_CFG = Config("alembic.ini")
    MIGRATE_CFG.set_section_option("alembic", "sqlalchemy.url", app.config["SQLALCHEMY_DATABASE_URI"])
    app.config["MIGRATE_CFG"] = MIGRATE_CFG


app.config["SiteTitle"] = "boolog"
app.config["SiteSubTitle"] = ""
app.config["OwnerEmail"] = "*****@*****.**"
app.config["DefaultPassword"] = "******"


######################################
## User
######################################
from flask.ext.login import LoginManager

login_manager = LoginManager()
Beispiel #25
0
    def get_config(self):
        '''
        Programmatically create Alembic config. To determine databases,
        DATASTORE from project's config file is used. To customize Alembic
        use MIGRATIONS in you config file.

        Example:
        MIGRATIONS = {
            'alembic': {
                'script_location': '<path>',
                'databases': '<db_name1>,<db_name2>',
            },
            '<db_name1>': {
                'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
            },
            '<bd_name2>': {
                'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
            },
            'logging': {
                'path': '<path_to_logging_config>',
            }
        }

        For more information about possible options, please visit Alembic
        documentation:
        https://alembic.readthedocs.org/en/latest/index.html
        '''
        from alembic.config import Config
        # Because we are using custom template, we need to change default
        # implementation of get_template_directory() to make it pointing
        # to template stored in lux, we need to do this hack
        Config.get_template_directory = self.get_lux_template_directory

        # put migrations in main project dir
        migration_dir = os.path.join(self.app.meta.path, 'migrations')
        # set default settings in case where MIGRATIONS is not set
        alembic_cfg = Config()
        # where to place alembic env
        alembic_cfg.set_main_option('script_location', migration_dir)
        # get database(s) name(s) and location(s)
        odm = self.app.odm()
        databases = []
        # set section for each found database
        for name, engine in odm.keys_engines():
            if not name:
                name = 'default'
            databases.append(name)
            # url = str(engine.url).replace('+green', '')
            url = str(engine.url)
            alembic_cfg.set_section_option(name, 'sqlalchemy.url', url)
        # put databases in main options
        alembic_cfg.set_main_option("databases", ','.join(databases))
        # create empty logging section to avoid raising errors in env.py
        alembic_cfg.set_section_option('logging', 'path', '')
        # obtain the metadata required for `auto` command
        self.get_metadata(alembic_cfg)

        # get rest of settings from project config. This may overwrite
        # already existing options (especially if different migration dir
        # is provided)
        cfg = self.app.config.get('MIGRATIONS')
        if cfg:
            for section in cfg.keys():
                for key, value in cfg[section].items():
                    if section == 'alembic':
                        alembic_cfg.set_main_option(key, value)
                    else:
                        alembic_cfg.set_section_option(section, key, value)

        return alembic_cfg
Beispiel #26
0
DB_DDL_URI = os.environ.get('DB_DDL_URI')

if not DB_RO_URI:
    DB_RO_URI = ('mysql+%s://%s:%s@%s:%s/%s' % (
        DB_LIBRARY, DB_RO_USER, DB_RO_PWD, DB_RO_HOST, DB_PORT, DB_NAME))

if not DB_RW_URI:
    DB_RW_URI = ('mysql+%s://%s:%s@%s:%s/%s' % (
        DB_LIBRARY, DB_RW_USER, DB_RW_PWD, DB_RW_HOST, DB_PORT, DB_NAME))

if not DB_DDL_URI:
    DB_DDL_URI = ('mysql+%s://%s:%s@%s:%s/%s' % (
        DB_LIBRARY, DB_DDL_USER, DB_DDL_PWD, DB_RW_HOST, DB_PORT, DB_NAME))

ALEMBIC_CFG = AlembicConfig()
ALEMBIC_CFG.set_section_option(
    'alembic', 'script_location', os.path.join(HERE, 'alembic'))
ALEMBIC_CFG.set_section_option(
    'alembic', 'sqlalchemy.url', DB_DDL_URI)

GEOIP_PATH = os.environ.get('GEOIP_PATH')
if not GEOIP_PATH:
    GEOIP_PATH = os.path.join(HERE, 'tests/data/GeoIP2-City-Test.mmdb')

MAP_TOKEN = os.environ.get('MAP_TOKEN')

REDIS_HOST = os.environ.get('REDIS_HOST')
REDIS_PORT = os.environ.get('REDIS_PORT', '6379')
REDIS_DB = '1' if TESTING else '0'
REDIS_URI = os.environ.get('REDIS_URI')
if REDIS_HOST and not REDIS_URI:
    REDIS_URI = 'redis://%s:%s/%s' % (REDIS_HOST, REDIS_PORT, REDIS_DB)
Beispiel #27
0
else:
    print 'alembic_version drop successfully.'

# delete `migration` dir if exists.
root_path = get_root_path()
migration_dirname = os.path.join(root_path, 'migration')
try:
    shutil.rmtree(migration_dirname)
except Exception:
    print 'migration dir drop failed.'
else:
    print 'migration dir drop successfully.'

# config
alembic_config = Config(os.path.join(root_path, 'alembic.ini'))
alembic_config.set_section_option('alembic', 'sqlalchemy.url',
                                  app_config.SQL_SERVER_URL)

# init env
command.init(alembic_config, migration_dirname)
# copy migration_env to replace migration/env.py Good.
migration_env_abspath = os.path.abspath(
    os.path.join(root_path, 'lib/serve/migration_env.py'))
to_env_abspath = os.path.abspath(os.path.join(root_path, 'migration/env.py'))
print migration_env_abspath, to_env_abspath
shutil.copyfile(migration_env_abspath, to_env_abspath)

#
command.revision(alembic_config, autogenerate=True)
command.upgrade(alembic_config, "head")

print '[Migrate] finish!\n'
Beispiel #28
0
class SPCCommand(CkanCommand):
    """
    ckanext-spc management commands.

    Usage::
    paster spc [command]

    Commands::
        db-upgrade    Upgrade database to the state of the latest migration
        fix-missed-licenses
        drop-mendeley-publications
        broken_links_report
        spc_find_duplicate
    ...
    """

    summary = __doc__.split('\n')[0]
    usage = __doc__

    parser = paste.script.command.Command.standard_parser(verbose=True)
    parser.add_option(
        '-c',
        '--config',
        dest='config',
        default='../spc.ini',
        help='Config file to use.'
    )
    parser.add_option(
        '-d',
        '--delay',
        type=int,
        default=1,
        help='Delay between pushes to datastore.'
    )
    parser.add_option(
        '-f',
        '--formats',
        type=str,
        default='xls,csv,xlsx',
        help='Delay between pushes to datastore.'
    )

    def command(self):
        self._load_config()
        model.Session.commit()

        alembic_ini = os.path.normpath(
            os.path.join(__file__, '../../../alembic.ini')
        )
        self.alembic_cfg = Config(alembic_ini)
        self.alembic_cfg.set_section_option(
            'alembic', 'sqlalchemy.url', config.get('sqlalchemy.url')
        )

        cmd_name = (self.args[0] if self.args else '').replace('-', '_')
        cmd = getattr(self, cmd_name, None)
        if cmd is None:
            return self.usage

        return cmd()

    def db_upgrade(self):
        command.upgrade(self.alembic_cfg, 'head')
        return 'Success'

    def db_downgrade(self):
        command.downgrade(self.alembic_cfg, 'base')
        return 'Success'

    def create_datastore(self):
        resources = model.Session.query(model.Resource)
        step = 20
        user = tk.get_action('get_site_user')({'ignore_auth': True})
        for offset in range(0, resources.count(), step):
            for res in resources.offset(offset).limit(step):
                if res.extras.get('datastore_active'):
                    continue
                if not res.format or res.format.lower() not in self.options.formats.split(','):
                    continue

                print('Pushing <{}> into datastore'.format(res.id))
                tk.get_action('datastore_create')(
                    {'ignore_auth': True, 'user': user['name']},
                    {'resource_id': res.id, 'force': True}
                )

                tk.get_action('datapusher_submit')(
                    {'ignore_auth': True, 'user': user['name']},
                    {'resource_id': res.id, 'force': True}
                )
                sleep(self.options.delay)

        return 'Success'

    def fix_missed_licenses(self):
        q = model.Session.query(model.Package).filter(
            model.Package.license_id.is_(None)
            | (model.Package.license_id == '')
        )
        ids = [pkg.id for pkg in q]
        if not ids:
            print('There are no packages with missed license_id')
            return
        broken_count = q.update({
            'license_id': 'notspecified'
        },
                                synchronize_session=False)
        model.Session.commit()
        print('{} packages were updated:'.format(broken_count))
        for id in ids:
            search.rebuild(id)
            print('\t' + id)

    def update_topic_names(self):
        pairs = (
            ('"Social"', '"Gender and Youth"'),
            ('"Statistics"', '"Official Statistics"'),
        )
        model.repo.new_revision()
        for old_name, new_name in pairs:
            items = model.Session.query(
                model.PackageExtra
            ).filter_by(key='thematic_area_string').filter(
                model.PackageExtra.value != '[]',
                ~model.PackageExtra.value.is_(None),
                model.PackageExtra.value.contains(old_name)
            )
            for item in items:
                item.value = item.value.replace(old_name, new_name)
        model.repo.commit_and_remove()

    def create_country_orgs(self):
        site_user = tk.get_action('get_site_user')({'ignore_auth': True}, {})
        for name, title in country_orgs.items():
            if model.Session.query(model.Group).filter_by(name=name + '-data'
                                                          ).count():
                continue
            tk.get_action('organization_create')({
                'ignore_auth': True,
                'user': site_user['name']
            }, {
                'name': name + '-data',
                'title': title
            })

    def drop_mendeley_publications(self):
        while True:
            results = tk.get_action('package_search')(
                None, {
                    'q': 'harvest_source:MENDELEY',
                    'rows': 100
                }
            )
            if not results['count']:
                break
            print('{} packages left'.format(results['count']))
            for pkg in results['results']:
                package = model.Package.get(pkg['id'])
                if package:
                    package.purge()
                    print('\tPurged package <{}>'.format(pkg['id']))
            model.Session.commit()
        print('Done')

    def broken_links_report(self):
        jobs.enqueue(broken_links_report, timeout=7200)

    def fix_harvester_duplications(self):
        # paster spc fix_harvester_duplications 'SOURCE_TYPE_TO_DROP' -c /etc/ckan/default/production.ini
        # paster spc fix_harvester_duplications 'SPREP' -c /etc/ckan/default/production.ini

        # Prepare HarvestObject to have munged ids for search
        formatted_harvest_objects = model.Session.query(
            harvest_model.HarvestObject,
            _func.replace(_func.replace(harvest_model.HarvestObject.guid, ':', '-'), '.', '-').label('possible_id')
            ).subquery()

        # Find packages with duplications
        subquery_packages = model.Session.query(
            model.Package.id.label('pkg_id'),
            harvest_model.HarvestSource.id.label('hs_id'),
            harvest_model.HarvestSource.type.label('hs_type'),
        )\
        .distinct()\
        .join(
            formatted_harvest_objects,
            _or_(
                formatted_harvest_objects.c.guid == model.Package.id,
                formatted_harvest_objects.c.possible_id == model.Package.id,
                formatted_harvest_objects.c.package_id == model.Package.id
            )
        ).join(
            harvest_model.HarvestSource,
            formatted_harvest_objects.c.harvest_source_id == harvest_model.HarvestSource.id
        ).group_by(
            model.Package.id,
            formatted_harvest_objects.c.id,
            harvest_model.HarvestSource.id
        ).subquery()
            
        subquery_count = model.Session.query(
            subquery_packages.c.pkg_id.label('pkg_id'),
            _func.count(subquery_packages.c.pkg_id).label('hs_count'),
            _func.array_agg(subquery_packages.c.hs_id).label('hs_ids'),
            _func.array_agg(subquery_packages.c.hs_type).label('hs_types')
        ).group_by(subquery_packages.c.pkg_id).subquery()

        q = model.Session.query(
            subquery_count.c.pkg_id, 
            subquery_count.c.hs_ids,
            subquery_count.c.hs_types)\
        .filter(subquery_count.c.hs_count > 1) 

        res = q.all()

        package_ids = []
        harvest_sources_types = []
        harvest_sources_ids = []
        for pkg_id, hs_ids, hs_types in res:
            package_ids.append(pkg_id)
            harvest_sources_types.extend(x for x in hs_types if x not in harvest_sources_types)
            harvest_sources_ids.extend(x for x in hs_ids if x not in harvest_sources_ids)

        try:
            source_type_to_drop = self.args[1]
            if len(res) == 0 or len(harvest_sources_types) < 2:
                raise ValueError
        except IndexError:
            print('Source type to drop is not defined')
            print('paster spc fix_harvester_duplications \'SOURCE_TYPE_TO_DROP\' -c /etc/ckan/default/production.ini')
            return
        except ValueError:
            print('No duplications found')
            return

        print('{} duplications found'.format(len(res)))    
        print('Duplications found for source types: {}'.format(', '.join(harvest_sources_types)))
        print('Harvest Sources IDs: {}'.format(', '.join(harvest_sources_ids)))

        # Filter by Source
        harvest_objects_ids = model.Session.query(formatted_harvest_objects.c.id)\
        .join(
            harvest_model.HarvestSource,
            harvest_model.HarvestSource.id == formatted_harvest_objects.c.harvest_source_id
            
        ).filter(harvest_model.HarvestSource.type == source_type_to_drop)\
        .join(
            model.Package,
            _or_(
                model.Package.id == formatted_harvest_objects.c.guid,
                model.Package.id == formatted_harvest_objects.c.possible_id,
                model.Package.id == formatted_harvest_objects.c.package_id
            )
        ).filter(model.Package.id.in_(package_ids)).all()

        # Delete Harvest Object Errors
        if harvest_objects_ids:
            model.Session.query(harvest_model.HarvestObjectError)\
            .filter(harvest_model.HarvestObjectError.harvest_object_id.in_(harvest_objects_ids))\
            .delete(synchronize_session='fetch')

            # Delete Harvest Objects
            model.Session.query(harvest_model.HarvestObject)\
            .filter(harvest_model.HarvestObject.id.in_(harvest_objects_ids))\
            .delete(synchronize_session='fetch')

        model.Session.commit()

        print 'Done'

    def update_dataset_coordinates(self):
        # EXAMPLE: paster spc update_dataset_coordinates 'COORDINATES_NEW' 'FIND_WITH_CURRENT' -c /etc/ckan/default/production.ini
        if self.args[1] and self.args[2]:
            new_coordinates = self.args[1]
            find_with_current = self.args[2]

            q = model.Session.query(model.PackageExtra)\
                .filter(model.PackageExtra.key == 'spatial')
            updated_items = []
            ds_list = [ds_extra for ds_extra in q.all() if find_with_current in ds_extra.value]
            if len(ds_list):
                print('There are items that match, will start to update')
                for ds in ds_list:
                    q = model.Session.query(model.PackageExtra)\
                        .filter(model.PackageExtra.id == ds.id)
                    if q:
                        q.update({
                            'value': new_coordinates
                            })
                        updated_items.append(ds.package_id)
                model.Session.commit()
                print('{0} items been updated. Here is the list of IDs:{1}'.format(
                    len(updated_items), updated_items))
            else:
                print('No items match found.')
        else:
            print('Please provide two arguments.')

    def spc_user_deletion(self):
        if self.args and len(self.args) == 2:
            file = self.args[1]
            site_user = logic.get_action(u'get_site_user')({u'ignore_auth': True}, {})
            context = {u'user': site_user[u'name']}

            with open(file) as csvfile:
                read = csv.reader(csvfile)
                for row in read:
                    usr_id = row[0]
                    user = model.User.get(usr_id)
                    if user:
                        print("Removing {0} user".format(user.name))
                        tk.get_action(u'user_delete')(context, {u'id': usr_id})
                    else:
                        print('User with ID "{0}" no exstis on the portal. Skipping...'.format(usr_id))
            print('User deletion finished.')
        else:
            print('Please provide path to the CSV file.')

    def spc_find_detached_datasets(self):
        """
        You must provide the creator_user_id
        Use a comma as a separator to provide multiple ids
        """
        if self.args and len(self.args) == 2:
            creator_user_ids = self.args[1].split(',')
        else:
            error('Please, provide only the creator_user_id(s)')

        # check if the file is already exist
        # ask for rewrite confirmation
        if os.path.isfile('detached_datasets.csv'):
            print('File detached_datasets.csv is already exist.')
            confirm = query_yes_no('Do you want to rewrite it?:', default='no')
            if confirm == 'no':
                error('Command aborted by user')
                return

        obj = harvest_model.HarvestObject
        pkg = model.Package

        # getting all packages that have no related harvest_object
        subquery = model.Session.query(obj.package_id).distinct().subquery()
        detached_pkgs = model.Session.query(pkg) \
            .outerjoin(subquery, pkg.id == subquery.c.package_id) \
            .filter(pkg.creator_user_id.in_(creator_user_ids),
                    subquery.c.package_id.is_(None)) \
            .filter(pkg.type != 'harvest')

        pkg_list = detached_pkgs.all()

        if not pkg_list:
            print('There is no detached datasets at all')
            return

        print('{} detached datasets found'.format(len(pkg_list)))
        print('Creating report .csv file')

        # generating report with detached datasets
        try:
            self._generate_detached_report(pkg_list)
        except csv.Error as e:
            error('Failed. An error occured during writing report: {}'.format(e))

        print('DONE')

    def _generate_detached_report(self, pkg_list):
        filename = 'detached_datasets.csv'
        fieldnames = (
            'id',
            'title',
            'creator_username',
            'org_name',
            'metadata_created',
            'metadata_modified'
        )
        with open(filename, 'w') as file:
            writer = csv.DictWriter(file, fieldnames=fieldnames, delimiter=';')
            writer.writeheader()

            for pkg in pkg_list:
                creator = model.Session.query(model.User).get(pkg.creator_user_id)
                org = model.Session.query(model.Group).get(pkg.owner_org)

                writer.writerow(
                    {
                        'id': pkg.id,
                        'title': pkg.title.strip().encode('utf8'),
                        'creator_username': creator.name,
                        'org_name': org.name,
                        'metadata_created': pkg.metadata_created,
                        'metadata_modified': pkg.metadata_modified
                    }
                )

        print('{} file created'.format(filename))

    def spc_del_datasets_from_list(self):
        """
        Purge all datasets from provided list of IDs
        """
        if self.args and len(self.args) == 2:
            file = self.args[1]
            site_user = logic.get_action(u'get_site_user')(
                {u'ignore_auth': True}, {})

        else:
            error('Please, provide only a file path to CSV file with package IDs.')

        # read all package ids from file
        with open(file) as file:
            file = csv.reader(file, quoting=csv.QUOTE_NONE)
            next(file)  # skip headers
            pkg_ids = {pkg[0].split(';')[0] for pkg in file}

        print('Are you sure you want to purge {} packages?'.format(len(pkg_ids)))
        print('This action is irreversible!')
        confirm = query_yes_no('Do you want to proceed?:', default='no')
        if confirm == 'no':
            error('Command aborted by user')
            return

        for idx, pkg_id in enumerate(pkg_ids, start=1):
            print('Purging packages in progress: {}/{}'.format(idx, len(pkg_ids)))
            try:
                context = {u'user': site_user[u'name']}
                tk.get_action('dataset_purge')(context, {'id': pkg_id})
                print('Package {} was purged'.format(pkg_id))
            except logic.NotFound:
                print('Dataset with id {} wasn\'t found. Skipping...'.format(pkg_id))

        print('DONE')
Beispiel #29
0
elif RUNTIME_ENV in ("local", ):
    LOCAL_DATABASE = "chenyangblog"

    app.secret_key = "xxxxxx"
    app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///%s.db' % LOCAL_DATABASE
    #app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://*****:*****@127.0.0.1:3306/%s' % LOCAL_DATABASE

elif RUNTIME_ENV in ("gae", "gae_dev"):

    app.secret_key = "xxxxxx+GAE"

if RUNTIME_ENV in ("bae", "sae", "local"):
    from alembic.config import Config
    MIGRATE_CFG = Config(os.path.join(BASE_DIR, "alembic.ini"))
    MIGRATE_CFG.set_section_option("alembic", "sqlalchemy.url",
                                   app.config['SQLALCHEMY_DATABASE_URI'])
    app.config['MIGRATE_CFG'] = MIGRATE_CFG

######################################
# User
######################################
from flask.ext.login import LoginManager

login_manager = LoginManager()
login_manager.init_app(app)

#####################################
# Mail
#####################################
if RUNTIME_ENV in ("bae", "sae"):
    pass
Beispiel #30
0
class SPCCommand(CkanCommand):
    """
    ckanext-spc management commands.

    Usage::
    paster spc [command]

    Commands::
        db-upgrade    Upgrade database to the state of the latest migration
        fix-missed-licenses
        drop-mendeley-publications
    ...
    """

    summary = __doc__.split('\n')[0]
    usage = __doc__

    parser = paste.script.command.Command.standard_parser(verbose=True)
    parser.add_option('-c',
                      '--config',
                      dest='config',
                      default='../spc.ini',
                      help='Config file to use.')

    def command(self):
        self._load_config()
        model.Session.commit()

        alembic_ini = os.path.normpath(
            os.path.join(__file__, '../../../alembic.ini'))
        self.alembic_cfg = Config(alembic_ini)
        self.alembic_cfg.set_section_option('alembic', 'sqlalchemy.url',
                                            config.get('sqlalchemy.url'))

        cmd_name = (self.args[0] if self.args else '').replace('-', '_')
        cmd = getattr(self, cmd_name, None)
        if cmd is None:
            return self.usage

        return cmd()

    def db_upgrade(self):
        command.upgrade(self.alembic_cfg, 'head')
        return 'Success'

    def db_downgrade(self):
        command.downgrade(self.alembic_cfg, 'base')
        return 'Success'

    def fix_missed_licenses(self):
        q = model.Session.query(model.Package).filter(
            model.Package.license_id.is_(None)
            | (model.Package.license_id == ''))
        ids = [pkg.id for pkg in q]
        if not ids:
            print('There are no packages with missed license_id')
            return
        broken_count = q.update({'license_id': 'notspecified'},
                                synchronize_session=False)
        model.Session.commit()
        print('{} packages were updated:'.format(broken_count))
        for id in ids:
            search.rebuild(id)
            print('\t' + id)

    def create_country_orgs(self):
        site_user = tk.get_action('get_site_user')({'ignore_auth': True}, {})
        for name, title in country_orgs.items():
            if model.Session.query(model.Group).filter_by(name=name +
                                                          '-data').count():
                continue
            tk.get_action('organization_create')({
                'ignore_auth': True,
                'user': site_user['name']
            }, {
                'name': name + '-data',
                'title': title
            })

    def drop_mendeley_publications(self):
        while True:
            results = tk.get_action('package_search')(None, {
                'q': 'harvest_source:MENDELEY',
                'rows': 100
            })
            if not results['count']:
                break
            print('{} packages left'.format(results['count']))
            for pkg in results['results']:
                package = model.Package.get(pkg['id'])
                if package:
                    package.purge()
                    print('\tPurged package <{}>'.format(pkg['id']))
            model.Session.commit()
        print('Done')
Beispiel #31
0
    },
    'Bhutan': {
        'city': False,
        'region_code': 'BT',
        'region_name': 'Bhutan',
        'ip': '67.43.156.1',
        'latitude': 27.5,
        'longitude': 90.5,
        'radius': GEOCODER.region_max_radius('BT'),
        'region_radius': GEOCODER.region_max_radius('BT'),
        'score': 0.9,
    },
}

ALEMBIC_CFG = AlembicConfig()
ALEMBIC_CFG.set_section_option(
    'alembic', 'script_location', 'alembic')
ALEMBIC_CFG.set_section_option(
    'alembic', 'sqlalchemy.url', SQLURI)
ALEMBIC_CFG.set_section_option(
    'alembic', 'sourceless', 'true')


@pytest.yield_fixture(scope='session', autouse=True)
def package():
    # We do this here as early as possible in tests.
    # We only do it in tests, as the real celery processes should
    # run without the monkey patches applied. The gunicorn arbiter
    # patches its worker processes itself.
    from gevent import monkey
    monkey.patch_all()
Beispiel #32
0
from alembic.config import Config
from flask.ext.assets import ManageAssets

from call_server.app import create_app
from call_server.extensions import assets, db, cache
from call_server import political_data
from call_server.user import User, USER_ADMIN, USER_ACTIVE

app = create_app()
app.db = db
manager = Manager(app)

alembic_config = Config(
    os.path.realpath(os.path.dirname(__name__)) + "/alembic.ini")
# let the config override the default db location in production
alembic_config.set_section_option('alembic', 'sqlalchemy.url',
                                  app.config.get('SQLALCHEMY_DATABASE_URI'))

manager.add_command("assets", ManageAssets())


def reset_assets():
    """Reset assets named bundles to {} before running command.
    This command should really be run with TestingConfig context"""
    print "resetting assets"
    assets._named_bundles = {}


@manager.command
def runserver(external=None):
    """Run web server for local development and debugging
        pass --external for external routing"""
Beispiel #33
0
def create_db(params):
    params = process_connection_values(params)
    db_name = params["database"]
    username = params["username"]

    params["username"] = MASTER_USER
    params["password"] = MASTER_PASSWORD

    master_params = params.copy()
    master_params["database"] = MASTER_DB
    engine = connect(master_params)
    engine.execute('COMMIT')
    sql = 'CREATE DATABASE "{}";'.format(db_name)
    try:
        engine.execute(sql)
    except Exception as e:
        print sql, e

    # TODO: This will only run for the first time and fail in all other cases.
    # Maybe test before instead?
    sql = 'CREATE ROLE {user} LOGIN PASSWORD "{user}" VALID UNTIL "infinity";'.format(
        user=username)
    try:
        engine.execute(sql)
    except Exception as e:
        pass

    engine = connect(params)

    # TODO: Alembic (and sqlalchemy for that matter) don't like schemas. We should
    # figure out a way to add these later
    config = load_flask_config()
    models = config.get("models", [])
    if not models:
        raise Exception("This app has no models defined in config")

    for model_module_name in models:
        log.info("Building models from %s", model_module_name)
        models = importlib.import_module(model_module_name)
        models.ModelBase.metadata.create_all(engine)

    # stamp the db with the latest alembic upgrade version
    from drift.flaskfactory import _find_app_root
    approot = _find_app_root()
    ini_path = os.path.join(approot, "alembic.ini")
    alembic_cfg = Config(ini_path)
    script_path = os.path.join(
        os.path.split(os.path.abspath(ini_path))[0], "alembic")
    alembic_cfg.set_main_option("script_location", script_path)
    db_names = alembic_cfg.get_main_option('databases')
    connection_string = format_connection_string(params)
    alembic_cfg.set_section_option(db_names, "sqlalchemy.url",
                                   connection_string)
    command.stamp(alembic_cfg, "head")

    for schema in SCHEMAS:
        # Note that this does not automatically grant on tables added later
        sql = '''
                 GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA "{schema}" TO {user};
                 GRANT USAGE, SELECT, UPDATE ON ALL SEQUENCES IN SCHEMA "{schema}" TO {user};
                 GRANT ALL ON SCHEMA "{schema}" TO {user};'''.format(
            schema=schema, user=username)
        try:
            engine.execute(sql)
        except Exception as e:
            print sql, e

    return db_name
Beispiel #34
0
 def _get_alembic_config(self):
     config = Config(os.path.join(os.path.dirname(__file__), 'alembic.ini'))
     config.set_section_option('alembic', 'script_location',
                               os.path.join(os.path.dirname(__file__), 'migrations'))
     config.set_main_option('sqlalchemy.url', self.connect_string)
     return config
Beispiel #35
0
def alembic_setup(logger):
    logger.info("")
    config.init_modules()
    usable_modules = config.config_data['papercup']['modules_by_name']
    alembic_modules = ['papercup']
    alembic_versions = [
        os.path.join(os.path.dirname(__file__), 'migrations', 'versions')
    ]
    for m in config.config_data['papercup']['loaded_modules']:
        if hasattr(usable_modules[m], 'alembic'):
            alembic_modules.append(m)
            alembic_versions.append(usable_modules[m].alembic['versions'])
    from alembic.config import Config
    alembic_config = Config()
    alembic_config.set_main_option(
        'script_location', os.path.join(os.path.dirname(__file__),
                                        'migrations'))
    alembic_config.set_main_option('version_locations',
                                   ' '.join(alembic_versions))
    alembic_config.set_section_option('loggers', 'keys',
                                      'root,sqlalchemy,alembic')
    alembic_config.set_section_option('handlers', 'keys', 'console')
    alembic_config.set_section_option('formatters', 'keys', 'generic')
    alembic_config.set_section_option('logger_root', 'level', 'WARN')
    alembic_config.set_section_option('logger_root', 'handlers', 'console')
    alembic_config.set_section_option('logger_root', 'qualname', '')
    alembic_config.set_section_option('logger_sqlalchemy', 'level', 'WARN')
    alembic_config.set_section_option('logger_sqlalchemy', 'handlers', '')
    alembic_config.set_section_option('logger_sqlalchemy', 'qualname',
                                      'sqlalchemy.engine')
    alembic_config.set_section_option('logger_alembic', 'level', 'INFO')
    alembic_config.set_section_option('logger_alembic', 'handlers', '')
    alembic_config.set_section_option('logger_alembic', 'qualname', 'alembic')
    alembic_config.set_section_option('handler_console', 'class',
                                      'StreamHandler')
    alembic_config.set_section_option('handler_console', 'args',
                                      '(sys.stderr,)')
    alembic_config.set_section_option('handler_console', 'level', 'NOTSET')
    alembic_config.set_section_option('handler_console', 'formatter',
                                      'generic')
    alembic_config.set_section_option(
        'formatter_generic', 'format',
        '%%(levelname)-5.5s [%%(name)s] %%(message)s')
    alembic_config.set_section_option('formatter_generic', 'datefmt',
                                      '%%H:%%M:%%S')
    #alembic_cfg.attributes['connection'] = connection
    return (usable_modules, alembic_modules, alembic_config)
def main():
    setup_logging('development.ini')
    settings = get_appsettings('development.ini')

    load_local_settings(settings)

    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)

    translation_manager.options.update({
        'locales': settings['available_languages'].split(),
        'get_locale_fallback': True
    })
    configure_mappers()

    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)

    # then, load the Alembic configuration and generate the
    # version table, "stamping" it with the most recent rev:
    from alembic.config import Config
    from alembic import command
    alembic_cfg = Config("alembic.ini")
    url = settings['sqlalchemy.url']
    alembic_cfg.set_section_option("alembic", "sqlalchemy.url", url)
    command.stamp(alembic_cfg, "head")

    with transaction.manager:
        geometry = '{"type":"Polygon","coordinates":[[[85.31038284301758,27.70731518595052],[85.31089782714842,27.698120147680104],[85.3242015838623,27.69842412827061],[85.323429107666,27.70731518595052],[85.31038284301758,27.70731518595052]]]}'  # noqa
        geometry = geojson.loads(geometry,
                                 object_hook=geojson.GeoJSON.to_instance)
        geometry = shapely.geometry.asShape(geometry)
        geometry = shape.from_shape(geometry, 4326)

        area = Area(
            geometry
        )
        DBSession.add(area)

        license1 = License()
        license1.name = 'NextView'
        license1.description = "This data is licensed for use by the US Government (USG) under the NextView (NV) license and copyrighted by Digital Globe or GeoEye. The NV license allows the USG to share the imagery and Literal Imagery Derived Products (LIDP) with entities outside the USG when that entity is working directly with the USG, for the USG, or in a manner that is directly beneficial to the USG. The party receiving the data can only use the imagery or LIDP for the original purpose or only as otherwise agreed to by the USG. The party receiving the data cannot share the imagery or LIDP with a third party without express permission from the USG. At no time should this imagery or LIDP be used for other than USG-related purposes and must not be used for commercial gain. The copyright information should be maintained at all times. Your acceptance of these license terms is implied by your use."  # noqa
        license1.plain_text = "In other words, you may only use NextView imagery linked from this site for digitizing OpenStreetMap data for humanitarian purposes."  # noqa
        DBSession.add(license1)

        license2 = License()
        license2.name = 'Astrium/UNOSAT'
        license2.description = "UNOSAT allow any INTERNET USER to use the IMAGE to develop DERIVATIVE WORKS provided that the INTERNET USER includes the DERIVATIVE WORKS he/she created in the OpenStreetMap database under CC-BY-SA licence (http://creativecommons.org/licenses/by-sa/2.0/) and/or Open Database licence (ODbL: http://www.opendatacommons.org/licenses/odbl/), with the credit of the corresponding PRODUCT conspicuously displayed and written in full, in order to allow any OpenStreetMap database user to have access to and to use the DERIVATIVE WORKS. Except for the foregoing, the END USER and/or the INTERNET USER shall not be entitled to sell, distribute, assign, dispose of, lease, sublicense or transfer, directly or indirectly, any DERIVATIVE WORKS to any third party."  # noqa
        license2.plain_text = "Astrium GEO-Information Services and UNOSAT are allowing access to this imagery for creating information in OpenStreetMap. Other uses are not allowed."  # noqa
        DBSession.add(license2)

        project = Project(
            'Kathmandu - Map all primary roads and buildings'
        )
        project.area = area
        project.short_description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."  # noqa
        project.description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."  # noqa
        project.instructions = "**The detailed instructions**\n\nLorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."  # noqa
        project.entities_to_map = "primary roads, buildings"
        project.imagery = "tms[22]:http://hiu-maps.net/hot/1.0.0/kathmandu_flipped/{zoom}/{x}/{y}.png"  # noqa
        project.license = license1
        DBSession.add(project)

        with project.force_locale('fr'):
            project.name = "Kathmandu - Cartographier les routes et les bâtiments"  # noqa

        project.auto_fill(17)
Beispiel #37
0
def upgrade_databases(local_conf, global_conf):
    """
    the database migration is managed by using alembic

     see the alembic.ini file for configuration options
     remark the database urls in the alembic.ini will be
     replaced with the ones of your linotp.ini

     if the writeback of the alembic.ini should not be done,
     this could be specified in the linotp.ini by the option

      alembic.writeback = False

    :param local_conf: the linotp section of the linotp configuration
    :param global_conf: the whole linotp configuration

    :return: -nothing-
    """

    preface = """# This config file is adjusted wrt. the sqlalchemy.urls
# by the websetup.py during
#
#   paster setup-app linotp.ini
#
# Before running alembic manualy, make sure, that the sqlalchemy.url's
# are correct !!
#
# alembic supports you to run the run the database migration by the commands
#
#      alembic upgrade head
# or
#      alembic downgrade -1

"""

    config = local_conf
    here = global_conf.get('here', '')
    alembic_ini = config.get('alembic.ini', "%s/alembic.ini" % here)

    if not os.path.isfile(alembic_ini):
        log.error('No Database migration done as no alembic configuration'
            ' [alembic.ini] could be found!')
        return

    databases = {}
    linotp_url = config.get('sqlalchemy.url', '')
    if linotp_url:
        databases['linotp'] = linotp_url
    audit_url = config.get('linotpAudit.sql.url', '')
    if audit_url:
        table_prefix = config.get("linotpAudit.sql.table_prefix", "")
        databases['audit'] = audit_url
    openid_url = config.get('linotpOpenID.sql.url', '')
    if openid_url:
        databases['openid'] = openid_url

    # load the alembic configuration
    alembic_cfg = Config(alembic_ini)

    for database in databases:
        if database == 'audit':
            alembic_cfg.set_section_option(database, 'table_prefix', table_prefix)
        alembic_cfg.set_section_option(database, 'sqlalchemy.url', databases.get(database))

    alembic_cfg.set_section_option('alembic', 'databases', ','.join(databases.keys()))

    if config.get('alembic.writeback', 'false').lower() == 'true':
        fileConfig = alembic_cfg.file_config
        with open(alembic_ini, 'w') as cfgfile:
            cfgfile.write(preface)
            fileConfig.write(cfgfile)

    try:
        if config.get('alembic.auto_update', 'false').lower() == 'true':
            command.upgrade(alembic_cfg, "head")
    except Exception as exx:
        log.exception('error during upgrade %r' % exx)

    return
Beispiel #38
0
from alembic import command
from alembic.config import Config
from flask.ext.assets import ManageAssets

from call_server.app import create_app
from call_server.extensions import assets, db, cache
from call_server import political_data
from call_server.user import User, USER_ADMIN, USER_ACTIVE

app = create_app()
app.db = db
manager = Manager(app)

alembic_config = Config(os.path.realpath(os.path.dirname(__name__)) + "/alembic.ini")
# let the config override the default db location in production
alembic_config.set_section_option('alembic', 'sqlalchemy.url',
                                  app.config.get('SQLALCHEMY_DATABASE_URI'))

manager.add_command("assets", ManageAssets())


def reset_assets():
    """Reset assets named bundles to {} before running command.
    This command should really be run with TestingConfig context"""
    print "resetting assets"
    assets._named_bundles = {}


@manager.command
def runserver(external=None):
    """Run web server for local development and debugging
        pass --external for external routing"""
Beispiel #39
0
def upgrade_databases(local_conf, global_conf):
    """
    the database migration is managed by using alembic

     see the alembic.ini file for configuration options
     remark the database urls in the alembic.ini will be
     replaced with the ones of your linotp.ini

     if the writeback of the alembic.ini should not be done,
     this could be specified in the linotp.ini by the option

      alembic.writeback = False

    :param local_conf: the linotp section of the linotp configuration
    :param global_conf: the whole linotp configuration

    :return: -nothing-
    """

    preface = """# This config file is adjusted wrt. the sqlalchemy.urls
# by the websetup.py during
#
#   paster setup-app linotp.ini
#
# Before running alembic manualy, make sure, that the sqlalchemy.url's
# are correct !!
#
# alembic supports you to run the run the database migration by the commands
#
#      alembic upgrade head
# or
#      alembic downgrade -1

"""

    config = local_conf
    here = global_conf.get('here', '')
    alembic_ini = config.get('alembic.ini', "%s/alembic.ini" % here)

    if not os.path.isfile(alembic_ini):
        log.error('No Database migration done as no alembic configuration'
                  ' [alembic.ini] could be found!')
        return

    databases = {}
    linotp_url = config.get('sqlalchemy.url', '')
    if linotp_url:
        databases['linotp'] = linotp_url
    audit_url = config.get('linotpAudit.sql.url', '')
    if audit_url:
        table_prefix = config.get("linotpAudit.sql.table_prefix", "")
        databases['audit'] = audit_url
    openid_url = config.get('linotpOpenID.sql.url', '')
    if openid_url:
        databases['openid'] = openid_url

    # load the alembic configuration
    alembic_cfg = Config(alembic_ini)

    for database in databases:
        if database == 'audit':
            alembic_cfg.set_section_option(database, 'table_prefix',
                                           table_prefix)
        alembic_cfg.set_section_option(database, 'sqlalchemy.url',
                                       databases.get(database))

    alembic_cfg.set_section_option('alembic', 'databases',
                                   ','.join(databases.keys()))

    if config.get('alembic.writeback', 'false').lower() == 'true':
        fileConfig = alembic_cfg.file_config
        with open(alembic_ini, 'w') as cfgfile:
            cfgfile.write(preface)
            fileConfig.write(cfgfile)

    try:
        if config.get('alembic.auto_update', 'false').lower() == 'true':
            command.upgrade(alembic_cfg, "head")
    except Exception as exx:
        log.exception('error during upgrade %r' % exx)

    return
Beispiel #40
0
def create_db(tenant, db_host=None, tier_name=None):

    config = load_config()
    service = config['name']
    db_name = construct_db_name(tenant, service, tier_name)

    username = get_db_info()['user']

    engine = connect(MASTER_DB, db_host)
    engine.execute('COMMIT')
    sql = 'CREATE DATABASE "{}";'.format(db_name)
    try:
        engine.execute(sql)
    except Exception as e:
        print sql, e

    # TODO: This will only run for the first time and fail in all other cases.
    # Maybe test before instead?
    sql = 'CREATE ROLE {user} LOGIN PASSWORD "{user}" VALID UNTIL "infinity";'.format(
        user=username)
    try:
        engine.execute(sql)
    except Exception as e:
        pass

    engine = connect(db_name, db_host)

    # TODO: Alembic (and sqlalchemy for that matter) don't like schemas. We should
    # figure out a way to add these later

    models = config.get("models", [])
    if not models:
        raise Exception("This app has no models defined in config")

    for model_module_name in models:
        log.info("Building models from %s", model_module_name)
        models = importlib.import_module(model_module_name)
        models.ModelBase.metadata.create_all(engine)

    engine = connect(db_name, db_host)
    for schema in schemas:
        # Note that this does not automatically grant on tables added later
        sql = '''
                 GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA "{schema}" TO {user};
                 GRANT USAGE, SELECT, UPDATE ON ALL SEQUENCES IN SCHEMA "{schema}" TO {user};
                 GRANT ALL ON SCHEMA "{schema}" TO {user};'''.format(
            schema=schema, user=username)
        try:
            engine.execute(sql)
        except Exception as e:
            print sql, e

    # stamp the db with the latest alembic upgrade version
    ini_path = os.path.join(
        os.path.split(os.environ["drift_CONFIG"])[0], "..", "alembic.ini")
    alembic_cfg = Config(ini_path)
    db_names = alembic_cfg.get_main_option('databases')
    connection_string = 'postgresql://%s:%s@%s/%s' % (username, username,
                                                      db_host, db_name)
    alembic_cfg.set_section_option(db_names, "sqlalchemy.url",
                                   connection_string)
    command.stamp(alembic_cfg, "head")

    sql = '''
    ALTER TABLE alembic_version
      OWNER TO postgres;
    GRANT ALL ON TABLE alembic_version TO postgres;
    GRANT SELECT, UPDATE, INSERT, DELETE ON TABLE alembic_version TO zzp_user;
    '''
    engine.execute(sql)

    return db_name
Beispiel #41
0
 def alembic_config(self):
     alembic_cfg = Config()
     alembic_cfg.set_section_option("alembic", "script_location", "alembic")
     alembic_cfg.set_section_option("alembic", "sqlalchemy.url", str(self.db.engine.url))
     alembic_cfg.set_section_option("alembic", "sourceless", "true")
     return alembic_cfg
def alembic_cfg():
    from app.environment import env

    alembic_cfg = AlembicConfig("alembic.ini")
    alembic_cfg.set_section_option("alembic", "sqlalchemy.url", env['DB_URL'])
    return alembic_cfg
Beispiel #43
0
    def _create_config(self):
        """Programmatically create Alembic config. To determine databases,
        DATASTORE from project's config file is used. To customize Alembic
        use MIGRATIONS in you config file.

        Example::

            MIGRATIONS = {
                'alembic': {
                    'script_location': '<path>',
                    'databases': '<db_name1>,<db_name2>',
                },
                '<db_name1>': {
                    'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
                },
                '<bd_name2>': {
                    'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
                },
                'logging': {
                    'path': '<path_to_logging_config>',
                }
            }

        For more information about possible options, please visit Alembic
        documentation:
        https://alembic.readthedocs.org/en/latest/index.html
        """
        app = self.app
        cfg = Config()
        cfg.get_template_directory = lux_template_directory
        migrations = os.path.join(app.meta.path, 'migrations')

        cfg.set_main_option('script_location', migrations)
        cfg.config_file_name = os.path.join(migrations, 'alembic.ini')
        odm = app.odm()
        databases = []
        # set section for each found database
        for name, engine in odm.keys_engines():
            if not name:
                name = 'default'
            databases.append(name)
            # url = str(engine.url).replace('+green', '')
            url = str(engine.url)
            cfg.set_section_option(name, 'sqlalchemy.url', url)
        # put databases in main options
        cfg.set_main_option("databases", ','.join(databases))
        # create empty logging section to avoid raising errors in env.py
        cfg.set_section_option('logging', 'path', '')
        # obtain the metadata required for `auto` command
        metadata = {}
        for key, db_engine in odm.keys_engines():
            if not key:
                key = 'default'
            metadata[key] = meta = MetaData()
            for table, engine in odm.binds.items():
                if engine == db_engine:
                    table.tometadata(meta)

        cfg.metadata = metadata

        config = app.config.get('MIGRATIONS')
        if config:
            for section in config.keys():
                for key, value in config[section].items():
                    if section == 'alembic':
                        cfg.set_main_option(key, value)
                    else:
                        cfg.set_section_option(section, key, value)

        return cfg
    DB_RO_URI = (
        'mysql+%s://%s:%s@%s:%s/%s' %
        (DB_LIBRARY, DB_RO_USER, DB_RO_PWD, DB_RO_HOST, DB_PORT, DB_NAME))

if not DB_RW_URI:
    DB_RW_URI = (
        'mysql+%s://%s:%s@%s:%s/%s' %
        (DB_LIBRARY, DB_RW_USER, DB_RW_PWD, DB_RW_HOST, DB_PORT, DB_NAME))

if not DB_DDL_URI:
    DB_DDL_URI = (
        'mysql+%s://%s:%s@%s:%s/%s' %
        (DB_LIBRARY, DB_DDL_USER, DB_DDL_PWD, DB_RW_HOST, DB_PORT, DB_NAME))

ALEMBIC_CFG = AlembicConfig()
ALEMBIC_CFG.set_section_option('alembic', 'script_location',
                               os.path.join(HERE, 'alembic'))
ALEMBIC_CFG.set_section_option('alembic', 'sqlalchemy.url', DB_DDL_URI)

GEOIP_PATH = os.environ.get('GEOIP_PATH')
if not GEOIP_PATH:
    GEOIP_PATH = os.path.join(HERE, 'tests/data/GeoIP2-City-Test.mmdb')

MAP_TOKEN = os.environ.get('MAP_TOKEN')

REDIS_HOST = os.environ.get('REDIS_HOST')
REDIS_PORT = os.environ.get('REDIS_PORT', '6379')
REDIS_DB = '1' if TESTING else '0'
REDIS_URI = os.environ.get('REDIS_URI')
if REDIS_HOST and not REDIS_URI:
    REDIS_URI = 'redis://%s:%s/%s' % (REDIS_HOST, REDIS_PORT, REDIS_DB)
Beispiel #45
0
    def get_config(self):
        '''
        Programmatically create Alembic config. To determine databases,
        DATASTORE from project's config file is used. To customize Alembic
        use MIGRATIONS in you config file.

        Example:
        MIGRATIONS = {
            'alembic': {
                'script_location': '<path>',
                'databases': '<db_name1>,<db_name2>',
            },
            '<db_name1>': {
                'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
            },
            '<bd_name2>': {
                'sqlalchemy.url': 'driver://*****:*****@localhost/dbname',
            },
            'logging': {
                'path': '<path_to_logging_config>',
            }
        }

        For more information about possible options, please visit Alembic
        documentation:
        https://alembic.readthedocs.org/en/latest/index.html
        '''
        from alembic.config import Config
        # Because we are using custom template, we need to change default
        # implementation of get_template_directory() to make it pointing
        # to template stored in lux, we need to do this hack
        Config.get_template_directory = self.get_lux_template_directory

        # put migrations in main project dir
        migration_dir = os.path.join(os.getcwd(), 'migrations')
        # set default settings in case where MIGRATIONS is not set
        alembic_cfg = Config()
        # where to place alembic env
        alembic_cfg.set_main_option('script_location', migration_dir)
        # get database(s) name(s) and location(s)
        odm = self.app.odm()
        databases = []
        # set section for each found database
        for name, engine in odm.keys_engines():
            if not name:
                name = 'default'
            databases.append(name)
            alembic_cfg.set_section_option(name, 'sqlalchemy.url',
                                           str(engine.url))
        # put databases in main options
        alembic_cfg.set_main_option("databases", ','.join(databases))
        # create empty logging section to avoid raising errors in env.py
        alembic_cfg.set_section_option('logging', 'path', '')
        # obtain the metadata required for `auto` command
        self.get_metadata(alembic_cfg)

        # get rest of settings from project config. This may overwrite
        # already existing options (especially if different migration dir
        # is provided)
        cfg = self.app.config.get('MIGRATIONS')
        if cfg:
            for section in cfg.keys():
                for key, value in cfg[section].items():
                    if section == 'alembic':
                        alembic_cfg.set_main_option(key, value)
                    else:
                        alembic_cfg.set_section_option(section, key, value)

        return alembic_cfg