Exemplo n.º 1
0
    def test_noerr_rev_leaves_open_transaction_transactional_ddl(self):
        a, b, c = self._opened_transaction_fixture()

        with self._patch_environment(
            transactional_ddl=True, transaction_per_migration=False
        ):
            command.upgrade(self.cfg, c)
Exemplo n.º 2
0
    def install_or_upgrade_db(self, skip_backup=False):
        from domogik.common import sql_schema
        from domogik.common import database
        from sqlalchemy import create_engine, MetaData, Table
        from alembic.config import Config
        from alembic import command

        info("Installing or upgrading the db")
        if not sql_schema.Device.__table__.exists(bind=self._engine):
            sql_schema.metadata.drop_all(self._engine)
            ok("Droping all existing tables...")
            sql_schema.metadata.create_all(self._engine)
            ok("Creating all tables...")
            with self._db.session_scope():
                self._db.add_default_user_account()
            ok("Creating admin user...")
            command.stamp(self.alembic_cfg, "head")
            ok("Setting db version to head")
        else:
            if not skip_backup:
                ok("Creating backup")
                self.backup_existing_database()
            ok("Upgrading")
            command.upgrade(self.alembic_cfg, "head")
        return 
Exemplo n.º 3
0
def setupPackage():
    os.environ['MONGO_URI'] = 'mongodb://localhost'
    os.environ['MONGO_DB_NAME'] = 'royal_example'
    os.environ['MONGO_DB_PREFIX'] = ''

    # sqla extentsion setup.
    global engine

    alembic_config = Config()
    alembic_config.set_main_option('script_location',
                                   'example/ext/sqla/db')
    alembic_config.set_main_option('sqlalchemy.url', mysql_uri)

    engine = create_engine(mysql_uri)

    try:
        command.downgrade(alembic_config, 'base')
    except:
        log.exception("Migration downgrade failed, clearing all tables")
        metadata = MetaData(engine)
        metadata.reflect()
        for table in metadata.tables.values():
            for fk in table.foreign_keys:
                engine.execute(DropConstraint(fk.constraint))
        metadata.drop_all()

    command.upgrade(alembic_config, 'head')
Exemplo n.º 4
0
 def create_app(self):
     # Use the testing configuration
     self.config = TestingConfig(self.flask_conf)
     alcommand.upgrade(janitoo_config(self.config.SQLALCHEMY_DATABASE_URI), 'heads')
     app = create_app(self.config)
     app.config['LIVESERVER_PORT'] = 8943
     return app
Exemplo n.º 5
0
def upgrade_db(options):
    """
    Upgrades database to latest (head) revision.
    """
    init_model(config, check_version=False)
    cfg = migrationsutil.create_config()
    command.upgrade(cfg, revision="head")
Exemplo n.º 6
0
def db_session2(request):
    sql_str = os.environ.get("DB_STRING2", "sqlite://")
    engine = create_engine(sql_str)
    engine.echo = True
    # pyramid way
    maker = sessionmaker(bind=engine)
    Base.metadata.bind = engine
    Base.metadata.drop_all(engine)
    engine.execute("DROP TABLE IF EXISTS alembic_ziggurat_foundations_version")
    if sql_str.startswith("sqlite"):
        # sqlite will not work with alembic
        Base.metadata.create_all(engine)
    else:
        alembic_cfg = Config()
        alembic_cfg.set_main_option(
            "script_location", "ziggurat_foundations:migrations"
        )
        alembic_cfg.set_main_option("sqlalchemy.url", sql_str)
        command.upgrade(alembic_cfg, "head")

    session = maker()

    def teardown():
        session.rollback()
        session.close()

    request.addfinalizer(teardown)

    return session
Exemplo n.º 7
0
    def __new__(cls):
        if cls.__instance is None:
            i = object.__new__(cls)

            i.SQLEngine = SQLEngine
            i.DataBase = DataBase
            i.Session = Session

            i.connection = SQLEngine.connect()
            i.context = MigrationContext.configure(i.connection)
            i.current_revision = i.context.get_current_revision()
            logger.boot('Database revision: %s', i.current_revision)

            i.config = Config(ALEMBIC_CONFIG)
            i.script = ScriptDirectory.from_config(i.config)
            i.head_revision = i.script.get_current_head()
            if i.current_revision is None or i.current_revision != i.head_revision:
                logger.boot('Upgrading database to version %s.', i.head_revision)
                command.upgrade(i.config, 'head')
                from option import Option
                from log import Log
                session = Session()
                options = session.query(Option).first()
                if options is None:
                    options = Option()
                    session.add(options)
                options.version = i.head_revision
                session.commit()
                i.current_revision = i.head_revision

            cls.__instance = i
            h = SQLAlchemyHandler()
            logger.addHandler(h)
            return cls.__instance
Exemplo n.º 8
0
 def test_upgrade_subnets_with_ip_policy(self):
     dt = datetime.datetime(1970, 1, 1)
     self.connection.execute(
         self.subnets.insert(),
         dict(id="000", tenant_id="foo", _cidr="192.168.10.0/24",
              ip_policy_id="111"))
     self.connection.execute(
         self.ip_policy.insert(),
         dict(id="111", tenant_id="foo", created_at=dt))
     self.connection.execute(
         self.ip_policy_cidrs.insert(),
         dict(id="221", created_at=dt,
              ip_policy_id="111", cidr="192.168.10.13/32"))
     alembic_command.upgrade(self.config, '552b213c2b8c')
     results = self.connection.execute(
         select([self.ip_policy])).fetchall()
     self.assertEqual(len(results), 1)
     result = results[0]
     self.assertEqual(result["id"], "111")
     self.assertEqual(result["tenant_id"], "foo")
     self.assertEqual(result["created_at"], dt)
     results = self.connection.execute(
         select([self.ip_policy_cidrs])).fetchall()
     self.assertEqual(len(results), 1)
     result = results[0]
     self.assertEqual(result["id"], "221")
     self.assertEqual(result["created_at"], dt)
     self.assertEqual(result["ip_policy_id"], "111")
     self.assertEqual(result["cidr"], "192.168.10.13/32")
     results = self.connection.execute(
         select([self.subnets])).fetchall()
     self.assertEqual(len(results), 1)
     self.assertEqual(results[0]["ip_policy_id"], "111")
Exemplo n.º 9
0
 def test_upgrade_subnets_no_ip_policy_v6(self):
     self.connection.execute(
         self.subnets.insert(),
         dict(id="000", tenant_id="foo", _cidr="fd00::/64",
              ip_policy_id=None))
     with contextlib.nested(
         mock.patch("oslo_utils.uuidutils"),
         mock.patch("oslo_utils.timeutils")
     ) as (uuid, tu):
         dt = datetime.datetime(1970, 1, 1)
         tu.utcnow.return_value = dt
         uuid.generate_uuid.side_effect = ("666", "667", "668")
         alembic_command.upgrade(self.config, '552b213c2b8c')
     results = self.connection.execute(
         select([self.ip_policy])).fetchall()
     self.assertEqual(len(results), 1)
     result = results[0]
     self.assertEqual(result["id"], "666")
     self.assertEqual(result["tenant_id"], "foo")
     self.assertEqual(result["created_at"], dt)
     results = self.connection.execute(
         select([self.ip_policy_cidrs])).fetchall()
     self.assertEqual(len(results), 2)
     for result in results:
         self.assertIn(result["id"], ("667", "668"))
         self.assertEqual(result["created_at"], dt)
         self.assertEqual(result["ip_policy_id"], "666")
         self.assertIn(result["cidr"],
                       ("fd00::/128",
                        "fd00::ffff:ffff:ffff:ffff/128"))
     self.assertNotEqual(results[0]["cidr"], results[1]["cidr"])
     results = self.connection.execute(
         select([self.subnets])).fetchall()
     self.assertEqual(len(results), 1)
     self.assertEqual(results[0]["ip_policy_id"], "666")
Exemplo n.º 10
0
    def test_upgrade_multiple_ip_policy_cidrs(self):
        self.connection.execute(
            self.subnets.insert(),
            dict(id="000", _cidr="192.168.10.0/24", ip_policy_id="111"))
        self.connection.execute(
            self.ip_policy_cidrs.insert(),
            dict(id="221", created_at=datetime.date(1970, 1, 1),
                 ip_policy_id="111", cidr="0.0.0.0/24"),
            dict(id="222", created_at=datetime.date(1970, 1, 1),
                 ip_policy_id="111", cidr="192.168.10.255/32"),
            dict(id="223", created_at=datetime.date(1970, 1, 1),
                 ip_policy_id="111", cidr="192.168.10.0/23"))

        with contextlib.nested(
            mock.patch("oslo_utils.uuidutils"),
            mock.patch("oslo_utils.timeutils")
        ) as (uuid, tu):
            tu.utcnow.return_value = datetime.datetime(2004, 2, 14)
            uuid.generate_uuid.return_value = "foo"
            alembic_command.upgrade(self.config, '2748e48cee3a')
            results = self.connection.execute(
                select([self.ip_policy_cidrs])).fetchall()
            self.assertEqual(len(results), 1)
            result = results[0]
            self.assertEqual(result["id"], uuid.generate_uuid.return_value)
            self.assertEqual(result["created_at"], tu.utcnow.return_value)
            self.assertEqual(result["ip_policy_id"], "111")
            self.assertEqual(result["cidr"], "192.168.10.0/24")
Exemplo n.º 11
0
 def test_upgrade_with_subnets_non_default_ip_policy_cidrs_v6(self):
     self.connection.execute(
         self.subnets.insert(),
         dict(id="000", _cidr="fd00::/64", ip_policy_id="111"))
     self.connection.execute(
         self.ip_policy_cidrs.insert(),
         dict(id="222", created_at=datetime.date(1970, 1, 1),
              ip_policy_id="111", cidr="fd00::3/128"))
     with contextlib.nested(
         mock.patch("oslo_utils.uuidutils"),
         mock.patch("oslo_utils.timeutils")
     ) as (uuid, tu):
         uuid.generate_uuid.side_effect = (1, 2, 3)
         tu.utcnow.return_value = datetime.datetime(1970, 1, 1)
         alembic_command.upgrade(self.config, '45a07fac3d38')
         results = self.connection.execute(
             select([self.ip_policy_cidrs])).fetchall()
         self.assertEqual(len(results), 3)
         default_cidrs = ["fd00::/128", "fd00::3/128",
                          "fd00::ffff:ffff:ffff:ffff/128"]
         for result in results:
             self.assertIn(result["cidr"], default_cidrs)
             self.assertGreaterEqual(int(result["id"]), 1)
             self.assertLessEqual(int(result["id"]), 3)
             self.assertEqual(result["created_at"], tu.utcnow.return_value)
         self.assertNotEqual(results[0]["cidr"], results[1]["cidr"])
         self.assertNotEqual(results[0]["cidr"], results[2]["cidr"])
         self.assertNotEqual(results[1]["cidr"], results[2]["cidr"])
Exemplo n.º 12
0
def init_database(app):
    settings = None
    try:
        settings = DBSiteSettings.get_by_id(1)

        if not settings or not settings.inited:
            raise Exception("Can not get site settings")

        if settings.version < DBSiteSettings.VERSION:
            raise Exception("Database expired")

    except:
        from alembic import command

        # auto generate alembic version in local
        try:
            command.revision(app.config["MIGRATE_CFG"],
                             "database v%s" % DBSiteSettings.VERSION,
                             True)
        except:
            logging.exception("migrate revision error")

        command.upgrade(app.config["MIGRATE_CFG"], "head")

        if not settings:
            settings = create_default_settings(app)
        else:
            settings.inited = True
            settings.version = DBSiteSettings.VERSION
            settings.save()

    app.config["SiteTitle"] = settings.title
    app.config["SiteSubTitle"] = settings.subtitle
    app.config["OwnerEmail"] = settings.owner
Exemplo n.º 13
0
 def test_upgrade_with_subnets_non_default_ip_policy_cidrs(self):
     self.connection.execute(
         self.subnets.insert(),
         dict(id="000", _cidr="192.168.10.0/24", ip_policy_id="111"))
     self.connection.execute(
         self.ip_policy_cidrs.insert(),
         dict(id="222", created_at=datetime.date(1970, 1, 1),
              ip_policy_id="111", cidr="192.168.10.13/32"))
     with contextlib.nested(
         mock.patch("neutron.openstack.common.timeutils"),
         mock.patch("neutron.openstack.common.uuidutils")
     ) as (tu, uuid):
         uuid.generate_uuid.side_effect = (1, 2, 3)
         tu.utcnow.return_value = datetime.datetime(1970, 1, 1)
         alembic_command.upgrade(self.config, '45a07fac3d38')
         results = self.connection.execute(
             select([self.ip_policy_cidrs])).fetchall()
         self.assertEqual(len(results), 3)
         default_cidrs = ["192.168.10.0/32", "192.168.10.255/32",
                          "192.168.10.13/32"]
         for result in results:
             self.assertIn(result["cidr"], default_cidrs)
             self.assertGreaterEqual(int(result["id"]), 1)
             self.assertLessEqual(int(result["id"]), 3)
             self.assertEqual(result["created_at"], tu.utcnow.return_value)
         self.assertNotEqual(results[0]["cidr"], results[1]["cidr"])
         self.assertNotEqual(results[0]["cidr"], results[2]["cidr"])
         self.assertNotEqual(results[1]["cidr"], results[2]["cidr"])
Exemplo n.º 14
0
 def test_offline_distinct_enum_create(self):
     self._distinct_enum_script()
     with capture_context_buffer() as buf:
         command.upgrade(self.cfg, self.rid, sql=True)
     assert "CREATE TYPE pgenum AS ENUM "\
         "('one', 'two', 'three')" in buf.getvalue()
     assert "CREATE TABLE sometable (\n    data pgenum\n)" in buf.getvalue()
Exemplo n.º 15
0
    def step4(self):
        if self.options.file is None:
            print("")
            print(self.color_bar)
            print("The makefile is missing")
            print("")
            self.print_step(4, intro="Fix it and run again the step 4:")
            exit(1)

        if os.path.isfile("create.diff"):
            os.unlink("create.diff")

        check_call(["make", "-f", self.options.file, "build"])

        command.upgrade(Config("alembic.ini"), "head")
        command.upgrade(Config("alembic_static.ini"), "head")

        if not self.options.windows:
            check_call(self.project.get("cmds", {}).get(
                "apache_graceful",
                ["sudo", "/usr/sbin/apache2ctl", "graceful"]
            ))

        print("")
        print(self.color_bar)
        print("The upgrade is nearly done, now you should:")
        print("- Test your application.")

        if self.options.windows:
            print("You are running on Windows, please restart your Apache server,")
            print("because we can not do that automatically.")

        self.print_step(5)
Exemplo n.º 16
0
 def test_upgrade_empty(self):
     alembic_command.upgrade(self.config, self.current_revision)
     results = self.connection.execute(
         select([self.ip_addresses_table]).order_by(
             self.ip_addresses_table.c.id)).fetchall()
     expected_results = []
     self.assertEqual(results, expected_results)
Exemplo n.º 17
0
def upgrade():
    parser = argparse.ArgumentParser(description='Upgrade database schema')
    parser.add_argument('dburl', help='Database URL for SQLAlchemy')
    args = parser.parse_args()
    repositories.sa_pixort_data(url=args.dburl)

    command.upgrade(_get_config(args.dburl), "head")
Exemplo n.º 18
0
def main():
    log = get_logger()

    argparser = argparse.ArgumentParser('irco-import')
    argparser.add_argument('-v', '--verbose', action='store_true')
    argparser.add_argument('database')

    args = argparser.parse_args()

    sentry.context.merge({
        'tags': {
            'command': 'irco-upgrade',
        },
        'extra': {
            'parsed_arguments': args.__dict__,
        }
    })

    log.info('arguments_parsed', args=args)

    config = Config()
    config.set_main_option('script_location', 'irco:migrations')
    config.set_main_option('sqlalchemy.url', args.database)

    command.upgrade(config, 'head', sql=False, tag=None)
Exemplo n.º 19
0
    def upgrade_db(self):
        """ Upgrade the database to the head revision with Alembic.

        :return: 0 (OK) or 1 (abnormal termination error)
        """
        config_uri = self.args.config_uri
        force = self.args.force
        settings = get_appsettings(config_uri)
        name = settings["anuket.brand_name"]
        directory = settings["anuket.backup_directory"]
        today = date.today().isoformat()
        filename = "{0}-{1}.sql.bz2".format(name, today)
        path = os.path.join(directory, filename)

        # check if there is a database backup
        isfile = os.path.isfile(path)
        if not isfile and not force:
            print("There is no up to date backup for the database. " "Please use the backup script before upgrading!")
            return 1

        # upgrade the database
        alembic_cfg = get_alembic_settings(config_uri)
        upgrade(alembic_cfg, "head")

        print("Database upgrade done.")
        return 0
Exemplo n.º 20
0
 def create_checkpoint_table(self, revision='head'):
     from alembic import command, config
     cfg = config.Config(os.path.join(self.migrations_repository, 'alembic.ini'))
     cfg.set_main_option('script_location', self.migrations_repository)
     with self.engine.begin() as connection:
         cfg.attributes['connection'] = connection
         command.upgrade(cfg, revision)
    def test_destination_rev_pre_context(self):
        env_file_fixture("""
assert context.get_revision_argument() == '%s'
""" % b)
        command.upgrade(self.cfg, b, sql=True)
        command.stamp(self.cfg, b, sql=True)
        command.downgrade(self.cfg, "%s:%s" % (c, b), sql=True)
Exemplo n.º 22
0
def do_apply_migrations():
    alembic_conf = config.Config(os.path.join(os.path.dirname(__file__), "alembic.ini"))
    alembic_conf.set_main_option("script_location", "fuel_plugin.ostf_adapter.storage:migrations")
    alembic_conf.set_main_option("sqlalchemy.url", cfg.CONF.adapter.dbpath)

    # apply initial migration
    command.upgrade(alembic_conf, "head")
Exemplo n.º 23
0
    def step2(self):
        if self.options.file is None:
            print("The makefile is missing")
            exit(1)

        if path.isfile("changelog.diff"):
            unlink("changelog.diff")

        check_call(["make", "-f", self.options.file, "build"])

        command.upgrade(Config("alembic.ini"), "head")
        command.upgrade(Config("alembic_static.ini"), "head")

        if not self.options.windows:
            check_call(["sudo", "/usr/sbin/apache2ctl", "graceful"])

        print("")
        print(self.color_bar)
        print("The upgrade is nearly done, now you should:")
        print("- Test your application.")

        if self.options.windows:
            print("You are running on Windows, please restart your Apache server,")
            print("because we can not do that automatically.")

        self.print_step(3)
Exemplo n.º 24
0
    def update(self):
        """
        Performs the update

        :returns: The update results
        """

        if self._current_revision != self._newest_revision:
            _log('DBUpdater: starting..')

            try:
                script_directory = ScriptDirectory.from_config(self._config)

                revision_list = []
                for script in script_directory.walk_revisions(self._current_revision, self._newest_revision):
                    if script.revision != self._current_revision:
                        revision_list.append(script.revision)

                for rev in reversed(revision_list):
                    try:
                        _log('Applying database revision: {0}'.format(rev))
                        command.upgrade(self._config, rev)
                    except sqlalchemy.exc.OperationalError, err:
                        if 'already exists' in str(err):
                            _log('Table already exists.. stamping to revision.')
                            self._stamp_database(rev)

            except sqlalchemy.exc.OperationalError, err:
                _log('DBUpdater: failure - {0}'.format(err), logLevel=logging.ERROR)

                return False

            _log('DBUpdater: success')
    def test_tag_None(self):
        env_file_fixture("""
context.configure(dialect_name='sqlite')
assert context.get_tag_argument() is None
""")
        command.upgrade(self.cfg, b, sql=True)
        command.downgrade(self.cfg, "%s:%s" % (b, a), sql=True)
Exemplo n.º 26
0
def init_db():
    connection = SQLEngine.connect()
    context = MigrationContext.configure(connection)
    current_revision = context.get_current_revision()
    logger.boot('Database revision: %s', current_revision)
    if current_revision is None:
        DataBase.metadata.create_all(SQLEngine)

    config = Config(ALEMBIC_CONFIG)
    script = ScriptDirectory.from_config(config)
    head_revision = script.get_current_head()
    if current_revision is None or current_revision != head_revision:
        logger.boot('Upgrading database to version %s.', head_revision)
        command.upgrade(config, 'head')
        from option import Option
        session = Session()
        options = session.query(Option).first()
        if options is None:
            options = Option()
        options.version = head_revision
        session.add(options)
        from pulse import Pulse
        pulse = session.query(Pulse).first()
        if pulse is None:
            pulse = Pulse()
        session.add(pulse)
        session.commit()
    def test_tag_cfg_arg(self):
        env_file_fixture("""
context.configure(dialect_name='sqlite', tag='there')
assert context.get_tag_argument() == 'there'
""")
        command.upgrade(self.cfg, b, sql=True, tag='hi')
        command.downgrade(self.cfg, "%s:%s" % (b, a), sql=True, tag='hi')
Exemplo n.º 28
0
def setup_db():
    """
    Either initialize the database if none yet exists, or migrate as needed
    """

    from alembic.config import Config
    from alembic import command

    with current_app.app_context():
        # Alembic config used by migration or stamping
        alembic_cfg = Config(
            os.path.join(current_app.config["PROJECT_PATH"], "alembic.ini")
        )

        # Database connections
        g.db_session = create_db_session()
        con = g.db_session.connection()

        # Query list of existing tables
        tables = con.execute("show tables").fetchall()
        alembic_table = ('alembic_version',)
        if alembic_table in tables:
            # Latest version has been stamped or we have been upgrading
            logging.info("Database: Migrating")
            command.upgrade(alembic_cfg, "head")
        else:
            # The database needs to be initialized
            logging.info("Database: Initializing")
            init_db()
            command.stamp(alembic_cfg, "head")
Exemplo n.º 29
0
 def test_encode(self):
     with capture_context_buffer(
         bytes_io=True,
         output_encoding='utf-8'
     ) as buf:
         command.upgrade(self.cfg, self.a, sql=True)
     assert compat.u("« S’il vous plaît…").encode("utf-8") in buf.getvalue()
Exemplo n.º 30
0
    def upgradeDatabase(
            cls, databaseUrl, resourcesPath =None, localResourcesPath =None, revision ='head',
            config =None
    ):
        if config is None:
            config = cls.getConfig(
                databaseUrl,
                resourcesPath=resourcesPath,
                localResourcesPath=localResourcesPath)

        if config is None:
            return False

        current = cls.getCurrentDatabaseRevision(
            databaseUrl=databaseUrl,
            localResourcesPath=localResourcesPath)

        head = cls.getHeadDatabaseRevision(
            databaseUrl=databaseUrl,
            resourcesPath=resourcesPath,
            config=config)

        if current == head:
            return False

        alembicCmd.upgrade(config=config, revision=revision)
        return True
Exemplo n.º 31
0
def upgrade(to_version='head', sql_url=None):
    """Upgrade to the specified version."""
    alembic_cfg = init_config(sql_url)
    alembic_command.upgrade(alembic_cfg, to_version)
Exemplo n.º 32
0
def _upgrade_alembic(engine, config, version):
    # re-use the connection rather than creating a new one
    with engine.begin() as connection:
        config.attributes['connection'] = connection
        alembic_api.upgrade(config, version or 'head')
Exemplo n.º 33
0
 def test_batch_separator_custom(self):
     with capture_context_buffer(mssql_batch_separator="BYE") as buf:
         command.upgrade(self.cfg, self.a, sql=True)
     assert "BYE" in buf.getvalue()
Exemplo n.º 34
0
 def test_batch_separator_default(self):
     with capture_context_buffer() as buf:
         command.upgrade(self.cfg, self.a, sql=True)
     assert "GO" in buf.getvalue()
Exemplo n.º 35
0
def alembic_migrate():
    from alembic.config import Config
    from alembic import command

    command.upgrade(Config(settings.ALEMBIC_CONFIG_PATH), "head")
Exemplo n.º 36
0
def main():
    with app.app_context():
        if daconfig.get('use alembic', True):
            changed = False
            if db.engine.has_table(dbtableprefix + 'userdict'):
                db.session.query(UserDict).filter(
                    db.func.length(UserDict.filename) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'userdictkeys'):
                db.session.query(UserDictKeys).filter(
                    db.func.length(UserDictKeys.filename) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'chatlog'):
                db.session.query(ChatLog).filter(
                    db.func.length(ChatLog.filename) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'uploads'):
                db.session.query(Uploads).filter(
                    db.func.length(Uploads.filename) > 255).delete(
                        synchronize_session=False)
                db.session.query(Uploads).filter(
                    db.func.length(Uploads.yamlfile) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'objectstorage'):
                db.session.query(ObjectStorage).filter(
                    db.func.length(ObjectStorage.key) > 1024).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'speaklist'):
                db.session.query(SpeakList).filter(
                    db.func.length(SpeakList.filename) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'shortener'):
                db.session.query(Shortener).filter(
                    db.func.length(Shortener.filename) > 255).delete(
                        synchronize_session=False)
                db.session.query(Shortener).filter(
                    db.func.length(Shortener.key) > 255).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'machinelearning'):
                db.session.query(MachineLearning).filter(
                    db.func.length(MachineLearning.key) > 1024).delete(
                        synchronize_session=False)
                db.session.query(MachineLearning).filter(
                    db.func.length(MachineLearning.group_id) > 1024).delete(
                        synchronize_session=False)
                changed = True
            if db.engine.has_table(dbtableprefix + 'globalobjectstorage'):
                db.session.query(GlobalObjectStorage).filter(
                    db.func.length(GlobalObjectStorage.key) > 1024).delete(
                        synchronize_session=False)
                changed = True
            if changed:
                db.session.commit()
            packagedir = pkg_resources.resource_filename(
                pkg_resources.Requirement.parse('docassemble.webapp'),
                'docassemble/webapp')
            if not os.path.isdir(packagedir):
                sys.exit("path for running alembic could not be found")
            from alembic.config import Config
            from alembic import command
            alembic_cfg = Config(os.path.join(packagedir, 'alembic.ini'))
            alembic_cfg.set_main_option("sqlalchemy.url",
                                        alchemy_connection_string())
            alembic_cfg.set_main_option("script_location",
                                        os.path.join(packagedir, 'alembic'))
            if not db.engine.has_table(dbtableprefix + 'alembic_version'):
                sys.stderr.write("Creating alembic stamp\n")
                command.stamp(alembic_cfg, "head")
            if db.engine.has_table(dbtableprefix + 'user'):
                sys.stderr.write("Running alembic upgrade\n")
                command.upgrade(alembic_cfg, "head")
        #db.drop_all()
        try:
            sys.stderr.write("Trying to create tables\n")
            db.create_all()
        except:
            sys.stderr.write(
                "Error trying to create tables; trying a second time.\n")
            try:
                db.create_all()
            except:
                sys.stderr.write(
                    "Error trying to create tables; trying a third time.\n")
                db.create_all()
        populate_tables()
        db.engine.dispose()
Exemplo n.º 37
0
def _upgrade():
    config = db.alembic_config
    upgrade(config, '213e2a3392f2')  # = init revision
Exemplo n.º 38
0
def upgrade(directory=None, revision='head', sql=False, tag=None, x_arg=None):
    """Upgrade to a later version"""
    config = current_app.extensions['migrate'].migrate.get_config(directory,
                                                                  x_arg=x_arg)
    command.upgrade(config, revision, sql=sql, tag=tag)
Exemplo n.º 39
0
def upgrade_databases(local_conf, global_conf):
    """
    the database migration is managed by using alembic

     see the alembic.ini file for configuration options
     remark the database urls in the alembic.ini will be
     replaced with the ones of your linotp.ini

     if the writeback of the alembic.ini should not be done,
     this could be specified in the linotp.ini by the option

      alembic.writeback = False

    :param local_conf: the linotp section of the linotp configuration
    :param global_conf: the whole linotp configuration

    :return: -nothing-
    """

    preface = """# This config file is adjusted wrt. the sqlalchemy.urls
# by the websetup.py during
#
#   paster setup-app linotp.ini
#
# Before running alembic manualy, make sure, that the sqlalchemy.url's
# are correct !!
#
# alembic supports you to run the run the database migration by the commands
#
#      alembic upgrade head
# or
#      alembic downgrade -1

"""

    config = local_conf
    here = global_conf.get('here', '')
    alembic_ini = config.get('alembic.ini', "%s/alembic.ini" % here)

    if not os.path.isfile(alembic_ini):
        log.error('No Database migration done as no alembic configuration'
                  ' [alembic.ini] could be found!')
        return

    databases = {}
    linotp_url = config.get('sqlalchemy.url', '')
    if linotp_url:
        databases['linotp'] = linotp_url
    audit_url = config.get('linotpAudit.sql.url', '')
    if audit_url:
        table_prefix = config.get("linotpAudit.sql.table_prefix", "")
        databases['audit'] = audit_url
    openid_url = config.get('linotpOpenID.sql.url', '')
    if openid_url:
        databases['openid'] = openid_url

    # load the alembic configuration
    alembic_cfg = Config(alembic_ini)

    for database in databases:
        if database == 'audit':
            alembic_cfg.set_section_option(database, 'table_prefix',
                                           table_prefix)
        alembic_cfg.set_section_option(database, 'sqlalchemy.url',
                                       databases.get(database))

    alembic_cfg.set_section_option('alembic', 'databases',
                                   ','.join(databases.keys()))

    if config.get('alembic.writeback', 'false').lower() == 'true':
        fileConfig = alembic_cfg.file_config
        with open(alembic_ini, 'w') as cfgfile:
            cfgfile.write(preface)
            fileConfig.write(cfgfile)

    try:
        if config.get('alembic.auto_update', 'false').lower() == 'true':
            command.upgrade(alembic_cfg, "head")
    except Exception as exx:
        log.exception('error during upgrade %r' % exx)

    return
Exemplo n.º 40
0
 def upgrade_schema():
     config = AlembicConfig(str(PACKAGE_DIRECTORY / "alembic.ini"))
     command.upgrade(config, "head")
Exemplo n.º 41
0
def upgrade(ctx, revision="head", sql=False, tag=None):
    """Upgrade to a later version"""
    command.upgrade(ctx.obj["migrations"], revision, sql=sql, tag=tag)
Exemplo n.º 42
0
def initdb():
    config = AlembicConfig('alembic.ini')
    command.upgrade(config, 'head')
Exemplo n.º 43
0
def main(confdir: str = "/etc/cslbot") -> None:
    conf_obj = config.Config(join(confdir, 'config.cfg'))
    command.upgrade(conf_obj, 'head')
Exemplo n.º 44
0
def upgrade(directory=None, revision='head', sql=False, tag=None, x_arg=None):
    """Upgrade to a later version"""
    config = _get_config(directory, x_arg=x_arg)
    command.upgrade(config, revision, sql=sql, tag=tag)
Exemplo n.º 45
0
def run_alembic_upgrade(alembic_config, conn, run_id=None, rev="head"):
    alembic_config.attributes["connection"] = conn
    alembic_config.attributes["run_id"] = run_id
    upgrade(alembic_config, rev)
Exemplo n.º 46
0
def upgrade():
    command.upgrade(config, 'head')
Exemplo n.º 47
0
def get_migration_data(config, revision):
    """Get migration data in form of a dict.

    :param config: alembic config object
    :type config: alembic.config.Config
    :param revision: revision name
    :type revision: str

    :return: migration data for given revision in form:
        {
            'revision': '123123123',
            'down_revision': '234234234',
            'phases': [
                {
                    'name': 'before-deploy',
                    'steps': [
                        {
                            'type': 'mysql',
                            'script': 'alter table example add column int'
                        }
                    ]
                }
            ]
        }
    """
    config.output_buffer = StringIO()
    script_directory = ScriptDirectory.from_config(config)
    script = script_directory.get_revision(revision)
    phases = frozenset(
        phase.strip()
        for phase in config.get_main_option('phases', '').split())
    script_attrs = get_script_attributes(config, script)
    default_phase = config.get_main_option('default-phase')
    if not default_phase or default_phase not in phases:
        raise RuntimeError(
            "'default-phase' should be configured and should be a member of 'phases'"
        )
    revision_range = ':'.join(
        (script.down_revision, revision)) if script.down_revision else revision
    upgrade(config, revision_range, sql=True)
    output_text = config.output_buffer.getvalue()
    dialect = make_url(
        config.get_main_option('sqlalchemy.url')).get_dialect().name
    phase_texts = PHASE_RE.split(output_text)
    if not script.down_revision and len(phase_texts) > 2:
        phase_texts[1] = phase_texts.pop(0) + phase_texts[1]
    if len(phase_texts) % 2:
        phase_texts.insert(0, default_phase)
    phases = {}
    for phase_name, phase_text in grouper(phase_texts, 2):
        script_texts = SCRIPT_RE.split(phase_text)
        if not script_texts[0]:
            del script_texts[0]
        steps = []
        phases[phase_name] = dict(name=phase_name, steps=steps)
        if len(script_texts) % 2:
            script_texts.insert(0, None)
        for script_name, script_text in grouper(script_texts, 2):
            if script_name:
                steps.append(get_script_data(script_directory, script_name))
            script_text = script_text.strip()
            if script_text:
                steps.append(dict(type=dialect, script=script_text))
    return dict(revision=revision,
                down_revision=script.down_revision,
                attributes=script_attrs,
                phases=phases)
Exemplo n.º 48
0
def upgrade_database(tag, sql, revision):
    """Upgrades database schema to newest version."""
    alembic_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "alembic.ini")
    alembic_cfg = AlembicConfig(alembic_path)
    alembic_command.upgrade(alembic_cfg, revision, sql=sql, tag=tag)
    click.secho("Success.", fg="green")
Exemplo n.º 49
0
def initialize(args):
    """
    This command exists to:

    - Prevent the user having to type more than one thing
    - Prevent the user seeing internals like 'manage.py' which we would
      rather people were not messing with on production systems.
    """
    log.info("Loading configuration..")
    config = CalamariConfig()

    # Generate django's SECRET_KEY setting
    # Do this first, otherwise subsequent django ops will raise ImproperlyConfigured.
    # Write into a file instead of directly, so that package upgrades etc won't spuriously
    # prompt for modified config unless it really is modified.
    if not os.path.exists(config.get('calamari_web', 'secret_key_path')):
        chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
        open(config.get('calamari_web', 'secret_key_path'), 'w').write(get_random_string(50, chars))

    # Configure postgres database
    if os.path.exists(POSTGRES_SLS):
        p = subprocess.Popen(["salt-call", "--local", "state.template",
                              POSTGRES_SLS],
                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate()
        log.debug("Postgres salt stdout: %s" % out)
        log.debug("Postgres salt stderr: %s" % err)
        if p.returncode != 0:
            raise RuntimeError("salt-call for postgres failed with rc={0}".format(p.returncode))
    else:
        # This is the path you take if you're running in a development environment
        log.debug("Skipping postgres configuration, SLS not found")

    # Cthulhu's database
    db_path = config.get('cthulhu', 'db_path')
    engine = create_engine(db_path)
    Base.metadata.reflect(engine)
    alembic_config = AlembicConfig()
    if ALEMBIC_TABLE in Base.metadata.tables:
        log.info("Updating database...")
        # Database already populated, migrate forward
        command.upgrade(alembic_config, "head")
    else:
        log.info("Initializing database...")
        # Blank database, do initial population
        Base.metadata.create_all(engine)
        command.stamp(alembic_config, "head")

    # Django's database
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "calamari_web.settings")
    with quiet():
        execute_from_command_line(["", "syncdb", "--noinput"])

    log.info("Initializing web interface...")
    user_model = get_user_model()

    if args.admin_username and args.admin_password and args.admin_email:
        if not user_model.objects.filter(username=args.admin_username).exists():
            log.info("Creating user '%s'" % args.admin_username)
            user_model.objects.create_superuser(
                username=args.admin_username,
                password=args.admin_password,
                email=args.admin_email
            )
    else:
        if not user_model.objects.all().count():
            # When prompting for details, it's good to let the user know what the account
            # is (especially that's a web UI one, not a linux system one)
            log.info("You will now be prompted for login details for the administrative "
                     "user account.  This is the account you will use to log into the web interface "
                     "once setup is complete.")
            # Prompt for user details
            execute_from_command_line(["", "createsuperuser"])

    # Django's static files
    with quiet():
        execute_from_command_line(["", "collectstatic", "--noinput"])

    # Because we've loaded Django, it will have written log files as
    # this user (probably root).  Fix it so that apache can write them later.
    apache_user = pwd.getpwnam(config.get('calamari_web', 'username'))
    os.chown(config.get('calamari_web', 'log_path'), apache_user.pw_uid, apache_user.pw_gid)

    # Handle SQLite case, otherwise no chown is needed
    if config.get('calamari_web', 'db_engine').endswith("sqlite3"):
        os.chown(config.get('calamari_web', 'db_name'), apache_user.pw_uid, apache_user.pw_gid)

    # Start services, configure to run on boot
    if os.path.exists(SERVICES_SLS):
        log.info("Starting/enabling services...")
        p = subprocess.Popen(["salt-call", "--local", "state.template",
                              SERVICES_SLS],
                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate()
        log.debug("Services salt stdout: %s" % out)
        log.debug("Services salt stderr: %s" % err)
        if p.returncode != 0:
            raise RuntimeError("salt-call for services failed with rc={0}".format(p.returncode))
    else:
        # This is the path you take if you're running in a development environment
        log.debug("Skipping services configuration")

    # Signal supervisor to restart cthulhu as we have created its database
    log.info("Restarting services...")
    subprocess.call(['supervisorctl', 'restart', 'cthulhu'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    # TODO: optionally generate or install HTTPS certs + hand to apache
    log.info("Complete.")
Exemplo n.º 50
0
def app():
    os.environ['DISCODE_CONFIG'] = 'discode_server/config/test.py'
    command.upgrade(config.Config('alembic.ini'), 'head')
    app = app_.create_app()
    yield app
    command.downgrade(config.Config('alembic.ini'), 'base')
Exemplo n.º 51
0
if not os.path.isdir(MIGRATIONS_DIR):
    alecomm.init(config, MIGRATIONS_DIR)

    env_file = open('%senv.py' % MIGRATIONS_DIR, 'r+')
    text = env_file.read()
    text = text.replace('target_metadata=target_metadata',
                        'target_metadata=target_metadata, compare_type=True')
    text = text.replace(
        'target_metadata = None',
        'import models\ntarget_metadata = models.base.metadata')
    env_file.seek(0)
    env_file.write(text)
    env_file.close()

# Makes sure the database is up to date
alecomm.upgrade(config, 'head')

# Check for changes in the database
mc = alemig.MigrationContext.configure(engine.connect())
diff_list = aleauto.compare_metadata(mc, models.base.metadata)

# Update the database
if diff_list:
    alecomm.revision(config, None, autogenerate=True)
    alecomm.upgrade(config, 'head')

# New Session
session = Session()

# endregion
Exemplo n.º 52
0
 def test_begin_comit(self):
     with capture_context_buffer(transactional_ddl=True) as buf:
         command.upgrade(self.cfg, self.a, sql=True)
     assert "BEGIN TRANSACTION;" in buf.getvalue()
     assert "COMMIT;" in buf.getvalue()
Exemplo n.º 53
0
 def test_heads_upg(self):
     command.stamp(self.cfg, (self.b2.revision, ))
     command.upgrade(self.cfg, (self.b3.revision))
     with self._assert_lines(['a2', 'b3']):
         command.current(self.cfg)
Exemplo n.º 54
0
def test_upgrade_downgrade_upgrade():
    command.upgrade(alembic_cfg, 'head')
    command.downgrade(alembic_cfg, '-1')
    command.upgrade(alembic_cfg, 'head')
Exemplo n.º 55
0
 def db_upgrade(self):
     with self.app_context():
         command.upgrade(self._alembic_config, "head")
     return self
Exemplo n.º 56
0
def upgrade_db(rev="head"):
    """
    Upgrade DB to specified revision or head
    """
    print(cyan("Running Alembic migrations, upgrading DB to %s" % rev))
    command.upgrade(alembic_cfg, rev)
Exemplo n.º 57
0
        # show all changes
        if args.history:
            print(50*"-")
            print("revision history: ")
            print(50*"-")
            command.history(alembic_cfg, rev_range=None, verbose=False)
            sys.exit()

        #
        # really migrate
        #
        if args.direction == "up":
            # upgrade
            if args.revision:
                args.revision
                command.upgrade(alembic_cfg, revision=args.revision)
            if args.number == "head":
                command.upgrade(alembic_cfg, "head")
            else:
                command.upgrade(alembic_cfg, "+" + args.number)
        elif args.direction == "down":
            # downgrade
            command.downgrade(alembic_cfg, "-" + args.number)
        else:
            print("Error: ")
            print("You must at least give a direction info up / down to migrate:")
            print(50*"-")
            print(" Change history ")
            print(50*"-")
            command.history(alembic_cfg, rev_range=None, verbose=False)
            sys.exit()
Exemplo n.º 58
0
def upgrade():
    print("Start Migration")
    command.upgrade(alembic_cfg, "head")
    print("End Migration")
Exemplo n.º 59
0
 def build(self):
     log.info("Running migrations...")
     alembic_cfg = Config(self.paths.get('frontendini'))
     command.upgrade(alembic_cfg, "head")
     self.touch('sqlite_db')
Exemplo n.º 60
0
def test_upgrade():
    command.upgrade(alembic_cfg, 'head')