def forward(): models.DB.create_tables([models.Comment]) comment = peewee.ForeignKeyField(models.Comment, null=True, to_field=models.Comment.id) migrator = PostgresqlMigrator(models.DB) migrate(migrator.add_column('post', 'comment_id', comment), )
def forward(): models.DB.create_tables([models.Author]) author = peewee.ForeignKeyField(models.Author, null=True, to_field=models.Author.id) migrator = PostgresqlMigrator(models.DB) migrate(migrator.add_column('post', 'author_id', author), )
def migration_002(): """Add path length property to instance, so we do not "need" to calculate it on the fly.""" migrator = PostgresqlMigrator(db) with db.transaction(): migrate( migrator.add_column(Instance._meta.db_table, Instance.path_length.db_column, Instance.path_length))
def migration_001(): """Add time property to instance, so we do not "need" to calculate median in database.""" migrator = PostgresqlMigrator(db) with db.transaction(): migrate( migrator.add_column(Instance._meta.db_table, Instance.median_time.db_column, Instance.median_time))
def forward(): comment_author = peewee.CharField(max_length=60, default='') comment_email = peewee.CharField(max_length=60, default='') migrator = PostgresqlMigrator(models.DB) migrate( migrator.add_column('comment', 'comment_author', comment_author), migrator.add_column('comment', 'comment_email', comment_email), )
def downgrade(): database_init(database) migrator = PostgresqlMigrator(database) try: migrate(migrator.rename_table("auth_users", "users")) migrate(migrator.rename_table("podcast_podcasts", "podcasts")) migrate(migrator.rename_table("podcast_episodes", "episodes")) except Exception as err: print(f"Couldn't rename table: {err}. SKIP")
def forward(): models.DB.create_tables([models.Comments]) comments = peewee.ForeignKeyField( models.Comments, null=True, to_field=models.Comments.id) migrator = PostgresqlMigrator(models.DB) migrate( migrator.add_column('blogpost', 'comments', comments), )
def forward(): models.DB.create_tables([models.Author]) # models.DB.create_tables([models.Author, models.BlogPost, models.Comment]) #You can also create all tables at once through this method. author = peewee.ForeignKeyField(models.Author, null=True, to_field=models.Author.id) migrator = PostgresqlMigrator(models.DB) migrate(migrator.add_column('blogpost', 'author_id', author), )
def apply(self, direction): applied_migration_ids = set( [m.migration_id for m in Migration.select().execute()]) pg_migrator = PostgresqlMigrator(self.db) if direction == 'up': ids = filter(lambda m: m not in applied_migration_ids, self.migration_ids) elif direction == 'down': ids = filter(lambda m: m in applied_migration_ids, self.migration_ids[::-1]) applied_atleast_one = False for m_id in ids: applied_atleast_one = True fn = getattr(self._load_migration(m_id), direction) try: self._apply_single_migration(pg_migrator, direction, m_id, fn) except RuntimeError: break if not applied_atleast_one: print('Nothing to do.')
def migrate(): database.connect() database.create_tables([Version], safe=True) try: v = Version.select().get() except Version.DoesNotExist: print('Creating tables') database.create_tables([User, MailCode, MailRequest]) v = Version(version=LAST_VERSION) v.save() if v.version >= LAST_VERSION: return print('Upgrading database version {} to version {}'.format(v.version, LAST_VERSION)) uri = current_app.config['DATABASE'] if 'mysql' in uri: migrator = MySQLMigrator(database) elif 'sqlite' in uri: migrator = SqliteMigrator(database) else: migrator = PostgresqlMigrator(database) # TODO: write migrations here if v.version != LAST_VERSION: raise ValueError('LAST_VERSION in db.py should be {}'.format(v.version))
def migrate(): database.create_tables([Version], safe=True) try: v = Version.select().get() except Version.DoesNotExist: database.create_tables([User]) v = Version(version=LAST_VERSION) v.save() if v.version >= LAST_VERSION: return if 'mysql' in config.DATABASE_URI: migrator = MySQLMigrator(database) elif 'sqlite' in config.DATABASE_URI: migrator = SqliteMigrator(database) else: migrator = PostgresqlMigrator(database) # No migrations yet logging.info('Migrated the database to version %s', v.version) if v.version != LAST_VERSION: raise ValueError('LAST_VERSION in db.py should be {}'.format( v.version))
def migrate(): database.create_tables([Version], safe=True) try: v = Version.select().get() except Version.DoesNotExist: database.create_tables([User, Task, Telegram]) v = Version(version=LAST_VERSION) v.save() if v.version >= LAST_VERSION: return if 'mysql' in config.DATABASE_URI: migrator = MySQLMigrator(database) elif 'sqlite' in config.DATABASE_URI: migrator = SqliteMigrator(database) else: migrator = PostgresqlMigrator(database) if v.version == 0: database.create_tables([Telegram]) peewee_migrate( migrator.add_column(User._meta.db_table, User.lang.name, User.lang)) v.version = 1 v.save() if v.version != LAST_VERSION: raise ValueError('LAST_VERSION in db.py should be {}'.format( v.version))
def migrate(): database.create_tables([Version], safe=True) try: v = Version.select().get() except Version.DoesNotExist: database.create_tables([User, Project, Feature, Task]) v = Version(version=LAST_VERSION) v.save() if v.version >= LAST_VERSION: return if 'mysql' in config.DATABASE_URI: migrator = MySQLMigrator(database) elif 'sqlite' in config.DATABASE_URI: migrator = SqliteMigrator(database) else: migrator = PostgresqlMigrator(database) if v.version == 0: # Making a copy of Project.owner field, because it's not nullable # and we need to migrate a default value. admin = User.select( User.uid).where(User.uid == list(config.ADMINS)[0]).get() owner = ForeignKeyField(User, related_name='projects', to_field=User.uid, default=admin) peewee_migrate( migrator.add_column(User._meta.db_table, User.admin.db_column, User.admin), migrator.add_column(Project._meta.db_table, Project.owner.db_column, owner), migrator.add_column(Project._meta.db_table, Project.hidden.db_column, Project.hidden), migrator.add_column(Project._meta.db_table, Project.overlays.db_column, Project.overlays), migrator.add_column(Task._meta.db_table, Task.skipped.db_column, Task.skipped), migrator.drop_column(Project._meta.db_table, 'validated_count'), ) v.version = 1 v.save() if v.version == 1: peewee_migrate( migrator.add_column(Project._meta.db_table, Project.validate_modified.db_column, Project.validate_modified), migrator.add_column(Project._meta.db_table, Project.audit.db_column, Project.audit), ) v.version = 2 v.save() if v.version != LAST_VERSION: raise ValueError('LAST_VERSION in db.py should be {}'.format( v.version))
def __get_migrator(self): if isinstance(self.db.engine, (peewee.SqliteDatabase, SqliteExtDatabase)): return SqliteMigrator(self.db.engine) elif isinstance(self.db.engine, peewee.MySQLDatabase): return MySQLMigrator(self.db.engine) elif isinstance(self.db.engine, peewee.PostgresqlDatabase): return PostgresqlMigrator(self.db.engine) raise ImproperlyConfigured('Database engine doesn\'t support Migrations!')
def init_database_from_uri(db_uri: str) -> peewee.Proxy: ''' Builds a database connection from a DB URI. ''' global database_migrator parsed = parse_uri(db_uri) if parsed['protocol'] == 'sqlite': database = FKSqliteDatabase(parsed['resource']) database_migrator = SqliteMigrator(database) elif parsed['protocol'] == 'postgres': database = playhouse.postgres_ext.PostgresqlExtDatabase( parsed['database'], user=parsed['username'], password=parsed['password'], host=parsed['host'], port=parsed['port'], ) database_migrator = PostgresqlMigrator(database) else: raise ValueError('Unknown DB schema: {}'.format(parsed['protocol'])) database_proxy.initialize(database) database.connect() # Import all BaseModels and run create_tables(...) tables = [] for module in __all__: mod = import_module('{}.{}'.format(__package__, module)) for member in dir(mod): member_obj = getattr(mod, member) if not inspect.isclass(member_obj): continue if member_obj.__name__ == 'BaseModel': continue if issubclass(member_obj, BaseModel): log.debug('Loading database model: %s.%s.%s' % (__package__, module, member)) tables.append(member_obj) log.debug('Ensuring tables are safely created..') try: database.create_tables(tables, safe=True) except Exception: log.exception('An error occurred while ensuring tables') return database_proxy
def init_postgres(url): global DB db_name = url.path.strip("/") DB.initialize( PostgresqlDatabase(database=db_name, user=url.user or None, password=url.password or None, host=url.host, autocommit=bool(url.get('autocommit', True)), autorollback=bool(url.get('autorollback', True)))) log.info("Database initialized as '%s'. Checking migrations...", db_name) return DB, PostgresqlMigrator(DB)
def up(db): with db.atomic(): migrator = PostgresqlMigrator(db) db.bind(MODELS, bind_refs=False, bind_backrefs=False) db.create_tables(MODELS) if Coin.get_or_none(Coin.id == 1) is None: Coin.create(name='Bitcoin', symbol='BTC') Coin.create(name='Ethereum', symbol='ETH') Coin.create(name='Litecoin', symbol='LTC') Coin.create(name='Coin 3', symbol='CO3') Coin.create(name='Coin 4', symbol='CO4') Coin.create(name='Coin 5', symbol='CO5') global_indef = Game.create(name='Global Indefinite', starting_cash=10000.00, shareable_link='INDEF', shareable_code='INDEF', ends_at=None) # insert achievements into database Achievement.create( name="Win", description="Finish in first place in a private game") Achievement.create( name="Double net worth", description="Achieved by doubling your net worth in a game") Achievement.create(name="Identity Crisis", description="Change your username") # insert goals into database Goal.create(name="Entrepreneur", description="Create a private game") all_coins = Coin.select() for coin in all_coins: GameCoin.create(game=global_indef, coin=coin) global_timed = Game.create(name='Global Timed', starting_cash=10000.00, shareable_link='TIMED', shareable_code='TIMED', ends_at=datetime.utcnow() + timedelta(minutes=1)) # CHANGEME for devel purposes, making it 1 min for now GameCoin.create(game=global_timed, coin=Coin.get()) # from auth.services import register hashed = bcrypt.hashpw("admin".encode(), bcrypt.gensalt()).decode() admin = Profile.create(username="******", hashed_password=hashed, is_admin=True) # Required so that admin can still view graphs in the landing page GameProfile.create(profile=admin, game=global_indef, cash=0.0)
def main(migration_name, db, *args, **kwargs): # Create a migrator for the type of database that is being used if db == 'sqlite': migrator = SqliteMigrator(db_proxy) elif db == 'postgres': migrator = PostgresqlMigrator(db_proxy) else: logger.error("Could not find appropriate migrator for the database.") return # Import migration module and run forward migration module_name = path_to_migrations + '.' + migration_name migration = importlib.import_module(module_name) migration.forward(migrator)
def migrate_db(db: PeeweeSession): with db.atomic() as txs: migrator = PostgresqlMigrator(db) try: migration = Migration.get() if migration.version < 1: # do everything for level 1 level_1(db, migration, migrator) if migration.version < 2: # do everything for level 1 level_2(db, migration, migrator) except ProgrammingError: log.exception('Error - Migrations table not found, please run init_db first!') txs.rollback() sys.exit(1)
def migrate(): database.connect() database.create_tables([Version], safe=True) try: v = Version.select().get() except Version.DoesNotExist: # Prints are here to mark a change for Ansible print('Creating tables') database.create_tables([User, MailCode, MailRequest, ProfileRequest]) v = Version(version=LAST_VERSION) v.save() if v.version >= LAST_VERSION: return print('Upgrading database version {} to version {}'.format( v.version, LAST_VERSION)) uri = current_app.config['DATABASE'] if 'mysql' in uri: migrator = MySQLMigrator(database) elif 'sqlite' in uri: migrator = SqliteMigrator(database) else: migrator = PostgresqlMigrator(database) if v.version == 0: database.create_tables([ProfileRequest]) v.version = 1 v.save() # When making further migrations, refer to # https://github.com/mapsme/cf_audit/blob/master/www/db.py if v.version != LAST_VERSION: raise ValueError('LAST_VERSION in db.py should be {}'.format( v.version))
from redash.models import db, Organization, Group from redash import settings from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): Organization.create_table() default_org = Organization.create(name="Default", slug='default', settings={ Organization.SETTING_GOOGLE_APPS_DOMAINS: list(settings.GOOGLE_APPS_DOMAIN) }) column = Group.org column.default = default_org migrate( migrator.add_column('groups', 'org_id', column), migrator.add_column('events', 'org_id', column), migrator.add_column('data_sources', 'org_id', column), migrator.add_column('users', 'org_id', column), migrator.add_column('dashboards', 'org_id', column), migrator.add_column('queries', 'org_id', column), migrator.add_column('query_results', 'org_id', column), ) # Change the uniqueness constraint on user email to be (org, email): migrate( migrator.drop_index('users', 'users_email'), migrator.add_index('users', ('org_id', 'email'), unique=True)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.drop_column('groups', 'countries')) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'updated_at', models.Query.updated_at), migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at), migrator.add_column('widgets', 'updated_at', models.Widget.updated_at), migrator.add_column('users', 'created_at', models.User.created_at), migrator.add_column('users', 'updated_at', models.User.updated_at), migrator.add_column('visualizations', 'created_at', models.Visualization.created_at), migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at) ) db.database.execute_sql("UPDATE queries SET updated_at = created_at;") db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;") db.database.execute_sql("UPDATE widgets SET updated_at = created_at;") db.close_db(None)
def backward(): #removes author_id column, will delete all data migrator = PostgresqlMigrator(models.DB) migrate(migrator.drop_column('blogpost', 'author_id'), ) models.Author.drop_table()
from redash.models import db, Query from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'options', Query.options), )
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.drop_not_null('queries', 'data_source_id'), ) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): column = models.User.api_key column.null = True migrate(migrator.add_column('users', 'api_key', models.User.api_key), ) for user in models.User.select(models.User.id, models.User.api_key): user.save(only=user.dirty_fields) migrate(migrator.add_not_null('users', 'api_key')) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'is_archived', models.Query.is_archived) ) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == "__main__": db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): column = models.Group.countries column.null = True migrate(migrator.add_column("users", "countries", models.Group.countries)) # for group in models.Group.select(): # group.save() migrate(migrator.drop_not_null("users", "countries")) db.close_db(None)
import peewee from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) cursor = db.database.execute_sql("SELECT column_name FROM information_schema.columns WHERE table_name='alerts' and column_name='rearm';") if cursor.rowcount > 0: print "Column exists. Skipping." exit() with db.database.transaction(): migrate( migrator.add_column('alerts', 'rearm', models.Alert.rearm), ) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.drop_not_null('events', 'user_id') )
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == "__main__": db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.add_column("queries", "last_modified_by_id", models.Query.last_modified_by)) db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;") db.close_db(None)
from collections import defaultdict from redash.models import db, DataSourceGroup, DataSource, Group, Organization, User from playhouse.migrate import PostgresqlMigrator, migrate import peewee if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): # Add type to groups migrate( migrator.add_column('groups', 'type', Group.type) ) for name in ['default', 'admin']: group = Group.get(Group.name==name) group.type = Group.BUILTIN_GROUP group.save() # Create association table between data sources and groups DataSourceGroup.create_table() # add default to existing data source: default_org = Organization.get_by_id(1) default_group = Group.get(Group.name=="default") for ds in DataSource.all(default_org): DataSourceGroup.create(data_source=ds, group=default_group) # change the groups list on a user object to be an ids list migrate( migrator.rename_column('users', 'groups', 'old_groups'),
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.drop_column('users', 'parent_user_id')) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('dashboards', 'groups', models.Dashboard.groups) ) db.close_db(None)
def upgrade_db_structure(): """Upgrade the tables version by version""" from playhouse.migrate import PostgresqlMigrator, migrate migrator = PostgresqlMigrator(wapt_db) logger.info('Current DB: %s version: %s' % (wapt_db.connect_kwargs, get_db_version())) # from 1.4.1 to 1.4.2 if get_db_version() < '1.4.2': with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), '1.4.2')) migrate( migrator.rename_column(Hosts._meta.name, 'host', 'host_info'), migrator.rename_column(Hosts._meta.name, 'wapt', 'wapt_status'), migrator.rename_column(Hosts._meta.name, 'update_status', 'last_update_status'), migrator.rename_column(Hosts._meta.name, 'softwares', 'installed_softwares'), migrator.rename_column(Hosts._meta.name, 'packages', 'installed_packages'), ) HostGroups.create_table(fail_silently=True) HostJsonRaw.create_table(fail_silently=True) HostWsus.create_table(fail_silently=True) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = '1.4.2' v.save() next_version = '1.4.3' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) if not [ c.name for c in wapt_db.get_columns('hosts') if c.name == 'host_certificate' ]: migrate( migrator.add_column(Hosts._meta.name, 'host_certificate', Hosts.host_certificate), ) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.4.3.1' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) columns = [c.name for c in wapt_db.get_columns('hosts')] opes = [] if not 'last_logged_on_user' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'last_logged_on_user', Hosts.last_logged_on_user)) if 'installed_sofwares' in columns: opes.append( migrator.drop_column(Hosts._meta.name, 'installed_sofwares')) if 'installed_sofwares' in columns: opes.append( migrator.drop_column(Hosts._meta.name, 'installed_packages')) migrate(*opes) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.4.3.2' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) wapt_db.execute_sql('''\ ALTER TABLE hostsoftwares ALTER COLUMN publisher TYPE character varying(2000), ALTER COLUMN version TYPE character varying(1000);''') (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.0.4' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) columns = [c.name for c in wapt_db.get_columns('hosts')] opes = [] if not 'server_uuid' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'server_uuid', Hosts.server_uuid)) migrate(*opes) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.0.11' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) HostGroups.create_table(fail_silently=True) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.1.1' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) columns = [c.name for c in wapt_db.get_columns('hosts')] opes = [] if not 'computer_ad_site' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'computer_ad_site', Hosts.computer_ad_site)) if not 'computer_ad_ou' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'computer_ad_ou', Hosts.computer_ad_ou)) if not 'computer_ad_groups' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'computer_ad_groups', Hosts.computer_ad_groups)) migrate(*opes) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.1.3' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) columns = [c.name for c in wapt_db.get_columns('hosts')] opes = [] if not 'registration_auth_user' in columns: opes.append( migrator.add_column(Hosts._meta.name, 'registration_auth_user', Hosts.registration_auth_user)) migrate(*opes) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.1.14' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) columns = [ c.name for c in wapt_db.get_columns('hostpackagesstatus') ] opes = [] if not 'depends' in columns: opes.append( migrator.add_column(HostPackagesStatus._meta.name, 'depends', HostPackagesStatus.depends)) if not 'conflicts' in columns: opes.append( migrator.add_column(HostPackagesStatus._meta.name, 'conflicts', HostPackagesStatus.conflicts)) migrate(*opes) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save() next_version = '1.5.1.17' if get_db_version() < next_version: with wapt_db.atomic(): logger.info('Migrating from %s to %s' % (get_db_version(), next_version)) opes = [] ## migrate(*opes) WsusScan2History.create_table(fail_silently=True) (v, created) = ServerAttribs.get_or_create(key='db_version') v.value = next_version v.save()
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): column = models.User.api_key column.null = True migrate( migrator.add_column('users', 'api_key', models.User.api_key), ) for user in models.User.select(models.User.id, models.User.api_key): user.save(only=user.dirty_fields) migrate( migrator.add_not_null('users', 'api_key') ) db.close_db(None)
def backward(): migrator = PostgresqlMigrator(models.DB) migrate(migrator.drop_column('post', 'author_id'), ) models.Author.drop_table()
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == "__main__": db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.drop_column("users", "countries")) db.close_db(None)
from __future__ import print_function from redash.models import db, Change, AccessPermission, Query, Dashboard from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': if not Change.table_exists(): Change.create_table() if not AccessPermission.table_exists(): AccessPermission.create_table() migrator = PostgresqlMigrator(db.database) try: migrate( migrator.add_column('queries', 'version', Query.version), migrator.add_column('dashboards', 'version', Dashboard.version) ) except Exception as ex: print("Error while adding version column to queries/dashboards. Maybe it already exists?") print(ex)
import os from redash.models import db, Organization, Group from redash import settings from playhouse.migrate import PostgresqlMigrator, migrate # The following is deprecated and should be defined with the Organization object GOOGLE_APPS_DOMAIN = settings.set_from_string( os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")) if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): Organization.create_table() default_org = Organization.create( name="Default", slug='default', settings={ Organization.SETTING_GOOGLE_APPS_DOMAINS: list(GOOGLE_APPS_DOMAIN) }) column = Group.org column.default = default_org migrate( migrator.add_column('groups', 'org_id', column), migrator.add_column('events', 'org_id', column), migrator.add_column('data_sources', 'org_id', column), migrator.add_column('users', 'org_id', column),
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): column = models.User.parent_user_id column.null = True migrate(migrator.add_column('users', 'parent_user_id', models.User.parent_user_id)) # for group in models.Group.select(): # group.save() migrate(migrator.drop_not_null('users', 'parent_user_id')) db.close_db(None)
from playhouse.migrate import migrate, PostgresqlMigrator from app.models import DATABASE from app.notes.models import Note Note.description.default = '' migrator = PostgresqlMigrator(DATABASE) migrate( migrator.add_column('TBL_NOTE', Note.description.db_column, Note.description))
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'schedule', models.Query.schedule), ) db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;") migrate( migrator.drop_column('queries', 'ttl') ) db.close_db(None)
import peewee from redash.models import db, NotificationDestination, AlertSubscription, Alert, Organization, User from redash.destinations import get_configuration_schema_for_destination_type from redash.utils.configuration import ConfigurationContainer from playhouse.migrate import PostgresqlMigrator, migrate HIPCHAT_API_TOKEN = os.environ.get('REDASH_HIPCHAT_API_TOKEN', None) HIPCHAT_API_URL = os.environ.get('REDASH_HIPCHAT_API_URL', None) HIPCHAT_ROOM_ID = os.environ.get('REDASH_HIPCHAT_ROOM_ID', None) WEBHOOK_ENDPOINT = os.environ.get('REDASH_WEBHOOK_ENDPOINT', None) WEBHOOK_USERNAME = os.environ.get('REDASH_WEBHOOK_USERNAME', None) WEBHOOK_PASSWORD = os.environ.get('REDASH_WEBHOOK_PASSWORD', None) if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): if not NotificationDestination.table_exists(): NotificationDestination.create_table() # Update alert subscription fields migrate( migrator.add_column('alert_subscriptions', 'destination_id', AlertSubscription.destination) ) try: org = Organization.get_by_slug('default') user = User.select().where(User.org==org, peewee.SQL("%s = ANY(groups)", org.admin_group.id)).get() except Exception: print("!!! Warning: failed finding default organization or admin user, won't migrate Webhook/HipChat alert subscriptions.")
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == "__main__": db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.add_column("alerts", "rearm", models.Alert.rearm)) db.close_db(None)
import datetime import decimal from decimal import Decimal import json import peewee import peewee_async from playhouse.migrate import PostgresqlMigrator from application.settings import MAIN_APP_NAME, REFERRAL_CODE_MAP from application.utils import print_tb, SATOSHIS_IN_BTC database = peewee_async.PostgresqlDatabase('hackaton') migrator = PostgresqlMigrator(database=database) objects = peewee_async.Manager(database) class BaseModel(peewee.Model): id = peewee.PrimaryKeyField() created_at = peewee.DateTimeField(default=datetime.datetime.now) updated_at = peewee.DateTimeField(default=datetime.datetime.now) @staticmethod def default_serializer(obj): if isinstance(obj, datetime.datetime): return obj.isoformat() if isinstance(obj, Decimal): return str(obj) if isinstance(obj, set): return list(obj) raise TypeError("Unknown type")
from redash.models import db import peewee from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): # Change the uniqueness constraint on data source name to be (org, name): # In some cases it's a constraint: db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name') # In others only an index: db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name') migrate( migrator.add_index('data_sources', ('org_id', 'name'), unique=True) ) db.close_db(None)
import peewee from redash import settings from redash.models import db, NotificationDestination, AlertSubscription, Alert, Organization, User from redash.destinations import get_configuration_schema_for_destination_type from redash.utils.configuration import ConfigurationContainer from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): if not NotificationDestination.table_exists(): NotificationDestination.create_table() # Update alert subscription fields migrate( migrator.add_column('alert_subscriptions', 'destination_id', AlertSubscription.destination) ) try: org = Organization.get_by_slug('default') user = User.select().where(User.org==org, peewee.SQL("%s = ANY(groups)", org.admin_group.id)).get() except Exception: print "!!! Warning: failed finding default organization or admin user, won't migrate Webhook/HipChat alert subscriptions." exit() if settings.WEBHOOK_ENDPOINT: # Have all existing alerts send to webhook if already configured schema = get_configuration_schema_for_destination_type('webhook') conf = {'url': settings.WEBHOOK_ENDPOINT} if settings.WEBHOOK_USERNAME:
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.drop_column('users', 'status')) db.close_db(None)
import peewee from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) cursor = db.database.execute_sql( "SELECT column_name FROM information_schema.columns WHERE table_name='alerts' and column_name='rearm';" ) if cursor.rowcount > 0: print "Column exists. Skipping." exit() with db.database.transaction(): migrate(migrator.add_column('alerts', 'rearm', models.Alert.rearm), ) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.drop_column('groups', 'tables') ) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'is_draft', models.Query.is_draft) ) migrate( migrator.add_column('dashboards', 'is_draft', models.Query.is_draft) ) db.database.execute_sql("UPDATE queries SET is_draft = (name = 'New Query')") db.close_db(None)
from redash.models import db import peewee from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == "__main__": migrator = PostgresqlMigrator(db.database) with db.database.transaction(): # Change the uniqueness constraint on data source name to be (org, name): success = False for constraint in ["unique_name", "data_sources_name"]: try: db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint)) success = True break except peewee.ProgrammingError: db.close_db(None) if not success: print "Failed removing uniqueness constraint on data source name." print "Please verify its name in the schema, update the migration and run again." exit() migrate(migrator.add_index("data_sources", ("org_id", "name"), unique=True)) db.close_db(None)
from playhouse.migrate import migrate, PostgresqlMigrator from app.models import DATABASE from app.qa.models import Reply migrator = PostgresqlMigrator(DATABASE) Reply.content.default = '' migrate( migrator.drop_column('TBL_REPLY', Reply.content.db_column), migrator.add_column('TBL_REPLY', Reply.content.db_column, Reply.content))
from playhouse.db_url import connect from playhouse.migrate import SqliteMigrator, MySQLMigrator, PostgresqlMigrator from playhouse.reflection import Introspector from . import VersionedModel from . import migrate # Setup Database database_url = os.environ.get('DATABASE', None) if database_url: database = connect(database_url) if database_url.startswith('mysql'): migrator = MySQLMigrator.from_database(database) if database_url.startswith('postgres'): migrator = PostgresqlMigrator.from_database(database) if database_url.startswith('sqlite'): migrator = SqliteMigrator.from_database(database) else: database = SqliteDatabase(':memory:') migrator = SqliteMigrator.from_database(database) introspector = Introspector.from_database(database) # Basic example class class BaseClass(VersionedModel): class Meta: