def database_migrate(db, old_ver): # Update database schema version Versions.update(val=db_schema_version).where(Versions.key == "schema_version").execute() log.info("Detected database version %i, updating to %i", old_ver, db_schema_version) # Perform migrations here migrator = None if args.db_type == "mysql": migrator = MySQLMigrator(db) else: migrator = SqliteMigrator(db) # No longer necessary, we're doing this at schema 4 as well # if old_ver < 1: # db.drop_tables([ScannedLocation]) if old_ver < 2: migrate(migrator.add_column("pokestop", "encounter_id", CharField(max_length=50, null=True))) if old_ver < 3: migrate( migrator.add_column("pokestop", "active_fort_modifier", CharField(max_length=50, null=True)), migrator.drop_column("pokestop", "encounter_id"), migrator.drop_column("pokestop", "active_pokemon_id"), ) if old_ver < 4: db.drop_tables([ScannedLocation]) if old_ver < 5: # Some pokemon were added before the 595 bug was "fixed" # Clean those up for a better UX query = Pokemon.delete().where(Pokemon.disappear_time > (datetime.now() - timedelta(hours=24))) query.execute()
def add_index(table): request_data = get_request_data() indexed_columns = request_data.getlist('indexed_columns') unique = bool(request_data.get('unique')) columns = dataset.get_columns(table) if request.method == 'POST': if indexed_columns: migrate( migrator.add_index( table, indexed_columns, unique)) flash('Index created successfully.', 'success') return redirect(url_for('table_structure', table=table)) else: flash('One or more columns must be selected.', 'danger') return render_template( 'add_index.html', columns=columns, indexed_columns=indexed_columns, table=table, unique=unique)
def rename_column(table): request_data = get_request_data() rename = request_data.get('rename', '') rename_to = request_data.get('rename_to', '') columns = dataset.get_columns(table) column_names = [column.name for column in columns] if request.method == 'POST': if (rename in column_names) and (rename_to not in column_names): migrate(migrator.rename_column(table, rename, rename_to)) flash('Column "%s" was renamed successfully!' % rename, 'success') dataset.update_cache(table) return redirect(url_for('table_structure', table=table)) else: flash('Column name is required and cannot conflict with an ' 'existing column\'s name.', 'danger') return render_template( 'rename_column.html', columns=columns, column_names=column_names, rename=rename, rename_to=rename_to, table=table)
def add_column(table): column_mapping = OrderedDict(( ('VARCHAR', CharField), ('TEXT', TextField), ('INTEGER', IntegerField), ('REAL', FloatField), ('BOOL', BooleanField), ('BLOB', BlobField), ('DATETIME', DateTimeField), ('DATE', DateField), ('TIME', TimeField), ('DECIMAL', DecimalField))) request_data = get_request_data() col_type = request_data.get('type') name = request_data.get('name', '') if request.method == 'POST': if name and col_type in column_mapping: migrate( migrator.add_column( table, name, column_mapping[col_type](null=True))) flash('Column "%s" was added successfully!' % name, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Name and column type are required.', 'danger') return render_template( 'add_column.html', col_type=col_type, column_mapping=column_mapping, name=name, table=table)
def add_uniquie_basename_index(migrator, db): try: migrate( migrator.add_index('packagefile', ('basename',), True) ) except (OperationalError, ProgrammingError): pass
def migrate(): """Run migrate database tasks.""" from playhouse.migrate import migrate from peewee import CharField, TextField, IntegerField migrate( # db.migrator.add_column('project', 'shares', IntegerField(default=0)), )
def m_004(migrator): """ Allow tags to be more than 20 characters """ table_name = models.Tag._meta.name updated_column = models.Tag.text migrate( migrator.change_column_type(table_name, updated_column) )
def m_003(migrator): """ Update post text field """ table_name = models.Post._meta.name updated_column = models.Post.post_text migrate( migrator.change_column_type(table_name, updated_column) )
def add_url_fileld(migrator, db): try: PackageFile.select(PackageFile.url).where(PackageFile.url == None).count() except: migrate( migrator.add_column( 'packagefile', 'url', PackageFile.url ) )
def add_fetched_fileld(migrator, db): try: PackageFile.select(PackageFile.fetched).where(PackageFile.fetched == None).count() except: migrate( migrator.add_column( 'packagefile', 'fetched', PackageFile.fetched ) )
def m_001(migrator): """ Add language column to post """ table_name = models.Post._meta.name language_field = CharField(null=False, default='ukr') migrate( migrator.add_column(table_name, 'language', language_field), )
def _migrate_new_columns(self, data): new_keys = set(data) - set(self.model_class._meta.fields) if new_keys: operations = [] for key in new_keys: field_class = self._guess_field_type(data[key]) field = field_class(null=True) operations.append( self.dataset._migrator.add_column(self.name, key, field)) field.add_to_class(self.model_class, key) migrate(*operations)
def m_002(migrator): """ Add columns to post """ table_name = models.Post._meta.name slug_field = CharField(default='') show_on_index_field = BooleanField(default=True) migrate( migrator.add_column(table_name, 'slug', slug_field), migrator.add_column(table_name, 'show_on_index', show_on_index_field), )
def create_key_details(migrator): """ Modifies the Key table to add several columns: short_name text indexed not null visibility text not null """ short_name = peewee.CharField(default=get_simple_name(), index=True, unique=False, null=False) visibility = peewee.CharField( default="self", null=False, choices=[("self", "self"), ("private", "private"), ("public", "public")] ) migrate(migrator.add_column("key", "short_name", short_name), migrator.add_column("key", "visibility", visibility))
def forward(migrator): # Add a placeholder field for storing a link to a WebPageContent object migrate( migrator.add_column( "searchresultcontent", "webpagecontent_id", ForeignKeyField(WebPageContent, null=True, to_field=WebPageContent.id), ) ) # Move the data previously in SearchResultContent model into WebPageContent, # and link the WebPageContent to the SearchResultContent. # Note that because the model for SearchResultContent has already been updated beyond the # state of the table, we have to access the 'content' and 'date' fields through the "SQL" # class instead of a field on the model. This is also the reason that we mix both # Query object methods and raw queries below. The models access the future field names, # and the raw queries access the past field names. content_records = ( SearchResultContent.select(SQL("content"), SQL("date"), SearchResult.url, SearchResultContent.id) .join(SearchResult) .dicts() ) for record in content_records: web_page_content = WebPageContent.create(content=record["content"], date=record["date"], url=record["url"]) # Normally, it's not recommended to directly insert values into queries. But I do # it here because I think Postgres and SQLite have two different interpolating strings, # so this is one way to write the migration to make it more portable. # I also think there is no risk that either of these fields that I insert will # be anything other than an integer. SearchResultContent.raw( "UPDATE searchresultcontent SET webpagecontent_id = " + str(web_page_content.id) + "WHERE id = " + str(record["id"]) ).execute() # Drop unnecessary columns from SearchResultContent model migrate( migrator.drop_column("searchresultcontent", "date"), migrator.drop_column("searchresultcontent", "content"), migrator.rename_column("searchresultcontent", "webpagecontent_id", "content_id"), migrator.drop_not_null("searchresultcontent", "content_id"), )
def main(): for cls in models.Category, models.Feed, models.Entry: try: cls.create_table() except peewee.OperationalError as ex: print ex migrator = SchemaMigrator(models.DATABASE) try: try: migrate(migrator.drop_index('entry_feed_id_published_at')) except: pass migrate(migrator.add_index('entry', ('feed_id', 'published_at'), False)) except peewee.OperationalError as ex: print ex
def forward(migrator): # This migration may take some time to run. # It adds indexes to integer fields that should have been indexed all # along, for many models. migrate( migrator.add_index('seed', ('fetch_index',), False), migrator.add_index('query', ('fetch_index',), False), migrator.add_index('search', ('fetch_index',), False), migrator.add_index('webpageversion', ('fetch_index',), False), migrator.add_index('questionsnapshot', ('fetch_index',), False), migrator.add_index('githubproject', ('fetch_index',), False), migrator.add_index('issue', ('fetch_index',), False), migrator.add_index('issuecomment', ('fetch_index',), False), migrator.add_index('issueevent', ('fetch_index',), False), migrator.add_index('postsnippet', ('compute_index',), False), migrator.add_index('postnpminstallpackage', ('compute_index',), False), migrator.add_index('task', ('compute_index',), False), )
def drop_column(table): request_data = get_request_data() name = request_data.get('name', '') columns = dataset.get_columns(table) column_names = [column.name for column in columns] if request.method == 'POST': if name in column_names: migrate(migrator.drop_column(table, name)) flash('Column "%s" was dropped successfully!' % name, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Name is required.', 'danger') return render_template('drop_column.html', columns=columns, column_names=column_names, name=name, table=table)
def bits(): """ bits, badges, colors, yolo """ migrator = SqliteMigrator(db) badges = CharField(null=True, default=None) color = CharField(default="#FFF") bits = IntegerField(default=0) sub = BooleanField(default=False) turbo = BooleanField(default=False) mod = BooleanField(default=False) migrate( migrator.add_column('messages', 'bits', bits), migrator.add_column('messages', 'badges', badges), migrator.add_column('messages', 'color', color), migrator.add_column('messages', 'sub', sub), migrator.add_column('messages', 'turbo', turbo), migrator.add_column('messages', 'mod', mod), )
def addColumns(migrator, newColumnsDict): for tableName in newColumnsDict: for columnName in newColumnsDict[tableName]: columnType = newColumnsDict[tableName][columnName]["type"] isNull = newColumnsDict[tableName][columnName]["null"] if columnType not in dbTypes: print( "Data type {} is not a valid datatype.".format(columnType)) continue try: print('Adding column "{}" to table "{}".'.format( columnName, tableName)) mgt.migrate( migrator.add_column(tableName, columnName, dbTypes[columnType](null=isNull))) except (sqlite3OperationalError, peeweeOperationalError): print('Error adding column "{}" to table "{}"'.format( columnName, tableName)) print("Done.")
def run_migrate(*args): ''' running some migration. :return: ''' print('Begin migrate ...') torcms_migrator = migrate.PostgresqlMigrator(config.DB_CON) version_field = migrate.IntegerField(null=False, default=1) try: migrate.migrate( torcms_migrator.add_column('mabgson', 'version', version_field)) except: pass print('Migration finished.')
def _drop_index_if_needed( table: Type[Model], field_instance: Field, foreign_key: bool = False, ) -> None: table_name = table.__name__.lower() suffix = '_id' if foreign_key else '' column_name = f'{table_name}_{field_instance.name}{suffix}' migrator = db_config.get_migrator_instance() log.info(f"Drop index from '{column_name}' field in '{table_name}'") with db_config.database.transaction(): try: migrate(migrator.drop_index(table_name, column_name)) except (OperationalError, ProgrammingError) as e: if 'does not exist' in str(e): log.info('Index already exists.') else: raise db_config.database.commit()
def drop_index(table): request_data = get_request_data() name = request_data.get('name', '') indexes = dataset.get_indexes(table) index_names = [index.name for index in indexes] if request.method == 'POST': if name in index_names: migrate(migrator.drop_index(table, name)) flash('Index "%s" was dropped successfully!' % name, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Index name is required.', 'danger') return render_template('drop_index.html', indexes=indexes, index_names=index_names, name=name, table=table)
def _add_not_null_column( table: Model, column: Field, run_before_adding_not_null: Callable[[Model, Field], None] = None, ) -> bool: already_exists, table_name, column_name = get_details(table, column) log.info(f'Adding {table_name}.{column_name}, if needed.') if already_exists: log.info(f'Column {column_name} already exists in {table_name}.') return False migrator = db_config.get_migrator_instance() with db_config.database.transaction(): column.null = True migrate(migrator.add_column(table_name, column_name, field=column)) if callable(run_before_adding_not_null): run_before_adding_not_null(table, column) migrate(migrator.drop_not_null(table_name, column_name)) db_config.database.commit() return True
def migration_v13(db, migrator: SchemaMigrator): with db.atomic(): migrate( migrator.add_column("comment", "object_type", CharField(null=True)), migrator.add_column("comment", "object_id", IntegerField(default=0)), ) from src.model.models import Comment for c in Comment.select(): c.object_type = "post" c.object_id = c.post.id c.save() migrate( # migrator.drop_foreign_key_constraint("comment", "post"), migrator.drop_column("comment", "post_id"), )
def database_migrate(db, old_ver): # Update database schema version Versions.update(val=db_schema_version).where( Versions.key == 'schema_version').execute() log.info("Detected database version %i, updating to %i", old_ver, db_schema_version) # Perform migrations here migrator = None if args.db_type == 'mysql': migrator = MySQLMigrator(db) else: migrator = SqliteMigrator(db) # No longer necessary, we're doing this at schema 4 as well # if old_ver < 1: # db.drop_tables([ScannedLocation]) if old_ver < 2: migrate( migrator.add_column('pokestop', 'encounter_id', CharField(max_length=50, null=True))) if old_ver < 3: migrate( migrator.add_column('pokestop', 'active_fort_modifier', CharField(max_length=50, null=True)), migrator.drop_column('pokestop', 'encounter_id'), migrator.drop_column('pokestop', 'active_pokemon_id')) if old_ver < 4: db.drop_tables([ScannedLocation]) if old_ver < 5: # Some pokemon were added before the 595 bug was "fixed" # Clean those up for a better UX query = (Pokemon.delete().where( Pokemon.disappear_time > (datetime.utcnow() - timedelta(hours=24)))) query.execute() if old_ver < 6: migrate( migrator.add_column('gym', 'last_scanned', DateTimeField(null=True)), ) if old_ver < 7: migrate( migrator.drop_column('gymdetails', 'description'), migrator.add_column('gymdetails', 'description', TextField(null=True, default="")))
def _migration_1(self): """Apply migration #1.""" tables = reflection.introspect(self._db).model_names migrator = SqliteMigrator(self._db) if "habitmodel" in tables and "activitymodel" in tables: with self._db.transaction(): migrate(migrator.rename_table("habitmodel", "habit"), migrator.rename_table("activitymodel", "activity")) logger.debug("Migration #1: Renamed habit, activity tables.") # Create new tables self._db.create_tables([Config, Summary], safe=True) logger.debug("Migration #1: Created tables.") # Set DB version Config.insert(name="version", value="1").on_conflict("replace").execute() logger.debug("Migration #1: DB version updated to 1.") # Update summaries for h in Habit.select(): activities = Activity.select()\ .where(Activity.for_habit == h)\ .order_by(Activity.update_date.asc()) streak = 0 if len(activities) != 0: last_date = activities[0].update_date for a in activities: delta = last_date - a.update_date if abs(delta.days) > 1: break streak += 1 last_date = a.update_date # Update summary for the habit s = Summary.get_or_create(for_habit=h, target=0, target_date=h.created_date) s[0].streak = streak s[0].save() logger.debug("Migration #1: Summary updated for habits.") return 0
def front(): median = IntegerField(default=0) hospitalized = IntegerField(default=0) confirmed_hospitalized = IntegerField(default=0) confirmed_hospitalized_icu = IntegerField(default=0) confirmed_hospitalized_ventilation = IntegerField(default=0) migrate( migrator.add_column("coronalog", "median", median), migrator.add_column("coronalog", "hospitalized", hospitalized), migrator.add_column("coronalog", "confirmed_hospitalized", confirmed_hospitalized), migrator.add_column("coronalog", "confirmed_hospitalized_icu", confirmed_hospitalized_icu), migrator.add_column( "coronalog", "confirmed_hospitalized_ventilation", confirmed_hospitalized_ventilation, ), )
def _rename_tables(): table_rename = { 'institutionperson': 'institutionuser', 'instrumentcustodian': 'instrumentuser', 'projectparticipant': 'projectuser', 'transactionrelease': 'transactionuser' } migrator = SchemaMigrator(DB) for old_table, new_table in table_rename.items(): migrate(migrator.rename_table(old_table, new_table)) for index_meta in DB.get_indexes(new_table): new_index_name = '{}{}'.format(new_table, index_meta.name[len(old_table):]) DB.execute_sql('alter index {} rename to {}'.format( index_meta.name, new_index_name)) for fkey_meta in DB.get_foreign_keys(new_table): old_name = '_'.join([old_table, fkey_meta.column, 'fkey']) new_name = '_'.join([new_table, fkey_meta.column, 'fkey']) DB.execute_sql('alter table {} rename constraint {} TO {}'.format( new_table, old_name, new_name))
def drop_column(table): request_data = get_request_data() name = request_data.get('name', '') columns = dataset.get_columns(table) column_names = [column.name for column in columns] if request.method == 'POST': if name in column_names: migrate(migrator.drop_column(table, name)) flash('Column "%s" was dropped successfully!' % name, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Name is required.', 'danger') return render_template( 'drop_column.html', columns=columns, column_names=column_names, name=name, table=table)
def _drop_column_from_module_if_needed( table: Type[Model], column_name: str, ) -> bool: table_name = table.__name__.lower() cols = {col.name for col in db_config.database.get_columns(table_name)} if column_name not in cols: print(f'Column {column_name} not exists in {table}') # noqa: T001 return False print(f'Drop {column_name} field in {table}') # noqa: T001 migrator = db_config.get_migrator_instance() with db_config.database.transaction(): migrate(migrator.drop_column( table_name, column_name, )) db_config.database.commit() return True
def drop_index(table): request_data = get_request_data() name = request_data.get('name', '') indexes = dataset.get_indexes(table) index_names = [index.name for index in indexes] if request.method == 'POST': if name in index_names: migrate(migrator.drop_index(table, name)) flash('Index "%s" was dropped successfully!' % name, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Index name is required.', 'danger') return render_template( 'drop_index.html', indexes=indexes, index_names=index_names, name=name, table=table)
def update_db_6(): """ Update database to v6. """ migrator = SqliteMigrator(db) db.create_tables([OfflineCache]) external = BooleanField(default=False) offline = BooleanField(default=False) downloaded = BooleanField(default=False) migrate(migrator.add_column('storage', 'external', external), migrator.add_column('book', 'offline', offline), migrator.add_column('book', 'downloaded', downloaded)) Settings.update(version=6).execute() import shutil shutil.rmtree(tools.get_cache_dir())
def add_index(table): request_data = get_request_data() indexed_columns = request_data.getlist('indexed_columns') unique = bool(request_data.get('unique')) columns = dataset.get_columns(table) if request.method == 'POST': if indexed_columns: migrate(migrator.add_index(table, indexed_columns, unique)) flash('Index created successfully.', 'success') return redirect(url_for('table_structure', table=table)) else: flash('One or more columns must be selected.', 'danger') return render_template('add_index.html', columns=columns, indexed_columns=indexed_columns, table=table, unique=unique)
def migrate(self): """Create and ensure proper schema""" now = datetime.datetime.utcnow() # need to handle migration #0 - create any missing tables self.create_models(ignore_errors=True) # load any completed migrations query = (DatabaseMigration.select().where( DatabaseMigration.migrated_at.is_null(False))) completed_migrations = [m.name for m in query] for name, operations in self.migrations: if name not in completed_migrations: # create a migration with this name try: with self.migrator.database.atomic(): m = DatabaseMigration.create(name=name) except IntegrityError: m = DatabaseMigration.get(DatabaseMigration.name == name) with self.migrator.database.atomic(): # run the operations try: migrate(operations) except ProgrammingError as ex: if 'already exists' in str(ex): logger.info( "Migration %s previous performed, skipped..." % name) pass else: raise except AttributeError as ex: print(str(ex)) logger.error(str(ex)) with DatabaseMigration.atomic(): # set the timestamp m.migrated_at = now m.save()
def migrate_created_modified(self, table_name: str) -> None: """ Migration adds "standard" CREATED_BY, CREATED_DATE, MODIFIED_BY, and MODIFIED_DATE columns @param table_name: @return: """ migrate_columns = (('CREATED_BY', self.nullable_int_field), ('CREATED_DATE', self.nullable_text_field), ('MODIFIED_BY', self.nullable_int_field), ('MODIFIED_DATE', self.nullable_text_field)) for new_column in migrate_columns: try: column_name = new_column[0] column_type = new_column[1] migrate( self.migrator.add_column(table_name, column_name, column_type) ) self._logger.info(f'Added column {column_name} to {table_name}') except SQLError: pass
def forward(migrator): migrate( migrator.add_column( 'issue', 'user_id', IntegerField( index=True, null=True, default=None ) ), migrator.add_column( 'issuecomment', 'user_id', IntegerField( index=True, null=True, default=None ) ), )
def _init_db_tables(self, database, additional_tables=None): database.create_tables([cache.Config, pim.Content, cache.User, cache.JournalEntity, cache.EntryEntity, cache.UserInfo], safe=True) if additional_tables: database.create_tables(additional_tables, safe=True) db_version = cache.Config.get_or_none() if db_version is None: from playhouse.migrate import SqliteMigrator, migrate # Essentially version 0 so do first migration. migrator = SqliteMigrator(database) try: migrate( migrator.add_column('journalentity', 'read_only', cache.JournalEntity.read_only), ) except peewee.OperationalError: # A hack because we don't have a db config yet. pass cache.Config.insert(db_version=1).execute()
def _migrate(i: int): """ Run migrations. Args: i: Migration ID """ migrator = SqliteMigrator(database) if i <= 0: # Migration 0: Add media file ID and editable message ID # 2019JAN08 migrate( migrator.add_column("msglog", "file_id", MsgLog.file_id), migrator.add_column("msglog", "media_type", MsgLog.media_type), migrator.add_column("msglog", "mime", MsgLog.mime), migrator.add_column("msglog", "master_msg_id_alt", MsgLog.master_msg_id_alt)) if i <= 1: # Migration 1: Add pickle objects to MsgLog and SlaveChatInfo # 2019JUL24 migrate( migrator.add_column("msglog", "pickle", MsgLog.pickle), migrator.add_column("slavechatinfo", "pickle", SlaveChatInfo.pickle)) if i <= 2: # Migration 2: Add column for group ID to slave chat info table # 2019NOV18 migrate( migrator.add_column("slavechatinfo", "slave_chat_group_id", SlaveChatInfo.slave_chat_group_id))
def migrate_database(db, old_ver): log.info('Detected database version {}, updating to {}...'.format(old_ver, db_schema_version)) migrator = MySQLMigrator(db) if old_ver < 2: migrate_varchar_columns(db, Account.username, Account.password, Account.email, Account.system_id, Account.tutorial_state) migrate_varchar_columns(db, Event.entity_id, Event.description) db.create_table(Version) InsertQuery(Version, {Version.key: 'schema_version', Version.val: 1}).execute() migrate( migrator.add_column('account', 'lures', SmallIntegerField(null=True)), migrator.rename_column('event', 'type', 'entity_type') ) if old_ver < 3: migrate( migrator.add_column('account', 'assigned_at', DateTimeField(index=True, null=True)) ) if old_ver <4: migrate( migrator.add_column('account','reach_lvl30_datetime', DateTimeField(index=True,null=True)) ) Version.update(val=db_schema_version).where( Version.key == 'schema_version').execute() log.info("Done migrating database.")
def run_migrate(*args): ''' running some migration. ''' print('Begin migrate ...') torcms_migrator = migrate.PostgresqlMigrator(config.DB_CON) memo_field = migrate.TextField( null=False, default='', help_text='Memo', ) try: migrate.migrate( torcms_migrator.add_column('tabpost', 'memo', memo_field)) except: pass desc_field = migrate.CharField(null=False, default='', max_length=255, help_text='') try: migrate.migrate( torcms_migrator.add_column('tabentity', 'desc', desc_field)) except: pass extinfo_field = BinaryJSONField(null=False, default={}, help_text='Extra data in JSON.') try: migrate.migrate( torcms_migrator.add_column('tabmember', 'extinfo', extinfo_field)) except: pass par_id_field = migrate.CharField(null=False, default='', max_length=4, help_text='父类id,对于label,top_id为""') try: migrate.migrate( torcms_migrator.add_column('tabpost2tag', 'par_id', par_id_field)) except: pass print('Migration finished.')
def apply(): database = SqliteDatabase(settings.DATABASE) migrator = SqliteMigrator(database) with database.transaction(): database.execute_sql('CREATE TABLE user (pk INTEGER PRIMARY KEY)') database.execute_sql('CREATE TABLE snippet (pk INTEGER PRIMARY KEY)') for field in ( User.created_at, User.updated_at, User.name, User.passhash, Snippet.created_at, Snippet.updated_at, Snippet.syntax, Snippet.raw, Snippet.html, ): field.null = True migrate( # user table migrator.add_column('user', 'created_at', User.created_at), migrator.add_column('user', 'updated_at', User.updated_at), migrator.add_column('user', 'name', User.name), migrator.add_column('user', 'passhash', User.passhash), migrator.add_index('user', ('name', ), True), migrator.add_index('user', ('updated_at', ), False), # snippet table migrator.add_column('snippet', 'created_at', Snippet.created_at), migrator.add_column('snippet', 'updated_at', Snippet.updated_at), migrator.add_column('snippet', 'author_id', Snippet.author), migrator.add_column('snippet', 'name', Snippet.name), migrator.add_column('snippet', 'syntax', Snippet.syntax), migrator.add_column('snippet', 'raw', Snippet.raw), migrator.add_column('snippet', 'html', Snippet.html), migrator.add_index('snippet', ('updated_at', ), False), )
def migrate_database_schema(old_ver): log.info('Detected database version %i, updating to %i...', old_ver, db_schema_version) with db: # Update database schema version. query = (Version.update(val=db_schema_version).where( Version.key == 'schema_version')) query.execute() # Perform migrations here. migrator = MySQLMigrator(db) if old_ver < 2: # Remove hash field unique index. migrate(migrator.drop_index('proxy', 'proxy_hash')) # Reset hash field in all proxies. Proxy.update(hash=1).execute() # Modify column type. db.execute_sql('ALTER TABLE `proxy` ' 'CHANGE COLUMN `hash` `hash` INT UNSIGNED NOT NULL;') # Re-hash all proxies. Proxy.rehash_all() # Recreate hash field unique index. migrate(migrator.add_index('proxy', ('hash', ), True)) if old_ver < 3: # Add response time field. migrate( migrator.add_column('proxy', 'latency', UIntegerField(index=True, null=True))) # Always log that we're done. log.info('Schema upgrade complete.') return True
def add_column(model, column_name, field): """ Add column to table if it doesn't exist """ table_name = model._meta.db_table if model.table_exists(): query = "SELECT True FROM information_schema.columns WHERE table_name='{}' and column_name='{}'".format( table_name, column_name) result = db.execute_sql(query).fetchall() if result and result[0] and result[0][0]: logger.info('%s column is already in the table %s', column_name, table_name) else: try: migrate(migrator.add_column(table_name, column_name, field)) logger.info('%s column is added to the table %s', column_name, table_name) except Exception as e: db.rollback() logger.error('%s: %s', type(e).__name__, e) else: logger.error('No table with name %s found', table_name)
def run_migrate(*args): ''' running some migration. :return: ''' print('Begin migrate ...') torcms_migrator = migrate.PostgresqlMigrator(config.DB_CON) float_field = migrate.FloatField(null=False, default=5) try: migrate.migrate(torcms_migrator.add_column('g_post', 'rating', float_field)) except: pass order = migrate.CharField(null=False, default='', max_length=8) try: migrate.migrate(torcms_migrator.add_column('g_post', 'order', order)) except: pass pid = migrate.CharField(null=False, max_length=4, default='xxxx', help_text='parent id') tmpl = migrate.IntegerField(null=False, default=9, help_text='tmplate type') try: migrate.migrate(torcms_migrator.add_column('g_tag', 'pid', pid)) except: pass try: migrate.migrate(torcms_migrator.add_column('g_tag', 'tmpl', tmpl)) except: pass try: migrate.migrate(torcms_migrator.drop_column('g_tag', 'role_mask')) except: pass print('QED')
def _migrate_column_in_table_if_needed( table: Type[Model], field_instance: Field, ) -> bool: column_name = field_instance.name table_name = table.__name__.lower() cols = {col.name for col in db_config.database.get_columns(table_name)} if column_name in cols: print(f'Column {column_name} already exists in {table}') # noqa: T001 return False print(f'Create {column_name} field in {table}') # noqa: T001 migrator = db_config.get_migrator_instance() with db_config.database.transaction(): migrate(migrator.add_column( table_name, field_instance.name, field_instance, )) db_config.database.commit() return True
def run_migration(migration_name: str) -> None: """ Args: migration_name: Must be the name of a migration class in flask_app.database.migrations """ migration_class: T.Type[BaseMigration[pw.SqliteDatabase]] = getattr( flask_app.database.migrations, migration_name) migration = migration_class() if not migration.database_needs_migrations( database_proxy.obj): # type: ignore[arg-type] print("Database doesn't need the migrations.") return models = migration.get_models_to_create() operations = migration.make_migrate_operations( database_proxy.obj) # type: ignore[arg-type] with database_proxy.atomic(): database_proxy.create_tables(models) migrate(*operations) print("Migration done")
def up(): migrator = migrate.PostgresqlMigrator(database.get_connector()) migrate.migrate( migrator.add_column('reservoir', 'nearest_city', peewee.CharField(null=True)), migrator.add_column('reservoir', 'twitter_place_id', peewee.CharField(null=True))) res_list = json.load(open('reservoirs.json')) for reservoir in models.Reservoir.select(): res_entries = [ r for r in res_list if r['station_id'] == reservoir.station_id ] if len(res_entries) > 0: res = res_entries[0] reservoir.nearest_city = res['nearest_city'] reservoir.twitter_place_id = res['twitter_place_id'] reservoir.save()
def update_0_1_to_1_0(cls): """Update from 0.1 to 1.0.""" migrator = SchemaMigrator(DB) TransSIP.create_table() TransSAP.create_table() class OldTrans(Model): """This is the old transactions.""" submitter = ForeignKeyField(Users, backref='transactions') instrument = ForeignKeyField(Instruments, backref='transactions') proposal = ForeignKeyField(Proposals, backref='transactions') created = DateTimeField() updated = DateTimeField() deleted = DateTimeField(null=True) class Meta(object): """This is the meta class for OldTrans.""" database = DB table_name = 'transactions' migrate( migrator.add_column('transactions', 'description', TextField(null=True))) for old_trans in OldTrans.select(): transsip = TransSIP() for attr in [ 'submitter', 'instrument', 'proposal', 'created', 'updated', 'deleted' ]: setattr(transsip, attr, getattr(old_trans, attr)) setattr(transsip, 'id', Transactions.get(Transactions.id == old_trans.id)) transsip.save(force_insert=True) migrate(migrator.drop_column('transactions', 'submitter_id'), migrator.drop_column('transactions', 'instrument_id'), migrator.drop_column('transactions', 'proposal_id'))
def update_schema(): """Update from 0.1 to 1.0.""" migrator = SchemaMigrator(DB) OldTransSIP.create_table() OldTransSAP.create_table() migrate( migrator.add_column('transactions', 'description', TextField(null=True))) for old_trans in OldTrans.select(): transsip = OldTransSIP() for attr in [ 'submitter', 'instrument', 'proposal', 'created', 'updated', 'deleted' ]: setattr(transsip, attr, getattr(old_trans, attr)) setattr(transsip, 'id', Transactions.get(Transactions.id == old_trans.id)) transsip.save(force_insert=True) migrate(migrator.drop_column('transactions', 'submitter_id'), migrator.drop_column('transactions', 'instrument_id'), migrator.drop_column('transactions', 'proposal_id'))
def rename_column(table): request_data = get_request_data() rename = request_data.get('rename', '') rename_to = request_data.get('rename_to', '') columns = dataset.get_columns(table) #修改:从数据库中得到属性的列表 column_names = [column.name for column in columns] if request.method == 'POST': if (rename in column_names) and (rename_to not in column_names): migrate(migrator.rename_column(table, rename, rename_to)) #需要修改:在数据库中修改一列的名字 flash('Column "%s" was renamed successfully!' % rename, 'success') return redirect(url_for('table_structure', table=table)) else: flash('Column name is required and cannot conflict with an ' 'existing column\'s name.', 'danger') return render_template( #不需要修改 'rename_column.html', columns=columns, column_names=column_names, rename=rename, rename_to=rename_to, table=table)
def change_license_field_type(migrator, db): if isinstance(db, peewee.SqliteDatabase): # SQLite has not length return try: migrate(migrator.drop_column('packageversion', 'license_old')) except: pass with db.transaction(): migrate( migrator.rename_column('packageversion', 'license', 'license_old'), migrator.add_column("packageversion", 'license', PackageVersion.license), ) db.execute_sql("UPDATE packageversion SET license = license_old") migrate(migrator.drop_column('packageversion', 'license_old'))
from __future__ import print_function from redash.models import db, Change, AccessPermission, Query, Dashboard from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': if not Change.table_exists(): Change.create_table() if not AccessPermission.table_exists(): AccessPermission.create_table() migrator = PostgresqlMigrator(db.database) try: migrate( migrator.add_column('queries', 'version', Query.version), migrator.add_column('dashboards', 'version', Dashboard.version) ) except Exception as ex: print("Error while adding version column to queries/dashboards. Maybe it already exists?") print(ex)
def v1(migrator): language_field = CharField(null=False, default="ukr") table_name = models.Post._meta.name migrate(migrator.add_column(table_name, "language", language_field))
def add_gain_interval(): migrate(migrator.add_column("moneyconfiguration", "gain_interval", IntegerField(null=False, default=300)))
from redash import settings from redash.models import db, NotificationDestination, AlertSubscription, Alert, Organization, User from redash.destinations import get_configuration_schema_for_destination_type from redash.utils.configuration import ConfigurationContainer from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): if not NotificationDestination.table_exists(): NotificationDestination.create_table() # Update alert subscription fields migrate( migrator.add_column('alert_subscriptions', 'destination_id', AlertSubscription.destination) ) try: org = Organization.get_by_slug('default') user = User.select().where(User.org==org, peewee.SQL("%s = ANY(groups)", org.admin_group.id)).get() except Exception: print "!!! Warning: failed finding default organization or admin user, won't migrate Webhook/HipChat alert subscriptions." exit() if settings.WEBHOOK_ENDPOINT: # Have all existing alerts send to webhook if already configured schema = get_configuration_schema_for_destination_type('webhook') conf = {'url': settings.WEBHOOK_ENDPOINT} if settings.WEBHOOK_USERNAME: conf['username'] = settings.WEBHOOK_USERNAME
from redash.models import db, Query from playhouse.migrate import PostgresqlMigrator, migrate if __name__ == '__main__': migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'options', Query.options), )
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == "__main__": db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate(migrator.drop_column("users", "countries")) db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate from redash.models import db from redash import models if __name__ == '__main__': db.connect_db() migrator = PostgresqlMigrator(db.database) with db.database.transaction(): migrate( migrator.add_column('queries', 'updated_at', models.Query.updated_at), migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at), migrator.add_column('widgets', 'updated_at', models.Widget.updated_at), migrator.add_column('users', 'created_at', models.User.created_at), migrator.add_column('users', 'updated_at', models.User.updated_at), migrator.add_column('visualizations', 'created_at', models.Visualization.created_at), migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at) ) db.database.execute_sql("UPDATE queries SET updated_at = created_at;") db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;") db.database.execute_sql("UPDATE widgets SET updated_at = created_at;") db.close_db(None)
def screen_name(self): return self.twitter_user.screen_name @property def name(self): return self.twitter_user.name # Create tables for t in (TwitterUser, TelegramChat, Tweet, Subscription): t.create_table(fail_silently=True) # Migrate new fields. TODO: think of some better migration mechanism db = SqliteDatabase('peewee.db', timeout=10) migrator = SqliteMigrator(db) operations = [ migrator.add_column('tweet', 'photo_url', Tweet.photo_url), migrator.add_column('twitteruser', 'last_fetched', TwitterUser.last_fetched), migrator.add_column('telegramchat', 'twitter_request_token', TelegramChat.twitter_request_token), migrator.add_column('telegramchat', 'twitter_token', TelegramChat.twitter_token), migrator.add_column('telegramchat', 'twitter_secret', TelegramChat.twitter_secret), migrator.add_column('telegramchat', 'timezone_name', TelegramChat.timezone_name), migrator.add_column('telegramchat', 'delete_soon', TelegramChat.delete_soon), ] for op in operations: try: migrate(op) except OperationalError: pass