def do_upgrade(): """Implement your upgrades here.""" # drop primary keys # (not necessary, because trick in invenio/base/scripts/database.py already # remove the primary key) # op.drop_constraint(None, 'collection_field_fieldvalue', # type_='primary') # add column "id" in the table op.add_column('collection_field_fieldvalue', db.Column('id', db.MediumInteger(9, unsigned=True), nullable=False)) # set all new ids records = run_sql("""SELECT id_collection, id_field, type, score, score_fieldvalue FROM collection_field_fieldvalue AS cff ORDER BY cff.id_collection, id_field, type, score, score_fieldvalue""") for index, rec in enumerate(records): run_sql("""UPDATE collection_field_fieldvalue SET id = %s WHERE id_collection = %s AND id_field = %s AND type = %s AND score = %s AND score_fieldvalue = %s """, (index + 1, rec[0], rec[1], rec[2], rec[3], rec[4])) # create new primary key with id op.create_primary_key('pk_collection_field_fieldvalue_id', 'collection_field_fieldvalue', ['id']) # set id as autoincrement op.alter_column('collection_field_fieldvalue', 'id', existing_type=db.MediumInteger(9, unsigned=True), existing_nullable=False, autoincrement=True)
def do_upgrade(): """Implement your upgrades here.""" # add column "id" in the table op.add_column( 'collection_field_fieldvalue', db.Column('id', db.MediumInteger(9, unsigned=True), nullable=False)) # set all new ids records = run_sql("""SELECT id_collection, id_field, id_fieldvalue, type, score, score_fieldvalue FROM collection_field_fieldvalue AS cff ORDER BY cff.id_collection, id_field, id_fieldvalue, type, score, score_fieldvalue""") for index, rec in enumerate(records): sql = """UPDATE collection_field_fieldvalue SET id = %%s WHERE id_collection = %%s AND id_field = %%s AND type = %%s AND score = %%s AND score_fieldvalue = %%s AND id_fieldvalue %s """ % ('=%s' % (rec[2], ) if rec[2] is not None else 'is NULL', ) run_sql(sql, (index + 1, rec[0], rec[1], rec[3], rec[4], rec[5])) # create new primary key with id op.create_primary_key('pk_collection_field_fieldvalue_id', 'collection_field_fieldvalue', ['id']) # set id as autoincrement op.alter_column('collection_field_fieldvalue', 'id', existing_type=db.MediumInteger(9, unsigned=True), existing_nullable=False, autoincrement=True)
def do_upgrade(): """Implement your upgrades here.""" # add column "id" in the table op.add_column("collection_field_fieldvalue", db.Column("id", db.MediumInteger(9, unsigned=True), nullable=False)) # set all new ids records = run_sql( """SELECT id_collection, id_field, id_fieldvalue, type, score, score_fieldvalue FROM collection_field_fieldvalue AS cff ORDER BY cff.id_collection, id_field, id_fieldvalue, type, score, score_fieldvalue""" ) for index, rec in enumerate(records): sql = """UPDATE collection_field_fieldvalue SET id = %%s WHERE id_collection = %%s AND id_field = %%s AND type = %%s AND score = %%s AND score_fieldvalue = %%s AND id_fieldvalue %s """ % ( "=%s" % (rec[2],) if rec[2] is not None else "is NULL", ) run_sql(sql, (index + 1, rec[0], rec[1], rec[3], rec[4], rec[5])) # create new primary key with id op.create_primary_key("pk_collection_field_fieldvalue_id", "collection_field_fieldvalue", ["id"]) # set id as autoincrement op.alter_column( "collection_field_fieldvalue", "id", existing_type=db.MediumInteger(9, unsigned=True), existing_nullable=False, autoincrement=True, )
def do_upgrade(): op.alter_column( u'seqSTORE', 'id', type_=db.Integer(display_width=15, unsigned=True), autoincrement=True, )
def do_upgrade(): op.alter_column( u'remoteACCOUNT', 'extra_data', existing_type=mysql.TEXT(), type_=mysql.LONGTEXT(), nullable=True )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column( table_name='facet_collection', column_name='id_collection', type_=db.MediumInteger(9, unsigned=True) )
def do_upgrade(): op.alter_column( u'bibrec', 'additional_info', existing_type=mysql.TEXT(), type_=mysql.LONGTEXT(), nullable=True )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column(table_name='oaiHARVESTLOG', column_name='bibupload_task_id', type_=db.MediumInteger(15, unsigned=True), existing_nullable=False, existing_server_default='0')
def do_upgrade(): """Implement your upgrades here.""" op.alter_column( 'remoteACCOUNT', 'extra_data', existing_type=mysql.LONGTEXT(), nullable=False )
def do_upgrade(): """Upgrade implementation.""" op.alter_column( table_name="community", column_name="has_logo", new_column_name="logo_ext", type_=db.String(length=5), )
def do_upgrade(): """Implement your upgrades here.""" op.alter_column( 'msgMESSAGE', 'id_user_from', existing_type=mysql.INTEGER(display_width=15), nullable=True, existing_server_default='0' )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column( table_name='knwKBRVAL', column_name='id_knwKB', type_=db.MediumInteger(8, unsigned=True), existing_nullable=False, existing_server_default='0' )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column( table_name='oaiHARVESTLOG', column_name='bibupload_task_id', type_=db.MediumInteger(15, unsigned=True), existing_nullable=False, existing_server_default='0' )
def do_upgrade(): op.alter_column( u'schTASK', 'sequenceid', type_=db.Integer(display_width=15, unsigned=True) ) op.alter_column( u'hstTASK', 'sequenceid', type_=db.Integer(display_width=15, unsigned=True) )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column(table_name='oauth2TOKEN', column_name='client_id', type_=db.String(255), existing_nullable=False) op.alter_column(table_name='oauth2TOKEN', column_name='user_id', type_=db.Integer(15, unsigned=True), existing_nullable=False)
def do_upgrade(): """ Implement your upgrades here """ op.alter_column( u'schTASK', 'sequenceid', type_=db.Integer(display_width=15) ) op.alter_column( u'hstTASK', 'sequenceid', type_=db.Integer(display_width=15) )
def do_upgrade(): """Upgrade recipe. Adds two new columns (password_salt and password_scheme) and migrates emails to password salt. """ op.add_column('user', db.Column('password_salt', db.String(length=255), nullable=True)) op.add_column('user', db.Column('password_scheme', db.String(length=50), nullable=False)) # Temporary column needed for data migration op.add_column('user', db.Column('new_password', db.String(length=255))) # Migrate emails to password_salt m = db.MetaData(bind=db.engine) m.reflect() u = m.tables['user'] conn = db.engine.connect() conn.execute(u.update().values( password_salt=u.c.email, password_scheme='invenio_aes_encrypted_email' )) # Migrate password blob to password varchar. for row in conn.execute(select([u])): # NOTE: Empty string passwords were stored as empty strings # instead of a hashed version, hence they must be treated differently. legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "") stmt = u.update().where( u.c.id == row[u.c.id] ).values( new_password=hashlib.sha256(legacy_pw).hexdigest() ) conn.execute(stmt) # Create index op.create_index( op.f('ix_user_password_scheme'), 'user', ['password_scheme'], unique=False ) # Drop old database column and rename new. op.drop_column('user', 'password') op.alter_column( 'user', 'new_password', new_column_name='password', existing_type=mysql.VARCHAR(255), existing_nullable=True, )
def do_upgrade(): try: op.alter_column(u'bibrec', 'additional_info', existing_type=mysql.TEXT(), type_=mysql.LONGTEXT(), nullable=True) except OperationalError: op.add_column( 'bibrec', sa.Column('additional_info', mysql.LONGTEXT(), nullable=True))
def do_upgrade(): """Implement your upgrades here.""" op.drop_column('knwKBRVAL', 'id') op.create_primary_key('pkey', 'knwKBRVAL', ['m_key', 'id_knwKB']) op.alter_column('knwKBRVAL', 'm_key', existing_type=sa.String(length=255), type_=sa.String(length=255), existing_server_default='', server_default=None, existing_nullable=False, nullable=False)
def do_upgrade(): try: op.alter_column( u'bibrec', 'additional_info', existing_type=mysql.TEXT(), type_=mysql.LONGTEXT(), nullable=True ) except OperationalError: op.add_column('bibrec', sa.Column('additional_info', mysql.LONGTEXT(), nullable=True))
def do_upgrade(): """Upgrade recipe. Adds two new columns (password_salt and password_scheme) and migrates emails to password salt. """ op.add_column( 'user', db.Column('password_salt', db.String(length=255), nullable=True)) op.add_column( 'user', db.Column('password_scheme', db.String(length=50), nullable=False)) # Temporary column needed for data migration op.add_column('user', db.Column('new_password', db.String(length=255))) # Migrate emails to password_salt m = db.MetaData(bind=db.engine) m.reflect() u = m.tables['user'] conn = db.engine.connect() conn.execute( u.update().values(password_salt=u.c.email, password_scheme='invenio_aes_encrypted_email')) # Migrate password blob to password varchar. for row in conn.execute(select([u])): # NOTE: Empty string passwords were stored as empty strings # instead of a hashed version, hence they must be treated differently. legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "") stmt = u.update().where(u.c.id == row[u.c.id]).values( new_password=hashlib.sha256(legacy_pw).hexdigest()) conn.execute(stmt) # Create index op.create_index(op.f('ix_user_password_scheme'), 'user', ['password_scheme'], unique=False) # Drop old database column and rename new. op.drop_column('user', 'password') op.alter_column( 'user', 'new_password', new_column_name='password', existing_type=mysql.VARCHAR(255), existing_nullable=True, )
def do_upgrade(): """Carry out the upgrade.""" op.alter_column( table_name='oauth2TOKEN', column_name='client_id', type_=db.String(255), existing_nullable=False ) op.alter_column( table_name='oauth2TOKEN', column_name='user_id', type_=db.Integer(15, unsigned=True), existing_nullable=False )
def do_upgrade(): """Implement your upgrades here.""" try: op.add_column( 'bibfmt', sa.Column('kind', sa.String(length=10), server_default='', nullable=False)) except OperationalError: warnings.warn("*** Problem adding column bibfmt.kind. " "Does it already exist? ***") op.alter_column('format', 'last_updated', existing_type=mysql.DATETIME(), nullable=True, existing_server_default='0000-00-00 00:00:00')
def do_upgrade(): # History op.add_column('hstDOCUMENT', db.Column( 'id', mysql.INTEGER(display_width=15), nullable=False)) op.add_column('hstRECORD', db.Column( 'id', mysql.INTEGER(display_width=15), nullable=False)) op.alter_column('hstRECORD', 'affected_fields', existing_type=mysql.TEXT(), nullable=True) # OAI Harvest op.drop_column('oaiHARVEST', u'bibconvertcfgfile') op.drop_column('oaiHARVEST', u'bibfilterprogram') # xtrJOB op.drop_column('xtrJOB', u'last_recid') # Record op.add_column("bibrec", db.Column("additional_info", db.JSON))
def do_upgrade(): """Implement your upgrades here.""" m = db.MetaData(bind=db.engine) m.reflect() u = m.tables['user'] conn = db.engine.connect() conn.execute(u.update().where(u.c.family_name == None).values( family_name='')) conn.execute(u.update().where(u.c.given_names == None).values( given_names='')) op.alter_column('user', 'family_name', existing_type=mysql.VARCHAR(length=255), nullable=False, server_default='') op.alter_column('user', 'given_names', existing_type=mysql.VARCHAR(length=255), nullable=False, server_default='')
def do_upgrade(): """ Implement your upgrades here """ op.rename_table('pid', 'pidSTORE') op.drop_index('idx_object_type_id', 'pidSTORE') op.alter_column('pidSTORE', 'type', new_column_name='pid_type', existing_type=mysql.VARCHAR(length=6), nullable=False, existing_server_default='') op.alter_column('pidSTORE', 'pid', new_column_name='pid_value', existing_type=mysql.VARCHAR(length=255), nullable=False, existing_server_default='') op.alter_column('pidSTORE', 'object_id', new_column_name='object_value', existing_type=mysql.VARCHAR(length=255), nullable=True, existing_server_default='') op.add_column( 'pidSTORE', db.Column('pid_provider', db.String(length=255), nullable=False))
def do_upgrade(): """ Implement your upgrades here """ op.rename_table('pid', 'pidSTORE') op.drop_index('idx_object_type_id', 'pidSTORE') op.alter_column( 'pidSTORE', 'type', new_column_name='pid_type', existing_type=mysql.VARCHAR(length=6), nullable=False, existing_server_default='') op.alter_column( 'pidSTORE', 'pid', new_column_name='pid_value', existing_type=mysql.VARCHAR(length=255), nullable=False, existing_server_default='') op.alter_column( 'pidSTORE', 'object_id', new_column_name='object_value', existing_type=mysql.VARCHAR(length=255), nullable=True, existing_server_default='') op.add_column( 'pidSTORE', db.Column('pid_provider', db.String(length=255), nullable=False) )
def do_upgrade(): """Implement your upgrades here.""" try: op.add_column( 'bibfmt', sa.Column( 'kind', sa.String(length=10), server_default='', nullable=False ) ) except OperationalError: warnings.warn("*** Problem adding column bibfmt.kind. Does it already exist? ***") op.alter_column( 'format', 'last_updated', existing_type=mysql.DATETIME(), nullable=True, existing_server_default='0000-00-00 00:00:00' )
def do_upgrade(): """Implement your upgrades here.""" m = db.MetaData(bind=db.engine) m.reflect() u = m.tables['user'] conn = db.engine.connect() conn.execute(u.update().where( u.c.family_name.is_(None)).values(family_name='')) conn.execute(u.update().where( u.c.given_names.is_(None)).values(given_names='')) op.alter_column('user', 'family_name', existing_type=mysql.VARCHAR(length=255), nullable=False, server_default='') op.alter_column('user', 'given_names', existing_type=mysql.VARCHAR(length=255), nullable=False, server_default='')
def do_upgrade(): # History op.add_column( 'hstDOCUMENT', db.Column('id', mysql.INTEGER(display_width=15), nullable=False)) op.add_column( 'hstRECORD', db.Column('id', mysql.INTEGER(display_width=15), nullable=False)) op.alter_column('hstRECORD', 'affected_fields', existing_type=mysql.TEXT(), nullable=True) # OAI Harvest op.drop_column('oaiHARVEST', u'bibconvertcfgfile') op.drop_column('oaiHARVEST', u'bibfilterprogram') # xtrJOB op.drop_column('xtrJOB', u'last_recid') # Record op.add_column("bibrec", db.Column("additional_info", db.JSON))
def do_upgrade(): """Implement your upgrades here.""" try: op.add_column( 'oaiHARVEST', sa.Column( 'workflows', sa.String(length=255), server_default='', nullable=False ) ) except OperationalError: op.alter_column( 'oaiHARVEST', 'workflows', existing_type=sa.String(length=255), nullable=False, server_default='' ) # Set default workflow with backwards compatibility for those who have none. all_data_objects = run_sql("SELECT id, workflows FROM oaiHARVEST") for object_id, workflows in all_data_objects: if not workflows: run_sql("UPDATE oaiHARVEST set workflows=%s WHERE id=%s", ("oaiharvest_harvest_repositories", str(object_id))) try: op.drop_column('oaiHARVEST', 'frequency') except OperationalError as err: warnings.warn( "*** Error removing 'oaiHARVEST.frequency' column: {0} ***".format( str(err) ) )
def do_upgrade(): import invenio import sys import types class CoolDict(dict): pass class CoolList(list): pass # Fake old non-existing module m = types.ModuleType('invenio.bibfield_utils') m.CoolDict = CoolDict m.CoolList = CoolList sys.modules['invenio.bibfield_utils'] = m invenio.bibfield_utils = m # Minimal table definitions bwlobject = table( 'bwlOBJECT', column('id', db.Integer(primary_key=True)), column('extra_data', db.MutableDict.as_mutable(db.PickleType)), column('_extra_data', db.LargeBinary()), column('_data', db.LargeBinary()), ) bwlworkflow = table( 'bwlWORKFLOW', column('uuid', db.String(36)), column('extra_data', db.MutableDict.as_mutable(db.PickleType)), column('_extra_data', db.LargeBinary()), ) bwlobjectlogging = table( 'bwlOBJECTLOGGING', column('id_object', db.Integer()), column('id_bibworkflowobject', db.Integer()), ) bwlworkflowlogging = table( 'bwlWORKFLOWLOGGING', column('id_object', db.String()), column('id_workflow', db.String()), ) conn = op.get_bind() # Object table op.add_column('bwlOBJECT', db.Column( '_extra_data', db.LargeBinary(), nullable=False)) query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject) for r in conn.execute(query): # Decode and re-encode old value value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data))) # Ensure data value can be read data_value = base64.b64encode(cPickle.dumps( transform_data(cPickle.loads(base64.b64decode(r._data))) )) # Update value in table. op.execute( bwlobject.update().where(bwlobject.c.id == r.id).values( _extra_data=value, _data=data_value, ) ) op.drop_column('bwlOBJECT', u'extra_data') op.alter_column('bwlOBJECT', 'data_type', existing_type=mysql.VARCHAR(length=50), nullable=True) op.alter_column('bwlOBJECT', 'id_workflow', existing_type=mysql.VARCHAR(length=36), nullable=True) # Workflow table op.add_column('bwlWORKFLOW', db.Column( '_extra_data', db.LargeBinary(), nullable=False)) query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow) for r in conn.execute(query): # Decode and re-encode old value value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data))) # Update value in table. op.execute( bwlworkflow.update().where(bwlworkflow.c.uuid == r.uuid).values( _extra_data=value ) ) op.drop_column('bwlWORKFLOW', u'extra_data') # Object logging op.add_column('bwlOBJECTLOGGING', db.Column( 'id_object', mysql.INTEGER(display_width=255), nullable=False)) op.execute( bwlobjectlogging.update().values({ bwlobjectlogging.c.id_object: bwlobjectlogging.c.id_bibworkflowobject }) ) op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject') op.drop_column('bwlOBJECTLOGGING', u'extra_data') op.drop_column('bwlOBJECTLOGGING', u'error_msg') # Workflow logging op.add_column('bwlWORKFLOWLOGGING', db.Column( 'id_object', db.String(length=255), nullable=False)) op.execute( bwlworkflowlogging.update().values({ bwlworkflowlogging.c.id_object: bwlworkflowlogging.c.id_workflow }) ) op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow') op.drop_column('bwlWORKFLOWLOGGING', u'extra_data') op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')
def do_upgrade(): """Implement your upgrades here.""" # table sbmFORMATEXTENSION # add "id" column op.add_column('sbmFORMATEXTENSION', db.Column('id', db.Integer(), nullable=False)) # set all ids records = run_sql("""SELECT FILE_FORMAT, FILE_EXTENSION FROM """ """sbmFORMATEXTENSION AS sbm """ """ORDER BY sbm.FILE_FORMAT, sbm.FILE_EXTENSION""") for index, rec in enumerate(records): run_sql( """UPDATE sbmFORMATEXTENSION """ """SET id = %s """ """ WHERE FILE_FORMAT = %s AND """ """ FILE_EXTENSION = %s """, (index + 1, rec[0], rec[1])) # remove primary key try: op.drop_constraint(None, 'sbmFORMATEXTENSION', type_='primary') except OperationalError: # the primary key is already dropped warnings.warn("""Primary key of sbmFORMATEXTENSION """ """table has been already dropped.""") # set id as new primary key op.create_primary_key('pk_sbmFORMATEXTENSION_id', 'sbmFORMATEXTENSION', ['id']) # set id as autoincrement op.alter_column('sbmFORMATEXTENSION', 'id', existing_type=db.Integer(), existing_nullable=False, autoincrement=True) # create indices op.create_index('sbmformatextension_file_extension_idx', 'sbmFORMATEXTENSION', columns=['FILE_EXTENSION'], unique=False, mysql_length=10) op.create_index('sbmformatextension_file_format_idx', 'sbmFORMATEXTENSION', columns=['FILE_FORMAT'], unique=False, mysql_length=50) # table sbmGFILERESULT # add "id" column op.add_column('sbmGFILERESULT', db.Column('id', db.Integer(), nullable=False)) # set all ids records = run_sql("""SELECT FORMAT, RESULT FROM """ """sbmGFILERESULT AS sbm """ """ORDER BY sbm.FORMAT, sbm.RESULT""") for index, rec in enumerate(records): run_sql( """UPDATE sbmGFILERESULT """ """SET id = %s """ """ WHERE FORMAT = %s AND """ """ RESULT = %s """, (index + 1, rec[0], rec[1])) # remove primary key try: op.drop_constraint(None, 'sbmGFILERESULT', type_='primary') except OperationalError: # the primary key is already dropped warnings.warn("""Primary key of sbmGFILERESULT """ """table has been already dropped.""") # set id as new primary key op.create_primary_key('pk_sbmGFILERESULT_id', 'sbmGFILERESULT', ['id']) # set id as autoincrement op.alter_column('sbmGFILERESULT', 'id', existing_type=db.Integer(), existing_nullable=False, autoincrement=True) # create indices op.create_index('sbmgfileresult_format_idx', 'sbmGFILERESULT', columns=['FORMAT'], unique=False, mysql_length=50) op.create_index('sbmgfileresult_result_idx', 'sbmGFILERESULT', columns=['RESULT'], unique=False, mysql_length=50)
def do_upgrade(): import invenio import sys import types class CoolDict(dict): pass class CoolList(list): pass # Fake old non-existing module m = types.ModuleType('invenio.bibfield_utils') m.CoolDict = CoolDict m.CoolList = CoolList sys.modules['invenio.bibfield_utils'] = m invenio.bibfield_utils = m # Minimal table definitions bwlobject = table( 'bwlOBJECT', column('id', db.Integer(primary_key=True)), column('extra_data', db.MutableDict.as_mutable(db.PickleType)), column('_extra_data', db.LargeBinary()), column('_data', db.LargeBinary()), ) bwlworkflow = table( 'bwlWORKFLOW', column('uuid', db.String(36)), column('extra_data', db.MutableDict.as_mutable(db.PickleType)), column('_extra_data', db.LargeBinary()), ) bwlobjectlogging = table( 'bwlOBJECTLOGGING', column('id_object', db.Integer()), column('id_bibworkflowobject', db.Integer()), ) bwlworkflowlogging = table( 'bwlWORKFLOWLOGGING', column('id_object', db.String()), column('id_workflow', db.String()), ) conn = op.get_bind() # Object table op.add_column('bwlOBJECT', db.Column('_extra_data', db.LargeBinary(), nullable=False)) query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject) for r in conn.execute(query): # Decode and re-encode old value value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data))) # Ensure data value can be read data_value = base64.b64encode( cPickle.dumps( transform_data(cPickle.loads(base64.b64decode(r._data))))) # Update value in table. op.execute(bwlobject.update().where(bwlobject.c.id == r.id).values( _extra_data=value, _data=data_value, )) op.drop_column('bwlOBJECT', u'extra_data') op.alter_column('bwlOBJECT', 'data_type', existing_type=mysql.VARCHAR(length=50), nullable=True) op.alter_column('bwlOBJECT', 'id_workflow', existing_type=mysql.VARCHAR(length=36), nullable=True) # Workflow table op.add_column('bwlWORKFLOW', db.Column('_extra_data', db.LargeBinary(), nullable=False)) query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow) for r in conn.execute(query): # Decode and re-encode old value value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data))) # Update value in table. op.execute(bwlworkflow.update().where( bwlworkflow.c.uuid == r.uuid).values(_extra_data=value)) op.drop_column('bwlWORKFLOW', u'extra_data') # Object logging op.add_column( 'bwlOBJECTLOGGING', db.Column('id_object', mysql.INTEGER(display_width=255), nullable=False)) op.execute(bwlobjectlogging.update().values({ bwlobjectlogging.c.id_object: bwlobjectlogging.c.id_bibworkflowobject })) op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject') op.drop_column('bwlOBJECTLOGGING', u'extra_data') op.drop_column('bwlOBJECTLOGGING', u'error_msg') # Workflow logging op.add_column( 'bwlWORKFLOWLOGGING', db.Column('id_object', db.String(length=255), nullable=False)) op.execute(bwlworkflowlogging.update().values( {bwlworkflowlogging.c.id_object: bwlworkflowlogging.c.id_workflow})) op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow') op.drop_column('bwlWORKFLOWLOGGING', u'extra_data') op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')
def do_upgrade(): """Implement your upgrades here.""" op.alter_column('remoteACCOUNT', 'extra_data', existing_type=mysql.LONGTEXT(), nullable=False)
def do_upgrade(): """Implement your upgrades here.""" # table sbmFORMATEXTENSION # add "id" column op.add_column('sbmFORMATEXTENSION', db.Column('id', db.Integer(), nullable=False)) # set all ids records = run_sql("""SELECT FILE_FORMAT, FILE_EXTENSION FROM """ """sbmFORMATEXTENSION AS sbm """ """ORDER BY sbm.FILE_FORMAT, sbm.FILE_EXTENSION""") for index, rec in enumerate(records): run_sql("""UPDATE sbmFORMATEXTENSION """ """SET id = %s """ """ WHERE FILE_FORMAT = %s AND """ """ FILE_EXTENSION = %s """, (index + 1, rec[0], rec[1])) # remove primary key try: op.drop_constraint(None, 'sbmFORMATEXTENSION', type_='primary') except OperationalError: # the primary key is already dropped warnings.warn("""Primary key of sbmFORMATEXTENSION """ """table has been already dropped.""") # set id as new primary key op.create_primary_key('pk_sbmFORMATEXTENSION_id', 'sbmFORMATEXTENSION', ['id']) # set id as autoincrement op.alter_column('sbmFORMATEXTENSION', 'id', existing_type=db.Integer(), existing_nullable=False, autoincrement=True) # create indices op.create_index('sbmformatextension_file_extension_idx', 'sbmFORMATEXTENSION', columns=['FILE_EXTENSION'], unique=False, mysql_length=10) op.create_index('sbmformatextension_file_format_idx', 'sbmFORMATEXTENSION', columns=['FILE_FORMAT'], unique=False, mysql_length=50) # table sbmGFILERESULT # add "id" column op.add_column('sbmGFILERESULT', db.Column('id', db.Integer(), nullable=False)) # set all ids records = run_sql("""SELECT FORMAT, RESULT FROM """ """sbmGFILERESULT AS sbm """ """ORDER BY sbm.FORMAT, sbm.RESULT""") for index, rec in enumerate(records): run_sql("""UPDATE sbmGFILERESULT """ """SET id = %s """ """ WHERE FORMAT = %s AND """ """ RESULT = %s """, (index + 1, rec[0], rec[1])) # remove primary key try: op.drop_constraint(None, 'sbmGFILERESULT', type_='primary') except OperationalError: # the primary key is already dropped warnings.warn("""Primary key of sbmGFILERESULT """ """table has been already dropped.""") # set id as new primary key op.create_primary_key('pk_sbmGFILERESULT_id', 'sbmGFILERESULT', ['id']) # set id as autoincrement op.alter_column('sbmGFILERESULT', 'id', existing_type=db.Integer(), existing_nullable=False, autoincrement=True) # create indices op.create_index('sbmgfileresult_format_idx', 'sbmGFILERESULT', columns=['FORMAT'], unique=False, mysql_length=50) op.create_index('sbmgfileresult_result_idx', 'sbmGFILERESULT', columns=['RESULT'], unique=False, mysql_length=50)
def do_upgrade(): op.alter_column(u'remoteACCOUNT', 'extra_data', existing_type=mysql.TEXT(), type_=mysql.LONGTEXT(), nullable=True)
def do_upgrade(): """Carry out the upgrade.""" op.alter_column(table_name='facet_collection', column_name='id_collection', type_=db.MediumInteger(9, unsigned=True))
def do_upgrade(): """Implement your upgrades here.""" # Table sbmCOLLECTION_sbmCOLLECTION # add column "id" in the table op.add_column('sbmCOLLECTION_sbmCOLLECTION', db.Column('id', db.Integer(11), nullable=False)) # set all new ids records = run_sql("""SELECT id_father, id_son FROM """ """sbmCOLLECTION_sbmCOLLECTION AS ssc """ """ORDER BY ssc.id_father, ssc.id_son""") for index, rec in enumerate(records): run_sql("""UPDATE sbmCOLLECTION_sbmCOLLECTION SET id = %s WHERE id_father = %s AND id_son = %s """, (index + 1, rec[0], rec[1])) # drop primary keys try: op.drop_constraint(None, 'sbmCOLLECTION_sbmCOLLECTION', type_='primary') except OperationalError: # the primary key is already dropped warnings.warn("""Primary key of sbmCOLLECTION_sbmCOLLECTION """ """table has been already dropped.""") # create new primary key with id op.create_primary_key('pk_sbmCOLLECTION_sbmCOLLECTION_id', 'sbmCOLLECTION_sbmCOLLECTION', ['id']) # set id as autoincrement op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id', existing_type=db.Integer(11), existing_nullable=False, autoincrement=True) # fix columns id_father and id_son op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id_father', existing_type=db.Integer(11), nullable=True, server_default=None) op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id_son', existing_type=db.Integer(11), nullable=False, server_default=None) op.create_index('id_father', 'sbmCOLLECTION_sbmCOLLECTION', columns=['id_father']) # Table sbmCOLLECTION_sbmDOCTYPE # add column "id" in the table op.add_column('sbmCOLLECTION_sbmDOCTYPE', db.Column('id', db.Integer(11), nullable=False)) # set all new ids records = run_sql("""SELECT id_father, id_son FROM sbmCOLLECTION_sbmDOCTYPE AS ssd ORDER BY ssd.id_father, ssd.id_son""") for index, rec in enumerate(records): run_sql("""UPDATE sbmCOLLECTION_sbmDOCTYPE SET id = %s WHERE id_father = %s AND id_son = %s """, (index + 1, rec[0], rec[1])) # drop primary keys op.drop_constraint('id_father', 'sbmCOLLECTION_sbmDOCTYPE', type_='primary') # create new primary key with id op.create_primary_key('pk_sbmCOLLECTION_sbmDOCTYPE_id', 'sbmCOLLECTION_sbmDOCTYPE', ['id']) # set id as autoincrement op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id', existing_type=db.Integer(11), existing_nullable=False, autoincrement=True) # fix columns id_father and id_son op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id_father', existing_type=db.Integer(11), nullable=True, server_default=None) op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id_son', existing_type=db.Char(10), nullable=False, server_default=None) op.create_index('id_father', 'sbmCOLLECTION_sbmDOCTYPE', columns=['id_father'])