Beispiel #1
0
def do_upgrade():
    """Implement your upgrades here."""
    # drop primary keys
    # (not necessary, because trick in invenio/base/scripts/database.py already
    # remove the primary key)
    # op.drop_constraint(None, 'collection_field_fieldvalue',
    #                    type_='primary')

    # add column "id" in the table
    op.add_column('collection_field_fieldvalue',
                  db.Column('id', db.MediumInteger(9, unsigned=True),
                            nullable=False))

    # set all new ids
    records = run_sql("""SELECT id_collection, id_field, type, score,
                      score_fieldvalue
                      FROM collection_field_fieldvalue AS cff
                      ORDER BY cff.id_collection, id_field, type, score,
                      score_fieldvalue""")
    for index, rec in enumerate(records):
        run_sql("""UPDATE collection_field_fieldvalue
                SET id = %s WHERE id_collection = %s AND id_field = %s
                AND type = %s AND score = %s AND score_fieldvalue = %s """,
                (index + 1, rec[0], rec[1], rec[2], rec[3], rec[4]))

    # create new primary key with id
    op.create_primary_key('pk_collection_field_fieldvalue_id',
                          'collection_field_fieldvalue', ['id'])

    # set id as autoincrement
    op.alter_column('collection_field_fieldvalue', 'id',
                    existing_type=db.MediumInteger(9, unsigned=True),
                    existing_nullable=False, autoincrement=True)
Beispiel #2
0
def do_upgrade():
    """Upgrades."""
    op.add_column(
        u'pages', db.Column('description',
                            db.String(length=200),
                            nullable=True))
    op.create_table('pagesLIST',
                    db.Column('id',
                              db.Integer(15, unsigned=True),
                              nullable=False),
                    db.Column('list_id',
                              db.Integer(15, unsigned=True),
                              nullable=False),
                    db.Column('page_id',
                              db.Integer(15, unsigned=True),
                              nullable=False),
                    db.Column('order',
                              db.Integer(15, unsigned=True),
                              nullable=False),
                    db.PrimaryKeyConstraint('id'),
                    db.ForeignKeyConstraint(
                        ['list_id'],
                        [u'pages.id'],
                    ),
                    db.ForeignKeyConstraint(
                        ['page_id'],
                        [u'pages.id'],
                    ),
                    mysql_charset='utf8',
                    mysql_engine='MyISAM')
def do_upgrade():
    """Implement your upgrades here."""
    # add column "id" in the table
    op.add_column("collection_field_fieldvalue", db.Column("id", db.MediumInteger(9, unsigned=True), nullable=False))

    # set all new ids
    records = run_sql(
        """SELECT id_collection, id_field, id_fieldvalue,
                      type, score, score_fieldvalue
                      FROM collection_field_fieldvalue AS cff
                      ORDER BY cff.id_collection, id_field, id_fieldvalue,
                      type, score, score_fieldvalue"""
    )
    for index, rec in enumerate(records):
        sql = """UPDATE collection_field_fieldvalue
                 SET id = %%s
                 WHERE id_collection = %%s AND id_field = %%s
                 AND type = %%s AND score = %%s AND score_fieldvalue = %%s
                 AND id_fieldvalue %s
              """ % (
            "=%s" % (rec[2],) if rec[2] is not None else "is NULL",
        )
        run_sql(sql, (index + 1, rec[0], rec[1], rec[3], rec[4], rec[5]))

    # create new primary key with id
    op.create_primary_key("pk_collection_field_fieldvalue_id", "collection_field_fieldvalue", ["id"])

    # set id as autoincrement
    op.alter_column(
        "collection_field_fieldvalue",
        "id",
        existing_type=db.MediumInteger(9, unsigned=True),
        existing_nullable=False,
        autoincrement=True,
    )
def do_upgrade():
    """ Implement your upgrades here  """
    op.rename_table('pid', 'pidSTORE')
    op.drop_index('idx_object_type_id', 'pidSTORE')
    op.alter_column('pidSTORE',
                    'type',
                    new_column_name='pid_type',
                    existing_type=mysql.VARCHAR(length=6),
                    nullable=False,
                    existing_server_default='')
    op.alter_column('pidSTORE',
                    'pid',
                    new_column_name='pid_value',
                    existing_type=mysql.VARCHAR(length=255),
                    nullable=False,
                    existing_server_default='')
    op.alter_column('pidSTORE',
                    'object_id',
                    new_column_name='object_value',
                    existing_type=mysql.VARCHAR(length=255),
                    nullable=True,
                    existing_server_default='')
    op.add_column(
        'pidSTORE',
        db.Column('pid_provider', db.String(length=255), nullable=False))
def do_upgrade():
    """Implement your upgrades here."""
    # add column "id" in the table
    op.add_column(
        'collection_field_fieldvalue',
        db.Column('id', db.MediumInteger(9, unsigned=True), nullable=False))

    # set all new ids
    records = run_sql("""SELECT id_collection, id_field, id_fieldvalue,
                      type, score, score_fieldvalue
                      FROM collection_field_fieldvalue AS cff
                      ORDER BY cff.id_collection, id_field, id_fieldvalue,
                      type, score, score_fieldvalue""")
    for index, rec in enumerate(records):
        sql = """UPDATE collection_field_fieldvalue
                 SET id = %%s
                 WHERE id_collection = %%s AND id_field = %%s
                 AND type = %%s AND score = %%s AND score_fieldvalue = %%s
                 AND id_fieldvalue %s
              """ % ('=%s' % (rec[2], ) if rec[2] is not None else 'is NULL', )
        run_sql(sql, (index + 1, rec[0], rec[1], rec[3], rec[4], rec[5]))

    # create new primary key with id
    op.create_primary_key('pk_collection_field_fieldvalue_id',
                          'collection_field_fieldvalue', ['id'])

    # set id as autoincrement
    op.alter_column('collection_field_fieldvalue',
                    'id',
                    existing_type=db.MediumInteger(9, unsigned=True),
                    existing_nullable=False,
                    autoincrement=True)
def do_upgrade():
    """ Implement your upgrades here  """
    op.rename_table('pid', 'pidSTORE')
    op.drop_index('idx_object_type_id', 'pidSTORE')
    op.alter_column(
        'pidSTORE', 'type',
        new_column_name='pid_type',
        existing_type=mysql.VARCHAR(length=6),
        nullable=False,
        existing_server_default='')
    op.alter_column(
        'pidSTORE', 'pid',
        new_column_name='pid_value',
        existing_type=mysql.VARCHAR(length=255),
        nullable=False,
        existing_server_default='')
    op.alter_column(
        'pidSTORE', 'object_id',
        new_column_name='object_value',
        existing_type=mysql.VARCHAR(length=255),
        nullable=True,
        existing_server_default='')
    op.add_column(
        'pidSTORE',
        db.Column('pid_provider', db.String(length=255), nullable=False)
    )
Beispiel #7
0
def do_upgrade():
    try:
        op.alter_column(u'bibrec',
                        'additional_info',
                        existing_type=mysql.TEXT(),
                        type_=mysql.LONGTEXT(),
                        nullable=True)
    except OperationalError:
        op.add_column(
            'bibrec',
            sa.Column('additional_info', mysql.LONGTEXT(), nullable=True))
Beispiel #8
0
def do_upgrade():
    """Migrate format references."""
    op.add_column('collection_format',
                  db.Column('format', db.String(length=10), nullable=False))
    run_sql('UPDATE collection_format cf JOIN format f ON f.id = cf.id_format '
            'SET cf.format = f.code')
    op.drop_constraint(None, 'collection_format', type_='primary')
    op.create_primary_key(None, 'collection_format',
                          ['id_collection', 'format'])
    op.drop_column('collection_format', 'id_format')
    op.drop_table('formatname')
    op.drop_table('format')
def do_upgrade():
    """Migrate format references."""
    op.add_column('collection_format',
                  db.Column('format', db.String(length=10), nullable=False))
    run_sql('UPDATE collection_format cf JOIN format f ON f.id = cf.id_format '
            'SET cf.format = f.code')
    op.drop_constraint(None, 'collection_format', type_='primary')
    op.create_primary_key(None, 'collection_format',
                          ['id_collection', 'format'])
    op.drop_column('collection_format', 'id_format')
    op.drop_table('formatname')
    op.drop_table('format')
def do_upgrade():
    try:
        op.alter_column(
            u'bibrec', 'additional_info',
            existing_type=mysql.TEXT(),
            type_=mysql.LONGTEXT(),
            nullable=True
        )
    except OperationalError:
        op.add_column('bibrec',
                      sa.Column('additional_info',
                      mysql.LONGTEXT(),
                      nullable=True))
Beispiel #11
0
def do_upgrade():
    """Implement your upgrades here."""
    try:
        op.add_column(
            'bibfmt',
            sa.Column('kind',
                      sa.String(length=10),
                      server_default='',
                      nullable=False))
    except OperationalError:
        warnings.warn("*** Problem adding column bibfmt.kind. "
                      "Does it already exist? ***")

    op.alter_column('format',
                    'last_updated',
                    existing_type=mysql.DATETIME(),
                    nullable=True,
                    existing_server_default='0000-00-00 00:00:00')
def do_upgrade():
    """Implement your upgrades here."""
    op.add_column('format', db.Column('mime_type',
                  db.String(length=255), unique=True, nullable=True))
    mime_type_dict = dict(
        xm='application/marcxml+xml',
        hm='application/marc',
        recjson='application/json',
        hx='application/x-bibtex',
        xn='application/x-nlm',
    )
    query = "UPDATE format SET mime_type=%s WHERE code=%s"
    for code, mime in mime_type_dict.items():
        params = (mime, code)
        try:
            run_sql(query, params)
        except Exception as e:
            warnings.warn("Failed to execute query {0}: {1}".format(query, e))
Beispiel #13
0
def do_upgrade():
    """Implement your upgrades here."""
    # modify the database
    op.add_column('knwKB',
                  db.Column('is_api_accessible', db.Boolean(), nullable=False))
    op.add_column(
        'knwKB',
        db.Column('slug', db.String(length=255), nullable=False, default=True))

    # update knwKB table values
    res = run_sql("SELECT name FROM knwKB")
    for record in res:
        name = record[0]
        slug = generate_slug(name)
        run_sql(
            "UPDATE knwKB SET is_api_accessible = 1, slug = %s "
            "WHERE name = %s", (slug, name))

    # define unique constraint
    op.create_unique_constraint(None, 'knwKB', ['slug'])
def do_upgrade():
    """Implement your upgrades here."""
    # modify the database
    op.add_column('knwKB',
                  db.Column('is_api_accessible',
                            db.Boolean(), nullable=False))
    op.add_column('knwKB',
                  db.Column('slug',
                            db.String(length=255),
                            nullable=False,
                            default=True))

    # update knwKB table values
    res = run_sql("SELECT name FROM knwKB")
    for record in res:
        name = record[0]
        slug = generate_slug(name)
        run_sql("UPDATE knwKB SET is_api_accessible = 1, slug = %s "
                "WHERE name = %s", (slug, name))

    # define unique constraint
    op.create_unique_constraint(None, 'knwKB', ['slug'])
Beispiel #15
0
def do_upgrade():
    """Implement your upgrades here."""
    op.add_column(
        'format',
        db.Column('mime_type',
                  db.String(length=255),
                  unique=True,
                  nullable=True))
    mime_type_dict = dict(
        xm='application/marcxml+xml',
        hm='application/marc',
        recjson='application/json',
        hx='application/x-bibtex',
        xn='application/x-nlm',
    )
    query = "UPDATE format SET mime_type=%s WHERE code=%s"
    for code, mime in mime_type_dict.items():
        params = (mime, code)
        try:
            run_sql(query, params)
        except Exception as e:
            warnings.warn("Failed to execute query {0}: {1}".format(query, e))
def do_upgrade():
    """Implement your upgrades here."""
    try:
        op.add_column(
            'bibfmt',
            sa.Column(
                'kind',
                sa.String(length=10),
                server_default='',
                nullable=False
            )
        )
    except OperationalError:
        warnings.warn("*** Problem adding column bibfmt.kind. Does it already exist? ***")

    op.alter_column(
        'format',
        'last_updated',
        existing_type=mysql.DATETIME(),
        nullable=True,
        existing_server_default='0000-00-00 00:00:00'
    )
def do_upgrade():
    """Implement your upgrades here."""
    try:
        op.add_column(
            'oaiHARVEST',
            sa.Column(
                'workflows',
                sa.String(length=255),
                server_default='',
                nullable=False
            )
        )

    except OperationalError:
        op.alter_column(
            'oaiHARVEST',
            'workflows',
            existing_type=sa.String(length=255),
            nullable=False,
            server_default=''
        )

    # Set default workflow with backwards compatibility for those who have none.
    all_data_objects = run_sql("SELECT id, workflows FROM oaiHARVEST")
    for object_id, workflows in all_data_objects:
        if not workflows:
            run_sql("UPDATE oaiHARVEST set workflows=%s WHERE id=%s",
                    ("oaiharvest_harvest_repositories", str(object_id)))

    try:
        op.drop_column('oaiHARVEST', 'frequency')
    except OperationalError as err:
        warnings.warn(
            "*** Error removing 'oaiHARVEST.frequency' column: {0} ***".format(
                str(err)
            )
        )
def do_upgrade():
    """ Implement your upgrades here  """
    op.add_column(u'community', db.Column('fixed_points',
                  db.Integer(display_width=9), nullable=False))
    op.add_column(u'community',
                  db.Column('last_record_accepted', db.DateTime(),
                            nullable=False))
    op.add_column(u'community',
                  db.Column('ranking', db.Integer(display_width=9),
                            nullable=False))
def do_upgrade():
    """Upgrade recipe.

    Adds two new columns (password_salt and password_scheme) and migrates
    emails to password salt.
    """
    op.add_column('user', db.Column('password_salt', db.String(length=255),
                                    nullable=True))
    op.add_column('user', db.Column('password_scheme', db.String(length=50),
                                    nullable=False))

    # Temporary column needed for data migration
    op.add_column('user', db.Column('new_password', db.String(length=255)))

    # Migrate emails to password_salt
    m = db.MetaData(bind=db.engine)
    m.reflect()
    u = m.tables['user']

    conn = db.engine.connect()
    conn.execute(u.update().values(
        password_salt=u.c.email,
        password_scheme='invenio_aes_encrypted_email'
    ))

    # Migrate password blob to password varchar.
    for row in conn.execute(select([u])):
        # NOTE: Empty string passwords were stored as empty strings
        # instead of a hashed version, hence they must be treated differently.
        legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "")

        stmt = u.update().where(
            u.c.id == row[u.c.id]
        ).values(
            new_password=hashlib.sha256(legacy_pw).hexdigest()
        )
        conn.execute(stmt)

    # Create index
    op.create_index(
        op.f('ix_user_password_scheme'),
        'user',
        ['password_scheme'],
        unique=False
    )

    # Drop old database column and rename new.
    op.drop_column('user', 'password')
    op.alter_column(
        'user', 'new_password',
        new_column_name='password',
        existing_type=mysql.VARCHAR(255),
        existing_nullable=True,
    )
def do_upgrade():
    op.drop_table(u'wtgTAG_usergroup')

    op.add_column('wtgTAG', db.Column(
        'group_access_rights', mysql.INTEGER(display_width=2), nullable=False))

    op.add_column(
        'wtgTAG', db.Column('id_usergroup', mysql.INTEGER(display_width=15),
                            server_default='0', nullable=True))
    op.add_column('wtgTAG_bibrec', db.Column(
        'annotation', db.Text(convert_unicode=True), nullable=True))
Beispiel #21
0
def do_upgrade():
    """Upgrade recipe.

    Adds two new columns (password_salt and password_scheme) and migrates
    emails to password salt.
    """
    op.add_column(
        'user', db.Column('password_salt',
                          db.String(length=255),
                          nullable=True))
    op.add_column(
        'user',
        db.Column('password_scheme', db.String(length=50), nullable=False))

    # Temporary column needed for data migration
    op.add_column('user', db.Column('new_password', db.String(length=255)))

    # Migrate emails to password_salt
    m = db.MetaData(bind=db.engine)
    m.reflect()
    u = m.tables['user']

    conn = db.engine.connect()
    conn.execute(
        u.update().values(password_salt=u.c.email,
                          password_scheme='invenio_aes_encrypted_email'))

    # Migrate password blob to password varchar.
    for row in conn.execute(select([u])):
        # NOTE: Empty string passwords were stored as empty strings
        # instead of a hashed version, hence they must be treated differently.
        legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "")

        stmt = u.update().where(u.c.id == row[u.c.id]).values(
            new_password=hashlib.sha256(legacy_pw).hexdigest())
        conn.execute(stmt)

    # Create index
    op.create_index(op.f('ix_user_password_scheme'),
                    'user', ['password_scheme'],
                    unique=False)

    # Drop old database column and rename new.
    op.drop_column('user', 'password')
    op.alter_column(
        'user',
        'new_password',
        new_column_name='password',
        existing_type=mysql.VARCHAR(255),
        existing_nullable=True,
    )
Beispiel #22
0
def do_upgrade():
    op.drop_table(u'wtgTAG_usergroup')

    op.add_column(
        'wtgTAG',
        db.Column('group_access_rights',
                  mysql.INTEGER(display_width=2),
                  nullable=False))

    op.add_column(
        'wtgTAG',
        db.Column('id_usergroup',
                  mysql.INTEGER(display_width=15),
                  server_default='0',
                  nullable=True))
    op.add_column(
        'wtgTAG_bibrec',
        db.Column('annotation', db.Text(convert_unicode=True), nullable=True))
def do_upgrade():
    # History
    op.add_column('hstDOCUMENT', db.Column(
        'id', mysql.INTEGER(display_width=15), nullable=False))
    op.add_column('hstRECORD', db.Column(
        'id', mysql.INTEGER(display_width=15), nullable=False))

    op.alter_column('hstRECORD', 'affected_fields',
                    existing_type=mysql.TEXT(),
                    nullable=True)

    # OAI Harvest
    op.drop_column('oaiHARVEST', u'bibconvertcfgfile')
    op.drop_column('oaiHARVEST', u'bibfilterprogram')

    # xtrJOB
    op.drop_column('xtrJOB', u'last_recid')

    # Record
    op.add_column("bibrec", db.Column("additional_info", db.JSON))
Beispiel #24
0
def do_upgrade():
    # History
    op.add_column(
        'hstDOCUMENT',
        db.Column('id', mysql.INTEGER(display_width=15), nullable=False))
    op.add_column(
        'hstRECORD',
        db.Column('id', mysql.INTEGER(display_width=15), nullable=False))

    op.alter_column('hstRECORD',
                    'affected_fields',
                    existing_type=mysql.TEXT(),
                    nullable=True)

    # OAI Harvest
    op.drop_column('oaiHARVEST', u'bibconvertcfgfile')
    op.drop_column('oaiHARVEST', u'bibfilterprogram')

    # xtrJOB
    op.drop_column('xtrJOB', u'last_recid')

    # Record
    op.add_column("bibrec", db.Column("additional_info", db.JSON))
Beispiel #25
0
def do_upgrade():
    """Implement your upgrades here."""
    # Table sbmCOLLECTION_sbmCOLLECTION

    # add column "id" in the table
    op.add_column('sbmCOLLECTION_sbmCOLLECTION',
                  db.Column('id', db.Integer(11), nullable=False))

    # set all new ids
    records = run_sql("""SELECT id_father, id_son FROM """
                      """sbmCOLLECTION_sbmCOLLECTION AS ssc """
                      """ORDER BY ssc.id_father, ssc.id_son""")
    for index, rec in enumerate(records):
        run_sql("""UPDATE sbmCOLLECTION_sbmCOLLECTION
                SET id = %s WHERE id_father = %s AND id_son = %s """,
                (index + 1, rec[0], rec[1]))

    # drop primary keys
    try:
        op.drop_constraint(None, 'sbmCOLLECTION_sbmCOLLECTION',
                           type_='primary')
    except OperationalError:
        # the primary key is already dropped
        warnings.warn("""Primary key of sbmCOLLECTION_sbmCOLLECTION """
                      """table has been already dropped.""")

    # create new primary key with id
    op.create_primary_key('pk_sbmCOLLECTION_sbmCOLLECTION_id',
                          'sbmCOLLECTION_sbmCOLLECTION', ['id'])
    # set id as autoincrement
    op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id',
                    existing_type=db.Integer(11),
                    existing_nullable=False, autoincrement=True)
    # fix columns id_father and id_son
    op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id_father',
                    existing_type=db.Integer(11),
                    nullable=True, server_default=None)
    op.alter_column('sbmCOLLECTION_sbmCOLLECTION', 'id_son',
                    existing_type=db.Integer(11),
                    nullable=False, server_default=None)
    op.create_index('id_father', 'sbmCOLLECTION_sbmCOLLECTION',
                    columns=['id_father'])

    # Table sbmCOLLECTION_sbmDOCTYPE

    # add column "id" in the table
    op.add_column('sbmCOLLECTION_sbmDOCTYPE',
                  db.Column('id', db.Integer(11), nullable=False))

    # set all new ids
    records = run_sql("""SELECT id_father, id_son
                      FROM sbmCOLLECTION_sbmDOCTYPE AS ssd
                      ORDER BY ssd.id_father, ssd.id_son""")
    for index, rec in enumerate(records):
        run_sql("""UPDATE sbmCOLLECTION_sbmDOCTYPE
                SET id = %s WHERE id_father = %s AND id_son = %s """,
                (index + 1, rec[0], rec[1]))

    # drop primary keys
    op.drop_constraint('id_father', 'sbmCOLLECTION_sbmDOCTYPE', type_='primary')
    # create new primary key with id
    op.create_primary_key('pk_sbmCOLLECTION_sbmDOCTYPE_id',
                          'sbmCOLLECTION_sbmDOCTYPE', ['id'])
    # set id as autoincrement
    op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id',
                    existing_type=db.Integer(11),
                    existing_nullable=False, autoincrement=True)
    # fix columns id_father and id_son
    op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id_father',
                    existing_type=db.Integer(11),
                    nullable=True, server_default=None)
    op.alter_column('sbmCOLLECTION_sbmDOCTYPE', 'id_son',
                    existing_type=db.Char(10),
                    nullable=False, server_default=None)
    op.create_index('id_father', 'sbmCOLLECTION_sbmDOCTYPE',
                    columns=['id_father'])
Beispiel #26
0
def do_upgrade():
    """Implement your upgrades here."""
    op.add_column(
        'user', db.Column('family_name', db.String(length=255), nullable=True))
    op.add_column(
        'user', db.Column('given_names', db.String(length=255), nullable=True))
def do_upgrade():
    """Implement your upgrades here."""
    # table sbmFORMATEXTENSION

    # add "id" column
    op.add_column('sbmFORMATEXTENSION',
                  db.Column('id', db.Integer(), nullable=False))
    # set all ids
    records = run_sql("""SELECT FILE_FORMAT, FILE_EXTENSION FROM """
                      """sbmFORMATEXTENSION AS sbm """
                      """ORDER BY sbm.FILE_FORMAT, sbm.FILE_EXTENSION""")
    for index, rec in enumerate(records):
        run_sql("""UPDATE sbmFORMATEXTENSION """
                """SET id = %s """
                """ WHERE FILE_FORMAT = %s AND """
                """       FILE_EXTENSION = %s """,
                (index + 1, rec[0], rec[1]))
    # remove primary key
    try:
        op.drop_constraint(None, 'sbmFORMATEXTENSION',
                           type_='primary')
    except OperationalError:
        # the primary key is already dropped
        warnings.warn("""Primary key of sbmFORMATEXTENSION """
                      """table has been already dropped.""")
    # set id as new primary key
    op.create_primary_key('pk_sbmFORMATEXTENSION_id',
                          'sbmFORMATEXTENSION', ['id'])
    # set id as autoincrement
    op.alter_column('sbmFORMATEXTENSION', 'id',
                    existing_type=db.Integer(),
                    existing_nullable=False, autoincrement=True)
    # create indices
    op.create_index('sbmformatextension_file_extension_idx',
                    'sbmFORMATEXTENSION', columns=['FILE_EXTENSION'],
                    unique=False, mysql_length=10)
    op.create_index('sbmformatextension_file_format_idx',
                    'sbmFORMATEXTENSION', columns=['FILE_FORMAT'],
                    unique=False, mysql_length=50)

    # table sbmGFILERESULT

    # add "id" column
    op.add_column('sbmGFILERESULT',
                  db.Column('id', db.Integer(), nullable=False))
    # set all ids
    records = run_sql("""SELECT FORMAT, RESULT FROM """
                      """sbmGFILERESULT AS sbm """
                      """ORDER BY sbm.FORMAT, sbm.RESULT""")
    for index, rec in enumerate(records):
        run_sql("""UPDATE sbmGFILERESULT """
                """SET id = %s """
                """ WHERE FORMAT = %s AND """
                """       RESULT = %s """,
                (index + 1, rec[0], rec[1]))
    # remove primary key
    try:
        op.drop_constraint(None, 'sbmGFILERESULT',
                           type_='primary')
    except OperationalError:
        # the primary key is already dropped
        warnings.warn("""Primary key of sbmGFILERESULT """
                      """table has been already dropped.""")
    # set id as new primary key
    op.create_primary_key('pk_sbmGFILERESULT_id',
                          'sbmGFILERESULT', ['id'])
    # set id as autoincrement
    op.alter_column('sbmGFILERESULT', 'id',
                    existing_type=db.Integer(),
                    existing_nullable=False, autoincrement=True)
    # create indices
    op.create_index('sbmgfileresult_format_idx',
                    'sbmGFILERESULT', columns=['FORMAT'],
                    unique=False, mysql_length=50)
    op.create_index('sbmgfileresult_result_idx',
                    'sbmGFILERESULT', columns=['RESULT'],
                    unique=False, mysql_length=50)
def do_upgrade():
    import invenio
    import sys
    import types

    class CoolDict(dict):
        pass

    class CoolList(list):
        pass

    # Fake old non-existing module
    m = types.ModuleType('invenio.bibfield_utils')
    m.CoolDict = CoolDict
    m.CoolList = CoolList
    sys.modules['invenio.bibfield_utils'] = m
    invenio.bibfield_utils = m

    # Minimal table definitions
    bwlobject = table(
        'bwlOBJECT',
        column('id', db.Integer(primary_key=True)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
        column('_data', db.LargeBinary()),
    )

    bwlworkflow = table(
        'bwlWORKFLOW',
        column('uuid', db.String(36)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
    )

    bwlobjectlogging = table(
        'bwlOBJECTLOGGING',
        column('id_object', db.Integer()),
        column('id_bibworkflowobject', db.Integer()),
    )

    bwlworkflowlogging = table(
        'bwlWORKFLOWLOGGING',
        column('id_object', db.String()),
        column('id_workflow', db.String()),
    )

    conn = op.get_bind()

    # Object table
    op.add_column('bwlOBJECT', db.Column(
                  '_extra_data', db.LargeBinary(), nullable=False))

    query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Ensure data value can be read
        data_value = base64.b64encode(cPickle.dumps(
            transform_data(cPickle.loads(base64.b64decode(r._data)))
        ))

        # Update value in table.
        op.execute(
            bwlobject.update().where(bwlobject.c.id == r.id).values(
                _extra_data=value,
                _data=data_value,
            )
        )

    op.drop_column('bwlOBJECT', u'extra_data')
    op.alter_column('bwlOBJECT', 'data_type',
                    existing_type=mysql.VARCHAR(length=50),
                    nullable=True)
    op.alter_column('bwlOBJECT', 'id_workflow',
                    existing_type=mysql.VARCHAR(length=36),
                    nullable=True)

    # Workflow table
    op.add_column('bwlWORKFLOW', db.Column(
        '_extra_data', db.LargeBinary(), nullable=False))
    query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Update value in table.
        op.execute(
            bwlworkflow.update().where(bwlworkflow.c.uuid == r.uuid).values(
                _extra_data=value
            )
        )
    op.drop_column('bwlWORKFLOW', u'extra_data')

    # Object logging
    op.add_column('bwlOBJECTLOGGING', db.Column(
        'id_object', mysql.INTEGER(display_width=255), nullable=False))
    op.execute(
        bwlobjectlogging.update().values({
            bwlobjectlogging.c.id_object:
            bwlobjectlogging.c.id_bibworkflowobject
        })
    )
    op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject')
    op.drop_column('bwlOBJECTLOGGING', u'extra_data')
    op.drop_column('bwlOBJECTLOGGING', u'error_msg')

    # Workflow logging
    op.add_column('bwlWORKFLOWLOGGING', db.Column(
        'id_object', db.String(length=255), nullable=False))
    op.execute(
        bwlworkflowlogging.update().values({
            bwlworkflowlogging.c.id_object:
            bwlworkflowlogging.c.id_workflow
        })
    )
    op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow')
    op.drop_column('bwlWORKFLOWLOGGING', u'extra_data')
    op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')
def do_upgrade():
    """Implement your upgrades here."""
    # table sbmFORMATEXTENSION

    # add "id" column
    op.add_column('sbmFORMATEXTENSION',
                  db.Column('id', db.Integer(), nullable=False))
    # set all ids
    records = run_sql("""SELECT FILE_FORMAT, FILE_EXTENSION FROM """
                      """sbmFORMATEXTENSION AS sbm """
                      """ORDER BY sbm.FILE_FORMAT, sbm.FILE_EXTENSION""")
    for index, rec in enumerate(records):
        run_sql(
            """UPDATE sbmFORMATEXTENSION """
            """SET id = %s """
            """ WHERE FILE_FORMAT = %s AND """
            """       FILE_EXTENSION = %s """, (index + 1, rec[0], rec[1]))
    # remove primary key
    try:
        op.drop_constraint(None, 'sbmFORMATEXTENSION', type_='primary')
    except OperationalError:
        # the primary key is already dropped
        warnings.warn("""Primary key of sbmFORMATEXTENSION """
                      """table has been already dropped.""")
    # set id as new primary key
    op.create_primary_key('pk_sbmFORMATEXTENSION_id', 'sbmFORMATEXTENSION',
                          ['id'])
    # set id as autoincrement
    op.alter_column('sbmFORMATEXTENSION',
                    'id',
                    existing_type=db.Integer(),
                    existing_nullable=False,
                    autoincrement=True)
    # create indices
    op.create_index('sbmformatextension_file_extension_idx',
                    'sbmFORMATEXTENSION',
                    columns=['FILE_EXTENSION'],
                    unique=False,
                    mysql_length=10)
    op.create_index('sbmformatextension_file_format_idx',
                    'sbmFORMATEXTENSION',
                    columns=['FILE_FORMAT'],
                    unique=False,
                    mysql_length=50)

    # table sbmGFILERESULT

    # add "id" column
    op.add_column('sbmGFILERESULT',
                  db.Column('id', db.Integer(), nullable=False))
    # set all ids
    records = run_sql("""SELECT FORMAT, RESULT FROM """
                      """sbmGFILERESULT AS sbm """
                      """ORDER BY sbm.FORMAT, sbm.RESULT""")
    for index, rec in enumerate(records):
        run_sql(
            """UPDATE sbmGFILERESULT """
            """SET id = %s """
            """ WHERE FORMAT = %s AND """
            """       RESULT = %s """, (index + 1, rec[0], rec[1]))
    # remove primary key
    try:
        op.drop_constraint(None, 'sbmGFILERESULT', type_='primary')
    except OperationalError:
        # the primary key is already dropped
        warnings.warn("""Primary key of sbmGFILERESULT """
                      """table has been already dropped.""")
    # set id as new primary key
    op.create_primary_key('pk_sbmGFILERESULT_id', 'sbmGFILERESULT', ['id'])
    # set id as autoincrement
    op.alter_column('sbmGFILERESULT',
                    'id',
                    existing_type=db.Integer(),
                    existing_nullable=False,
                    autoincrement=True)
    # create indices
    op.create_index('sbmgfileresult_format_idx',
                    'sbmGFILERESULT',
                    columns=['FORMAT'],
                    unique=False,
                    mysql_length=50)
    op.create_index('sbmgfileresult_result_idx',
                    'sbmGFILERESULT',
                    columns=['RESULT'],
                    unique=False,
                    mysql_length=50)
def do_upgrade():
    """Implement your upgrades here."""
    op.add_column("user", db.Column("family_name", db.String(length=255), nullable=True))
    op.add_column("user", db.Column("given_names", db.String(length=255), nullable=True))
Beispiel #31
0
def do_upgrade():
    import invenio
    import sys
    import types

    class CoolDict(dict):
        pass

    class CoolList(list):
        pass

    # Fake old non-existing module
    m = types.ModuleType('invenio.bibfield_utils')
    m.CoolDict = CoolDict
    m.CoolList = CoolList
    sys.modules['invenio.bibfield_utils'] = m
    invenio.bibfield_utils = m

    # Minimal table definitions
    bwlobject = table(
        'bwlOBJECT',
        column('id', db.Integer(primary_key=True)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
        column('_data', db.LargeBinary()),
    )

    bwlworkflow = table(
        'bwlWORKFLOW',
        column('uuid', db.String(36)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
    )

    bwlobjectlogging = table(
        'bwlOBJECTLOGGING',
        column('id_object', db.Integer()),
        column('id_bibworkflowobject', db.Integer()),
    )

    bwlworkflowlogging = table(
        'bwlWORKFLOWLOGGING',
        column('id_object', db.String()),
        column('id_workflow', db.String()),
    )

    conn = op.get_bind()

    # Object table
    op.add_column('bwlOBJECT',
                  db.Column('_extra_data', db.LargeBinary(), nullable=False))

    query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Ensure data value can be read
        data_value = base64.b64encode(
            cPickle.dumps(
                transform_data(cPickle.loads(base64.b64decode(r._data)))))

        # Update value in table.
        op.execute(bwlobject.update().where(bwlobject.c.id == r.id).values(
            _extra_data=value,
            _data=data_value,
        ))

    op.drop_column('bwlOBJECT', u'extra_data')
    op.alter_column('bwlOBJECT',
                    'data_type',
                    existing_type=mysql.VARCHAR(length=50),
                    nullable=True)
    op.alter_column('bwlOBJECT',
                    'id_workflow',
                    existing_type=mysql.VARCHAR(length=36),
                    nullable=True)

    # Workflow table
    op.add_column('bwlWORKFLOW',
                  db.Column('_extra_data', db.LargeBinary(), nullable=False))
    query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Update value in table.
        op.execute(bwlworkflow.update().where(
            bwlworkflow.c.uuid == r.uuid).values(_extra_data=value))
    op.drop_column('bwlWORKFLOW', u'extra_data')

    # Object logging
    op.add_column(
        'bwlOBJECTLOGGING',
        db.Column('id_object',
                  mysql.INTEGER(display_width=255),
                  nullable=False))
    op.execute(bwlobjectlogging.update().values({
        bwlobjectlogging.c.id_object:
        bwlobjectlogging.c.id_bibworkflowobject
    }))
    op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject')
    op.drop_column('bwlOBJECTLOGGING', u'extra_data')
    op.drop_column('bwlOBJECTLOGGING', u'error_msg')

    # Workflow logging
    op.add_column(
        'bwlWORKFLOWLOGGING',
        db.Column('id_object', db.String(length=255), nullable=False))
    op.execute(bwlworkflowlogging.update().values(
        {bwlworkflowlogging.c.id_object: bwlworkflowlogging.c.id_workflow}))
    op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow')
    op.drop_column('bwlWORKFLOWLOGGING', u'extra_data')
    op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')