def do_upgrade():
    """Upgrade recipe.

    Adds two new columns (password_salt and password_scheme) and migrates
    emails to password salt.
    """
    op.add_column('user', db.Column('password_salt', db.String(length=255),
                                    nullable=True))
    op.add_column('user', db.Column('password_scheme', db.String(length=50),
                                    nullable=False))

    # Temporary column needed for data migration
    op.add_column('user', db.Column('new_password', db.String(length=255)))

    # Migrate emails to password_salt
    m = db.MetaData(bind=db.engine)
    m.reflect()
    u = m.tables['user']

    conn = db.engine.connect()
    conn.execute(u.update().values(
        password_salt=u.c.email,
        password_scheme='invenio_aes_encrypted_email'
    ))

    # Migrate password blob to password varchar.
    for row in conn.execute(select([u])):
        # NOTE: Empty string passwords were stored as empty strings
        # instead of a hashed version, hence they must be treated differently.
        legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "")

        stmt = u.update().where(
            u.c.id == row[u.c.id]
        ).values(
            new_password=hashlib.sha256(legacy_pw).hexdigest()
        )
        conn.execute(stmt)

    # Create index
    op.create_index(
        op.f('ix_user_password_scheme'),
        'user',
        ['password_scheme'],
        unique=False
    )

    # Drop old database column and rename new.
    op.drop_column('user', 'password')
    op.alter_column(
        'user', 'new_password',
        new_column_name='password',
        existing_type=mysql.VARCHAR(255),
        existing_nullable=True,
    )
Example #2
0
def do_upgrade():
    """Implement your upgrades here."""
    op.drop_column('knwKBRVAL', 'id')
    op.create_primary_key('pkey', 'knwKBRVAL', ['m_key', 'id_knwKB'])
    op.alter_column('knwKBRVAL', 'm_key',
                    existing_type=sa.String(length=255),
                    type_=sa.String(length=255),
                    existing_server_default='',
                    server_default=None,
                    existing_nullable=False,
                    nullable=False)
Example #3
0
def do_upgrade():
    """Migrate format references."""
    op.add_column('collection_format',
                  db.Column('format', db.String(length=10), nullable=False))
    run_sql('UPDATE collection_format cf JOIN format f ON f.id = cf.id_format '
            'SET cf.format = f.code')
    op.drop_constraint(None, 'collection_format', type_='primary')
    op.create_primary_key(None, 'collection_format',
                          ['id_collection', 'format'])
    op.drop_column('collection_format', 'id_format')
    op.drop_table('formatname')
    op.drop_table('format')
def do_upgrade():
    """Migrate format references."""
    op.add_column('collection_format',
                  db.Column('format', db.String(length=10), nullable=False))
    run_sql('UPDATE collection_format cf JOIN format f ON f.id = cf.id_format '
            'SET cf.format = f.code')
    op.drop_constraint(None, 'collection_format', type_='primary')
    op.create_primary_key(None, 'collection_format',
                          ['id_collection', 'format'])
    op.drop_column('collection_format', 'id_format')
    op.drop_table('formatname')
    op.drop_table('format')
Example #5
0
def do_upgrade():
    """Upgrade recipe.

    Adds two new columns (password_salt and password_scheme) and migrates
    emails to password salt.
    """
    op.add_column(
        'user', db.Column('password_salt',
                          db.String(length=255),
                          nullable=True))
    op.add_column(
        'user',
        db.Column('password_scheme', db.String(length=50), nullable=False))

    # Temporary column needed for data migration
    op.add_column('user', db.Column('new_password', db.String(length=255)))

    # Migrate emails to password_salt
    m = db.MetaData(bind=db.engine)
    m.reflect()
    u = m.tables['user']

    conn = db.engine.connect()
    conn.execute(
        u.update().values(password_salt=u.c.email,
                          password_scheme='invenio_aes_encrypted_email'))

    # Migrate password blob to password varchar.
    for row in conn.execute(select([u])):
        # NOTE: Empty string passwords were stored as empty strings
        # instead of a hashed version, hence they must be treated differently.
        legacy_pw = row[u.c.password] or mysql_aes_encrypt(row[u.c.email], "")

        stmt = u.update().where(u.c.id == row[u.c.id]).values(
            new_password=hashlib.sha256(legacy_pw).hexdigest())
        conn.execute(stmt)

    # Create index
    op.create_index(op.f('ix_user_password_scheme'),
                    'user', ['password_scheme'],
                    unique=False)

    # Drop old database column and rename new.
    op.drop_column('user', 'password')
    op.alter_column(
        'user',
        'new_password',
        new_column_name='password',
        existing_type=mysql.VARCHAR(255),
        existing_nullable=True,
    )
def do_upgrade():
    """Implement your upgrades here."""
    try:
        op.add_column(
            'oaiHARVEST',
            sa.Column(
                'workflows',
                sa.String(length=255),
                server_default='',
                nullable=False
            )
        )

    except OperationalError:
        op.alter_column(
            'oaiHARVEST',
            'workflows',
            existing_type=sa.String(length=255),
            nullable=False,
            server_default=''
        )

    # Set default workflow with backwards compatibility for those who have none.
    all_data_objects = run_sql("SELECT id, workflows FROM oaiHARVEST")
    for object_id, workflows in all_data_objects:
        if not workflows:
            run_sql("UPDATE oaiHARVEST set workflows=%s WHERE id=%s",
                    ("oaiharvest_harvest_repositories", str(object_id)))

    try:
        op.drop_column('oaiHARVEST', 'frequency')
    except OperationalError as err:
        warnings.warn(
            "*** Error removing 'oaiHARVEST.frequency' column: {0} ***".format(
                str(err)
            )
        )
def do_upgrade():
    # History
    op.add_column('hstDOCUMENT', db.Column(
        'id', mysql.INTEGER(display_width=15), nullable=False))
    op.add_column('hstRECORD', db.Column(
        'id', mysql.INTEGER(display_width=15), nullable=False))

    op.alter_column('hstRECORD', 'affected_fields',
                    existing_type=mysql.TEXT(),
                    nullable=True)

    # OAI Harvest
    op.drop_column('oaiHARVEST', u'bibconvertcfgfile')
    op.drop_column('oaiHARVEST', u'bibfilterprogram')

    # xtrJOB
    op.drop_column('xtrJOB', u'last_recid')

    # Record
    op.add_column("bibrec", db.Column("additional_info", db.JSON))
Example #8
0
def do_upgrade():
    # History
    op.add_column(
        'hstDOCUMENT',
        db.Column('id', mysql.INTEGER(display_width=15), nullable=False))
    op.add_column(
        'hstRECORD',
        db.Column('id', mysql.INTEGER(display_width=15), nullable=False))

    op.alter_column('hstRECORD',
                    'affected_fields',
                    existing_type=mysql.TEXT(),
                    nullable=True)

    # OAI Harvest
    op.drop_column('oaiHARVEST', u'bibconvertcfgfile')
    op.drop_column('oaiHARVEST', u'bibfilterprogram')

    # xtrJOB
    op.drop_column('xtrJOB', u'last_recid')

    # Record
    op.add_column("bibrec", db.Column("additional_info", db.JSON))
def do_upgrade():
    import invenio
    import sys
    import types

    class CoolDict(dict):
        pass

    class CoolList(list):
        pass

    # Fake old non-existing module
    m = types.ModuleType('invenio.bibfield_utils')
    m.CoolDict = CoolDict
    m.CoolList = CoolList
    sys.modules['invenio.bibfield_utils'] = m
    invenio.bibfield_utils = m

    # Minimal table definitions
    bwlobject = table(
        'bwlOBJECT',
        column('id', db.Integer(primary_key=True)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
        column('_data', db.LargeBinary()),
    )

    bwlworkflow = table(
        'bwlWORKFLOW',
        column('uuid', db.String(36)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
    )

    bwlobjectlogging = table(
        'bwlOBJECTLOGGING',
        column('id_object', db.Integer()),
        column('id_bibworkflowobject', db.Integer()),
    )

    bwlworkflowlogging = table(
        'bwlWORKFLOWLOGGING',
        column('id_object', db.String()),
        column('id_workflow', db.String()),
    )

    conn = op.get_bind()

    # Object table
    op.add_column('bwlOBJECT', db.Column(
                  '_extra_data', db.LargeBinary(), nullable=False))

    query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Ensure data value can be read
        data_value = base64.b64encode(cPickle.dumps(
            transform_data(cPickle.loads(base64.b64decode(r._data)))
        ))

        # Update value in table.
        op.execute(
            bwlobject.update().where(bwlobject.c.id == r.id).values(
                _extra_data=value,
                _data=data_value,
            )
        )

    op.drop_column('bwlOBJECT', u'extra_data')
    op.alter_column('bwlOBJECT', 'data_type',
                    existing_type=mysql.VARCHAR(length=50),
                    nullable=True)
    op.alter_column('bwlOBJECT', 'id_workflow',
                    existing_type=mysql.VARCHAR(length=36),
                    nullable=True)

    # Workflow table
    op.add_column('bwlWORKFLOW', db.Column(
        '_extra_data', db.LargeBinary(), nullable=False))
    query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Update value in table.
        op.execute(
            bwlworkflow.update().where(bwlworkflow.c.uuid == r.uuid).values(
                _extra_data=value
            )
        )
    op.drop_column('bwlWORKFLOW', u'extra_data')

    # Object logging
    op.add_column('bwlOBJECTLOGGING', db.Column(
        'id_object', mysql.INTEGER(display_width=255), nullable=False))
    op.execute(
        bwlobjectlogging.update().values({
            bwlobjectlogging.c.id_object:
            bwlobjectlogging.c.id_bibworkflowobject
        })
    )
    op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject')
    op.drop_column('bwlOBJECTLOGGING', u'extra_data')
    op.drop_column('bwlOBJECTLOGGING', u'error_msg')

    # Workflow logging
    op.add_column('bwlWORKFLOWLOGGING', db.Column(
        'id_object', db.String(length=255), nullable=False))
    op.execute(
        bwlworkflowlogging.update().values({
            bwlworkflowlogging.c.id_object:
            bwlworkflowlogging.c.id_workflow
        })
    )
    op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow')
    op.drop_column('bwlWORKFLOWLOGGING', u'extra_data')
    op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')
Example #10
0
def do_upgrade():
    import invenio
    import sys
    import types

    class CoolDict(dict):
        pass

    class CoolList(list):
        pass

    # Fake old non-existing module
    m = types.ModuleType('invenio.bibfield_utils')
    m.CoolDict = CoolDict
    m.CoolList = CoolList
    sys.modules['invenio.bibfield_utils'] = m
    invenio.bibfield_utils = m

    # Minimal table definitions
    bwlobject = table(
        'bwlOBJECT',
        column('id', db.Integer(primary_key=True)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
        column('_data', db.LargeBinary()),
    )

    bwlworkflow = table(
        'bwlWORKFLOW',
        column('uuid', db.String(36)),
        column('extra_data', db.MutableDict.as_mutable(db.PickleType)),
        column('_extra_data', db.LargeBinary()),
    )

    bwlobjectlogging = table(
        'bwlOBJECTLOGGING',
        column('id_object', db.Integer()),
        column('id_bibworkflowobject', db.Integer()),
    )

    bwlworkflowlogging = table(
        'bwlWORKFLOWLOGGING',
        column('id_object', db.String()),
        column('id_workflow', db.String()),
    )

    conn = op.get_bind()

    # Object table
    op.add_column('bwlOBJECT',
                  db.Column('_extra_data', db.LargeBinary(), nullable=False))

    query = select(columns=['id', 'extra_data', '_data'], from_obj=bwlobject)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Ensure data value can be read
        data_value = base64.b64encode(
            cPickle.dumps(
                transform_data(cPickle.loads(base64.b64decode(r._data)))))

        # Update value in table.
        op.execute(bwlobject.update().where(bwlobject.c.id == r.id).values(
            _extra_data=value,
            _data=data_value,
        ))

    op.drop_column('bwlOBJECT', u'extra_data')
    op.alter_column('bwlOBJECT',
                    'data_type',
                    existing_type=mysql.VARCHAR(length=50),
                    nullable=True)
    op.alter_column('bwlOBJECT',
                    'id_workflow',
                    existing_type=mysql.VARCHAR(length=36),
                    nullable=True)

    # Workflow table
    op.add_column('bwlWORKFLOW',
                  db.Column('_extra_data', db.LargeBinary(), nullable=False))
    query = select(columns=['uuid', 'extra_data'], from_obj=bwlworkflow)
    for r in conn.execute(query):
        # Decode and re-encode old value
        value = base64.b64encode(cPickle.dumps(cPickle.loads(r.extra_data)))
        # Update value in table.
        op.execute(bwlworkflow.update().where(
            bwlworkflow.c.uuid == r.uuid).values(_extra_data=value))
    op.drop_column('bwlWORKFLOW', u'extra_data')

    # Object logging
    op.add_column(
        'bwlOBJECTLOGGING',
        db.Column('id_object',
                  mysql.INTEGER(display_width=255),
                  nullable=False))
    op.execute(bwlobjectlogging.update().values({
        bwlobjectlogging.c.id_object:
        bwlobjectlogging.c.id_bibworkflowobject
    }))
    op.drop_column('bwlOBJECTLOGGING', u'id_bibworkflowobject')
    op.drop_column('bwlOBJECTLOGGING', u'extra_data')
    op.drop_column('bwlOBJECTLOGGING', u'error_msg')

    # Workflow logging
    op.add_column(
        'bwlWORKFLOWLOGGING',
        db.Column('id_object', db.String(length=255), nullable=False))
    op.execute(bwlworkflowlogging.update().values(
        {bwlworkflowlogging.c.id_object: bwlworkflowlogging.c.id_workflow}))
    op.drop_column('bwlWORKFLOWLOGGING', u'id_workflow')
    op.drop_column('bwlWORKFLOWLOGGING', u'extra_data')
    op.drop_column('bwlWORKFLOWLOGGING', u'error_msg')