def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #2
0
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    create_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(
            text(
                """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """
            ))
        session.execute(
            text(
                """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """
            ))
        session.execute(
            text(
                """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """
            ))
        session.commit()
        session.close()
    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    category_column = Column('category_id', Integer)
    movie = Table('movie', meta, category_column)
    create_column(category_column, movie)
    Index('ix_movie_category_id', movie.c.category_id).create()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #5
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['PROJECTS']
        create_column(COL_VERSION, table)

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
Beispiel #6
0
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    # Change release, add last_edit and index
    last_edit_column = Column('last_edit', Integer)
    release = Table('release', meta, last_edit_column)

    create_column(last_edit_column, release)
    Index('ix_release_last_edit', release.c.last_edit).create()

    # Change movie last_edit
    last_edit_column = Column('last_edit', Integer)
    movie = Table('movie', meta, last_edit_column)
    Index('ix_movie_last_edit', movie.c.last_edit).create()
Beispiel #7
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']
    table3 = meta.tables['MAPPED_SENSORS_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)
    create_column(COL_SENSORS, table3)

    session = SA_SESSIONMAKER()
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping =
                        (SELECT dt.gid
                         FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt
                         WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""
        ))
    # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume =
    #                     (SELECT dt.gid
    #                      FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt
    #                      WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG',
                                            'TimeSeriesSEEG', 'TimeSeriesMEG'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping is not NULL);"""
        ))
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);"""
        ))

    session.commit()
    session.close()
Beispiel #8
0
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_CONNECTIVITY_DATA']

    create_column(COL_OLD, table1)

    session = SA_SESSIONMAKER()
    session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _unidirectional=_undirected"))
    session.commit()
    session.close()

    drop_column(COL_NEW, table1)
    create_column(COL_NOSE_CORRECTION, table1)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['PROJECTS']
        create_column(COL_VERSION, table)

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['MAPPED_CORRELATION_COEFFICIENTS_DATA']
        create_column(COL_LABELS, table)

    except Exception:
        ## This update might fail, in case people are having an older version of TVB
        ## (in which current datatypes does not even exists)
        logger = get_logger(__name__)
        logger.warning("We do not update table MAPPED_CORRELATION_COEFFICIENTS_DATA, because it is not yet created")
Beispiel #11
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['MAPPED_CORRELATION_COEFFICIENTS_DATA']
        create_column(COL_LABELS, table)

    except Exception:
        ## This update might fail, in case people are having an older version of TVB
        ## (in which current datatypes does not even exists)
        logger = get_logger(__name__)
        logger.warning("We do not update table MAPPED_CORRELATION_COEFFICIENTS_DATA, because it is not yet created")
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['MAPPED_CONNECTIVITY_DATA']
        create_column(COL_NR_OF_CONNECTIONS, table)

        remove_visualizer_references()

    except Exception:
        LOGGER.exception("Cold not create new column required by the update")
        raise
Beispiel #13
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table = meta.tables['MAPPED_CONNECTIVITY_DATA']
        create_column(COL_NR_OF_CONNECTIONS, table)

        remove_visualizer_references()

    except Exception:
        LOGGER.exception("Cold not create new column required by the update")
        raise
Beispiel #14
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_CONNECTIVITY_DATA']

    create_column(COL_NEW, table1)

    session = SA_SESSIONMAKER()
    session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _undirected=_unidirectional"))
    session.commit()
    session.close()

    drop_column(COL_OLD, table1)
    drop_column(COL_NOSE_CORRECTION, table1)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']
    table3 = meta.tables['MAPPED_SENSORS_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)
    create_column(COL_SENSORS, table3)

    logger = get_logger(__name__)
    sqlite = _exec_update("1", logger)
    if not sqlite:
        _exec_update("True", logger)
def upgrade(migrate_engine):
    """
    Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table.
    """
    meta.bind = migrate_engine
    table_algo = meta.tables["ALGORITHMS"]
    for col in ADD_COLUMNS:
        create_column(col, table_algo)

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception, _:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state character varying;"))
        session.commit()
Beispiel #17
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']
    table3 = meta.tables['MAPPED_SENSORS_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)
    create_column(COL_SENSORS, table3)

    logger = get_logger(__name__)
    sqlite = _exec_update("1", logger)
    if not sqlite:
        _exec_update("True", logger)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']
    table3 = meta.tables['MAPPED_SENSORS_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)
    create_column(COL_SENSORS, table3)

    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping =
                        (SELECT dt.gid
                         FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt
                         WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume =
    #                     (SELECT dt.gid
    #                      FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt
    #                      WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG',
                                            'TimeSeriesSEEG', 'TimeSeriesMEG'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping is not NULL);"""))
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);"""))

    session.commit()
    session.close()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table1 = meta.tables['MAPPED_SURFACE_DATA']

        create_column(COL_1, table1)
        create_column(COL_2, table1)
        create_column(COL_3, table1)

        try:
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()
        except ProgrammingError:
            # PostgreSQL
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
Beispiel #20
0
def upgrade(migrate_engine):
    """
    Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table.
    """
    meta.bind = migrate_engine
    table_algo = meta.tables["ALGORITHMS"]
    for col in ADD_COLUMNS:
        create_column(col, table_algo)

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                 "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception, _:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(
            text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                 "ADD COLUMN _current_state character varying;"))
        session.commit()
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    create_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """))
        session.commit()
        session.close()
    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
Beispiel #22
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table1 = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_BURST, table1)

    table2 = meta.tables['ALGORITHM_CATEGORIES']
    create_column(COLUMN_CATEGORY, table2)

    table3 = meta.tables['ALGORITHM_GROUPS']
    create_column(COLUMN_GROUP, table3)

    table4 = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_VALID_SURFACE, table4)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table1 = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_BURST, table1)

    table2 = meta.tables['ALGORITHM_CATEGORIES']
    create_column(COLUMN_CATEGORY, table2)

    table3 = meta.tables['ALGORITHM_GROUPS']
    create_column(COLUMN_GROUP, table3)

    table4 = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_VALID_SURFACE, table4)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_N1, table)
    create_column(COLUMN_N2, table)
    create_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """))
    session.commit()
    session.close()
Beispiel #25
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_N1, table)
    create_column(COLUMN_N2, table)
    create_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(
        text(
            """UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """
        ))
    session.commit()
    session.close()
Beispiel #26
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table1 = meta.tables['MAPPED_SURFACE_DATA']

        create_column(COL_1, table1)
        create_column(COL_2, table1)
        create_column(COL_3, table1)

        try:
            session = SA_SESSIONMAKER()
            session.execute(
                text(
                    "UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists "
                    "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                    "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()
        except ProgrammingError:
            # PostgreSQL
            session = SA_SESSIONMAKER()
            session.execute(
                text(
                    "UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists "
                    "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                    "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
Beispiel #27
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0",
                                                 "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(
                group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)

    session = SA_SESSIONMAKER()
    session.execute(
        text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""
             ))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        # TODO: fix me
        # for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
        #     session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #28
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta = MetaData(bind=migrate_engine)

    table = _prepare_table(meta, 'USER_PREFERENCES')
    table.c.user_id.alter(name='fk_user')

    table = _prepare_table(meta, 'BURST_CONFIGURATIONS')
    table.c.project_id.alter(name='fk_project')

    table = _prepare_table(
        meta,
        'WORKFLOWS',
    )
    table.c.project_id.alter(name='fk_project')
    table.c.burst_id.alter(name='fk_burst')

    table = _prepare_table(meta, 'WORKFLOW_STEPS')
    table.c.workflow_id.alter(name='fk_workflow')
    table.c.algorithm_id.alter(name='fk_algorithm')
    table.c.resulted_op_id.alter(name='fk_operation')

    table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE')
    table.c.analyzed_datatype.alter(name='_analyzed_datatype')

    ## Fix Lookup Table mapping.
    table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA')
    create_column(COL_1, table)
    create_column(COL_2, table)
    create_column(COL_3, table)
    create_column(COL_4, table)
    create_column(COL_5, table)
    create_column(COL_6, table)
    create_column(COL_7, table)
    session = SA_SESSIONMAKER()
    session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";'))
    session.execute(
        text(
            'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
            'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";'
        ))
    session.execute(
        text(
            'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
            'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";'
        ))
    session.commit()
    session.close()

    table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA')
    table.drop()

    table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA')
    table.drop()
Beispiel #29
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']

    create_column(COL_1, table1)
    create_column(COL_2, table1)
    create_column(COL_3, table1)
    create_column(COL_4, table1)
    create_column(COL_5, table1)

    session = SA_SESSIONMAKER()
    try:
        # We have a database that supports renaming columns. This way we save data from old timeseries.
        session.execute(
            text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                 "RENAME COLUMN _dim_labels to _labels_ordering"))
        session.execute(
            text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                 "RENAME COLUMN _dim_labels to _labels_ordering"))
    except sqlalchemy.exc.OperationalError:
        # We have a database like sqlite. Just create a new column, we're gonna miss old data in this case.
        session.execute(
            text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                 "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))
        session.execute(
            text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                 "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))

    session.execute(text("DROP TABLE \"MAPPED_PSI_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_NERF_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_LOOK_UP_TABLES_DATA\""))
    session.commit()
    session.close()

    table2 = meta.tables['MAPPED_CONNECTIVITY_DATA']
    create_column(COL_7, table2)

    for mapping in TABLE_RENAMES:
        session = SA_SESSIONMAKER()
        session.execute(
            text("ALTER TABLE \"%s\" RENAME TO \"%s\"" %
                 (mapping[0], mapping[1])))
        session.commit()
        session.close()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']

    create_column(COL_1, table1)
    create_column(COL_2, table1)
    create_column(COL_3, table1)
    create_column(COL_4, table1)
    create_column(COL_5, table1)

    session = SA_SESSIONMAKER()
    try:
        # We have a database that supports renaming columns. This way we save data from old timeseries.
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "RENAME COLUMN _dim_labels to _labels_ordering"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "RENAME COLUMN _dim_labels to _labels_ordering"))
    except sqlalchemy.exc.OperationalError:
        # We have a database like sqlite. Just create a new column, we're gonna miss old data in this case.
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))

    session.execute(text("DROP TABLE \"MAPPED_PSI_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_NERF_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_LOOK_UP_TABLES_DATA\""))
    session.commit()
    session.close()

    table2 = meta.tables['MAPPED_CONNECTIVITY_DATA']
    create_column(COL_7, table2)

    for mapping in TABLE_RENAMES:
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[0], mapping[1])))
        session.commit()
        session.close()
Beispiel #31
0
def upgrade(migrate_engine):
    """
    Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table.
    """
    meta.bind = migrate_engine
    table_algo = meta.tables["ALGORITHMS"]
    for col in ADD_COLUMNS:
        create_column(col, table_algo)

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                 "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(
            text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                 "ADD COLUMN _current_state character varying;"))
        session.commit()
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("""UPDATE "ALGORITHMS" SET
            module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);"""
                 ))
        session.commit()

        # Delete old columns, no longer needed
        for col in DEL_COLUMNS:
            drop_column(col, table_algo)

        # Create constraint only after rows are being populated
        table_algo = meta.tables["ALGORITHMS"]
        fk_constraint = ForeignKeyConstraint(["fk_category"],
                                             ["ALGORITHM_CATEGORIES.id"],
                                             ondelete="CASCADE",
                                             table=table_algo)
        fk_constraint.create()

        # Drop old table
        session = SA_SESSIONMAKER()
        session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";"""))
        session.commit()

    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS"
                                RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """
                 ))
        session.execute(
            text(
                """ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """
            ))
        session.execute(
            text(
                """ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """
            ))
        session.execute(
            text(
                """ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """
            ))
        session.execute(
            text(
                """ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """
            ))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta = MetaData(bind=migrate_engine)
    
    table = _prepare_table(meta, 'USER_PREFERENCES')
    table.c.user_id.alter(name='fk_user')
    
    table = _prepare_table(meta, 'BURST_CONFIGURATIONS')
    table.c.project_id.alter(name='fk_project')
    
    table = _prepare_table(meta, 'WORKFLOWS',)
    table.c.project_id.alter(name='fk_project')
    table.c.burst_id.alter(name='fk_burst')
    
    table = _prepare_table(meta, 'WORKFLOW_STEPS')
    table.c.workflow_id.alter(name='fk_workflow')
    table.c.algorithm_id.alter(name='fk_algorithm')
    table.c.resulted_op_id.alter(name='fk_operation')
    
    table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE')
    table.c.analyzed_datatype.alter(name='_analyzed_datatype')
    
    ## Fix Lookup Table mapping.
    table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA')
    create_column(COL_1, table)
    create_column(COL_2, table)
    create_column(COL_3, table)
    create_column(COL_4, table)
    create_column(COL_5, table)
    create_column(COL_6, table)
    create_column(COL_7, table)
    session = SA_SESSIONMAKER()
    session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";'))
    session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
                         'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";'))
    session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
                         'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";'))
    session.commit()
    session.close()
        
    table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA')
    table.drop()
    
    table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA')
    table.drop()
def upgrade(migrate_engine):
    """
    Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table.
    """
    meta.bind = migrate_engine
    table_algo = meta.tables["ALGORITHMS"]
    for col in ADD_COLUMNS:
        create_column(col, table_algo)

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state character varying;"))
        session.commit()
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(text(
            """UPDATE "ALGORITHMS" SET
            module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);"""))
        session.commit()

        # Delete old columns, no longer needed
        for col in DEL_COLUMNS:
            drop_column(col, table_algo)

        # Create constraint only after rows are being populated
        table_algo = meta.tables["ALGORITHMS"]
        fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"],
                                             ondelete="CASCADE", table=table_algo)
        fk_constraint.create()

        # Drop old table
        session = SA_SESSIONMAKER()
        session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";"""))
        session.commit()

    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS"
                                RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """))
        session.execute(text("""ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """))
        session.execute(text("""ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """))
        session.execute(text("""ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """))
        session.execute(text("""ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()