def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ try: meta.bind = migrate_engine table1 = meta.tables['MAPPED_SURFACE_DATA'] create_column(COL_1, table1) create_column(COL_2, table1) create_column(COL_3, table1) try: session = SA_SESSIONMAKER() session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists " "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE _number_of_split_slices > 1 " "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)")) session.commit() session.close() except ProgrammingError: # PostgreSQL session = SA_SESSIONMAKER() session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists " "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE _number_of_split_slices > 1 " "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)")) session.commit() session.close() except Exception: logger = get_logger(__name__) logger.exception("Cold not create new column required by the update") raise
def _transfer_projection_matrices(): """ Previous ProjectionRegionM/EEG objects should be Removed, and ProjectionSurfaceM/EEG should be transported into the new DB tables. """ session = SA_SESSIONMAKER() LOGGER.info("Transferring Projections Surface ...") try: # Ony after SqlAlchemy finished initialization the new table MAPPED_PROJECTION_DATA exists session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type) SELECT PS.id, PM._sources, PM._sensors, 'projEEG' FROM "MAPPED_PROJECTION_SURFACE_EEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM WHERE PM.id=PS.id;""")) session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type) SELECT PS.id, PM._sources, PM._sensors, 'projMEG' FROM "MAPPED_PROJECTION_SURFACE_MEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM WHERE PM.id=PS.id;""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_EEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_MEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";""")) LOGGER.info("Removing Projections Region ...") session.execute(text("""DELETE from "DATA_TYPES" WHERE type in ('ProjectionRegionEEG', 'ProjectionRegionMEG');""")) session.commit() except Exception: LOGGER.exception("Could not update Projection references") finally: session.close()
def remove_visualizer_references(): """ As we removed an algorithm, remove left-overs. """ LOGGER.info("Starting to remove references towards old viewer ....") session = SA_SESSIONMAKER() try: session.execute(text( """DELETE FROM "OPERATIONS" WHERE fk_from_algo IN (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG WHERE A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation' AND classname = 'PearsonCorrelationCoefficientVisualizer');""")) session.execute(text( """DELETE FROM "WORKFLOW_VIEW_STEPS" WHERE fk_algorithm IN (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG WHERE A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation' AND classname = 'PearsonCorrelationCoefficientVisualizer');""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() LOGGER.info("References removed.")
def reset_database(): """ Remove all tables in DB. """ LOGGER.warning("Your Database tables will be deleted.") try: session = SA_SESSIONMAKER() LOGGER.debug("Delete connection initiated.") inspector = reflection.Inspector.from_engine(session.connection()) for table in inspector.get_table_names(): try: LOGGER.debug("Removing:" + table) session.execute(text("DROP TABLE \"%s\" CASCADE" % table)) except Exception: try: session.execute(text("DROP TABLE %s" % table)) except Exception as excep1: LOGGER.error("Could no drop table %s", table) LOGGER.exception(excep1) session.commit() LOGGER.info("Database was cleanup!") except Exception as excep: LOGGER.warning(excep) finally: session.close()
def remove_visualizer_references(): """ As we removed an algorithm, remove left-overs. """ LOGGER.info("Starting to remove references towards old viewer ....") session = SA_SESSIONMAKER() try: session.execute( text("""DELETE FROM "OPERATIONS" WHERE fk_from_algo IN (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG WHERE A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation' AND classname = 'PearsonCorrelationCoefficientVisualizer');""" )) session.execute( text("""DELETE FROM "WORKFLOW_VIEW_STEPS" WHERE fk_algorithm IN (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG WHERE A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation' AND classname = 'PearsonCorrelationCoefficientVisualizer');""" )) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() LOGGER.info("References removed.")
def _transfer_projection_matrices(): """ Previous ProjectionRegionM/EEG objects should be Removed, and ProjectionSurfaceM/EEG should be transported into the new DB tables. """ session = SA_SESSIONMAKER() LOGGER.info("Transferring Projections Surface ...") try: # Ony after SqlAlchemy finished initialization the new table MAPPED_PROJECTION_DATA exists session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type) SELECT PS.id, PM._sources, PM._sensors, 'projEEG' FROM "MAPPED_PROJECTION_SURFACE_EEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM WHERE PM.id=PS.id;""")) session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type) SELECT PS.id, PM._sources, PM._sensors, 'projMEG' FROM "MAPPED_PROJECTION_SURFACE_MEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM WHERE PM.id=PS.id;""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_EEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_MEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";""")) LOGGER.info("Removing Projections Region ...") session.execute(text("""DELETE from "DATA_TYPES" WHERE type in ('ProjectionRegionEEG', 'ProjectionRegionMEG');""")) session.commit() except Exception: LOGGER.exception("Could not update Projection references") finally: session.close()
def reset_database(): """ Remove all tables in DB. """ LOGGER.warning("Your Database tables will be deleted.") try: session = SA_SESSIONMAKER() LOGGER.debug("Delete connection initiated.") inspector = reflection.Inspector.from_engine(session.connection()) for table in inspector.get_table_names(): try: LOGGER.debug("Removing:" + table) session.execute(text("DROP TABLE \"%s\" CASCADE" % table)) except Exception: try: session.execute(text("DROP TABLE %s" % table)) except Exception as excep1: LOGGER.error("Could no drop table %s", table) LOGGER.exception(excep1) session.commit() LOGGER.info("Database was cleanup!") except Exception as excep: LOGGER.warning(excep) finally: session.close()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] create_column(COL_RANGES_1, table) create_column(COL_RANGES_2, table) try: ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column. previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges") for group in previous_groups: operation_group = dao.get_operationgroup_by_id(group.fk_operation_group) #group.only_numeric_ranges = operation_group.has_only_numeric_ranges if operation_group.range3 is not None: group.no_of_ranges = 3 elif operation_group.range2 is not None: group.no_of_ranges = 2 elif operation_group.range1 is not None: group.no_of_ranges = 1 else: group.no_of_ranges = 0 dao.store_entity(group) except Exception as excep: ## we can live with a column only having default value. We will not stop the startup. logger = get_logger(__name__) logger.exception(excep) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""")) session.commit() session.close() try: session = SA_SESSIONMAKER() for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all(): session.delete(sim_state) session.commit() session.close() except Exception as excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2 logger = get_logger(__name__) logger.exception(excep)
def upgrade(_migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ session = SA_SESSIONMAKER() session.execute(text("DROP TABLE \"MAPPED_STRUCTURAL_MRI_DATA\"")) session.commit() session.close()
def initialize_startup(): """ Force DB tables create, in case no data is already found.""" is_db_empty = False session = SA_SESSIONMAKER() inspector = reflection.Inspector.from_engine(session.connection()) table_names = inspector.get_table_names() if len(table_names) < 1: LOGGER.debug("Database access exception, maybe DB is empty") is_db_empty = True session.close() versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO alembic_cfg = Config() alembic_cfg.set_main_option('script_location', versions_repo) alembic_cfg.set_main_option('sqlalchemy.url', TvbProfile.current.db.DB_URL) if is_db_empty: LOGGER.info("Initializing Database") if os.path.exists(versions_repo): shutil.rmtree(versions_repo) _update_sql_scripts() session = SA_SESSIONMAKER() Base.metadata.create_all(bind=session.connection()) session.commit() session.close() command.stamp(alembic_cfg, 'head') LOGGER.info("Database Default Tables created successfully!") else: _update_sql_scripts() if 'migrate_version' in table_names: db_version = session.execute( text("""SELECT version from migrate_version""")).fetchone()[0] if db_version == 18: command.stamp(alembic_cfg, 'head') session.execute(text("""DROP TABLE "migrate_version";""")) session.commit() return is_db_empty if 'alembic_version' in table_names: db_version = session.execute( text( """SELECT version_num from alembic_version""")).fetchone() if not db_version: command.stamp(alembic_cfg, 'head') with session.connection() as connection: alembic_cfg.attributes['connection'] = connection command.upgrade(alembic_cfg, TvbProfile.current.version.DB_STRUCTURE_VERSION) LOGGER.info("Database already has some data, will not be re-created!") return is_db_empty
def change_algorithm(module, classname, new_module, new_class): """ Change module and classname fields in ALGORITHM_GROUPS table. """ session = SA_SESSIONMAKER() try: session.execute(text( """UPDATE "ALGORITHM_GROUPS" SET module = '""" + new_module + """', classname = '""" + new_class + """' WHERE module = '""" + module + """' AND classname = '""" + classname + """';""")) session.commit() except Exception, excep: LOGGER.exception(excep)
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table1 = meta.tables['MAPPED_CONNECTIVITY_DATA'] create_column(COL_OLD, table1) session = SA_SESSIONMAKER() session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _unidirectional=_undirected")) session.commit() session.close() drop_column(COL_NEW, table1) create_column(COL_NOSE_CORRECTION, table1)
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['MAPPED_SURFACE_DATA'] drop_column(COLUMN_N1, table) drop_column(COLUMN_N2, table) drop_column(COLUMN_N3, table) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status='4-FINISHED' WHERE status = '5-FINISHED' """)) session.execute(text("""UPDATE "OPERATIONS" SET status='3-STARTED' WHERE status = '4-PENDING' """)) session.commit() session.close()
def downgrade(_migrate_engine): """Operations to reverse the above upgrade go here.""" try: session = SA_SESSIONMAKER() session.execute(text("""UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration = REPLACE(REPLACE(_simulator_configuration, "range_1", "first_range"), "range_2", "second_range");""")) session.execute(text("""UPDATE "OPERATIONS" SET parameters = REPLACE(REPLACE(parameters, "range_1", "first_range"), "range_2", "second_range");""")) session.commit() session.close() except Exception as excep: ## This update is not critical. We can run even in case of error at update logger = get_logger(__name__) logger.exception(excep)
def delete_old_burst_table_after_migration(): session = SA_SESSIONMAKER() try: session.execute(text("""DROP TABLE "BURST_CONFIGURATION"; """)) session.commit() except Exception as excep: session.close() session = SA_SESSIONMAKER() LOGGER.exception(excep) try: session.execute(text("""DROP TABLE if exists "BURST_CONFIGURATION" cascade; """)) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['MAPPED_SURFACE_DATA'] create_column(COLUMN_N1, table) create_column(COLUMN_N2, table) create_column(COLUMN_N3, table) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """)) session.commit() session.close()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table1 = meta.tables['MAPPED_CONNECTIVITY_DATA'] create_column(COL_NEW, table1) session = SA_SESSIONMAKER() session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _undirected=_unidirectional")) session.commit() session.close() drop_column(COL_OLD, table1) drop_column(COL_NOSE_CORRECTION, table1)
def upgrade(_migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ try: session = SA_SESSIONMAKER() session.execute(text("""UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration = REPLACE(REPLACE(_simulator_configuration, "first_range", "range_1"), "second_range", "range_2");""")) session.execute(text("""UPDATE "OPERATIONS" SET parameters = REPLACE(REPLACE(parameters, "first_range", "range_1"), "second_range", "range_2");""")) session.commit() session.close() except Exception, excep: ## This update is not critical. We can run even in case of error at update logger = get_logger(__name__) logger.exception(excep)
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['MAPPED_SURFACE_DATA'] create_column(COLUMN_N1, table) create_column(COLUMN_N2, table) create_column(COLUMN_N3, table) session = SA_SESSIONMAKER() session.execute( text( """UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """ )) session.commit() session.close()
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] drop_column(COL_RANGES_1, table) drop_column(COL_RANGES_2, table) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = '4-FINISHED' THEN 'FINISHED' WHEN status = '3-STARTED' THEN 'STARTED' WHEN status = '2-CANCELED' THEN 'CANCELED' ELSE 'ERROR' END WHERE status IN ('4-FINISHED', '2-CANCELED', '3-STARTED', '1-ERROR');"""))
def downgrade(_migrate_engine): """Operations to reverse the above upgrade go here.""" try: session = SA_SESSIONMAKER() session.execute( text( """UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration = REPLACE(REPLACE(_simulator_configuration, "range_1", "first_range"), "range_2", "second_range");""" )) session.execute( text("""UPDATE "OPERATIONS" SET parameters = REPLACE(REPLACE(parameters, "range_1", "first_range"), "range_2", "second_range");""" )) session.commit() session.close() except Exception as excep: ## This update is not critical. We can run even in case of error at update logger = get_logger(__name__) logger.exception(excep)
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['MAPPED_SURFACE_DATA'] drop_column(COLUMN_N1, table) drop_column(COLUMN_N2, table) drop_column(COLUMN_N3, table) session = SA_SESSIONMAKER() session.execute( text( """UPDATE "OPERATIONS" SET status='4-FINISHED' WHERE status = '5-FINISHED' """ )) session.execute( text( """UPDATE "OPERATIONS" SET status='3-STARTED' WHERE status = '4-PENDING' """ )) session.commit() session.close()
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception, _: session.close() session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit()
def _exec_update(boolean_value, logger): session = SA_SESSIONMAKER() try: logger.info("Executing Db update script 015...") session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" SET _region_mapping = (SELECT dt.gid FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt WHERE dt.id = rm.id AND "MAPPED_TIME_SERIES_REGION_DATA"._connectivity= rm._connectivity);""")) session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" SET _has_surface_mapping = """ + boolean_value + """ WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id="MAPPED_TIME_SERIES_DATA".id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG', 'TimeSeriesSEEG', 'TimeSeriesMEG')) OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr WHERE tr.id="MAPPED_TIME_SERIES_DATA".id AND tr._region_mapping is not NULL);""")) session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" SET _has_volume_mapping = """ + boolean_value + """ WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id="MAPPED_TIME_SERIES_DATA".id AND dt.type in ('TimeSeriesVolume'));""")) session.commit() logger.info("DB update script 015 committed.") return True except Exception, excep: logger.exception(excep) return False
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['USERS'] drop_column(COLUMN_N1, table) table = meta.tables['BURST_CONFIGURATIONS'] drop_column(COLUMN_N2, table) table = meta.tables['OPERATIONS'] alter_column(COLUMN_N3_OLD, table=table, name=COLUMN_N3_NEW.name) try: meta.bind = migrate_engine session = SA_SESSIONMAKER() session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.region_mapping' WHERE "type" = 'RegionMapping' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.local_connectivity' WHERE "type" = 'LocalConnectivity' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.cortex' WHERE "type" = 'Cortex' """)) session.commit() session.close() except Exception: LOGGER.exception("Cold not update datatypes") raise
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['USERS'] create_column(COLUMN_N1, table) table = meta.tables['BURST_CONFIGURATIONS'] create_column(COLUMN_N2, table) table = meta.tables['OPERATIONS'] alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name) try: meta.bind = migrate_engine session = SA_SESSIONMAKER() session.execute( text( """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """ )) session.execute( text( """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """ )) session.execute( text( """UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """ )) session.commit() session.close() except Exception: LOGGER.exception("Cold not update datatypes") raise
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['USERS'] drop_column(COLUMN_N1, table) table = meta.tables['BURST_CONFIGURATIONS'] drop_column(COLUMN_N2, table) table = meta.tables['OPERATIONS'] alter_column(COLUMN_N3_OLD, table=table, name=COLUMN_N3_NEW.name) try: meta.bind = migrate_engine session = SA_SESSIONMAKER() session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.region_mapping' WHERE "type" = 'RegionMapping' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.local_connectivity' WHERE "type" = 'LocalConnectivity' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.cortex' WHERE "type" = 'Cortex' """)) session.commit() session.close() except Exception: LOGGER.exception("Cold not update datatypes") raise
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] drop_column(COL_1, table1) drop_column(COL_2, table1) drop_column(COL_3, table1) drop_column(COL_4, table1) session = SA_SESSIONMAKER() try: session.execute( text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "RENAME COLUMN _labels_ordering to _dim_labels")) session.execute( text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "RENAME COLUMN _labels_ordering to _dim_labels")) except sqlalchemy.exc.OperationalError: session.execute( text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "ADD COLUMN _dim_labels VARYING CHARACTER(255)")) session.execute( text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "ADD COLUMN _dim_labels VARYING CHARACTER(255)")) session.commit() session.close() table2 = meta.tables['MAPPED_CONNECTIVITY_DATA'] drop_column(COL_7, table2) for mapping in TABLE_RENAMES: session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[1], mapping[0]))) session.commit() session.close()
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception, _: session.close() session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit()
def upgrade(_migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ try: session = SA_SESSIONMAKER() session.execute( text( """UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration = REPLACE(REPLACE(_simulator_configuration, "first_range", "range_1"), "second_range", "range_2");""" )) session.execute( text("""UPDATE "OPERATIONS" SET parameters = REPLACE(REPLACE(parameters, "first_range", "range_1"), "second_range", "range_2");""" )) session.commit() session.close() except Exception, excep: ## This update is not critical. We can run even in case of error at update logger = get_logger(__name__) logger.exception(excep)
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ try: meta.bind = migrate_engine table1 = meta.tables['MAPPED_SURFACE_DATA'] create_column(COL_1, table1) create_column(COL_2, table1) create_column(COL_3, table1) try: session = SA_SESSIONMAKER() session.execute( text( "UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists " "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE _number_of_split_slices > 1 " "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)")) session.commit() session.close() except ProgrammingError: # PostgreSQL session = SA_SESSIONMAKER() session.execute( text( "UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists " "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE _number_of_split_slices > 1 " "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)")) session.commit() session.close() except Exception: logger = get_logger(__name__) logger.exception("Cold not create new column required by the update") raise
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta = MetaData(bind=migrate_engine) table = _prepare_table(meta, 'USER_PREFERENCES') table.c.user_id.alter(name='fk_user') table = _prepare_table(meta, 'BURST_CONFIGURATIONS') table.c.project_id.alter(name='fk_project') table = _prepare_table( meta, 'WORKFLOWS', ) table.c.project_id.alter(name='fk_project') table.c.burst_id.alter(name='fk_burst') table = _prepare_table(meta, 'WORKFLOW_STEPS') table.c.workflow_id.alter(name='fk_workflow') table.c.algorithm_id.alter(name='fk_algorithm') table.c.resulted_op_id.alter(name='fk_operation') table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE') table.c.analyzed_datatype.alter(name='_analyzed_datatype') ## Fix Lookup Table mapping. table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA') create_column(COL_1, table) create_column(COL_2, table) create_column(COL_3, table) create_column(COL_4, table) create_column(COL_5, table) create_column(COL_6, table) create_column(COL_7, table) session = SA_SESSIONMAKER() session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";')) session.execute( text( 'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) ' 'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";' )) session.execute( text( 'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) ' 'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";' )) session.commit() session.close() table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA') table.drop() table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA') table.drop()
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] drop_column(COL_1, table1) drop_column(COL_2, table1) drop_column(COL_3, table1) drop_column(COL_4, table1) session = SA_SESSIONMAKER() try: session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "RENAME COLUMN _labels_ordering to _dim_labels")) session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "RENAME COLUMN _labels_ordering to _dim_labels")) except sqlalchemy.exc.OperationalError: session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "ADD COLUMN _dim_labels VARYING CHARACTER(255)")) session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "ADD COLUMN _dim_labels VARYING CHARACTER(255)")) session.commit() session.close() table2 = meta.tables['MAPPED_CONNECTIVITY_DATA'] drop_column(COL_7, table2) for mapping in TABLE_RENAMES: session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[1], mapping[0]))) session.commit() session.close()
def get_burst_for_migration(burst_id, burst_match_dict, date_format, selected_db): """ This method is supposed to only be used when migrating from version 4 to version 5. It finds a BurstConfig in the old format (when it did not inherit from HasTraitsIndex), deletes it and returns its parameters. """ session = SA_SESSIONMAKER() burst_params = session.execute("""SELECT * FROM "BURST_CONFIGURATION" WHERE id = """ + burst_id).fetchone() session.close() if burst_params is None: return None, False burst_params_dict = {'datatypes_number': burst_params['datatypes_number'], 'dynamic_ids': burst_params['dynamic_ids'], 'range_1': burst_params['range1'], 'range_2': burst_params['range2'], 'fk_project': burst_params['fk_project'], 'name': burst_params['name'], 'status': burst_params['status'], 'error_message': burst_params['error_message'], 'start_time': burst_params['start_time'], 'finish_time': burst_params['finish_time'], 'fk_simulation': burst_params['fk_simulation'], 'fk_operation_group': burst_params['fk_operation_group'], 'fk_metric_operation_group': burst_params['fk_metric_operation_group']} if selected_db == 'sqlite': burst_params_dict['start_time'] = string2date(burst_params_dict['start_time'], date_format=date_format) burst_params_dict['finish_time'] = string2date(burst_params_dict['finish_time'], date_format=date_format) if burst_id not in burst_match_dict: burst_config = BurstConfiguration(burst_params_dict['fk_project']) burst_config.datatypes_number = burst_params_dict['datatypes_number'] burst_config.dynamic_ids = burst_params_dict['dynamic_ids'] burst_config.error_message = burst_params_dict['error_message'] burst_config.finish_time = burst_params_dict['finish_time'] burst_config.fk_metric_operation_group = burst_params_dict['fk_metric_operation_group'] burst_config.fk_operation_group = burst_params_dict['fk_operation_group'] burst_config.fk_project = burst_params_dict['fk_project'] burst_config.fk_simulation = burst_params_dict['fk_simulation'] burst_config.name = burst_params_dict['name'] burst_config.range1 = burst_params_dict['range_1'] burst_config.range2 = burst_params_dict['range_2'] burst_config.start_time = burst_params_dict['start_time'] burst_config.status = burst_params_dict['status'] new_burst = True else: burst_config = dao.get_burst_by_id(burst_match_dict[burst_id]) new_burst = False return burst_config, new_burst
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA'] table3 = meta.tables['MAPPED_SENSORS_DATA'] create_column(COL_REG1, table1) create_column(COL_REG2, table1) create_column(COL_REG3, table2) create_column(COL_REG4, table2) create_column(COL_SENSORS, table3) session = SA_SESSIONMAKER() session.execute( text( """UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping = (SELECT dt.gid FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);""" )) # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume = # (SELECT dt.gid # FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt # WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);""")) session.execute( text( """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG', 'TimeSeriesSEEG', 'TimeSeriesMEG')) OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr WHERE tr.id=ts.id AND tr._region_mapping is not NULL);""" )) session.execute( text( """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume')) OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);""" )) session.commit() session.close()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta = MetaData(bind=migrate_engine) table = _prepare_table(meta, 'USER_PREFERENCES') table.c.user_id.alter(name='fk_user') table = _prepare_table(meta, 'BURST_CONFIGURATIONS') table.c.project_id.alter(name='fk_project') table = _prepare_table(meta, 'WORKFLOWS',) table.c.project_id.alter(name='fk_project') table.c.burst_id.alter(name='fk_burst') table = _prepare_table(meta, 'WORKFLOW_STEPS') table.c.workflow_id.alter(name='fk_workflow') table.c.algorithm_id.alter(name='fk_algorithm') table.c.resulted_op_id.alter(name='fk_operation') table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE') table.c.analyzed_datatype.alter(name='_analyzed_datatype') ## Fix Lookup Table mapping. table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA') create_column(COL_1, table) create_column(COL_2, table) create_column(COL_3, table) create_column(COL_4, table) create_column(COL_5, table) create_column(COL_6, table) create_column(COL_7, table) session = SA_SESSIONMAKER() session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";')) session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) ' 'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";')) session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) ' 'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";')) session.commit() session.close() table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA') table.drop() table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA') table.drop()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA'] table3 = meta.tables['MAPPED_SENSORS_DATA'] create_column(COL_REG1, table1) create_column(COL_REG2, table1) create_column(COL_REG3, table2) create_column(COL_REG4, table2) create_column(COL_SENSORS, table3) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping = (SELECT dt.gid FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);""")) # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume = # (SELECT dt.gid # FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt # WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);""")) session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG', 'TimeSeriesSEEG', 'TimeSeriesMEG')) OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr WHERE tr.id=ts.id AND tr._region_mapping is not NULL);""")) session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True WHERE EXISTS (SELECT * FROM "DATA_TYPES" dt WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume')) OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);""")) session.commit() session.close()
def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['USERS'] create_column(COLUMN_N1, table) table = meta.tables['BURST_CONFIGURATIONS'] create_column(COLUMN_N2, table) table = meta.tables['OPERATIONS'] alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name) try: meta.bind = migrate_engine session = SA_SESSIONMAKER() session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """)) session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """)) session.commit() session.close() except Exception: LOGGER.exception("Cold not update datatypes") raise
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception: session.close() session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""" )) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS" RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """ )) session.execute( text( """ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """ )) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close()
def _adapt_simulation_monitor_params(): """ For previous simulation with EEG monitor, adjust the change of input parameters. """ session = SA_SESSIONMAKER() param_connectivity = "connectivity" param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data" param_eeg_proj_new = "monitors_parameters_option_EEG_projection" param_eeg_sensors = "monitors_parameters_option_EEG_sensors" param_eeg_rm = "monitors_parameters_option_EEG_region_mapping" try: all_eeg_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for eeg_op in all_eeg_ops: try: op_params = parse_json_parameters(eeg_op.parameters) LOGGER.debug("Updating " + str(op_params)) old_projection_guid = op_params[param_eeg_proj_old] connectivity_guid = op_params[param_connectivity] rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0] dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0] if dt.type == 'ProjectionSurfaceEEG': LOGGER.debug("Previous Prj is surface: " + old_projection_guid) new_projection_guid = old_projection_guid else: new_projection_guid = session.execute(text("""SELECT DT.gid FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO, "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and PM._sources='""" + rm._surface + """' and DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0] LOGGER.debug("New Prj is surface: " + str(new_projection_guid)) sensors_guid = session.execute(text("""SELECT _sensors FROM "MAPPED_PROJECTION_MATRIX_DATA" WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0] del op_params[param_eeg_proj_old] op_params[param_eeg_proj_new] = str(new_projection_guid) op_params[param_eeg_sensors] = str(sensors_guid) op_params[param_eeg_rm] = str(rm.gid) eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + eeg_op.parameters) files_helper.write_operation_metadata(eeg_op) burst = dao.get_burst_for_operation_id(eeg_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() del burst.simulator_configuration[param_eeg_proj_old] burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)} burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)} burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)} burst._simulator_configuration = json.dumps(burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if burst.id not in all_bursts: all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(eeg_op)) session.add_all(all_eeg_ops) session.add_all(list(all_bursts.values())) session.commit() except Exception: LOGGER.exception("Could not update Simulation Monitor Params") finally: session.close()
else: group.no_of_ranges = 0 dao.store_entity(group) except Exception, excep: ## we can live with a column only having default value. We will not stop the startup. logger = get_logger(__name__) logger.exception(excep) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""")) session.commit() session.close() try: session = SA_SESSIONMAKER() for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all(): session.delete(sim_state) session.commit() session.close() except Exception, excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] create_column(COL_RANGES_1, table) create_column(COL_RANGES_2, table) try: ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column. previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges") for group in previous_groups: operation_group = dao.get_operationgroup_by_id( group.fk_operation_group) #group.only_numeric_ranges = operation_group.has_only_numeric_ranges if operation_group.range3 is not None: group.no_of_ranges = 3 elif operation_group.range2 is not None: group.no_of_ranges = 2 elif operation_group.range1 is not None: group.no_of_ranges = 1 else: group.no_of_ranges = 0 dao.store_entity(group) except Exception as excep: ## we can live with a column only having default value. We will not stop the startup. logger = get_logger(__name__) logger.exception(excep) session = SA_SESSIONMAKER() session.execute( text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""" )) session.commit() session.close() try: session = SA_SESSIONMAKER() # TODO: fix me # for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all(): # session.delete(sim_state) session.commit() session.close() except Exception as excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2 logger = get_logger(__name__) logger.exception(excep)
def upgrade(migrate_engine): meta.bind = migrate_engine session = SA_SESSIONMAKER() try: session.execute( text("""ALTER TABLE "BURST_CONFIGURATIONS" RENAME TO "BurstConfiguration"; """)) # Dropping tables which don't exist in the new version session.execute(text("""DROP TABLE "MAPPED_LOOK_UP_TABLE_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_DATATYPE_MEASURE_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_SPATIAL_PATTERN_VOLUME_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_SIMULATION_STATE_DATA";""")) session.execute(text("""DROP TABLE "WORKFLOW_STEPS";""")) session.execute(text("""DROP TABLE "WORKFLOW_VIEW_STEPS";""")) # Dropping tables which will be repopulated from the H5 files session.execute( text("""DROP TABLE "MAPPED_COHERENCE_SPECTRUM_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_COMPLEX_COHERENCE_SPECTRUM_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_CONNECTIVITY_MEASURE_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_CONNECTIVITY_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_CORRELATION_COEFFICIENTS_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_COVARIANCE_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_CROSS_CORRELATION_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_FCD_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_FOURIER_SPECTRUM_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_INDEPENDENT_COMPONENTS_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_LOCAL_CONNECTIVITY_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_PRINCIPAL_COMPONENTS_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_REGION_MAPPING_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_REGION_VOLUME_MAPPING_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_TIME_SERIES_REGION_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_EEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_MEG_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_SEEG_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_TIME_SERIES_SURFACE_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_TIME_SERIES_VOLUME_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_SENSORS_DATA" """)) session.execute(text("""DROP TABLE "MAPPED_TRACTS_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_STIMULI_REGION_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_STIMULI_SURFACE_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_STRUCTURAL_MRI_DATA" """)) session.execute(text("""DROP TABLE "MAPPED_SURFACE_DATA" """)) session.execute(text("""DROP TABLE "MAPPED_VALUE_WRAPPER_DATA";""")) session.execute(text("""DROP TABLE "MAPPED_VOLUME_DATA" """)) session.execute( text("""DROP TABLE "MAPPED_WAVELET_COEFFICIENTS_DATA";""")) session.execute(text("""DROP TABLE "DATA_TYPES_GROUPS";""")) session.execute(text("""DROP TABLE "MAPPED_ARRAY_DATA";""")) session.execute( text("""DROP TABLE "MAPPED_SPATIO_TEMPORAL_PATTERN_DATA" """)) session.execute(text("""DROP TABLE "MAPPED_SPATIAL_PATTERN_DATA";""")) session.execute(text("""DROP TABLE "WORKFLOWS";""")) session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_DATA";""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() # MIGRATING USERS # users_table = meta.tables['USERS'] for column in USER_COLUMNS: create_column(column, users_table) session = SA_SESSIONMAKER() try: user_ids = eval( str( session.execute( text("""SELECT U.id FROM "USERS" U """)).fetchall())) for id in user_ids: session.execute( text("""UPDATE "USERS" SET display_name = username, gid ='""" + uuid.uuid4().hex + """' WHERE id = """ + str(id[0]))) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() UniqueConstraint("gid", table=users_table) # Migrating BurstConfiguration burst_config_table = meta.tables['BurstConfiguration'] for column in BURST_COLUMNS: create_column(column, burst_config_table) session = SA_SESSIONMAKER() try: session.execute( text("""ALTER TABLE "BurstConfiguration" RENAME COLUMN _dynamic_ids TO dynamic_ids""")) session.execute( text("""ALTER TABLE "BurstConfiguration" RENAME COLUMN _simulator_configuration TO simulator_gid""" )) ranges = session.execute( text("""SELECT OG.id, OG.range1, OG.range2 FROM "OPERATION_GROUPS" OG""")).fetchall() session.execute( text( """DELETE FROM "BurstConfiguration" WHERE status = \'error\'""" )) ranges_1 = [] ranges_2 = [] for r in ranges: ranges_1.append(str(r[1])) ranges_2.append(str(r[2])) new_ranges_1 = migrate_range_params(ranges_1) new_ranges_2 = migrate_range_params(ranges_2) operation_groups = session.execute( text("""SELECT * FROM "OPERATION_GROUPS" """)).fetchall() for op_g in operation_groups: op = eval( str( session.execute( text( """SELECT fk_operation_group, parameters, meta_data FROM "OPERATIONS" O WHERE O.fk_operation_group = """ + str(op_g[0]))).fetchone())) burst_id = eval(op[2])['Burst_Reference'] if 'time_series' in op[1]: session.execute( text( """UPDATE "BurstConfiguration" as B SET fk_metric_operation_group = """ + str(op[0]) + """ WHERE B.id = """ + str(burst_id))) else: session.execute( text( """UPDATE "BurstConfiguration" as B SET fk_operation_group = """ + str(op[0]) + """ WHERE B.id = """ + str(burst_id))) for i in range(len(ranges_1)): range1 = str(new_ranges_1[i]).replace('\'', '') range2 = str(new_ranges_2[i]).replace('\'', '') session.execute( text("""UPDATE "BurstConfiguration" SET range1 = '""" + range1 + """' WHERE fk_operation_group = """ + str(ranges[i][0]))) session.execute( text("""UPDATE "OPERATION_GROUPS" SET range1 = '""" + range1 + """' WHERE id = """ + str(ranges[i][0]))) if range2 != 'None': session.execute( text("""UPDATE "BurstConfiguration" SET range2 = '""" + range2 + """' WHERE fk_operation_group = """ + str(ranges[i][0]))) session.execute( text("""UPDATE "OPERATION_GROUPS" SET range2 = '""" + range2 + """' WHERE id = """ + str(ranges[i][0]))) session.commit() except Exception: session.close() finally: session.close() # Drop old column drop_column(BURST_DELETED_COLUMN, burst_config_table) # Create constraints only after the rows are populated fk_burst_config_constraint_1 = ForeignKeyConstraint( ["fk_simulation"], ["OPERATIONS.id"], table=burst_config_table) fk_burst_config_constraint_2 = ForeignKeyConstraint( ["fk_operation_group"], ["OPERATION_GROUPS.id"], table=burst_config_table) fk_burst_config_constraint_3 = ForeignKeyConstraint( ["fk_metric_operation_group"], ["OPERATION_GROUPS.id"], table=burst_config_table) fk_burst_config_constraint_1.create() fk_burst_config_constraint_2.create() fk_burst_config_constraint_3.create() # MIGRATING Operations # session = SA_SESSIONMAKER() try: burst_ref_metadata = session.execute( text("""SELECT id, meta_data FROM "OPERATIONS" WHERE meta_data like '%Burst_Reference%' """)).fetchall() for metadata in burst_ref_metadata: metadata_dict = eval(str(metadata[1])) session.execute( text("""UPDATE "OPERATIONS" SET parameters = '""" + json.dumps(metadata_dict['Burst_Reference']) + """' WHERE id = """ + str(metadata[0]))) session.execute( text("""ALTER TABLE "OPERATIONS" RENAME COLUMN parameters TO view_model_gid""" )) # Name it back to the old name, because we have to keep both tables so we can create BurstConfigurationH5s session.execute( text("""ALTER TABLE "BurstConfiguration" RENAME TO "BURST_CONFIGURATION"; """ )) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text("""DROP TABLE "ALGORITHMS"; """)) session.execute(text("""DROP TABLE "ALGORITHM_CATEGORIES"; """)) session.execute(text("""DROP TABLE "DATA_TYPES"; """)) session.commit() except Exception as excep: # If the drops fail, it could mean we are using postgresql session.close() session = SA_SESSIONMAKER() LOGGER.exception(excep) try: session.execute( text("""DROP TABLE if exists "ALGORITHMS" cascade; """)) session.execute( text( """DROP TABLE if exists "ALGORITHM_CATEGORIES" cascade; """ )) session.execute( text("""DROP TABLE if exists "DATA_TYPES" cascade; """)) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() op_table = meta.tables['OPERATIONS'] create_column(OP_COLUMN, op_table) drop_column(OP_DELETED_COLUMN, op_table)
except Exception, _: session.close() session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute( text("""UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""" )) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo)
def upgrade(migrate_engine): """ Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table. """ meta.bind = migrate_engine table_algo = meta.tables["ALGORITHMS"] for col in ADD_COLUMNS: create_column(col, table_algo) session = SA_SESSIONMAKER() try: session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception: session.close() session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text( """UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""")) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table session = SA_SESSIONMAKER() session.execute(text("""DROP TABLE "ALGORITHM_GROUPS";""")) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text("""ALTER TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS" RENAME TO "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_DATATYPE_MEASURE" RENAME TO "MAPPED_DATATYPE_MEASURE_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_SIMULATION_STATE" RENAME TO "MAPPED_SIMULATION_STATE_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_VALUE_WRAPPER" RENAME TO "MAPPED_VALUE_WRAPPER_DATA"; """)) session.execute(text("""ALTER TABLE "MAPPED_PROJECTION_DATA" RENAME TO "MAPPED_PROJECTION_MATRIX_DATA"; """)) session.commit() except Exception as excep: LOGGER.exception(excep) finally: session.close()
"ADD COLUMN _current_state VARYING CHARACTER(255)")) session.commit() except Exception, _: session.close() session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" " "ADD COLUMN _current_state character varying;")) session.commit() finally: session.close() session = SA_SESSIONMAKER() try: session.execute(text( """UPDATE "ALGORITHMS" SET module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id), fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);""")) session.commit() # Delete old columns, no longer needed for col in DEL_COLUMNS: drop_column(col, table_algo) # Create constraint only after rows are being populated table_algo = meta.tables["ALGORITHMS"] fk_constraint = ForeignKeyConstraint(["fk_category"], ["ALGORITHM_CATEGORIES.id"], ondelete="CASCADE", table=table_algo) fk_constraint.create() # Drop old table
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] create_column(COL_1, table1) create_column(COL_2, table1) create_column(COL_3, table1) create_column(COL_4, table1) create_column(COL_5, table1) session = SA_SESSIONMAKER() try: # We have a database that supports renaming columns. This way we save data from old timeseries. session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "RENAME COLUMN _dim_labels to _labels_ordering")) session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "RENAME COLUMN _dim_labels to _labels_ordering")) except sqlalchemy.exc.OperationalError: # We have a database like sqlite. Just create a new column, we're gonna miss old data in this case. session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "ADD COLUMN _labels_ordering VARYING CHARACTER(255)")) session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "ADD COLUMN _labels_ordering VARYING CHARACTER(255)")) session.execute(text("DROP TABLE \"MAPPED_PSI_TABLE_DATA\"")) session.execute(text("DROP TABLE \"MAPPED_NERF_TABLE_DATA\"")) session.execute(text("DROP TABLE \"MAPPED_LOOK_UP_TABLES_DATA\"")) session.commit() session.close() table2 = meta.tables['MAPPED_CONNECTIVITY_DATA'] create_column(COL_7, table2) for mapping in TABLE_RENAMES: session = SA_SESSIONMAKER() session.execute(text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[0], mapping[1]))) session.commit() session.close()
def _adapt_simulation_monitor_params(): """ For previous simulation with EEG monitor, adjust the change of input parameters. """ session = SA_SESSIONMAKER() param_connectivity = "connectivity" param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data" param_eeg_proj_new = "monitors_parameters_option_EEG_projection" param_eeg_sensors = "monitors_parameters_option_EEG_sensors" param_eeg_rm = "monitors_parameters_option_EEG_region_mapping" try: all_eeg_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for eeg_op in all_eeg_ops: try: op_params = parse_json_parameters(eeg_op.parameters) LOGGER.debug("Updating " + str(op_params)) old_projection_guid = op_params[param_eeg_proj_old] connectivity_guid = op_params[param_connectivity] rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0] dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0] if dt.type == 'ProjectionSurfaceEEG': LOGGER.debug("Previous Prj is surface: " + old_projection_guid) new_projection_guid = old_projection_guid else: new_projection_guid = session.execute(text("""SELECT DT.gid FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO, "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and PM._sources='""" + rm._surface + """' and DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0] LOGGER.debug("New Prj is surface: " + str(new_projection_guid)) sensors_guid = session.execute(text("""SELECT _sensors FROM "MAPPED_PROJECTION_MATRIX_DATA" WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0] del op_params[param_eeg_proj_old] op_params[param_eeg_proj_new] = str(new_projection_guid) op_params[param_eeg_sensors] = str(sensors_guid) op_params[param_eeg_rm] = str(rm.gid) eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + eeg_op.parameters) files_helper.write_operation_metadata(eeg_op) burst = dao.get_burst_for_operation_id(eeg_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() del burst.simulator_configuration[param_eeg_proj_old] burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)} burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)} burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)} burst._simulator_configuration = json.dumps(burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if not all_bursts.has_key(burst.id): all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(eeg_op)) session.add_all(all_eeg_ops) session.add_all(all_bursts.values()) session.commit() except Exception: LOGGER.exception("Could not update Simulation Monitor Params") finally: session.close()
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table1 = meta.tables['MAPPED_TIME_SERIES_DATA'] create_column(COL_1, table1) create_column(COL_2, table1) create_column(COL_3, table1) create_column(COL_4, table1) create_column(COL_5, table1) session = SA_SESSIONMAKER() try: # We have a database that supports renaming columns. This way we save data from old timeseries. session.execute( text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "RENAME COLUMN _dim_labels to _labels_ordering")) session.execute( text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "RENAME COLUMN _dim_labels to _labels_ordering")) except sqlalchemy.exc.OperationalError: # We have a database like sqlite. Just create a new column, we're gonna miss old data in this case. session.execute( text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" " "ADD COLUMN _labels_ordering VARYING CHARACTER(255)")) session.execute( text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" " "ADD COLUMN _labels_ordering VARYING CHARACTER(255)")) session.execute(text("DROP TABLE \"MAPPED_PSI_TABLE_DATA\"")) session.execute(text("DROP TABLE \"MAPPED_NERF_TABLE_DATA\"")) session.execute(text("DROP TABLE \"MAPPED_LOOK_UP_TABLES_DATA\"")) session.commit() session.close() table2 = meta.tables['MAPPED_CONNECTIVITY_DATA'] create_column(COL_7, table2) for mapping in TABLE_RENAMES: session = SA_SESSIONMAKER() session.execute( text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[0], mapping[1]))) session.commit() session.close()