Beispiel #1
0
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("""DELETE FROM "OPERATIONS" WHERE fk_from_algo IN
               (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG
               WHERE  A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation'
                      AND classname = 'PearsonCorrelationCoefficientVisualizer');"""
                 ))

        session.execute(
            text("""DELETE FROM "WORKFLOW_VIEW_STEPS" WHERE fk_algorithm IN
               (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG
               WHERE  A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation'
                      AND classname = 'PearsonCorrelationCoefficientVisualizer');"""
                 ))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    LOGGER.info("References removed.")
def initialize_startup():
    """ Force DB tables create, in case no data is already found."""
    is_db_empty = False
    session = SA_SESSIONMAKER()
    inspector = reflection.Inspector.from_engine(session.connection())
    if len(inspector.get_table_names()) < 1:
        LOGGER.debug("Database access exception, maybe DB is empty")
        is_db_empty = True
    session.close()

    if is_db_empty:
        LOGGER.info("Initializing Database")
        if os.path.exists(cfg.DB_VERSIONING_REPO):
            shutil.rmtree(cfg.DB_VERSIONING_REPO)
        migratesqlapi.create(cfg.DB_VERSIONING_REPO, os.path.split(cfg.DB_VERSIONING_REPO)[1])
        _update_sql_scripts()
        migratesqlapi.version_control(cfg.DB_URL, cfg.DB_VERSIONING_REPO, version=cfg.DB_CURRENT_VERSION)
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()
        LOGGER.info("Database Default Tables created successfully!")
    else:
        _update_sql_scripts()
        migratesqlapi.upgrade(cfg.DB_URL, cfg.DB_VERSIONING_REPO, version=cfg.DB_CURRENT_VERSION)
        LOGGER.info("Database already has some data, will not be re-created!")
    return is_db_empty
def _exec_update(boolean_value, logger):
    session = SA_SESSIONMAKER()
    try:
        logger.info("Executing Db update script 015...")
        session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" SET _region_mapping =
                        (SELECT dt.gid
                        FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt
                        WHERE dt.id = rm.id AND "MAPPED_TIME_SERIES_REGION_DATA"._connectivity= rm._connectivity);"""))

        session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" SET _has_surface_mapping = """ + boolean_value + """
                            WHERE
                                EXISTS (SELECT * FROM "DATA_TYPES" dt
                                        WHERE dt.id="MAPPED_TIME_SERIES_DATA".id AND dt.type in ('TimeSeriesSurface',
                                                'TimeSeriesEEG', 'TimeSeriesSEEG', 'TimeSeriesMEG'))
                            OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id="MAPPED_TIME_SERIES_DATA".id AND tr._region_mapping is not NULL);"""))

        session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" SET _has_volume_mapping = """ + boolean_value + """
                            WHERE
                                EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id="MAPPED_TIME_SERIES_DATA".id AND dt.type in ('TimeSeriesVolume'));"""))
        session.commit()
        logger.info("DB update script 015 committed.")
        return True

    except Exception, excep:
        logger.exception(excep)
        return False
def reset_database():
    """
    Remove all tables in DB.
    """
    LOGGER.warning("Your Database tables will be deleted.")
    try:
        session = SA_SESSIONMAKER()
        LOGGER.debug("Delete connection initiated.")
        inspector = reflection.Inspector.from_engine(session.connection())
        for table in inspector.get_table_names():
            try:
                LOGGER.debug("Removing:" + table)
                session.execute(text("DROP TABLE \"%s\" CASCADE" % table))
            except Exception:
                try:
                    session.execute(text("DROP TABLE %s" % table))
                except Exception as excep1:
                    LOGGER.error("Could no drop table %s", table)
                    LOGGER.exception(excep1)
        session.commit()
        LOGGER.info("Database was cleanup!")
    except Exception as excep:
        LOGGER.warning(excep)
    finally:
        session.close()
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    session = SA_SESSIONMAKER()
    try:
        session.execute(text(
            """DELETE FROM "OPERATIONS" WHERE fk_from_algo IN
               (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG
               WHERE  A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation'
                      AND classname = 'PearsonCorrelationCoefficientVisualizer');"""))

        session.execute(text(
            """DELETE FROM "WORKFLOW_VIEW_STEPS" WHERE fk_algorithm IN
               (SELECT A.id FROM "ALGORITHMS" A, "ALGORITHM_GROUPS" AG
               WHERE  A.fk_algo_group = AG.id AND module = 'tvb.adapters.visualizers.cross_correlation'
                      AND classname = 'PearsonCorrelationCoefficientVisualizer');"""))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    LOGGER.info("References removed.")
Beispiel #6
0
def change_algorithm(module, classname, new_module, new_class):
    """
    Change module and classname fields in ALGORITHM_GROUPS table.
    """
    session = SA_SESSIONMAKER()
    try:
        session.execute(text(
            """UPDATE "ALGORITHM_GROUPS"
               SET module = '""" + new_module + """', classname = '""" + new_class + """'
               WHERE module = '""" + module + """' AND classname = '""" + classname + """';"""))
        session.commit()
    except Exception, excep:
        LOGGER.exception(excep)
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
Beispiel #8
0
def command_initializer(persist_settings=True, skip_import=False):
    if persist_settings and TvbProfile.is_first_run():
        settings_service = SettingsService()
        settings = {}
        # Save default settings
        for key, setting in settings_service.configurable_keys.items():
            settings[key] = setting['value']
        settings_service.save_settings(**settings)
    TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE)
    # Build new db engine in case DB URL value changed
    new_db_engine = build_db_engine()
    SA_SESSIONMAKER.configure(bind=new_db_engine)

    # Initialize application
    initialize(skip_import)
Beispiel #9
0
def get_burst_for_migration(burst_id, burst_match_dict, date_format, selected_db):
    """
    This method is supposed to only be used when migrating from version 4 to version 5.
    It finds a BurstConfig in the old format (when it did not inherit from HasTraitsIndex), deletes it
    and returns its parameters.
    """
    session = SA_SESSIONMAKER()
    burst_params = session.execute("""SELECT * FROM "BURST_CONFIGURATION" WHERE id = """ + burst_id).fetchone()
    session.close()

    if burst_params is None:
        return None, False

    burst_params_dict = {'datatypes_number': burst_params['datatypes_number'],
                         'dynamic_ids': burst_params['dynamic_ids'], 'range_1': burst_params['range1'],
                         'range_2': burst_params['range2'], 'fk_project': burst_params['fk_project'],
                         'name': burst_params['name'], 'status': burst_params['status'],
                         'error_message': burst_params['error_message'], 'start_time': burst_params['start_time'],
                         'finish_time': burst_params['finish_time'], 'fk_simulation': burst_params['fk_simulation'],
                         'fk_operation_group': burst_params['fk_operation_group'],
                         'fk_metric_operation_group': burst_params['fk_metric_operation_group']}

    if selected_db == 'sqlite':
        burst_params_dict['start_time'] = string2date(burst_params_dict['start_time'], date_format=date_format)
        burst_params_dict['finish_time'] = string2date(burst_params_dict['finish_time'], date_format=date_format)

    if burst_id not in burst_match_dict:
        burst_config = BurstConfiguration(burst_params_dict['fk_project'])
        burst_config.datatypes_number = burst_params_dict['datatypes_number']
        burst_config.dynamic_ids = burst_params_dict['dynamic_ids']
        burst_config.error_message = burst_params_dict['error_message']
        burst_config.finish_time = burst_params_dict['finish_time']
        burst_config.fk_metric_operation_group = burst_params_dict['fk_metric_operation_group']
        burst_config.fk_operation_group = burst_params_dict['fk_operation_group']
        burst_config.fk_project = burst_params_dict['fk_project']
        burst_config.fk_simulation = burst_params_dict['fk_simulation']
        burst_config.name = burst_params_dict['name']
        burst_config.range1 = burst_params_dict['range_1']
        burst_config.range2 = burst_params_dict['range_2']
        burst_config.start_time = burst_params_dict['start_time']
        burst_config.status = burst_params_dict['status']
        new_burst = True
    else:
        burst_config = dao.get_burst_by_id(burst_match_dict[burst_id])
        new_burst = False

    return burst_config, new_burst
Beispiel #10
0
def upgrade(_migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    session = SA_SESSIONMAKER()
    session.execute(text("DROP TABLE \"MAPPED_STRUCTURAL_MRI_DATA\""))
    session.commit()
    session.close()
Beispiel #11
0
    def test_db_mapping(self):
        """ Test DB storage/retrieval of a simple traited attribute"""
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()

        # test data
        dikt = {'a': 6}
        tup = ('5', 9.348)
        dtype = numpy.dtype(float)
        json = {'a': 'asdf', 'b': {'23': '687568'}}

        test_inst = MappedTestClass()
        test_inst.dikt = copy.deepcopy(dikt)
        test_inst.tup = copy.deepcopy(tup)
        test_inst.dtype = copy.deepcopy(dtype)
        test_inst.json = copy.deepcopy(json)
        test_inst.set_operation_id(self.operation.id)
        test_inst = dao.store_entity(test_inst)

        test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid,
                                           'gid')[0]
        self.assertEqual(test_inst.dikt, dikt)
        self.assertEqual(test_inst.tup, tup)
        self.assertEqual(test_inst.dtype, dtype)
        self.assertEqual(test_inst.json, json)
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    drop_column(COLUMN_N1, table)
    drop_column(COLUMN_N2, table)
    drop_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS" SET status='4-FINISHED' WHERE status = '5-FINISHED' """))
    session.execute(text("""UPDATE "OPERATIONS" SET status='3-STARTED' WHERE status = '4-PENDING' """))
    session.commit()
    session.close()
def downgrade(_migrate_engine):
    """Operations to reverse the above upgrade go here."""
    try:
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration =
                                REPLACE(REPLACE(_simulator_configuration, "range_1", "first_range"),
                                                                          "range_2", "second_range");"""))
        session.execute(text("""UPDATE "OPERATIONS" SET parameters =
                                REPLACE(REPLACE(parameters, "range_1", "first_range"), "range_2", "second_range");"""))
        session.commit()
        session.close()
    except Exception as excep:
        ## This update is not critical. We can run even in case of error at update
        logger = get_logger(__name__)
        logger.exception(excep)
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    drop_column(COL_RANGES_1, table)
    drop_column(COL_RANGES_2, table)
    
    session = SA_SESSIONMAKER()
    
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = '4-FINISHED' THEN 'FINISHED'
                                    WHEN status = '3-STARTED' THEN 'STARTED'
                                    WHEN status = '2-CANCELED' THEN 'CANCELED'
                                    ELSE 'ERROR'
                                END
                             WHERE status IN ('4-FINISHED', '2-CANCELED', '3-STARTED', '1-ERROR');"""))
Beispiel #15
0
    def introspect(self, do_create):
        """
        Introspect a given module to: 
            - create tables for custom DataType;
            - populate adapter algorithms references. 
        """
        self.logger.debug("Introspection into module:" + self.module_name)
        module = __import__(self.module_name, globals(), locals(),
                            ["__init__"])
        try:
            path_adapters = module.ADAPTERS
            self.path_types = module.DATATYPES_PATH
            self.removers_path = module.REMOVERS_PATH
            self.path_portlets = getattr(module, 'PORTLETS_PATH', [])
        except Exception as excep:
            self.logger.warning("Module " + self.module_name +
                                " is not fully introspect compatible!")
            self.logger.warning(excep.message)
            return

        if do_create:
            self.logger.debug("Found Datatypes_Path=" + str(self.path_types))
            # DataTypes only need to be imported for adding to DB tables
            for path in self.path_types:
                self.__get_datatypes(path)

            session = SA_SESSIONMAKER()
            model.Base.metadata.create_all(bind=session.connection())
            session.commit()
            session.close()

            self.logger.debug("Found Adapters_Dict=" + str(path_adapters))
            for category_name in path_adapters:
                category_details = path_adapters[category_name]
                launchable = bool(category_details.get(LAUNCHABLE))
                rawinput = bool(category_details.get(RAWINPUT))
                display = bool(category_details.get(DISPLAYER))
                order_nr = category_details.get(ORDER, 999)
                category_instance = dao.filter_category(
                    category_name, rawinput, display, launchable, order_nr)
                if category_instance is not None:
                    category_instance.last_introspection_check = datetime.datetime.now(
                    )
                    category_instance.removed = False
                else:
                    category_state = category_details.get(STATE, '')
                    category_instance = model.AlgorithmCategory(
                        category_name, launchable, rawinput, display,
                        category_state, order_nr, datetime.datetime.now())
                category_instance = dao.store_entity(category_instance)
                for actual_module in path_adapters[category_name]['modules']:
                    self.__read_adapters(category_instance.id, actual_module)

            for path in self.path_portlets:
                self.__get_portlets(path)
        ### Register Remover instances for current introspected module
        removers.update_dictionary(self.get_removers_dict())
Beispiel #16
0
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    drop_column(COLUMN_N1, table)
    drop_column(COLUMN_N2, table)
    drop_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(
        text(
            """UPDATE "OPERATIONS" SET status='4-FINISHED' WHERE status = '5-FINISHED' """
        ))
    session.execute(
        text(
            """UPDATE "OPERATIONS" SET status='3-STARTED' WHERE status = '4-PENDING' """
        ))
    session.commit()
    session.close()
Beispiel #17
0
def downgrade(_migrate_engine):
    """Operations to reverse the above upgrade go here."""
    try:
        session = SA_SESSIONMAKER()
        session.execute(
            text(
                """UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration =
                                REPLACE(REPLACE(_simulator_configuration, "range_1", "first_range"),
                                                                          "range_2", "second_range");"""
            ))
        session.execute(
            text("""UPDATE "OPERATIONS" SET parameters =
                                REPLACE(REPLACE(parameters, "range_1", "first_range"), "range_2", "second_range");"""
                 ))
        session.commit()
        session.close()
    except Exception as excep:
        ## This update is not critical. We can run even in case of error at update
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #18
0
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_CONNECTIVITY_DATA']

    create_column(COL_OLD, table1)

    session = SA_SESSIONMAKER()
    session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _unidirectional=_undirected"))
    session.commit()
    session.close()

    drop_column(COL_NEW, table1)
    create_column(COL_NOSE_CORRECTION, table1)
    def test_db_mapping(self):
        """ Test DB storage/retrieval of a simple traited attribute"""
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()
        
        # test data
        dikt = {'a': 6}
        tup = ('5', 9.348)
        dtype = numpy.dtype(float)
        json = {'a': 'asdf', 'b': {'23': '687568'}}

        test_inst = MappedTestClass()
        test_inst.dikt = copy.deepcopy(dikt)
        test_inst.tup = copy.deepcopy(tup)
        test_inst.dtype = copy.deepcopy(dtype)
        test_inst.json = copy.deepcopy(json)
        test_inst.set_operation_id(self.operation.id)
        test_inst = dao.store_entity(test_inst)

        test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid, 'gid')[0]
        self.assertEqual(test_inst.dikt, dikt)
        self.assertEqual(test_inst.tup, tup)
        self.assertEqual(test_inst.dtype, dtype)
        self.assertEqual(test_inst.json, json)
def upgrade(_migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration =
                                REPLACE(REPLACE(_simulator_configuration, "first_range", "range_1"),
                                                                          "second_range", "range_2");"""))
        session.execute(text("""UPDATE "OPERATIONS" SET parameters =
                                REPLACE(REPLACE(parameters, "first_range", "range_1"), "second_range", "range_2");"""))
        session.commit()
        session.close()
    except Exception, excep:
        ## This update is not critical. We can run even in case of error at update
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #21
0
def change_algorithm(module, classname, new_module, new_class):
    """
    Change module and classname fields in ALGORITHM_GROUPS table.
    """
    session = SA_SESSIONMAKER()
    try:
        session.execute(text(
            """UPDATE "ALGORITHM_GROUPS"
               SET module = '""" + new_module + """', classname = '""" + new_class + """'
               WHERE module = '""" + module + """' AND classname = '""" + classname + """';"""))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()
def upgrade(_migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        session = SA_SESSIONMAKER()
        session.execute(
            text(
                """UPDATE "BURST_CONFIGURATIONS" SET _simulator_configuration =
                                REPLACE(REPLACE(_simulator_configuration, "first_range", "range_1"),
                                                                          "second_range", "range_2");"""
            ))
        session.execute(
            text("""UPDATE "OPERATIONS" SET parameters =
                                REPLACE(REPLACE(parameters, "first_range", "range_1"), "second_range", "range_2");"""
                 ))
        session.commit()
        session.close()
    except Exception, excep:
        ## This update is not critical. We can run even in case of error at update
        logger = get_logger(__name__)
        logger.exception(excep)
Beispiel #23
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_CONNECTIVITY_DATA']

    create_column(COL_NEW, table1)

    session = SA_SESSIONMAKER()
    session.execute(text("UPDATE \"MAPPED_CONNECTIVITY_DATA\" set _undirected=_unidirectional"))
    session.commit()
    session.close()

    drop_column(COL_OLD, table1)
    drop_column(COL_NOSE_CORRECTION, table1)
Beispiel #24
0
def reset_database():
    """
    Remove all tables in DB.
    """
    LOGGER.warning("Your Database tables will be deleted.")
    try:
        session = SA_SESSIONMAKER()
        LOGGER.debug("Delete connection initiated.")
        inspector = reflection.Inspector.from_engine(session.connection())
        for table in inspector.get_table_names():
            try:
                LOGGER.debug("Removing:" + table)
                session.execute(text("DROP TABLE \"%s\" CASCADE" % table))
            except Exception:
                try:
                    session.execute(text("DROP TABLE %s" % table))
                except Exception, excep1:
                    LOGGER.error("Could no drop table %s", table)
                    LOGGER.exception(excep1)
        session.commit()
        LOGGER.info("Database was cleanup!")
Beispiel #25
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_N1, table)
    create_column(COLUMN_N2, table)
    create_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(
        text(
            """UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """
        ))
    session.commit()
    session.close()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['MAPPED_SURFACE_DATA']
    create_column(COLUMN_N1, table)
    create_column(COLUMN_N2, table)
    create_column(COLUMN_N3, table)

    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS" SET status='5-FINISHED' WHERE status = '4-FINISHED' """))
    session.commit()
    session.close()
    def introspect(self, do_create):
        """
        Introspect a given module to: 
            - create tables for custom DataType;
            - populate adapter algorithms references. 
        """
        self.logger.debug("Introspection into module:" + self.module_name)
        module = __import__(self.module_name, globals(), locals(), ["__init__"])
        try:
            path_adapters = module.ADAPTERS
            self.path_types = module.DATATYPES_PATH
            self.removers_path = module.REMOVERS_PATH
            self.path_portlets = getattr(module, 'PORTLETS_PATH', [])
        except Exception as excep:
            self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!")
            self.logger.warning(excep.message)
            return

        if do_create:
            self.logger.debug("Found Datatypes_Path=" + str(self.path_types))
            # DataTypes only need to be imported for adding to DB tables
            for path in self.path_types:
                self.__get_datatypes(path)

            session = SA_SESSIONMAKER()
            model.Base.metadata.create_all(bind=session.connection())
            session.commit()
            session.close()

            self.logger.debug("Found Adapters_Dict=" + str(path_adapters))
            for category_name in path_adapters:
                category_details = path_adapters[category_name]
                launchable = bool(category_details.get(LAUNCHABLE))
                rawinput = bool(category_details.get(RAWINPUT))
                display = bool(category_details.get(DISPLAYER))
                order_nr = category_details.get(ORDER, 999)
                category_instance = dao.filter_category(category_name, rawinput, display, launchable, order_nr)
                if category_instance is not None:
                    category_instance.last_introspection_check = datetime.datetime.now()
                    category_instance.removed = False
                else:
                    category_state = category_details.get(STATE, '')
                    category_instance = model.AlgorithmCategory(category_name, launchable, rawinput, display,
                                                                category_state, order_nr, datetime.datetime.now())
                category_instance = dao.store_entity(category_instance)
                for actual_module in path_adapters[category_name]['modules']:
                    self.__read_adapters(category_instance.id, actual_module)

            for path in self.path_portlets:
                self.__get_portlets(path)
        ### Register Remover instances for current introspected module
        removers.update_dictionary(self.get_removers_dict())
Beispiel #28
0
                self.path_portlets = module.PORTLETS_PATH
            else:
                self.path_portlets = []
        except Exception, excep:
            self.logger.warning("Module " + self.module_name +
                                " is not fully introspect compatible!")
            self.logger.warning(excep.message)
            return

        if do_create:
            self.logger.debug("Found Datatypes_Path=" + str(self.path_types))
            # DataTypes only need to be imported for adding to DB tables
            for path in self.path_types:
                self.__get_datatypes(path)

            session = SA_SESSIONMAKER()
            model.Base.metadata.create_all(bind=session.connection())
            session.commit()
            session.close()

            self.logger.debug("Found Adapters_Dict=" + str(path_adapters))
            for category_name in path_adapters:
                category_details = path_adapters[category_name]
                launchable = (LAUNCHABLE in category_details
                              and category_details[LAUNCHABLE])
                rawinput = (RAWINPUT in category_details
                            and category_details[RAWINPUT])
                display = (DISPLAYER in category_details
                           and category_details[DISPLAYER])
                if ORDER in category_details:
                    order_nr = category_details[ORDER]
Beispiel #29
0
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    session = SA_SESSIONMAKER()

    try:
        session.execute(
            text("""ALTER TABLE "BURST_CONFIGURATIONS"
                                        RENAME TO "BurstConfiguration"; """))

        # Dropping tables which don't exist in the new version
        session.execute(text("""DROP TABLE "MAPPED_LOOK_UP_TABLE_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_DATATYPE_MEASURE_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_SPATIAL_PATTERN_VOLUME_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_SIMULATION_STATE_DATA";"""))
        session.execute(text("""DROP TABLE "WORKFLOW_STEPS";"""))
        session.execute(text("""DROP TABLE "WORKFLOW_VIEW_STEPS";"""))

        # Dropping tables which will be repopulated from the H5 files
        session.execute(
            text("""DROP TABLE "MAPPED_COHERENCE_SPECTRUM_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_COMPLEX_COHERENCE_SPECTRUM_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_CONNECTIVITY_ANNOTATIONS_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_CONNECTIVITY_MEASURE_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_CONNECTIVITY_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_CORRELATION_COEFFICIENTS_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_COVARIANCE_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_CROSS_CORRELATION_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_FCD_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_FOURIER_SPECTRUM_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_INDEPENDENT_COMPONENTS_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_LOCAL_CONNECTIVITY_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_PRINCIPAL_COMPONENTS_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_REGION_MAPPING_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_REGION_VOLUME_MAPPING_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_TIME_SERIES_REGION_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_EEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_MEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_SEEG_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_TIME_SERIES_SURFACE_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_TIME_SERIES_VOLUME_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_SENSORS_DATA" """))
        session.execute(text("""DROP TABLE "MAPPED_TRACTS_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_STIMULI_REGION_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_STIMULI_SURFACE_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_STRUCTURAL_MRI_DATA" """))
        session.execute(text("""DROP TABLE "MAPPED_SURFACE_DATA" """))
        session.execute(text("""DROP TABLE "MAPPED_VALUE_WRAPPER_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_VOLUME_DATA" """))
        session.execute(
            text("""DROP TABLE "MAPPED_WAVELET_COEFFICIENTS_DATA";"""))
        session.execute(text("""DROP TABLE "DATA_TYPES_GROUPS";"""))
        session.execute(text("""DROP TABLE "MAPPED_ARRAY_DATA";"""))
        session.execute(
            text("""DROP TABLE "MAPPED_SPATIO_TEMPORAL_PATTERN_DATA" """))
        session.execute(text("""DROP TABLE "MAPPED_SPATIAL_PATTERN_DATA";"""))
        session.execute(text("""DROP TABLE "WORKFLOWS";"""))
        session.execute(text("""DROP TABLE "MAPPED_TIME_SERIES_DATA";"""))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    # MIGRATING USERS #
    users_table = meta.tables['USERS']
    for column in USER_COLUMNS:
        create_column(column, users_table)

    session = SA_SESSIONMAKER()
    try:
        user_ids = eval(
            str(
                session.execute(
                    text("""SELECT U.id
                            FROM "USERS" U """)).fetchall()))

        for id in user_ids:
            session.execute(
                text("""UPDATE "USERS" SET display_name = username,
                gid ='""" + uuid.uuid4().hex + """' WHERE id = """ +
                     str(id[0])))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    UniqueConstraint("gid", table=users_table)

    # Migrating BurstConfiguration
    burst_config_table = meta.tables['BurstConfiguration']
    for column in BURST_COLUMNS:
        create_column(column, burst_config_table)

    session = SA_SESSIONMAKER()
    try:
        session.execute(
            text("""ALTER TABLE "BurstConfiguration"
                                RENAME COLUMN _dynamic_ids TO dynamic_ids"""))
        session.execute(
            text("""ALTER TABLE "BurstConfiguration"
                                RENAME COLUMN _simulator_configuration TO simulator_gid"""
                 ))

        ranges = session.execute(
            text("""SELECT OG.id, OG.range1, OG.range2
                            FROM "OPERATION_GROUPS" OG""")).fetchall()
        session.execute(
            text(
                """DELETE FROM "BurstConfiguration" WHERE status = \'error\'"""
            ))

        ranges_1 = []
        ranges_2 = []

        for r in ranges:
            ranges_1.append(str(r[1]))
            ranges_2.append(str(r[2]))

        new_ranges_1 = migrate_range_params(ranges_1)
        new_ranges_2 = migrate_range_params(ranges_2)
        operation_groups = session.execute(
            text("""SELECT * FROM "OPERATION_GROUPS" """)).fetchall()

        for op_g in operation_groups:
            op = eval(
                str(
                    session.execute(
                        text(
                            """SELECT fk_operation_group, parameters, meta_data 
            FROM "OPERATIONS" O WHERE O.fk_operation_group = """ +
                            str(op_g[0]))).fetchone()))
            burst_id = eval(op[2])['Burst_Reference']

            if 'time_series' in op[1]:
                session.execute(
                    text(
                        """UPDATE "BurstConfiguration" as B SET fk_metric_operation_group = """
                        + str(op[0]) + """ WHERE B.id = """ + str(burst_id)))
            else:
                session.execute(
                    text(
                        """UPDATE "BurstConfiguration" as B SET fk_operation_group = """
                        + str(op[0]) + """ WHERE B.id = """ + str(burst_id)))

        for i in range(len(ranges_1)):
            range1 = str(new_ranges_1[i]).replace('\'', '')
            range2 = str(new_ranges_2[i]).replace('\'', '')

            session.execute(
                text("""UPDATE "BurstConfiguration" SET
                range1 = '""" + range1 + """'
                WHERE fk_operation_group = """ + str(ranges[i][0])))

            session.execute(
                text("""UPDATE "OPERATION_GROUPS" SET
                range1 = '""" + range1 + """'
                WHERE id = """ + str(ranges[i][0])))

            if range2 != 'None':
                session.execute(
                    text("""UPDATE "BurstConfiguration" SET
                    range2 = '""" + range2 + """'
                    WHERE fk_operation_group = """ + str(ranges[i][0])))

                session.execute(
                    text("""UPDATE "OPERATION_GROUPS" SET
                    range2 = '""" + range2 + """'
                    WHERE id = """ + str(ranges[i][0])))

        session.commit()
    except Exception:
        session.close()
    finally:
        session.close()

    # Drop old column
    drop_column(BURST_DELETED_COLUMN, burst_config_table)

    # Create constraints only after the rows are populated
    fk_burst_config_constraint_1 = ForeignKeyConstraint(
        ["fk_simulation"], ["OPERATIONS.id"], table=burst_config_table)
    fk_burst_config_constraint_2 = ForeignKeyConstraint(
        ["fk_operation_group"], ["OPERATION_GROUPS.id"],
        table=burst_config_table)
    fk_burst_config_constraint_3 = ForeignKeyConstraint(
        ["fk_metric_operation_group"], ["OPERATION_GROUPS.id"],
        table=burst_config_table)

    fk_burst_config_constraint_1.create()
    fk_burst_config_constraint_2.create()
    fk_burst_config_constraint_3.create()

    # MIGRATING Operations #
    session = SA_SESSIONMAKER()
    try:
        burst_ref_metadata = session.execute(
            text("""SELECT id, meta_data FROM "OPERATIONS"
                    WHERE meta_data like '%Burst_Reference%' """)).fetchall()

        for metadata in burst_ref_metadata:
            metadata_dict = eval(str(metadata[1]))
            session.execute(
                text("""UPDATE "OPERATIONS" SET parameters = '""" +
                     json.dumps(metadata_dict['Burst_Reference']) +
                     """' WHERE id = """ + str(metadata[0])))

        session.execute(
            text("""ALTER TABLE "OPERATIONS"
                                    RENAME COLUMN parameters TO view_model_gid"""
                 ))

        # Name it back to the old name, because we have to keep both tables so we can create BurstConfigurationH5s
        session.execute(
            text("""ALTER TABLE "BurstConfiguration"
                                                RENAME TO "BURST_CONFIGURATION"; """
                 ))
        session.commit()
    except Exception as excep:
        LOGGER.exception(excep)
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("""DROP TABLE "ALGORITHMS"; """))
        session.execute(text("""DROP TABLE "ALGORITHM_CATEGORIES"; """))
        session.execute(text("""DROP TABLE "DATA_TYPES"; """))
        session.commit()
    except Exception as excep:
        # If the drops fail, it could mean we are using postgresql
        session.close()
        session = SA_SESSIONMAKER()
        LOGGER.exception(excep)
        try:
            session.execute(
                text("""DROP TABLE if exists "ALGORITHMS" cascade; """))
            session.execute(
                text(
                    """DROP TABLE if exists "ALGORITHM_CATEGORIES" cascade; """
                ))
            session.execute(
                text("""DROP TABLE if exists "DATA_TYPES" cascade; """))
            session.commit()
        except Exception as excep:
            LOGGER.exception(excep)
    finally:
        session.close()

    op_table = meta.tables['OPERATIONS']
    create_column(OP_COLUMN, op_table)
    drop_column(OP_DELETED_COLUMN, op_table)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)

    session = SA_SESSIONMAKER()
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping =
                        (SELECT dt.gid
                         FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt
                         WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""
        ))
    # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume =
    #                     (SELECT dt.gid
    #                      FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt
    #                      WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG',
                                            'TimeSeriesSEEG', 'TimeSeriesMEG'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping is not NULL);"""
        ))
    session.execute(
        text(
            """UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);"""
        ))

    session.commit()
    session.close()
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    create_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """))
        session.commit()
        session.close()
    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
def _transfer_projection_matrices():
    """
    Previous ProjectionRegionM/EEG objects should be Removed,
    and ProjectionSurfaceM/EEG should be transported into the new DB tables.
    """
    session = SA_SESSIONMAKER()
    LOGGER.info("Transferring Projections Surface ...")

    try:
        # Ony after SqlAlchemy finished initialization the new table MAPPED_PROJECTION_DATA exists
        session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type)
                            SELECT PS.id, PM._sources, PM._sensors, 'projEEG'
                            FROM "MAPPED_PROJECTION_SURFACE_EEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM
                            WHERE PM.id=PS.id;"""))

        session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type)
                            SELECT PS.id, PM._sources, PM._sensors, 'projMEG'
                            FROM "MAPPED_PROJECTION_SURFACE_MEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM
                            WHERE PM.id=PS.id;"""))

        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_EEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_MEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";"""))

        LOGGER.info("Removing Projections Region ...")

        session.execute(text("""DELETE from "DATA_TYPES"
                            WHERE type in ('ProjectionRegionEEG', 'ProjectionRegionMEG');"""))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Projection references")

    finally:
        session.close()
Beispiel #33
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0",
                                                 "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(
                group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)

    session = SA_SESSIONMAKER()
    session.execute(
        text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""
             ))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        # TODO: fix me
        # for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
        #     session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    drop_column(COL_1, table1)
    drop_column(COL_2, table1)
    drop_column(COL_3, table1)
    drop_column(COL_4, table1)

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "RENAME COLUMN _labels_ordering to _dim_labels"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "RENAME COLUMN _labels_ordering to _dim_labels"))

    except sqlalchemy.exc.OperationalError:
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "ADD COLUMN _dim_labels VARYING CHARACTER(255)"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "ADD COLUMN _dim_labels VARYING CHARACTER(255)"))
    session.commit()
    session.close()

    table2 = meta.tables['MAPPED_CONNECTIVITY_DATA']
    drop_column(COL_7, table2)

    for mapping in TABLE_RENAMES:
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[1], mapping[0])))
        session.commit()
        session.close()
        
        
        
Beispiel #35
0
def downgrade(migrate_engine):
    """Operations to reverse the above upgrade go here."""
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    create_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    create_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_NEW, table=table, name=COLUMN_N3_OLD.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'RegionMapping' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'LocalConnectivity' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.surfaces' WHERE "type" = 'Cortex' """))
        session.commit()
        session.close()
    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['DATA_TYPES_GROUPS']
    create_column(COL_RANGES_1, table)
    create_column(COL_RANGES_2, table)

    try:
        ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
        previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")

        for group in previous_groups:

            operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
            #group.only_numeric_ranges = operation_group.has_only_numeric_ranges

            if operation_group.range3 is not None:
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception as excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
            session.delete(sim_state)
        session.commit()
        session.close()
    except Exception as excep:
        ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2
        logger = get_logger(__name__)
        logger.exception(excep)
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta = MetaData(bind=migrate_engine)
    
    table = _prepare_table(meta, 'USER_PREFERENCES')
    table.c.user_id.alter(name='fk_user')
    
    table = _prepare_table(meta, 'BURST_CONFIGURATIONS')
    table.c.project_id.alter(name='fk_project')
    
    table = _prepare_table(meta, 'WORKFLOWS',)
    table.c.project_id.alter(name='fk_project')
    table.c.burst_id.alter(name='fk_burst')
    
    table = _prepare_table(meta, 'WORKFLOW_STEPS')
    table.c.workflow_id.alter(name='fk_workflow')
    table.c.algorithm_id.alter(name='fk_algorithm')
    table.c.resulted_op_id.alter(name='fk_operation')
    
    table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE')
    table.c.analyzed_datatype.alter(name='_analyzed_datatype')
    
    ## Fix Lookup Table mapping.
    table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA')
    create_column(COL_1, table)
    create_column(COL_2, table)
    create_column(COL_3, table)
    create_column(COL_4, table)
    create_column(COL_5, table)
    create_column(COL_6, table)
    create_column(COL_7, table)
    session = SA_SESSIONMAKER()
    session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";'))
    session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
                         'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";'))
    session.execute(text('insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
                         'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";'))
    session.commit()
    session.close()
        
    table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA')
    table.drop()
    
    table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA')
    table.drop()
Beispiel #38
0
def initialize_startup():
    """ Force DB tables create, in case no data is already found."""
    is_db_empty = False
    session = SA_SESSIONMAKER()
    inspector = reflection.Inspector.from_engine(session.connection())
    table_names = inspector.get_table_names()
    if len(table_names) < 1:
        LOGGER.debug("Database access exception, maybe DB is empty")
        is_db_empty = True
    session.close()

    versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO
    alembic_cfg = Config()
    alembic_cfg.set_main_option('script_location', versions_repo)
    alembic_cfg.set_main_option('sqlalchemy.url', TvbProfile.current.db.DB_URL)

    if is_db_empty:
        LOGGER.info("Initializing Database")
        if os.path.exists(versions_repo):
            shutil.rmtree(versions_repo)

        _update_sql_scripts()
        session = SA_SESSIONMAKER()
        Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()

        command.stamp(alembic_cfg, 'head')
        LOGGER.info("Database Default Tables created successfully!")
    else:
        _update_sql_scripts()

        try:
            db_version = session.execute(
                text("""SELECT version from migrate_version""")).fetchone()[0]

            if db_version == 18:
                command.stamp(alembic_cfg, 'head')
                session.execute(text("""DROP TABLE "migrate_version";"""))

                return is_db_empty

        except SQLAlchemyError:
            pass

        with session.connection() as connection:
            alembic_cfg.attributes['connection'] = connection
            command.upgrade(alembic_cfg,
                            TvbProfile.current.version.DB_STRUCTURE_VERSION)
        LOGGER.info("Database already has some data, will not be re-created!")
    return is_db_empty
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table1 = meta.tables['MAPPED_SURFACE_DATA']

        create_column(COL_1, table1)
        create_column(COL_2, table1)
        create_column(COL_3, table1)

        try:
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()
        except ProgrammingError:
            # PostgreSQL
            session = SA_SESSIONMAKER()
            session.execute(text("UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists "
                                 "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                                 "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise
Beispiel #40
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta = MetaData(bind=migrate_engine)

    table = _prepare_table(meta, 'USER_PREFERENCES')
    table.c.user_id.alter(name='fk_user')

    table = _prepare_table(meta, 'BURST_CONFIGURATIONS')
    table.c.project_id.alter(name='fk_project')

    table = _prepare_table(
        meta,
        'WORKFLOWS',
    )
    table.c.project_id.alter(name='fk_project')
    table.c.burst_id.alter(name='fk_burst')

    table = _prepare_table(meta, 'WORKFLOW_STEPS')
    table.c.workflow_id.alter(name='fk_workflow')
    table.c.algorithm_id.alter(name='fk_algorithm')
    table.c.resulted_op_id.alter(name='fk_operation')

    table = _prepare_table(meta, 'MAPPED_DATATYPE_MEASURE')
    table.c.analyzed_datatype.alter(name='_analyzed_datatype')

    ## Fix Lookup Table mapping.
    table = _prepare_table(meta, 'MAPPED_LOOK_UP_TABLE_DATA')
    create_column(COL_1, table)
    create_column(COL_2, table)
    create_column(COL_3, table)
    create_column(COL_4, table)
    create_column(COL_5, table)
    create_column(COL_6, table)
    create_column(COL_7, table)
    session = SA_SESSIONMAKER()
    session.execute(text('DELETE FROM "MAPPED_LOOK_UP_TABLE_DATA";'))
    session.execute(
        text(
            'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
            'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_NERF_TABLE_DATA";'
        ))
    session.execute(
        text(
            'insert into "MAPPED_LOOK_UP_TABLE_DATA"(id, _equation, _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data) '
            'select id, \'\', _number_of_values, _invdx, _xmax, _xmin, _df, _dx, _data from "MAPPED_PSI_TABLE_DATA";'
        ))
    session.commit()
    session.close()

    table = _prepare_table(meta, 'MAPPED_NERF_TABLE_DATA')
    table.drop()

    table = _prepare_table(meta, 'MAPPED_PSI_TABLE_DATA')
    table.drop()
Beispiel #41
0
 def _ensure_datatype_tables_are_created():
     session = SA_SESSIONMAKER()
     Base.metadata.create_all(bind=session.connection())
     session.commit()
     session.close()
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    drop_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    drop_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_OLD, table=table, name=COLUMN_N3_NEW.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.region_mapping' WHERE "type" = 'RegionMapping' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.local_connectivity' WHERE "type" = 'LocalConnectivity' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.cortex' WHERE "type" = 'Cortex' """))
        session.commit()
        session.close()

    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']
    table2 = meta.tables['MAPPED_TIME_SERIES_REGION_DATA']
    table3 = meta.tables['MAPPED_SENSORS_DATA']

    create_column(COL_REG1, table1)
    create_column(COL_REG2, table1)
    create_column(COL_REG3, table2)
    create_column(COL_REG4, table2)
    create_column(COL_SENSORS, table3)

    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping =
                        (SELECT dt.gid
                         FROM "MAPPED_REGION_MAPPING_DATA" rm, "DATA_TYPES" dt
                         WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    # session.execute(text("""UPDATE "MAPPED_TIME_SERIES_REGION_DATA" tr SET _region_mapping_volume =
    #                     (SELECT dt.gid
    #                      FROM "MAPPED_REGION_VOLUME_MAPPING_DATA" rm, "DATA_TYPES" dt
    #                      WHERE dt.id = rm.id AND tr._connectivity = rm._connectivity);"""))
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_surface_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesSurface', 'TimeSeriesEEG',
                                            'TimeSeriesSEEG', 'TimeSeriesMEG'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping is not NULL);"""))
    session.execute(text("""UPDATE "MAPPED_TIME_SERIES_DATA" ts SET _has_volume_mapping = True
                        WHERE
                            EXISTS (SELECT * FROM "DATA_TYPES" dt
                                    WHERE dt.id=ts.id AND dt.type in ('TimeSeriesVolume'))
                         OR EXISTS (SELECT * from "MAPPED_TIME_SERIES_REGION_DATA" tr
                                    WHERE tr.id=ts.id AND tr._region_mapping_volume is not NULL);"""))

    session.commit()
    session.close()
def _transfer_projection_matrices():
    """
    Previous ProjectionRegionM/EEG objects should be Removed,
    and ProjectionSurfaceM/EEG should be transported into the new DB tables.
    """
    session = SA_SESSIONMAKER()
    LOGGER.info("Transferring Projections Surface ...")

    try:
        # Ony after SqlAlchemy finished initialization the new table MAPPED_PROJECTION_DATA exists
        session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type)
                            SELECT PS.id, PM._sources, PM._sensors, 'projEEG'
                            FROM "MAPPED_PROJECTION_SURFACE_EEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM
                            WHERE PM.id=PS.id;"""))

        session.execute(text("""INSERT into "MAPPED_PROJECTION_DATA" (id, _sources, _sensors, _projection_type)
                            SELECT PS.id, PM._sources, PM._sensors, 'projMEG'
                            FROM "MAPPED_PROJECTION_SURFACE_MEG_DATA" PS, "MAPPED_PROJECTION_MATRIX_DATA" PM
                            WHERE PM.id=PS.id;"""))

        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_EEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_SURFACE_MEG_DATA";"""))
        session.execute(text("""DROP TABLE "MAPPED_PROJECTION_MATRIX_DATA";"""))

        LOGGER.info("Removing Projections Region ...")

        session.execute(text("""DELETE from "DATA_TYPES"
                            WHERE type in ('ProjectionRegionEEG', 'ProjectionRegionMEG');"""))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Projection references")

    finally:
        session.close()
def upgrade(migrate_engine):
    """
    Alter existing table ALGORITHMS, by moving columns from the old ALGORITHM_GROUPS table.
    """
    meta.bind = migrate_engine
    table_algo = meta.tables["ALGORITHMS"]
    for col in ADD_COLUMNS:
        create_column(col, table_algo)

    session = SA_SESSIONMAKER()
    try:
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception, _:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state character varying;"))
        session.commit()
Beispiel #46
0
def initialize_startup():
    """ Force DB tables create, in case no data is already found."""
    is_db_empty = False
    session = SA_SESSIONMAKER()
    inspector = reflection.Inspector.from_engine(session.connection())
    if len(inspector.get_table_names()) < 1:
        LOGGER.debug("Database access exception, maybe DB is empty")
        is_db_empty = True
    session.close()

    versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO
    if is_db_empty:
        LOGGER.info("Initializing Database")
        if os.path.exists(versions_repo):
            shutil.rmtree(versions_repo)
        migratesqlapi.create(versions_repo, os.path.split(versions_repo)[1])
        _update_sql_scripts()
        migratesqlapi.version_control(
            TvbProfile.current.db.DB_URL,
            versions_repo,
            version=TvbProfile.current.version.DB_STRUCTURE_VERSION)
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()
        LOGGER.info("Database Default Tables created successfully!")
    else:
        _update_sql_scripts()
        migratesqlapi.upgrade(
            TvbProfile.current.db.DB_URL,
            versions_repo,
            version=TvbProfile.current.version.DB_STRUCTURE_VERSION)
        LOGGER.info("Database already has some data, will not be re-created!")
    return is_db_empty
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine
    table1 = meta.tables['MAPPED_TIME_SERIES_DATA']

    create_column(COL_1, table1)
    create_column(COL_2, table1)
    create_column(COL_3, table1)
    create_column(COL_4, table1)
    create_column(COL_5, table1)

    session = SA_SESSIONMAKER()
    try:
        # We have a database that supports renaming columns. This way we save data from old timeseries.
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "RENAME COLUMN _dim_labels to _labels_ordering"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "RENAME COLUMN _dim_labels to _labels_ordering"))
    except sqlalchemy.exc.OperationalError:
        # We have a database like sqlite. Just create a new column, we're gonna miss old data in this case.
        session.execute(text("ALTER TABLE \"MAPPED_TIME_SERIES_DATA\" "
                             "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))
        session.execute(text("ALTER TABLE \"MAPPED_CROSS_CORRELATION_DATA\" "
                             "ADD COLUMN _labels_ordering VARYING CHARACTER(255)"))

    session.execute(text("DROP TABLE \"MAPPED_PSI_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_NERF_TABLE_DATA\""))
    session.execute(text("DROP TABLE \"MAPPED_LOOK_UP_TABLES_DATA\""))
    session.commit()
    session.close()

    table2 = meta.tables['MAPPED_CONNECTIVITY_DATA']
    create_column(COL_7, table2)

    for mapping in TABLE_RENAMES:
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"%s\" RENAME TO \"%s\"" % (mapping[0], mapping[1])))
        session.commit()
        session.close()
Beispiel #48
0
            path_adapters = module.ADAPTERS
            self.path_types = module.DATATYPES_PATH
            self.removers_path = module.REMOVERS_PATH
            self.path_portlets = getattr(module, 'PORTLETS_PATH', [])
        except Exception, excep:
            self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!")
            self.logger.warning(excep.message)
            return

        if do_create:
            self.logger.debug("Found Datatypes_Path=" + str(self.path_types))
            # DataTypes only need to be imported for adding to DB tables
            for path in self.path_types:
                self.__get_datatypes(path)

            session = SA_SESSIONMAKER()
            model.Base.metadata.create_all(bind=session.connection())
            session.commit()
            session.close()

            self.logger.debug("Found Adapters_Dict=" + str(path_adapters))
            for category_name in path_adapters:
                category_details = path_adapters[category_name]
                launchable = bool(category_details.get(LAUNCHABLE))
                rawinput = bool(category_details.get(RAWINPUT))
                display = bool(category_details.get(DISPLAYER))
                order_nr = category_details.get(ORDER, 999)
                category_instance = dao.filter_category(category_name, rawinput, display, launchable, order_nr)
                if category_instance is not None:
                    category_instance.last_introspection_check = datetime.datetime.now()
                    category_instance.removed = False
    session = SA_SESSIONMAKER()
    try:
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state VARYING CHARACTER(255)"))
        session.commit()
    except Exception, _:
        session.close()
        session = SA_SESSIONMAKER()
        session.execute(text("ALTER TABLE \"MAPPED_SIMULATION_STATE\" "
                             "ADD COLUMN _current_state character varying;"))
        session.commit()
    finally:
        session.close()

    session = SA_SESSIONMAKER()
    try:
        session.execute(text(
            """UPDATE "ALGORITHMS" SET
            module = (select G.module FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            classname = (select G.classname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            displayname = (select G.displayname FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id),
            fk_category = (select G.fk_category FROM "ALGORITHM_GROUPS" G WHERE "ALGORITHMS".fk_algo_group=G.id);"""))
        session.commit()

        # Delete old columns, no longer needed
        for col in DEL_COLUMNS:
            drop_column(col, table_algo)

        # Create constraint only after rows are being populated
        table_algo = meta.tables["ALGORITHMS"]
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if not all_bursts.has_key(burst.id):
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(all_bursts.values())
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
                group.no_of_ranges = 3
            elif operation_group.range2 is not None:
                group.no_of_ranges = 2
            elif operation_group.range1 is not None:
                group.no_of_ranges = 1
            else:
                group.no_of_ranges = 0

            dao.store_entity(group)

    except Exception, excep:
        ## we can live with a column only having default value. We will not stop the startup.
        logger = get_logger(__name__)
        logger.exception(excep)
        
    session = SA_SESSIONMAKER()
    session.execute(text("""UPDATE "OPERATIONS"
                               SET status = 
                                CASE
                                    WHEN status = 'FINISHED' THEN '4-FINISHED'
                                    WHEN status = 'STARTED' THEN '3-STARTED'
                                    WHEN status = 'CANCELED' THEN '2-CANCELED'
                                    ELSE '1-ERROR'
                                END
                             WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');"""))
    session.commit()
    session.close()

    try:
        session = SA_SESSIONMAKER()
        for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all():
def _adapt_simulation_monitor_params():
    """
    For previous simulation with EEG monitor, adjust the change of input parameters.
    """
    session = SA_SESSIONMAKER()

    param_connectivity = "connectivity"
    param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data"
    param_eeg_proj_new = "monitors_parameters_option_EEG_projection"
    param_eeg_sensors = "monitors_parameters_option_EEG_sensors"
    param_eeg_rm = "monitors_parameters_option_EEG_region_mapping"

    try:
        all_eeg_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for eeg_op in all_eeg_ops:
            try:
                op_params = parse_json_parameters(eeg_op.parameters)
                LOGGER.debug("Updating " + str(op_params))
                old_projection_guid = op_params[param_eeg_proj_old]
                connectivity_guid = op_params[param_connectivity]

                rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0]
                dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0]

                if dt.type == 'ProjectionSurfaceEEG':
                    LOGGER.debug("Previous Prj is surface: " + old_projection_guid)
                    new_projection_guid = old_projection_guid
                else:
                    new_projection_guid = session.execute(text("""SELECT DT.gid
                            FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO,
                                 "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT
                            WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and
                                  PM._sources='""" + rm._surface + """' and
                                  DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0]
                    LOGGER.debug("New Prj is surface: " + str(new_projection_guid))

                sensors_guid = session.execute(text("""SELECT _sensors
                            FROM "MAPPED_PROJECTION_MATRIX_DATA"
                            WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0]

                del op_params[param_eeg_proj_old]
                op_params[param_eeg_proj_new] = str(new_projection_guid)
                op_params[param_eeg_sensors] = str(sensors_guid)
                op_params[param_eeg_rm] = str(rm.gid)

                eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + eeg_op.parameters)
                files_helper.write_operation_metadata(eeg_op)

                burst = dao.get_burst_for_operation_id(eeg_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    del burst.simulator_configuration[param_eeg_proj_old]
                    burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)}
                    burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)}
                    burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(eeg_op))

        session.add_all(all_eeg_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Monitor Params")
    finally:
        session.close()
Beispiel #53
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    meta.bind = migrate_engine

    table = meta.tables['USERS']
    drop_column(COLUMN_N1, table)
    table = meta.tables['BURST_CONFIGURATIONS']
    drop_column(COLUMN_N2, table)
    table = meta.tables['OPERATIONS']
    alter_column(COLUMN_N3_OLD, table=table, name=COLUMN_N3_NEW.name)

    try:
        meta.bind = migrate_engine
        session = SA_SESSIONMAKER()
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.region_mapping' WHERE "type" = 'RegionMapping' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.local_connectivity' WHERE "type" = 'LocalConnectivity' """))
        session.execute(text("""UPDATE "DATA_TYPES" SET module='tvb.datatypes.cortex' WHERE "type" = 'Cortex' """))
        session.commit()
        session.close()

    except Exception:
        LOGGER.exception("Cold not update datatypes")
        raise
def _adapt_epileptor_simulations():
    """
    Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model.
    As the parameters from the two models are having different ranges and defaults, we do not translate parameters,
    we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults.
    """
    session = SA_SESSIONMAKER()
    epileptor_old = "EpileptorPermittivityCoupling"
    epileptor_new = "Epileptor"
    param_model = "model"

    try:
        all_ep_ops = session.query(model.Operation).filter(
            model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all()
        files_helper = FilesHelper()
        all_bursts = dict()

        for ep_op in all_ep_ops:
            try:
                op_params = parse_json_parameters(ep_op.parameters)
                if op_params[param_model] != epileptor_old:
                    LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op))
                    continue

                LOGGER.debug("Updating " + str(op_params))
                op_params[param_model] = epileptor_new
                ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder)
                LOGGER.debug("New params:" + ep_op.parameters)
                files_helper.write_operation_metadata(ep_op)

                burst = dao.get_burst_for_operation_id(ep_op.id)
                if burst is not None:
                    LOGGER.debug("Updating burst:" + str(burst))
                    burst.prepare_after_load()
                    burst.simulator_configuration[param_model] = {'value': epileptor_new}
                    burst._simulator_configuration = json.dumps(burst.simulator_configuration,
                                                                cls=MapAsJson.MapAsJsonEncoder)
                    if burst.id not in all_bursts:
                        all_bursts[burst.id] = burst

            except Exception:
                LOGGER.exception("Could not process " + str(ep_op))

        session.add_all(all_ep_ops)
        session.add_all(list(all_bursts.values()))
        session.commit()

    except Exception:
        LOGGER.exception("Could not update Simulation Epileptor Params")
    finally:
        session.close()
Beispiel #55
0
def upgrade(migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    try:
        meta.bind = migrate_engine
        table1 = meta.tables['MAPPED_SURFACE_DATA']

        create_column(COL_1, table1)
        create_column(COL_2, table1)
        create_column(COL_3, table1)

        try:
            session = SA_SESSIONMAKER()
            session.execute(
                text(
                    "UPDATE \"DATA_TYPES\" SET invalid=1 WHERE exists "
                    "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                    "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()
        except ProgrammingError:
            # PostgreSQL
            session = SA_SESSIONMAKER()
            session.execute(
                text(
                    "UPDATE \"DATA_TYPES\" SET invalid=TRUE WHERE exists "
                    "(SELECT * FROM \"MAPPED_SURFACE_DATA\" WHERE  _number_of_split_slices > 1 "
                    "and \"DATA_TYPES\".id = \"MAPPED_SURFACE_DATA\".id)"))
            session.commit()
            session.close()

    except Exception:
        logger = get_logger(__name__)
        logger.exception("Cold not create new column required by the update")
        raise