def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] create_column(COL_RANGES_1, table) create_column(COL_RANGES_2, table) try: ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column. previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges") for group in previous_groups: operation_group = dao.get_operationgroup_by_id(group.fk_operation_group) #group.only_numeric_ranges = operation_group.has_only_numeric_ranges if operation_group.range3 is not None: group.no_of_ranges = 3 elif operation_group.range2 is not None: group.no_of_ranges = 2 elif operation_group.range1 is not None: group.no_of_ranges = 1 else: group.no_of_ranges = 0 dao.store_entity(group) except Exception as excep: ## we can live with a column only having default value. We will not stop the startup. logger = get_logger(__name__) logger.exception(excep) session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""")) session.commit() session.close() try: session = SA_SESSIONMAKER() for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all(): session.delete(sim_state) session.commit() session.close() except Exception as excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2 logger = get_logger(__name__) logger.exception(excep)
def _adapt_epileptor_simulations(): """ Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model. As the parameters from the two models are having different ranges and defaults, we do not translate parameters, we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults. """ session = SA_SESSIONMAKER() epileptor_old = "EpileptorPermittivityCoupling" epileptor_new = "Epileptor" param_model = "model" try: all_ep_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for ep_op in all_ep_ops: try: op_params = parse_json_parameters(ep_op.parameters) if op_params[param_model] != epileptor_old: LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op)) continue LOGGER.debug("Updating " + str(op_params)) op_params[param_model] = epileptor_new ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + ep_op.parameters) files_helper.write_operation_metadata(ep_op) burst = dao.get_burst_for_operation_id(ep_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() burst.simulator_configuration[param_model] = { 'value': epileptor_new } burst._simulator_configuration = json.dumps( burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if not all_bursts.has_key(burst.id): all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(ep_op)) session.add_all(all_ep_ops) session.add_all(all_bursts.values()) session.commit() except Exception: LOGGER.exception("Could not update Simulation Epileptor Params") finally: session.close()
def _adapt_epileptor_simulations(): """ Previous Simulations on EpileptorWithPermitivity model, should be converted to use the Epileptor model. As the parameters from the two models are having different ranges and defaults, we do not translate parameters, we only set the Epileptor as model instead of EpileptorPermittivityCoupling, and leave the model params to defaults. """ session = SA_SESSIONMAKER() epileptor_old = "EpileptorPermittivityCoupling" epileptor_new = "Epileptor" param_model = "model" try: all_ep_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + epileptor_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for ep_op in all_ep_ops: try: op_params = parse_json_parameters(ep_op.parameters) if op_params[param_model] != epileptor_old: LOGGER.debug("Skipping op " + str(op_params[param_model]) + " -- " + str(ep_op)) continue LOGGER.debug("Updating " + str(op_params)) op_params[param_model] = epileptor_new ep_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + ep_op.parameters) files_helper.write_operation_metadata(ep_op) burst = dao.get_burst_for_operation_id(ep_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() burst.simulator_configuration[param_model] = {'value': epileptor_new} burst._simulator_configuration = json.dumps(burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if not all_bursts.has_key(burst.id): all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(ep_op)) session.add_all(all_ep_ops) session.add_all(all_bursts.values()) session.commit() except Exception: LOGGER.exception("Could not update Simulation Epileptor Params") finally: session.close()
def _adapt_simulation_monitor_params(): """ For previous simulation with EEG monitor, adjust the change of input parameters. """ session = SA_SESSIONMAKER() param_connectivity = "connectivity" param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data" param_eeg_proj_new = "monitors_parameters_option_EEG_projection" param_eeg_sensors = "monitors_parameters_option_EEG_sensors" param_eeg_rm = "monitors_parameters_option_EEG_region_mapping" try: all_eeg_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for eeg_op in all_eeg_ops: try: op_params = parse_json_parameters(eeg_op.parameters) LOGGER.debug("Updating " + str(op_params)) old_projection_guid = op_params[param_eeg_proj_old] connectivity_guid = op_params[param_connectivity] rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0] dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0] if dt.type == 'ProjectionSurfaceEEG': LOGGER.debug("Previous Prj is surface: " + old_projection_guid) new_projection_guid = old_projection_guid else: new_projection_guid = session.execute(text("""SELECT DT.gid FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO, "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and PM._sources='""" + rm._surface + """' and DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0] LOGGER.debug("New Prj is surface: " + str(new_projection_guid)) sensors_guid = session.execute(text("""SELECT _sensors FROM "MAPPED_PROJECTION_MATRIX_DATA" WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0] del op_params[param_eeg_proj_old] op_params[param_eeg_proj_new] = str(new_projection_guid) op_params[param_eeg_sensors] = str(sensors_guid) op_params[param_eeg_rm] = str(rm.gid) eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + eeg_op.parameters) files_helper.write_operation_metadata(eeg_op) burst = dao.get_burst_for_operation_id(eeg_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() del burst.simulator_configuration[param_eeg_proj_old] burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)} burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)} burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)} burst._simulator_configuration = json.dumps(burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if burst.id not in all_bursts: all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(eeg_op)) session.add_all(all_eeg_ops) session.add_all(list(all_bursts.values())) session.commit() except Exception: LOGGER.exception("Could not update Simulation Monitor Params") finally: session.close()
session = SA_SESSIONMAKER() session.execute(text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""")) session.commit() session.close() try: session = SA_SESSIONMAKER() for sim_state in session.query(SimulationState).filter(SimulationState.fk_datatype_group is not None).all(): session.delete(sim_state) session.commit() session.close() except Exception, excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2 logger = get_logger(__name__) logger.exception(excep) def downgrade(migrate_engine): """Operations to reverse the above upgrade go here.""" meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] drop_column(COL_RANGES_1, table)
def upgrade(migrate_engine): """ Upgrade operations go here. Don't create your own engine; bind migrate_engine to your metadata. """ meta.bind = migrate_engine table = meta.tables['DATA_TYPES_GROUPS'] create_column(COL_RANGES_1, table) create_column(COL_RANGES_2, table) try: ## Iterate DataTypeGroups from previous code-versions and try to update value for the new column. previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges") for group in previous_groups: operation_group = dao.get_operationgroup_by_id( group.fk_operation_group) #group.only_numeric_ranges = operation_group.has_only_numeric_ranges if operation_group.range3 is not None: group.no_of_ranges = 3 elif operation_group.range2 is not None: group.no_of_ranges = 2 elif operation_group.range1 is not None: group.no_of_ranges = 1 else: group.no_of_ranges = 0 dao.store_entity(group) except Exception as excep: ## we can live with a column only having default value. We will not stop the startup. logger = get_logger(__name__) logger.exception(excep) session = SA_SESSIONMAKER() session.execute( text("""UPDATE "OPERATIONS" SET status = CASE WHEN status = 'FINISHED' THEN '4-FINISHED' WHEN status = 'STARTED' THEN '3-STARTED' WHEN status = 'CANCELED' THEN '2-CANCELED' ELSE '1-ERROR' END WHERE status IN ('FINISHED', 'CANCELED', 'STARTED', 'ERROR');""" )) session.commit() session.close() try: session = SA_SESSIONMAKER() for sim_state in session.query(SimulationState).filter( SimulationState.fk_datatype_group is not None).all(): session.delete(sim_state) session.commit() session.close() except Exception as excep: ## It might happen that SimulationState table is not yet created, e.g. if user has version 1.0.2 logger = get_logger(__name__) logger.exception(excep)
def _adapt_simulation_monitor_params(): """ For previous simulation with EEG monitor, adjust the change of input parameters. """ session = SA_SESSIONMAKER() param_connectivity = "connectivity" param_eeg_proj_old = "monitors_parameters_option_EEG_projection_matrix_data" param_eeg_proj_new = "monitors_parameters_option_EEG_projection" param_eeg_sensors = "monitors_parameters_option_EEG_sensors" param_eeg_rm = "monitors_parameters_option_EEG_region_mapping" try: all_eeg_ops = session.query(model.Operation).filter( model.Operation.parameters.ilike('%"' + param_eeg_proj_old + '"%')).all() files_helper = FilesHelper() all_bursts = dict() for eeg_op in all_eeg_ops: try: op_params = parse_json_parameters(eeg_op.parameters) LOGGER.debug("Updating " + str(op_params)) old_projection_guid = op_params[param_eeg_proj_old] connectivity_guid = op_params[param_connectivity] rm = dao.get_generic_entity(RegionMapping, connectivity_guid, "_connectivity")[0] dt = dao.get_generic_entity(model.DataType, old_projection_guid, "gid")[0] if dt.type == 'ProjectionSurfaceEEG': LOGGER.debug("Previous Prj is surface: " + old_projection_guid) new_projection_guid = old_projection_guid else: new_projection_guid = session.execute(text("""SELECT DT.gid FROM "MAPPED_PROJECTION_MATRIX_DATA" PMO, "DATA_TYPES" DTO, "MAPPED_PROJECTION_MATRIX_DATA" PM, "DATA_TYPES" DT WHERE DTO.id=PMO.id and DT.id=PM.id and PM._sensors=PMO._sensors and PM._sources='""" + rm._surface + """' and DTO.gid='""" + old_projection_guid + """';""")).fetchall()[0][0] LOGGER.debug("New Prj is surface: " + str(new_projection_guid)) sensors_guid = session.execute(text("""SELECT _sensors FROM "MAPPED_PROJECTION_MATRIX_DATA" WHERE id = '""" + str(dt.id) + """';""")).fetchall()[0][0] del op_params[param_eeg_proj_old] op_params[param_eeg_proj_new] = str(new_projection_guid) op_params[param_eeg_sensors] = str(sensors_guid) op_params[param_eeg_rm] = str(rm.gid) eeg_op.parameters = json.dumps(op_params, cls=MapAsJson.MapAsJsonEncoder) LOGGER.debug("New params:" + eeg_op.parameters) files_helper.write_operation_metadata(eeg_op) burst = dao.get_burst_for_operation_id(eeg_op.id) if burst is not None: LOGGER.debug("Updating burst:" + str(burst)) burst.prepare_after_load() del burst.simulator_configuration[param_eeg_proj_old] burst.simulator_configuration[param_eeg_proj_new] = {'value': str(new_projection_guid)} burst.simulator_configuration[param_eeg_sensors] = {'value': str(sensors_guid)} burst.simulator_configuration[param_eeg_rm] = {'value': str(rm.gid)} burst._simulator_configuration = json.dumps(burst.simulator_configuration, cls=MapAsJson.MapAsJsonEncoder) if not all_bursts.has_key(burst.id): all_bursts[burst.id] = burst except Exception: LOGGER.exception("Could not process " + str(eeg_op)) session.add_all(all_eeg_ops) session.add_all(all_bursts.values()) session.commit() except Exception: LOGGER.exception("Could not update Simulation Monitor Params") finally: session.close()