def prepare_mapped_sensors_as_measure_points_params(project_id, sensors, eeg_cap=None): """ Compute sensors positions by mapping them to the ``eeg_cap`` surface If ``eeg_cap`` is not specified the mapping will use a default EEGCal DataType in current project. If no default EEGCap is found, return sensors as they are (not projected) :returns: dictionary to be used in Viewers for rendering measure_points :rtype: dict """ if eeg_cap is None: eeg_cap = dao.try_load_last_entity_of_type(project_id, EEGCap) if eeg_cap: datatype_kwargs = json.dumps({'surface_to_map': eeg_cap.gid}) sensor_locations = ABCDisplayer.paths2url( sensors, 'sensors_to_surface') + '/' + datatype_kwargs sensor_no = sensors.number_of_sensors sensor_labels = ABCDisplayer.paths2url(sensors, 'labels') return { 'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': '' } return prepare_sensors_as_measure_points_params(sensors)
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): # type: (RegionVolumeMappingIndex, DataTypeMatrix, str, StructuralMRIIndex) -> dict region_mapping_volume = self._ensure_region_mapping_index(region_mapping_volume, measure) url_voxel_region = URLGenerator.build_url(self.stored_adapter.id, 'get_voxel_region', region_mapping_volume.gid, parameter='') if measure is None: params = self._compute_region_volume_map_params(region_mapping_volume) else: params = self._compute_measure_params(region_mapping_volume, measure, data_slice) volume_gid = region_mapping_volume.fk_volume_gid volume_index = self.load_entity_by_gid(volume_gid) assert isinstance(volume_index, VolumeIndex) volume_shape = region_mapping_volume.parsed_shape volume_shape = (1,) + volume_shape params.update(volumeShape=json.dumps(volume_shape), volumeOrigin=volume_index.origin, voxelUnit=volume_index.voxel_unit, voxelSize=volume_index.voxel_size, urlVoxelRegion=url_voxel_region) if background is None: background = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRIIndex) if background is None: # still params.update(self.compute_background_params()) else: url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background.gid, '') params.update(self.compute_background_params(background.array_data_min, background.array_data_max, url_volume_data)) return params
def _assert_stored_ddti(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype
def prepare_mapped_sensors_as_measure_points_params(project_id, sensors, eeg_cap=None): """ Compute sensors positions by mapping them to the ``eeg_cap`` surface If ``eeg_cap`` is not specified the mapping will use a default EEGCal DataType in current project. If no default EEGCap is found, return sensors as they are (not projected) :returns: dictionary to be used in Viewers for rendering measure_points :rtype: dict """ if eeg_cap is None: eeg_cap = dao.try_load_last_entity_of_type(project_id, EEGCap) if eeg_cap: datatype_kwargs = json.dumps({'surface_to_map': eeg_cap.gid}) sensor_locations = ABCDisplayer.paths2url(sensors, 'sensors_to_surface') + '/' + datatype_kwargs sensor_no = sensors.number_of_sensors sensor_labels = ABCDisplayer.paths2url(sensors, 'labels') return {'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': ''} return prepare_sensors_as_measure_points_params(sensors)
def launch(self, time_series, background=None): min_value, max_value = time_series.get_min_max_values() url_volume_data = self.paths2url(time_series, "get_volume_view", parameter="") url_timeseries_data = self.paths2url(time_series, "get_voxel_time_series", parameter="") if isinstance(time_series, TimeSeriesVolume): volume = time_series.volume volume_shape = time_series.read_data_shape() else: volume = time_series.region_mapping_volume.volume volume_shape = [time_series.read_data_shape()[0]] volume_shape.extend(time_series.region_mapping_volume.shape) params = dict(title="Volumetric Time Series", ts_title=time_series.title, labelsStateVar=time_series.labels_dimensions.get(time_series.labels_ordering[1], []), labelsModes=range(time_series.read_data_shape()[3]), minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data, urlTimeSeriesData=url_timeseries_data, samplePeriod=time_series.sample_period, samplePeriodUnit=time_series.sample_period_unit, volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume.origin.tolist()), voxelUnit=volume.voxel_unit, voxelSize=json.dumps(volume.voxel_size.tolist())) if background is None: background = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRI) params.update(_MappedArrayVolumeBase._compute_background(background)) return self.build_display_result("time_series_volume/view", params, pages=dict(controlPage="time_series_volume/controls"))
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): region_mapping_volume = self._ensure_region_mapping(region_mapping_volume) volume = region_mapping_volume.volume volume_shape = region_mapping_volume.read_data_shape() volume_shape = (1, ) + volume_shape if measure is None: params = self._compute_region_volume_map_params(region_mapping_volume) else: params = self._compute_measure_params(region_mapping_volume, measure, data_slice) url_voxel_region = ABCDisplayer.paths2url(region_mapping_volume, "get_voxel_region", parameter="") params.update(volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume.origin.tolist()), voxelUnit=volume.voxel_unit, voxelSize=json.dumps(volume.voxel_size.tolist()), urlVoxelRegion=url_voxel_region) if background is None: background = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRI) params.update(self._compute_background(background)) return params
def launch(self, time_series, projection_surface=None, shell_surface=None): self.surface = projection_surface if isinstance(time_series, TimeSeriesSEEG) and shell_surface is None: shell_surface = dao.try_load_last_entity_of_type( self.current_project_id, CorticalSurface) params = BrainViewer.compute_parameters(self, time_series, shell_surface) params.update(EegMonitor().compute_parameters(time_series, is_extended_view=True)) params['isOneToOneMapping'] = False params['brainViewerTemplate'] = 'view.html' if isinstance(time_series, TimeSeriesSEEG): params['brainViewerTemplate'] = "internal_view.html" # Mark as None since we only display shelf face and no point to load these as well params['urlVertices'] = None params['isSEEG'] = True return self.build_display_result( "brain/extendedview", params, pages=dict(controlPage="brain/extendedcontrols", channelsPage="commons/channel_selector.html"))
def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) self.assertEqual(expected_cnt, count) datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") return datatype
def _ensure_region_mapping_index(self, region_mapping_volume): if region_mapping_volume is None: region_mapping_volume = dao.try_load_last_entity_of_type( self.current_project_id, RegionVolumeMappingIndex) if region_mapping_volume is None: raise LaunchException( 'You should have a volume mapping to launch this viewer') return region_mapping_volume
def _params_internal_sensors(self, internal_sensors, shell_surface=None): params = prepare_sensors_as_measure_points_params(internal_sensors) if shell_surface is None: shell_surface = dao.try_load_last_entity_of_type(self.current_project_id, CorticalSurface) params['shelfObject'] = prepare_shell_surface_urls(self.current_project_id, shell_surface) return self.build_display_result('sensors/sensors_internal', params, pages={'controlPage': 'sensors/sensors_controls'})
def prepare_shell_surface_urls(project_id, shell_surface=None, preferred_type=FaceSurface): if shell_surface is None: shell_surface = dao.try_load_last_entity_of_type(project_id, preferred_type) if not shell_surface: raise Exception('No Face object found in current project.') face_vertices, face_normals, _, face_triangles = shell_surface.get_urls_for_rendering() return json.dumps([face_vertices, face_normals, face_triangles])
def ensure_background(self, background_index): if background_index is None: background_index = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRIIndex) if background_index is None: return _MappedArrayVolumeBase.compute_background_params() with h5.h5_file_for_index(background_index) as background_h5: min_value, max_value = background_h5.get_min_max_values() url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background_index.gid, '') return _MappedArrayVolumeBase.compute_background_params(min_value, max_value, url_volume_data)
def prepare_shell_surface_urls(project_id, shell_surface=None): if shell_surface is None: shell_surface = dao.try_load_last_entity_of_type( project_id, FaceSurface) if not shell_surface: raise Exception('No Face object found in current project.') face_vertices, face_normals, _, face_triangles = shell_surface.get_urls_for_rendering( ) return json.dumps([face_vertices, face_normals, face_triangles])
def _params_internal_sensors(self, internal_sensors, shell_surface=None): params = prepare_sensors_as_measure_points_params(internal_sensors) if shell_surface is None: shell_surface = dao.try_load_last_entity_of_type( self.current_project_id, CorticalSurface) params['shelfObject'] = prepare_shell_surface_urls( self.current_project_id, shell_surface) return self.build_display_result( 'sensors/sensors_internal', params, pages={'controlPage': 'sensors/sensors_controls'})
def _ensure_region_mapping_index(self, region_mapping_volume, measure=None): # type: (RegionVolumeMappingIndex, DataTypeMatrix) -> RegionVolumeMappingIndex if region_mapping_volume is None: if measure is not None and hasattr(measure, "fk_connectivity_gid"): region_mapping_volume = dao.get_generic_entity(RegionVolumeMappingIndex, measure.fk_connectivity_gid, 'fk_connectivity_gid') if region_mapping_volume is not None and len(region_mapping_volume): region_mapping_volume = region_mapping_volume[0] else: region_mapping_volume = None else: region_mapping_volume = dao.try_load_last_entity_of_type(self.current_project_id, RegionVolumeMappingIndex) if region_mapping_volume is None: raise LaunchException('You should have a compatible volume mapping to launch this viewer') return region_mapping_volume
def ensure_background(self, background_index): if background_index is None: background_index = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRIIndex) if background_index is None: return _MappedArrayVolumeBase.compute_background_params() background_class, background_path = self._load_h5_of_gid( background_index.gid) background_h5 = background_class(background_path) min_value, max_value = background_h5.get_min_max_values() background_h5.close() url_volume_data = self.build_url('get_volume_view', background_index.gid, '') return _MappedArrayVolumeBase.compute_background_params( min_value, max_value, url_volume_data)
def launch(self, time_series, background=None): min_value, max_value = time_series.get_min_max_values() url_volume_data = self.paths2url(time_series, "get_volume_view", parameter="") url_timeseries_data = self.paths2url(time_series, "get_voxel_time_series", parameter="") if isinstance(time_series, TimeSeriesVolume): volume = time_series.volume volume_shape = time_series.read_data_shape() else: volume = time_series.region_mapping_volume.volume volume_shape = [time_series.read_data_shape()[0]] volume_shape.extend(time_series.region_mapping_volume.shape) params = dict(title="Volumetric Time Series", ts_title=time_series.title, labelsStateVar=time_series.labels_dimensions.get( time_series.labels_ordering[1], []), labelsModes=range(time_series.read_data_shape()[3]), minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data, urlTimeSeriesData=url_timeseries_data, samplePeriod=time_series.sample_period, samplePeriodUnit=time_series.sample_period_unit, volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume.origin.tolist()), voxelUnit=volume.voxel_unit, voxelSize=json.dumps(volume.voxel_size.tolist())) if background is None: background = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRI) params.update(_MappedArrayVolumeBase._compute_background(background)) return self.build_display_result( "time_series_volume/view", params, pages=dict(controlPage="time_series_volume/controls"))
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): region_mapping_volume = self._ensure_region_mapping_index( region_mapping_volume) rmv_h5_class, rmv_h5_path = self._load_h5_of_gid( region_mapping_volume.gid) rmv_h5 = rmv_h5_class(rmv_h5_path) volume_shape = rmv_h5.array_data.shape volume_shape = (1, ) + volume_shape if measure is None: params = self._compute_region_volume_map_params(rmv_h5) else: params = self._compute_measure_params(rmv_h5, measure, data_slice) url_voxel_region = URLGenerator.build_h5_url(region_mapping_volume.gid, 'get_voxel_region', parameter='') volume_gid = rmv_h5.volume.load() volume_h5_class, volume_g5_path = self._load_h5_of_gid(volume_gid.hex) volume_h5 = volume_h5_class(volume_g5_path) params.update( volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume_h5.origin.load().tolist()), voxelUnit=volume_h5.voxel_unit.load(), voxelSize=json.dumps(volume_h5.voxel_size.load().tolist()), urlVoxelRegion=url_voxel_region) rmv_h5.close() volume_h5.close() if background is None: background = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRIIndex) params.update(self._compute_background(background)) return params
def launch(self, time_series, projection_surface=None, shell_surface=None): self.surface = projection_surface if isinstance(time_series, TimeSeriesSEEG) and shell_surface is None: shell_surface = dao.try_load_last_entity_of_type(self.current_project_id, CorticalSurface) params = BrainViewer.compute_parameters(self, time_series, shell_surface) params.update(EegMonitor().compute_parameters(time_series, is_extended_view=True)) params['isOneToOneMapping'] = False params['brainViewerTemplate'] = 'view.html' if isinstance(time_series, TimeSeriesSEEG): params['brainViewerTemplate'] = "internal_view.html" # Mark as None since we only display shelf face and no point to load these as well params['urlVertices'] = None params['isSEEG'] = True return self.build_display_result("brain/extendedview", params, pages=dict(controlPage="brain/extendedcontrols", channelsPage="commons/channel_selector.html"))
def _ensure_region_mapping(self, region_mapping_volume): if region_mapping_volume is None: region_mapping_volume = dao.try_load_last_entity_of_type(self.current_project_id, RegionVolumeMapping) if region_mapping_volume is None: raise LaunchException('You should have a volume mapping to launch this viewer') return region_mapping_volume
def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype