def prepare_mapped_sensors_as_measure_points_params(sensors, eeg_cap=None, adapter_id=None): """ Compute sensors positions by mapping them to the ``eeg_cap`` surface If ``eeg_cap`` is not specified the mapping will use a default EEGCal DataType in current project. If no default EEGCap is found, return sensors as they are (not projected) :returns: dictionary to be used in Viewers for rendering measure_points :rtype: dict """ if eeg_cap: sensor_locations = URLGenerator.build_url( adapter_id, 'sensors_to_surface', sensors.gid, parameter='surface_to_map_gid=' + eeg_cap.gid) sensor_no = sensors.number_of_sensors sensor_labels = URLGenerator.build_h5_url(sensors.gid, 'get_labels') return { 'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': '' } return prepare_sensors_as_measure_points_params(sensors)
def _get_data_set_urls(self, list_of_timeseries, is_preview=False): """ Returns a list of lists. Each list contains the urls to the files containing the data for a certain array wrapper. """ base_urls = [] time_set_urls = [] total_pages_set = [] if is_preview is False: page_size = self.page_size for timeseries in list_of_timeseries: overall_shape = timeseries.read_data_shape() total_pages = overall_shape[0] // self.page_size if overall_shape[0] % self.page_size > 0: total_pages += 1 timeline_urls = [] ts_gid = timeseries.gid.load().hex for i in range(total_pages): current_max_size = min((i + 1) * self.page_size, overall_shape[0]) - i * self.page_size params = "current_page=" + str(i) + ";page_size=" + str(self.page_size) + \ ";max_size=" + str(current_max_size) timeline_urls.append(URLGenerator.build_h5_url(ts_gid, 'read_time_page', parameter=params)) base_urls.append(URLGenerator.build_base_h5_url(ts_gid)) time_set_urls.append(timeline_urls) total_pages_set.append(total_pages) else: ts_gid = list_of_timeseries[0].gid.load().hex base_urls.append(URLGenerator.build_base_h5_url(ts_gid)) total_pages_set.append(1) page_size = self.preview_page_size params = "current_page=0;page_size=" + str(self.preview_page_size) + ";max_size=" + \ str(min(self.preview_page_size, list_of_timeseries[0].read_data_shape()[0])) time_set_urls.append([URLGenerator.build_h5_url(ts_gid, 'read_time_page', parameter=params)]) return base_urls, page_size, total_pages_set, time_set_urls
def get_urls_for_rendering(surface_h5, region_mapping_gid=None): """ Compose URLs for the JS code to retrieve a surface from the UI for rendering. """ url_vertices = [] url_triangles = [] url_normals = [] url_lines = [] url_region_map = [] gid = surface_h5.gid.load().hex for i in range(surface_h5.get_number_of_split_slices()): param = "slice_number=" + str(i) url_vertices.append(URLGenerator.build_h5_url(gid, 'get_vertices_slice', parameter=param, flatten=True)) url_triangles.append(URLGenerator.build_h5_url(gid, 'get_triangles_slice', parameter=param, flatten=True)) url_lines.append(URLGenerator.build_h5_url(gid, 'get_lines_slice', parameter=param, flatten=True)) url_normals.append(URLGenerator.build_h5_url(gid, 'get_vertex_normals_slice', parameter=param, flatten=True)) if region_mapping_gid is None: continue start_idx, end_idx = surface_h5.get_slice_vertex_boundaries(i) url_region_map.append(URLGenerator.build_h5_url(region_mapping_gid, "get_region_mapping_slice", flatten=True, parameter="start_idx=" + str(start_idx) + ";end_idx=" + str(end_idx))) if region_mapping_gid: return url_vertices, url_normals, url_lines, url_triangles, url_region_map return url_vertices, url_normals, url_lines, url_triangles, None
def launch(self, view_model): # type: (TractViewerModel) -> dict tracts_index = load.load_entity_by_gid(view_model.tracts) region_volume_mapping_index = load.load_entity_by_gid( tracts_index.fk_region_volume_map_gid) shell_surface_index = None if view_model.shell_surface: shell_surface_index = self.load_entity_by_gid( view_model.shell_surface) shell_surface_index = ensure_shell_surface(self.current_project_id, shell_surface_index, FACE) tracts_starts = URLGenerator.build_h5_url(tracts_index.gid, 'get_line_starts') tracts_vertices = URLGenerator.build_binary_datatype_attribute_url( tracts_index.gid, 'get_vertices') params = dict(title="Tract Visualizer", shellObject=self.prepare_shell_surface_params( shell_surface_index, SurfaceURLGenerator), urlTrackStarts=tracts_starts, urlTrackVertices=tracts_vertices) connectivity = self.load_traited_by_gid( region_volume_mapping_index.fk_connectivity_gid) params.update( self.build_params_for_selectable_connectivity(connectivity)) return self.build_display_result( "tract/tract_view", params, pages={"controlPage": "tract/tract_viewer_controls"})
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): # type: (RegionVolumeMappingIndex, DataTypeMatrix, str, StructuralMRIIndex) -> dict region_mapping_volume = self._ensure_region_mapping_index(region_mapping_volume, measure) url_voxel_region = URLGenerator.build_url(self.stored_adapter.id, 'get_voxel_region', region_mapping_volume.gid, parameter='') if measure is None: params = self._compute_region_volume_map_params(region_mapping_volume) else: params = self._compute_measure_params(region_mapping_volume, measure, data_slice) volume_gid = region_mapping_volume.fk_volume_gid volume_index = self.load_entity_by_gid(volume_gid) assert isinstance(volume_index, VolumeIndex) volume_shape = region_mapping_volume.parsed_shape volume_shape = (1,) + volume_shape params.update(volumeShape=json.dumps(volume_shape), volumeOrigin=volume_index.origin, voxelUnit=volume_index.voxel_unit, voxelSize=volume_index.voxel_size, urlVoxelRegion=url_voxel_region) if background is None: background = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRIIndex) if background is None: # still params.update(self.compute_background_params()) else: url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background.gid, '') params.update(self.compute_background_params(background.array_data_min, background.array_data_max, url_volume_data)) return params
def get_urls_for_pick_rendering(surface_h5): """ Compose URLS for the JS code to retrieve a surface for picking. """ vertices = [] triangles = [] normals = [] number_of_triangles = surface_h5.number_of_triangles.load() number_of_split = number_of_triangles // SPLIT_PICK_MAX_TRIANGLE if number_of_triangles % SPLIT_PICK_MAX_TRIANGLE > 0: number_of_split += 1 gid = surface_h5.gid.load().hex for i in range(number_of_split): param = "slice_number=" + str(i) vertices.append( URLGenerator.build_h5_url(gid, 'get_pick_vertices_slice', parameter=param, flatten=True)) triangles.append( URLGenerator.build_h5_url(gid, 'get_pick_triangles_slice', parameter=param, flatten=True)) normals.append( URLGenerator.build_h5_url(gid, 'get_pick_vertex_normals_slice', parameter=param, flatten=True)) return vertices, normals, triangles
def launch(self, view_model): # type: (ConnectivityAnnotationsViewModel) -> dict annotations_index = self.load_entity_by_gid(view_model.annotations_index) if view_model.connectivity_index is None: connectivity_index = self.load_entity_by_gid(annotations_index.connectivity_gid) else: connectivity_index = self.load_entity_by_gid(view_model.connectivity_index) if view_model.region_mapping_index is None: region_map = dao.get_generic_entity(RegionMappingIndex, connectivity_index.gid, 'connectivity_gid') if len(region_map) < 1: raise LaunchException( "Can not launch this viewer unless we have at least a RegionMapping for the current Connectivity!") region_mapping_index = region_map[0] else: region_mapping_index = self.load_entity_by_gid(view_model.region_mapping_index) boundary_url = SurfaceURLGenerator.get_url_for_region_boundaries(region_mapping_index.surface_gid, region_mapping_index.gid, self.stored_adapter.id) surface_index = self.load_entity_by_gid(region_mapping_index.surface_gid) surface_h5 = h5.h5_file_for_index(surface_index) assert isinstance(surface_h5, SurfaceH5) url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering( surface_h5) url_vertices, url_normals, _, url_triangles, url_region_map = SurfaceURLGenerator.get_urls_for_rendering( surface_h5, region_mapping_index.gid) params = dict(title="Connectivity Annotations Visualizer", baseUrl=TvbProfile.current.web.BASE_URL, annotationsTreeUrl=URLGenerator.build_url(self.stored_adapter.id, 'tree_json', view_model.annotations_index), urlTriangleToRegion=URLGenerator.build_url(self.stored_adapter.id, "get_triangles_mapping", region_mapping_index.gid), urlActivationPatterns=URLGenerator.paths2url(view_model.annotations_index, "get_activation_patterns"), minValue=0, maxValue=connectivity_index.number_of_regions - 1, urlColors=json.dumps(url_region_map), urlVerticesPick=json.dumps(url_vertices_pick), urlTrianglesPick=json.dumps(url_triangles_pick), urlNormalsPick=json.dumps(url_normals_pick), brainCenter=json.dumps(surface_h5.center()), urlVertices=json.dumps(url_vertices), urlTriangles=json.dumps(url_triangles), urlNormals=json.dumps(url_normals), urlRegionBoundaries=boundary_url) return self.build_display_result("annotations/annotations_view", params, pages={"controlPage": "annotations/annotations_controls"})
def launch(self, view_model): # type: (TimeSeriesVolumeVisualiserModel) -> dict url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', view_model.time_series, '') url_timeseries_data = URLGenerator.build_url(self.stored_adapter.id, 'get_voxel_time_series', view_model.time_series, '') ts_h5_class, ts_h5_path = self._load_h5_of_gid(view_model.time_series.hex) ts_h5 = ts_h5_class(ts_h5_path) min_value, max_value = ts_h5.get_min_max_values() ts_index = self.load_entity_by_gid(view_model.time_series) if isinstance(ts_h5, TimeSeriesVolumeH5): volume_h5_class, volume_h5_path = self._load_h5_of_gid(ts_h5.volume.load()) volume_h5 = volume_h5_class(volume_h5_path) volume_shape = ts_h5.data.shape else: rmv_index = self.load_entity_by_gid(ts_h5.region_mapping_volume.load()) rmv_h5_class, rmv_h5_path = self._load_h5_of_gid(rmv_index.gid) rmv_h5 = rmv_h5_class(rmv_h5_path) volume_index = self.load_entity_by_gid(rmv_h5.volume.load()) volume_h5_class, volume_h5_path = self._load_h5_of_gid(volume_index.gid) volume_h5 = volume_h5_class(volume_h5_path) volume_shape = [ts_h5.data.shape[0]] volume_shape.extend(rmv_h5.array_data.shape) rmv_h5.close() background_index = None if view_model.background: background_index = self.load_entity_by_gid(view_model.background) params = dict(title="Volumetric Time Series", ts_title=ts_h5.title.load(), labelsStateVar=ts_index.get_labels_for_dimension(1), labelsModes=list(range(ts_index.data_length_4d)), minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data, urlTimeSeriesData=url_timeseries_data, samplePeriod=ts_h5.sample_period.load(), samplePeriodUnit=ts_h5.sample_period_unit.load(), volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume_h5.origin.load().tolist()), voxelUnit=volume_h5.voxel_unit.load(), voxelSize=json.dumps(volume_h5.voxel_size.load().tolist())) params.update(self.ensure_background(background_index)) volume_h5.close() ts_h5.close() return self.build_display_result("time_series_volume/view", params, pages=dict(controlPage="time_series_volume/controls"))
def launch(self, view_model): # type: (PCAModel) -> dict """Construct data for visualization and launch it.""" with h5.h5_file_for_gid(view_model.pca) as ts_h5: source_gid = ts_h5.source.load() with h5.h5_file_for_gid(source_gid) as source_h5: labels_data = self.get_space_labels(source_h5) fractions_update_url = URLGenerator.build_h5_url(view_model.pca, 'read_fractions_data') weights_update_url = URLGenerator.build_h5_url(view_model.pca, 'read_weights_data') return self.build_display_result("pca/view", dict(labels_data=json.dumps(labels_data), fractions_update_url=fractions_update_url, weights_update_url=weights_update_url))
def _launch(self, view_model, figsize, preview=False): time_series_index = self.load_entity_by_gid(view_model.time_series) h5_file = h5.h5_file_for_index(time_series_index) assert isinstance(h5_file, TimeSeriesH5) shape = list(h5_file.read_data_shape()) ts = h5_file.storage_manager.get_data('time') state_variables = time_series_index.get_labels_for_dimension(1) labels = self.get_space_labels(h5_file) # Assume that the first dimension is the time since that is the case so far if preview and shape[0] > self.MAX_PREVIEW_DATA_LENGTH: shape[0] = self.MAX_PREVIEW_DATA_LENGTH # when surface-result, the labels will be empty, so fill some of them, # but not all, otherwise the viewer will take ages to load. if shape[2] > 0 and len(labels) == 0: for n in range(min(self.MAX_PREVIEW_DATA_LENGTH, shape[2])): labels.append("Node-" + str(n)) pars = {'baseURL': URLGenerator.build_base_h5_url(time_series_index.gid), 'labels': labels, 'labels_json': json.dumps(labels), 'ts_title': time_series_index.title, 'preview': preview, 'figsize': figsize, 'shape': repr(shape), 't0': ts[0], 'dt': ts[1] - ts[0] if len(ts) > 1 else 1, 'labelsStateVar': state_variables, 'labelsModes': list(range(shape[3])) } pars.update(self.build_params_for_subselectable_ts(h5_file)) h5_file.close() return self.build_display_result("time_series/view", pars, pages=dict(controlPage="time_series/control"))
def launch(self, view_model): # type: (BaseVolumeVisualizerModel) -> dict structural_mri = self.load_entity_by_gid(view_model.background) assert isinstance(structural_mri, StructuralMRIIndex) volume_shape = structural_mri.parsed_shape volume_shape = (1, ) + volume_shape url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', view_model.background, '') volume_gid = structural_mri.fk_volume_gid volume_index = self.load_entity_by_gid(volume_gid) assert isinstance(volume_index, VolumeIndex) params = dict(title="MRI Volume visualizer", minValue=structural_mri.array_data_min, maxValue=structural_mri.array_data_max, urlVolumeData=url_volume_data, volumeShape=json.dumps(volume_shape), volumeOrigin=volume_index.origin, voxelUnit=volume_index.voxel_unit, voxelSize=volume_index.voxel_size, urlVoxelRegion='', urlBackgroundVolumeData='', minBackgroundValue=structural_mri.array_data_min, maxBackgroundValue=structural_mri.array_data_max) return self.build_display_result( "time_series_volume/staticView", params, pages=dict(controlPage="time_series_volume/controls"))
def launch(self, view_model): # type: (PearsonCorrelationCoefficientVisualizerModel) -> dict """Construct data for visualization and launch it.""" datatype_h5_class, datatype_h5_path = self._load_h5_of_gid( view_model.datatype.hex) with datatype_h5_class(datatype_h5_path) as datatype_h5: matrix_shape = datatype_h5.array_data.shape[0:2] ts_gid = datatype_h5.source.load() ts_index = self.load_entity_by_gid(ts_gid.hex) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) ts_h5_class, ts_h5_path = self._load_h5_of_gid(ts_index.gid) with ts_h5_class(ts_h5_path) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps(labels), matrix_shape=json.dumps(matrix_shape), viewer_title='Pearson Edge Bundle', url_base=URLGenerator.build_h5_url(view_model.datatype.hex, 'get_correlation_data', flatten="True", parameter=''), state_variable=0, mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX, thresh=0.5) return self.build_display_result("pearson_edge_bundle/view", pars)
def get_url_for_region_boundaries(surface_gid, region_mapping_gid, adapter_id): return URLGenerator.build_url(adapter_id, 'generate_region_boundaries', surface_gid, parameter='region_mapping_gid=' + region_mapping_gid)
def launch(self, view_model): """Construct data for visualization and launch it.""" cc_gid = view_model.datatype.hex cc_index = self.load_entity_by_gid(cc_gid) assert isinstance(cc_index, CorrelationCoefficientsIndex) matrix_shape = cc_index.parsed_shape[0:2] ts_gid = cc_index.fk_source_gid ts_index = self.load_entity_by_gid(ts_gid) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) with h5.h5_file_for_index(ts_index) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps([labels, labels]), matrix_shape=json.dumps(matrix_shape), viewer_title='Cross Correlation Matrix Plot', url_base=URLGenerator.build_h5_url(cc_gid, 'get_correlation_data', parameter=''), state_variable=state_list[0], mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX) return self.build_display_result("pearson_correlation/view", pars)
def launch(self, datatype): """Construct data for visualization and launch it.""" datatype_h5_class, datatype_h5_path = self._load_h5_of_gid( datatype.gid) with datatype_h5_class(datatype_h5_path) as datatype_h5: matrix_shape = datatype_h5.array_data.shape[0:2] ts_gid = datatype_h5.source.load() ts_index = self.load_entity_by_gid(ts_gid.hex) ts_h5_class, ts_h5_path = self._load_h5_of_gid(ts_index.gid) with ts_h5_class(ts_h5_path) as ts_h5: labels = ts_h5.get_space_labels() state_list = ts_h5.labels_dimensions.load().get( ts_h5.labels_ordering.load()[1], []) mode_list = list(range(ts_index.data_length_4d)) if not labels: labels = None pars = dict(matrix_labels=json.dumps([labels, labels]), matrix_shape=json.dumps(matrix_shape), viewer_title='Cross Corelation Matrix plot', url_base=URLGenerator.build_h5_url(datatype.gid, 'get_correlation_data', parameter=''), state_variable=state_list[0], mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX) return self.build_display_result("pearson_correlation/view", pars)
def prepare_sensors_as_measure_points_params(sensors): """ Returns urls from where to fetch the measure points and their labels """ sensor_locations = URLGenerator.build_h5_url(sensors.gid, 'get_locations') sensor_no = sensors.number_of_sensors sensor_labels = URLGenerator.build_h5_url(sensors.gid, 'get_labels') return { 'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': '' }
def _compute_measure_params(self, region_mapping_volume, measure, data_slice): # prepare the url that will project the measure onto the region volume map measure_h5_class, measure_h5_path = self._load_h5_of_gid(measure.gid) measure_h5 = measure_h5_class(measure_h5_path) min_value, max_value = measure_h5.get_min_max_values() measure_shape = measure_h5.array_data.shape if not data_slice: conn_index = dao.get_datatype_by_gid( region_mapping_volume.connectivity.load().hex) data_slice = self.get_default_slice(measure_shape, conn_index.number_of_regions) data_slice = slice_str(data_slice) url_volume_data = URLGenerator.build_url( self.stored_adapter.id, 'get_mapped_array_volume_view', region_mapping_volume.gid.load(), parameter='') url_volume_data += 'mapped_array_gid=' + measure.gid + ';mapped_array_slice=' + data_slice + ';' return dict(minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data, measureShape=slice_str(measure_shape), measureSlice=data_slice)
def launch(self, view_model): # type: (ImaginaryCoherenceDisplayModel) -> dict """ Draw interactive display. """ self.log.debug("Plot started...") input_data_h5_class, input_data_h5_path = self._load_h5_of_gid(view_model.input_data.hex) with input_data_h5_class(input_data_h5_path) as input_data_h5: source_gid = input_data_h5.source.load() source_index = self.load_entity_by_gid(source_gid.hex) params = dict(plotName=source_index.type, xAxisName="Frequency [kHz]", yAxisName="CohSpec", available_xScale=["Linear", "Logarithmic"], available_spectrum=json.dumps(input_data_h5_class.spectrum_types), spectrum_list=input_data_h5_class.spectrum_types, xscale="Linear", spectrum=input_data_h5_class.spectrum_types[0], url_base=URLGenerator.build_h5_url(view_model.input_data, 'get_spectrum_data', parameter=""), # TODO investigate the static xmin and xmax values xmin=0.02, xmax=0.8) return self.build_display_result("complex_coherence/view", params)
def get_url_for_region_boundaries(surface_h5, region_mapping_gid, adapter_id): surface_gid = surface_h5.gid.load().hex return URLGenerator.build_url(surface_gid, 'generate_region_boundaries', adapter_id=adapter_id, parameter='region_mapping_gid=' + region_mapping_gid)
def _compute_region_volume_map_params(self, region_mapping_volume): # type: (RegionVolumeMappingIndex) -> dict # prepare the url that will display the region volume map conn_index = dao.get_datatype_by_gid(region_mapping_volume.fk_connectivity_gid) min_value, max_value = [0, conn_index.number_of_regions] url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', region_mapping_volume.gid, '') return dict(minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data)
def ensure_background(self, background_index): if background_index is None: background_index = dao.try_load_last_entity_of_type(self.current_project_id, StructuralMRIIndex) if background_index is None: return _MappedArrayVolumeBase.compute_background_params() with h5.h5_file_for_index(background_index) as background_h5: min_value, max_value = background_h5.get_min_max_values() url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background_index.gid, '') return _MappedArrayVolumeBase.compute_background_params(min_value, max_value, url_volume_data)
def _prepare_data_slices(self, time_series_index): """ Prepare data URL for retrieval with slices of timeSeries activity and Time-Line. :returns: [activity_urls], [timeline_urls] Currently timeline_urls has just one value, as on client is loaded entirely anyway. """ time_series_gid = time_series_index.gid activity_base_url = URLGenerator.build_url(self.stored_adapter.id, 'read_data_page_split', time_series_gid, "") time_urls = [SurfaceURLGenerator.build_h5_url(time_series_gid, 'read_time_page', parameter="current_page=0;page_size=" + str(time_series_index.data_length_1d))] return activity_base_url, time_urls
def _compute_measure_params(self, rvm_index, measure, data_slice): # type: (RegionVolumeMappingIndex, DataTypeMatrix, str) -> dict # prepare the url that will project the measure onto the region volume map measure_shape = measure.parsed_shape if not data_slice: conn_index = dao.get_datatype_by_gid(rvm_index.fk_connectivity_gid) data_slice = self.get_default_slice(measure_shape, conn_index.number_of_regions) data_slice = slice_str(data_slice) url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_mapped_array_volume_view', rvm_index.gid, parameter='') url_volume_data += 'mapped_array_gid=' + measure.gid + ';mapped_array_slice=' + data_slice + ';' return dict(minValue=measure.array_data_min, maxValue=measure.array_data_max, urlVolumeData=url_volume_data, measureShape=measure.shape, measureSlice=data_slice)
def _compute_background(self, background): if background is None: return self.compute_background_params() background_class, background_path = self._load_h5_of_gid( background.gid) background_h5 = background_class(background_path) min_value, max_value = background_h5.get_min_max_values() background_h5.close() url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background.gid, '') return self.compute_background_params(min_value, max_value, url_volume_data)
def launch(self, view_model): """Construct data for visualization and launch it.""" connectivity = self.load_traited_by_gid(view_model.connectivity) pars = { "labels": json.dumps(connectivity.region_labels.tolist()), "url_base": URLGenerator.paths2url(view_model.connectivity, attribute_name="weights", flatten="True") } return self.build_display_result("connectivity_edge_bundle/view", pars)
def launch(self, view_model): # type: (FourierSpectrumModel) -> dict self.log.debug("Plot started...") # these partial loads are dangerous for TS and FS instances, but efficient fs_input_index = self.load_entity_by_gid(view_model.input_data) fourier_spectrum = FourierSpectrum() with h5.h5_file_for_index(fs_input_index) as input_h5: shape = list(input_h5.array_data.shape) fourier_spectrum.segment_length = input_h5.segment_length.load() fourier_spectrum.windowing_function = input_h5.windowing_function.load( ) ts_index = self.load_entity_by_gid(fs_input_index.fk_source_gid) state_list = ts_index.get_labels_for_dimension(1) if len(state_list) == 0: state_list = list(range(shape[1])) fourier_spectrum.source = TimeSeries( sample_period=ts_index.sample_period) mode_list = list(range(shape[3])) available_scales = ["Linear", "Logarithmic"] params = dict(matrix_shape=json.dumps([shape[0], shape[2]]), plotName=ts_index.title, url_base=URLGenerator.build_h5_url(view_model.input_data, "get_fourier_data", parameter=""), xAxisName="Frequency [kHz]", yAxisName="Power", available_scales=available_scales, state_list=state_list, mode_list=mode_list, normalize_list=["no", "yes"], normalize="no", state_variable=state_list[0], mode=mode_list[0], xscale=available_scales[0], yscale=available_scales[0], x_values=json.dumps(fourier_spectrum.frequency[slice( shape[0])].tolist()), xmin=fourier_spectrum.freq_step, xmax=fourier_spectrum.max_freq) return self.build_display_result("fourier_spectrum/view", params)
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): region_mapping_volume = self._ensure_region_mapping_index( region_mapping_volume) rmv_h5_class, rmv_h5_path = self._load_h5_of_gid( region_mapping_volume.gid) rmv_h5 = rmv_h5_class(rmv_h5_path) volume_shape = rmv_h5.array_data.shape volume_shape = (1, ) + volume_shape if measure is None: params = self._compute_region_volume_map_params(rmv_h5) else: params = self._compute_measure_params(rmv_h5, measure, data_slice) url_voxel_region = URLGenerator.build_h5_url(region_mapping_volume.gid, 'get_voxel_region', parameter='') volume_gid = rmv_h5.volume.load() volume_h5_class, volume_g5_path = self._load_h5_of_gid(volume_gid.hex) volume_h5 = volume_h5_class(volume_g5_path) params.update( volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume_h5.origin.load().tolist()), voxelUnit=volume_h5.voxel_unit.load(), voxelSize=json.dumps(volume_h5.voxel_size.load().tolist()), urlVoxelRegion=url_voxel_region) rmv_h5.close() volume_h5.close() if background is None: background = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRIIndex) params.update(self._compute_background(background)) return params
def launch(self, view_model): # type: (VolumeVisualizerModel) -> dict background_class, background_path = self._load_h5_of_gid( view_model.background.hex) background_h5 = background_class(background_path) volume_shape = background_h5.array_data.shape volume_shape = (1, ) + volume_shape min_value, max_value = background_h5.get_min_max_values() url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', view_model.background, '') volume_gid = background_h5.volume.load() volume_class, volume_path = self._load_h5_of_gid(volume_gid.hex) volume_h5 = volume_class(volume_path) params = dict( title="MRI Volume visualizer", minValue=min_value, maxValue=max_value, urlVolumeData=url_volume_data, volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume_h5.origin.load().tolist()), voxelUnit=volume_h5.voxel_unit.load(), voxelSize=json.dumps(volume_h5.voxel_size.load().tolist()), urlVoxelRegion='', minBackgroundValue=min_value, maxBackgroundValue=max_value, urlBackgroundVolumeData='') background_h5.close() volume_h5.close() return self.build_display_result( "time_series_volume/staticView", params, pages=dict(controlPage="time_series_volume/controls"))