def get_urls_for_pick_rendering(surface_h5): """ Compose URLS for the JS code to retrieve a surface for picking. """ vertices = [] triangles = [] normals = [] number_of_triangles = surface_h5.number_of_triangles.load() number_of_split = number_of_triangles // SPLIT_PICK_MAX_TRIANGLE if number_of_triangles % SPLIT_PICK_MAX_TRIANGLE > 0: number_of_split += 1 gid = surface_h5.gid.load().hex for i in range(number_of_split): param = "slice_number=" + str(i) vertices.append( URLGenerator.build_h5_url(gid, 'get_pick_vertices_slice', parameter=param, flatten=True)) triangles.append( URLGenerator.build_h5_url(gid, 'get_pick_triangles_slice', parameter=param, flatten=True)) normals.append( URLGenerator.build_h5_url(gid, 'get_pick_vertex_normals_slice', parameter=param, flatten=True)) return vertices, normals, triangles
def _get_data_set_urls(self, list_of_timeseries, is_preview=False): """ Returns a list of lists. Each list contains the urls to the files containing the data for a certain array wrapper. """ base_urls = [] time_set_urls = [] total_pages_set = [] if is_preview is False: page_size = self.page_size for timeseries in list_of_timeseries: overall_shape = timeseries.read_data_shape() total_pages = overall_shape[0] // self.page_size if overall_shape[0] % self.page_size > 0: total_pages += 1 timeline_urls = [] ts_gid = timeseries.gid.load().hex for i in range(total_pages): current_max_size = min((i + 1) * self.page_size, overall_shape[0]) - i * self.page_size params = "current_page=" + str(i) + ";page_size=" + str(self.page_size) + \ ";max_size=" + str(current_max_size) timeline_urls.append(URLGenerator.build_h5_url(ts_gid, 'read_time_page', parameter=params)) base_urls.append(URLGenerator.build_base_h5_url(ts_gid)) time_set_urls.append(timeline_urls) total_pages_set.append(total_pages) else: ts_gid = list_of_timeseries[0].gid.load().hex base_urls.append(URLGenerator.build_base_h5_url(ts_gid)) total_pages_set.append(1) page_size = self.preview_page_size params = "current_page=0;page_size=" + str(self.preview_page_size) + ";max_size=" + \ str(min(self.preview_page_size, list_of_timeseries[0].read_data_shape()[0])) time_set_urls.append([URLGenerator.build_h5_url(ts_gid, 'read_time_page', parameter=params)]) return base_urls, page_size, total_pages_set, time_set_urls
def get_urls_for_rendering(surface_h5, region_mapping_gid=None): """ Compose URLs for the JS code to retrieve a surface from the UI for rendering. """ url_vertices = [] url_triangles = [] url_normals = [] url_lines = [] url_region_map = [] gid = surface_h5.gid.load().hex for i in range(surface_h5.get_number_of_split_slices()): param = "slice_number=" + str(i) url_vertices.append(URLGenerator.build_h5_url(gid, 'get_vertices_slice', parameter=param, flatten=True)) url_triangles.append(URLGenerator.build_h5_url(gid, 'get_triangles_slice', parameter=param, flatten=True)) url_lines.append(URLGenerator.build_h5_url(gid, 'get_lines_slice', parameter=param, flatten=True)) url_normals.append(URLGenerator.build_h5_url(gid, 'get_vertex_normals_slice', parameter=param, flatten=True)) if region_mapping_gid is None: continue start_idx, end_idx = surface_h5.get_slice_vertex_boundaries(i) url_region_map.append(URLGenerator.build_h5_url(region_mapping_gid, "get_region_mapping_slice", flatten=True, parameter="start_idx=" + str(start_idx) + ";end_idx=" + str(end_idx))) if region_mapping_gid: return url_vertices, url_normals, url_lines, url_triangles, url_region_map return url_vertices, url_normals, url_lines, url_triangles, None
def launch(self, view_model): # type: (PCAModel) -> dict """Construct data for visualization and launch it.""" with h5.h5_file_for_gid(view_model.pca) as ts_h5: source_gid = ts_h5.source.load() with h5.h5_file_for_gid(source_gid) as source_h5: labels_data = self.get_space_labels(source_h5) fractions_update_url = URLGenerator.build_h5_url(view_model.pca, 'read_fractions_data') weights_update_url = URLGenerator.build_h5_url(view_model.pca, 'read_weights_data') return self.build_display_result("pca/view", dict(labels_data=json.dumps(labels_data), fractions_update_url=fractions_update_url, weights_update_url=weights_update_url))
def launch(self, view_model): # type: (ImaginaryCoherenceDisplayModel) -> dict """ Draw interactive display. """ self.log.debug("Plot started...") input_data_h5_class, input_data_h5_path = self._load_h5_of_gid(view_model.input_data.hex) with input_data_h5_class(input_data_h5_path) as input_data_h5: source_gid = input_data_h5.source.load() source_index = self.load_entity_by_gid(source_gid.hex) params = dict(plotName=source_index.type, xAxisName="Frequency [kHz]", yAxisName="CohSpec", available_xScale=["Linear", "Logarithmic"], available_spectrum=json.dumps(input_data_h5_class.spectrum_types), spectrum_list=input_data_h5_class.spectrum_types, xscale="Linear", spectrum=input_data_h5_class.spectrum_types[0], url_base=URLGenerator.build_h5_url(view_model.input_data, 'get_spectrum_data', parameter=""), # TODO investigate the static xmin and xmax values xmin=0.02, xmax=0.8) return self.build_display_result("complex_coherence/view", params)
def launch(self, view_model): # type: (TractViewerModel) -> dict tracts_index = load.load_entity_by_gid(view_model.tracts) region_volume_mapping_index = load.load_entity_by_gid( tracts_index.fk_region_volume_map_gid) shell_surface_index = None if view_model.shell_surface: shell_surface_index = self.load_entity_by_gid( view_model.shell_surface) shell_surface_index = ensure_shell_surface(self.current_project_id, shell_surface_index, FACE) tracts_starts = URLGenerator.build_h5_url(tracts_index.gid, 'get_line_starts') tracts_vertices = URLGenerator.build_binary_datatype_attribute_url( tracts_index.gid, 'get_vertices') params = dict(title="Tract Visualizer", shellObject=self.prepare_shell_surface_params( shell_surface_index, SurfaceURLGenerator), urlTrackStarts=tracts_starts, urlTrackVertices=tracts_vertices) connectivity = self.load_traited_by_gid( region_volume_mapping_index.fk_connectivity_gid) params.update( self.build_params_for_selectable_connectivity(connectivity)) return self.build_display_result( "tract/tract_view", params, pages={"controlPage": "tract/tract_viewer_controls"})
def launch(self, view_model): # type: (PearsonCorrelationCoefficientVisualizerModel) -> dict """Construct data for visualization and launch it.""" datatype_h5_class, datatype_h5_path = self._load_h5_of_gid( view_model.datatype.hex) with datatype_h5_class(datatype_h5_path) as datatype_h5: matrix_shape = datatype_h5.array_data.shape[0:2] ts_gid = datatype_h5.source.load() ts_index = self.load_entity_by_gid(ts_gid.hex) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) ts_h5_class, ts_h5_path = self._load_h5_of_gid(ts_index.gid) with ts_h5_class(ts_h5_path) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps(labels), matrix_shape=json.dumps(matrix_shape), viewer_title='Pearson Edge Bundle', url_base=URLGenerator.build_h5_url(view_model.datatype.hex, 'get_correlation_data', flatten="True", parameter=''), state_variable=0, mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX, thresh=0.5) return self.build_display_result("pearson_edge_bundle/view", pars)
def prepare_mapped_sensors_as_measure_points_params(sensors, eeg_cap=None, adapter_id=None): """ Compute sensors positions by mapping them to the ``eeg_cap`` surface If ``eeg_cap`` is not specified the mapping will use a default EEGCal DataType in current project. If no default EEGCap is found, return sensors as they are (not projected) :returns: dictionary to be used in Viewers for rendering measure_points :rtype: dict """ if eeg_cap: sensor_locations = URLGenerator.build_url( adapter_id, 'sensors_to_surface', sensors.gid, parameter='surface_to_map_gid=' + eeg_cap.gid) sensor_no = sensors.number_of_sensors sensor_labels = URLGenerator.build_h5_url(sensors.gid, 'get_labels') return { 'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': '' } return prepare_sensors_as_measure_points_params(sensors)
def prepare_sensors_as_measure_points_params(sensors): """ Returns urls from where to fetch the measure points and their labels """ sensor_locations = URLGenerator.build_h5_url(sensors.gid, 'get_locations') sensor_no = sensors.number_of_sensors sensor_labels = URLGenerator.build_h5_url(sensors.gid, 'get_labels') return { 'urlMeasurePoints': sensor_locations, 'urlMeasurePointsLabels': sensor_labels, 'noOfMeasurePoints': sensor_no, 'minMeasure': 0, 'maxMeasure': sensor_no, 'urlMeasure': '' }
def launch(self, view_model): """Construct data for visualization and launch it.""" cc_gid = view_model.datatype.hex cc_index = self.load_entity_by_gid(cc_gid) assert isinstance(cc_index, CorrelationCoefficientsIndex) matrix_shape = cc_index.parsed_shape[0:2] ts_gid = cc_index.fk_source_gid ts_index = self.load_entity_by_gid(ts_gid) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) with h5.h5_file_for_index(ts_index) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps([labels, labels]), matrix_shape=json.dumps(matrix_shape), viewer_title='Cross Correlation Matrix Plot', url_base=URLGenerator.build_h5_url(cc_gid, 'get_correlation_data', parameter=''), state_variable=state_list[0], mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX) return self.build_display_result("pearson_correlation/view", pars)
def launch(self, datatype): """Construct data for visualization and launch it.""" datatype_h5_class, datatype_h5_path = self._load_h5_of_gid( datatype.gid) with datatype_h5_class(datatype_h5_path) as datatype_h5: matrix_shape = datatype_h5.array_data.shape[0:2] ts_gid = datatype_h5.source.load() ts_index = self.load_entity_by_gid(ts_gid.hex) ts_h5_class, ts_h5_path = self._load_h5_of_gid(ts_index.gid) with ts_h5_class(ts_h5_path) as ts_h5: labels = ts_h5.get_space_labels() state_list = ts_h5.labels_dimensions.load().get( ts_h5.labels_ordering.load()[1], []) mode_list = list(range(ts_index.data_length_4d)) if not labels: labels = None pars = dict(matrix_labels=json.dumps([labels, labels]), matrix_shape=json.dumps(matrix_shape), viewer_title='Cross Corelation Matrix plot', url_base=URLGenerator.build_h5_url(datatype.gid, 'get_correlation_data', parameter=''), state_variable=state_list[0], mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX) return self.build_display_result("pearson_correlation/view", pars)
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): # type: (RegionVolumeMappingIndex, DataTypeMatrix, str, StructuralMRIIndex) -> dict region_mapping_volume = self._ensure_region_mapping_index( region_mapping_volume, measure) url_voxel_region = URLGenerator.build_h5_url(region_mapping_volume.gid, 'get_voxel_region', parameter='') if measure is None: params = self._compute_region_volume_map_params( region_mapping_volume) else: params = self._compute_measure_params(region_mapping_volume, measure, data_slice) volume_gid = region_mapping_volume.fk_volume_gid volume_index = self.load_entity_by_gid(volume_gid) assert isinstance(volume_index, VolumeIndex) volume_shape = region_mapping_volume.parsed_shape volume_shape = (1, ) + volume_shape params.update(volumeShape=json.dumps(volume_shape), volumeOrigin=volume_index.origin, voxelUnit=volume_index.voxel_unit, voxelSize=volume_index.voxel_size, urlVoxelRegion=url_voxel_region) if background is None: background = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRIIndex) if background is None: # still params.update(self.compute_background_params()) else: url_volume_data = URLGenerator.build_url(self.stored_adapter.id, 'get_volume_view', background.gid, '') params.update( self.compute_background_params(background.array_data_min, background.array_data_max, url_volume_data)) return params
def launch(self, view_model): # type: (FourierSpectrumModel) -> dict self.log.debug("Plot started...") # these partial loads are dangerous for TS and FS instances, but efficient fs_input_index = self.load_entity_by_gid(view_model.input_data) fourier_spectrum = FourierSpectrum() with h5.h5_file_for_index(fs_input_index) as input_h5: shape = list(input_h5.array_data.shape) fourier_spectrum.segment_length = input_h5.segment_length.load() fourier_spectrum.windowing_function = input_h5.windowing_function.load( ) ts_index = self.load_entity_by_gid(fs_input_index.fk_source_gid) state_list = ts_index.get_labels_for_dimension(1) if len(state_list) == 0: state_list = list(range(shape[1])) fourier_spectrum.source = TimeSeries( sample_period=ts_index.sample_period) mode_list = list(range(shape[3])) available_scales = ["Linear", "Logarithmic"] params = dict(matrix_shape=json.dumps([shape[0], shape[2]]), plotName=ts_index.title, url_base=URLGenerator.build_h5_url(view_model.input_data, "get_fourier_data", parameter=""), xAxisName="Frequency [kHz]", yAxisName="Power", available_scales=available_scales, state_list=state_list, mode_list=mode_list, normalize_list=["no", "yes"], normalize="no", state_variable=state_list[0], mode=mode_list[0], xscale=available_scales[0], yscale=available_scales[0], x_values=json.dumps(fourier_spectrum.frequency[slice( shape[0])].tolist()), xmin=fourier_spectrum.freq_step, xmax=fourier_spectrum.max_freq) return self.build_display_result("fourier_spectrum/view", params)
def compute_params(self, region_mapping_volume=None, measure=None, data_slice='', background=None): region_mapping_volume = self._ensure_region_mapping_index( region_mapping_volume) rmv_h5_class, rmv_h5_path = self._load_h5_of_gid( region_mapping_volume.gid) rmv_h5 = rmv_h5_class(rmv_h5_path) volume_shape = rmv_h5.array_data.shape volume_shape = (1, ) + volume_shape if measure is None: params = self._compute_region_volume_map_params(rmv_h5) else: params = self._compute_measure_params(rmv_h5, measure, data_slice) url_voxel_region = URLGenerator.build_h5_url(region_mapping_volume.gid, 'get_voxel_region', parameter='') volume_gid = rmv_h5.volume.load() volume_h5_class, volume_g5_path = self._load_h5_of_gid(volume_gid.hex) volume_h5 = volume_h5_class(volume_g5_path) params.update( volumeShape=json.dumps(volume_shape), volumeOrigin=json.dumps(volume_h5.origin.load().tolist()), voxelUnit=volume_h5.voxel_unit.load(), voxelSize=json.dumps(volume_h5.voxel_size.load().tolist()), urlVoxelRegion=url_voxel_region) rmv_h5.close() volume_h5.close() if background is None: background = dao.try_load_last_entity_of_type( self.current_project_id, StructuralMRIIndex) params.update(self._compute_background(background)) return params