def compute_data_for_gradient_view(self, local_connectivity_gid, selected_triangle): """ When the user loads an existent local connectivity and he picks a vertex from the used surface, this method computes the data needed for drawing a gradient view corresponding to that vertex. Returns a json which contains the data needed for drawing a gradient view for the selected vertex. """ lconn_index = load_entity_by_gid(local_connectivity_gid) triangle_index = int(selected_triangle) surface_indx = load_entity_by_gid(lconn_index.fk_surface_gid) surface_h5 = h5.h5_file_for_index(surface_indx) assert isinstance(surface_h5, SurfaceH5) vertex_index = int(surface_h5.triangles[triangle_index][0]) lconn_h5 = h5.h5_file_for_index(lconn_index) assert isinstance(lconn_h5, LocalConnectivityH5) lconn_matrix = lconn_h5.matrix.load() picked_data = list(lconn_matrix[vertex_index].toarray().squeeze()) lconn_h5.close() result = [] number_of_split_slices = surface_h5.number_of_split_slices.load() if number_of_split_slices <= 1: result.append(picked_data) else: for slice_number in range(number_of_split_slices): start_idx, end_idx = surface_h5.get_slice_vertex_boundaries( slice_number) result.append(picked_data[start_idx:end_idx]) surface_h5.close() result = {'data': json.dumps(result)} return result
def launch(self, view_model): # type: (SurfaceViewerModel) -> dict surface_index = self.load_entity_by_gid(view_model.surface.hex) connectivity_measure_index = None region_map_index = None if view_model.connectivity_measure: connectivity_measure_index = self.load_entity_by_gid( view_model.connectivity_measure.hex) if view_model.region_map: region_map_index = self.load_entity_by_gid( view_model.region_map.hex) surface_h5 = h5.h5_file_for_index(surface_index) cm_h5 = h5.h5_file_for_index( connectivity_measure_index ) if connectivity_measure_index is not None else None region_map_gid = region_map_index.gid if region_map_index is not None else None connectivity_gid = region_map_index.connectivity_gid if region_map_index is not None else None assert isinstance(surface_h5, SurfaceH5) params = dict(title=view_model.title, extended_view=False, isOneToOneMapping=False, hasRegionMap=region_map_index is not None) params.update(self._compute_surface_params(surface_h5, region_map_gid)) params.update(self._compute_hemispheric_param(surface_h5)) params.update( self._compute_measure_points_param(surface_h5, region_map_gid, connectivity_gid)) params.update( self._compute_measure_param(cm_h5, params['noOfMeasurePoints'])) surface_h5.close() if cm_h5: cm_h5.close() params['shelfObject'] = None shell_surface_index = None if view_model.shell_surface: shell_surface_index = self.load_entity_by_gid( view_model.shell_surface.hex) shell_surface = ensure_shell_surface(self.current_project_id, shell_surface_index) if shell_surface: shell_h5 = h5.h5_file_for_index(shell_surface) assert isinstance(shell_h5, SurfaceH5) shell_vertices, shell_normals, _, shell_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering( shell_h5) params['shelfObject'] = json.dumps( [shell_vertices, shell_normals, shell_triangles]) shell_h5.close() return self.build_display_result( "surface/surface_view", params, pages={"controlPage": "surface/surface_viewer_controls"})
def read_data_page_split(self, time_series_gid, from_idx, to_idx, step=None, specific_slices=None): time_series_index = self.load_entity_by_gid(time_series_gid) with h5.h5_file_for_index(time_series_index) as time_series_h5: assert isinstance(time_series_h5, TimeSeriesH5) basic_result = time_series_h5.read_data_page( from_idx, to_idx, step, specific_slices) if not isinstance(time_series_index, TimeSeriesSurfaceIndex): return basic_result.tolist() result = [] surface_index = self.load_entity_by_gid( time_series_index.fk_surface_gid) surface_h5 = h5.h5_file_for_index(surface_index) assert isinstance(surface_h5, SurfaceH5) number_of_split_slices = surface_h5.number_of_split_slices.load() if number_of_split_slices <= 1: result.append(basic_result.tolist()) else: for slice_number in range(surface_h5.number_of_split_slices): start_idx, end_idx = surface_h5.get_slice_vertex_boundaries( slice_number) result.append(basic_result[:, start_idx:end_idx].tolist()) surface_h5.close() return result
def generate_region_boundaries(self, surface_gid, region_mapping_gid): """ Return the full region boundaries, including: vertices, normals and lines indices. """ boundary_vertices = [] boundary_lines = [] boundary_normals = [] surface_index = self.load_entity_by_gid(surface_gid) rm_index = self.load_entity_by_gid(region_mapping_gid) with h5.h5_file_for_index(rm_index) as rm_h5: array_data = rm_h5.array_data[:] with h5.h5_file_for_index(surface_index) as surface_h5: for slice_idx in range(surface_h5.get_number_of_split_slices()): # Generate the boundaries sliced for the off case where we might overflow the buffer capacity slice_triangles = surface_h5.get_triangles_slice(slice_idx) slice_vertices = surface_h5.get_vertices_slice(slice_idx) slice_normals = surface_h5.get_vertex_normals_slice(slice_idx) first_index_in_slice = surface_h5.split_slices.load()[str( slice_idx)][KEY_VERTICES][KEY_START] # These will keep track of the vertices / triangles / normals for this slice that have # been processed and were found as a part of the boundary processed_vertices = [] processed_triangles = [] processed_normals = [] for triangle in slice_triangles: triangle += first_index_in_slice # Check if there are two points from a triangles that are in separate regions # then send this to further processing that will generate the corresponding # region separation lines depending on the 3rd point from the triangle rt0, rt1, rt2 = array_data[triangle] if rt0 - rt1: reg_idx1, reg_idx2, dangling_idx = 0, 1, 2 elif rt1 - rt2: reg_idx1, reg_idx2, dangling_idx = 1, 2, 0 elif rt2 - rt0: reg_idx1, reg_idx2, dangling_idx = 2, 0, 1 else: continue lines_vert, lines_ind, lines_norm = self._process_triangle( triangle, reg_idx1, reg_idx2, dangling_idx, first_index_in_slice, array_data, slice_vertices, slice_normals) ind_offset = len(processed_vertices) / 3 processed_vertices.extend(lines_vert) processed_normals.extend(lines_norm) processed_triangles.extend( [ind + ind_offset for ind in lines_ind]) boundary_vertices.append(processed_vertices) boundary_lines.append(processed_triangles) boundary_normals.append(processed_normals) return numpy.array( [boundary_vertices, boundary_lines, boundary_normals]).tolist()
def _extract_labels_and_data_matrix(self, datatype_index): """ If datatype has a source attribute of type TimeSeriesRegion then the labels of the associated connectivity are returned. Else None """ with h5.h5_file_for_index(datatype_index) as datatype_h5: matrix = datatype_h5.array_data[:] source_index = self.load_entity_by_gid(datatype_index.source_gid) with h5.h5_file_for_index(source_index) as source_h5: labels = self.get_space_labels(source_h5) return [labels, labels], matrix
def launch(self, view_model): """Construct data for visualization and launch it.""" cc_gid = view_model.datatype.hex cc_index = self.load_entity_by_gid(cc_gid) assert isinstance(cc_index, CorrelationCoefficientsIndex) matrix_shape = cc_index.parsed_shape[0:2] ts_gid = cc_index.fk_source_gid ts_index = self.load_entity_by_gid(ts_gid) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) with h5.h5_file_for_index(ts_index) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps([labels, labels]), matrix_shape=json.dumps(matrix_shape), viewer_title='Cross Correlation Matrix Plot', url_base=URLGenerator.build_h5_url(cc_gid, 'get_correlation_data', parameter=''), state_variable=state_list[0], mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX) return self.build_display_result("pearson_correlation/view", pars)
def launch(self, view_model): # type: (TopographicViewerModel) -> dict connectivities_idx = [] measures_ht = [] for measure in [view_model.data_0, view_model.data_1, view_model.data_2]: if measure is not None: measure_index = self.load_entity_by_gid(measure) measures_ht.append(h5.load_from_index(measure_index)) conn_index = self.load_entity_by_gid(measure_index.fk_connectivity_gid) connectivities_idx.append(conn_index) with h5.h5_file_for_index(connectivities_idx[0]) as conn_h5: centres = conn_h5.centres.load() sensor_locations = TopographyCalculations.normalize_sensors(centres) sensor_number = len(sensor_locations) arrays = [] titles = [] min_vals = [] max_vals = [] data_array = [] data_arrays = [] for i, measure in enumerate(measures_ht): if connectivities_idx[i].number_of_regions != sensor_number: raise Exception("Use the same connectivity!!!") arrays.append(measure.array_data.tolist()) titles.append(measure.title) min_vals.append(measure.array_data.min()) max_vals.append(measure.array_data.max()) color_bar_min = min(min_vals) color_bar_max = max(max_vals) for i, array_data in enumerate(arrays): try: data_array = TopographyCalculations.compute_topography_data(array_data, sensor_locations) # We always access the first element because only one connectivity can be used at one time first_label = h5.load_from_index(connectivities_idx[0]).hemispheres[0] if first_label: data_array = numpy.rot90(data_array, k=1, axes=(0, 1)) else: data_array = numpy.rot90(data_array, k=-1, axes=(0, 1)) if numpy.any(numpy.isnan(array_data)): titles[i] = titles[i] + " - Topography contains nan" if not numpy.any(array_data): titles[i] = titles[i] + " - Topography data is all zeros" data_arrays.append(ABCDisplayer.dump_with_precision(data_array.flat)) except KeyError as err: self.log.exception(err) raise LaunchException("The measure points location is not compatible with this viewer ", err) params = dict(matrix_datas=data_arrays, matrix_shape=json.dumps(data_array.squeeze().shape), titles=titles, vmin=color_bar_min, vmax=color_bar_max) return self.build_display_result("topographic/view", params, pages={"controlPage": "topographic/controls"})
def _store_imported_datatypes_in_db(self, project, all_datatypes): # type: (Project, dict) -> int sorted_dts = sorted( all_datatypes.items(), key=lambda dt_item: dt_item[1].create_date or datetime.now()) count = 0 for dt_path, datatype in sorted_dts: datatype_already_in_tvb = dao.get_datatype_by_gid(datatype.gid) if not datatype_already_in_tvb: self.store_datatype(datatype, dt_path) count += 1 else: AlgorithmService.create_link([datatype_already_in_tvb.id], project.id) file_path = h5.h5_file_for_index(datatype).path h5_class = H5File.h5_class_from_file(file_path) reference_list = h5_class(file_path).gather_references() for _, reference_gid in reference_list: if not reference_gid: continue ref_index = dao.get_datatype_by_gid(reference_gid.hex) if ref_index is None: os.remove(file_path) dao.remove_entity(datatype.__class__, datatype.id) raise MissingReferenceException( 'Imported file depends on datatypes that do not exist. Please upload ' 'those first!') return count
def build(): ts_index = time_series_factory() ts_h5 = h5_file_for_index(ts_index) ts = TimeSeries() ts_h5.load_into(ts) ts_h5.close() data_shape = ts.data.shape result_shape = (data_shape[2], data_shape[2], data_shape[1], data_shape[3]) result = numpy.zeros(result_shape) for mode in range(data_shape[3]): for var in range(data_shape[1]): data = ts_h5.data[:, var, :, mode] data = data - data.mean(axis=0)[numpy.newaxis, 0] result[:, :, var, mode] = numpy.cov(data.T) covariance = Covariance(source=ts, array_data=result) op = operation_factory() covariance_db = CovarianceIndex() covariance_db.fk_from_operation = op.id covariance_db.fill_from_has_traits(covariance) covariance_h5_path = h5.path_for_stored_index(covariance_db) with TimeSeriesH5(covariance_h5_path) as f: f.store(ts) session.add(covariance_db) session.commit() return covariance_db
def _launch(self, view_model, figsize, preview=False): time_series_index = self.load_entity_by_gid(view_model.time_series) h5_file = h5.h5_file_for_index(time_series_index) assert isinstance(h5_file, TimeSeriesH5) shape = list(h5_file.read_data_shape()) ts = h5_file.storage_manager.get_data('time') state_variables = time_series_index.get_labels_for_dimension(1) labels = self.get_space_labels(h5_file) # Assume that the first dimension is the time since that is the case so far if preview and shape[0] > self.MAX_PREVIEW_DATA_LENGTH: shape[0] = self.MAX_PREVIEW_DATA_LENGTH # when surface-result, the labels will be empty, so fill some of them, # but not all, otherwise the viewer will take ages to load. if shape[2] > 0 and len(labels) == 0: for n in range(min(self.MAX_PREVIEW_DATA_LENGTH, shape[2])): labels.append("Node-" + str(n)) pars = {'baseURL': URLGenerator.build_base_h5_url(time_series_index.gid), 'labels': labels, 'labels_json': json.dumps(labels), 'ts_title': time_series_index.title, 'preview': preview, 'figsize': figsize, 'shape': repr(shape), 't0': ts[0], 'dt': ts[1] - ts[0] if len(ts) > 1 else 1, 'labelsStateVar': state_variables, 'labelsModes': list(range(shape[3])) } pars.update(self.build_params_for_subselectable_ts(h5_file)) h5_file.close() return self.build_display_result("time_series/view", pars, pages=dict(controlPage="time_series/control"))
def launch(self, view_model): # type: (PearsonCorrelationCoefficientVisualizerModel) -> dict """Construct data for visualization and launch it.""" with h5.h5_file_for_gid(view_model.datatype) as datatype_h5: matrix_shape = datatype_h5.array_data.shape[0:2] ts_gid = datatype_h5.source.load() ts_index = self.load_entity_by_gid(ts_gid) state_list = ts_index.get_labels_for_dimension(1) mode_list = list(range(ts_index.data_length_4d)) with h5.h5_file_for_index(ts_index) as ts_h5: labels = self.get_space_labels(ts_h5) if not labels: labels = None pars = dict(matrix_labels=json.dumps(labels), matrix_shape=json.dumps(matrix_shape), viewer_title='Pearson Edge Bundle', url_base=URLGenerator.build_h5_url(view_model.datatype.hex, 'get_correlation_data', flatten="True", parameter=''), state_variable=0, mode=mode_list[0], state_list=state_list, mode_list=mode_list, pearson_min=CorrelationCoefficients.PEARSON_MIN, pearson_max=CorrelationCoefficients.PEARSON_MAX, thresh=0.5 ) return self.build_display_result("pearson_edge_bundle/view", pars)
def display_surface(surface_gid, region_mapping_gid=None): """ Generates the HTML for displaying the surface with the given ID. """ surface = ABCAdapter.load_entity_by_gid(surface_gid) if surface is None: raise MissingDataException( SpatioTemporalController.MSG_MISSING_SURFACE + "!!") common.add2session(PARAM_SURFACE, surface_gid) surface_h5 = h5.h5_file_for_index(surface) url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering( surface_h5) url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering( surface_h5, region_mapping_gid) surface_h5.close() return { 'urlVerticesPick': json.dumps(url_vertices_pick), 'urlTrianglesPick': json.dumps(url_triangles_pick), 'urlNormalsPick': json.dumps(url_normals_pick), 'urlVertices': json.dumps(url_vertices), 'urlTriangles': json.dumps(url_triangles), 'urlNormals': json.dumps(url_normals), 'brainCenter': json.dumps(surface_h5.center()) }
def compute_2d_view(self, dtm_index, slice_s): # type: (DataTypeMatrix, str) -> (numpy.array, str, bool) """ Create a 2d view of the matrix using the suggested slice If the given slice is invalid or fails to produce a 2d array the default is used which selects the first 2 dimensions. If the matrix is complex the real part is shown :param dtm_index: main input. It can have more then 2D :param slice_s: a string representation of a slice :return: (a 2d array, the slice used to make it, is_default_returned) """ default = (slice(None), slice(None)) + tuple( 0 for _ in range(dtm_index.ndim - 2)) # [:,:,0,0,0,0 etc] slice_used = default try: if slice_s is not None and slice_s != "": slice_used = parse_slice(slice_s) except ValueError: # if the slice could not be parsed self.log.warning("failed to parse the slice") try: with h5.h5_file_for_index(dtm_index) as h5_file: result_2d = h5_file.array_data[slice_used] result_2d = result_2d.astype(float) if result_2d.ndim > 2: # the slice did not produce a 2d array, treat as error raise ValueError(str(dtm_index.shape)) except (ValueError, IndexError, TypeError): # if the slice failed to produce a 2d array self.log.warning("failed to produce a 2d array") return self.compute_2d_view(dtm_index, "") return result_2d, slice_str(slice_used), slice_used == default
def launch(self, view_model): # type: (ICAModel) -> dict """Construct data for visualization and launch it.""" ica_gid = view_model.datatype ica_index = self.load_entity_by_gid(ica_gid) slice_given = slice_str( (slice(None), slice(None), slice(view_model.i_svar), slice(view_model.i_mode))) if view_model.i_svar < 0 or view_model.i_svar >= ica_index.parsed_shape[ 2]: view_model.i_svar = 0 if view_model.i_mode < 0 or view_model.i_mode >= ica_index.parsed_shape[ 3]: view_model.i_mode = 0 slice_used = slice_str( (slice(None), slice(None), slice(view_model.i_svar), slice(view_model.i_mode))) with h5.h5_file_for_index(ica_index) as h5_file: unmixing_matrix = h5_file.unmixing_matrix[..., view_model.i_svar, view_model.i_mode] prewhitening_matrix = h5_file.prewhitening_matrix[ ..., view_model.i_svar, view_model.i_mode] Cinv = unmixing_matrix.dot(prewhitening_matrix) title = 'ICA region contribution -- (Ellipsis, %d, 0)' % ( view_model.i_svar) labels = self.extract_source_labels(ica_index) pars = self.compute_params(ica_index, Cinv, title, [labels, labels], slice_given, slice_used, slice_given != slice_used) return self.build_display_result("matrix/svg_view", pars)
def launch(self, view_model): # type: (ConnectivityViewerModel) -> dict """ Given the input connectivity data and the surface data, build the HTML response to be displayed. """ connectivity, colors, rays = self._load_input_data(view_model) global_params, global_pages = self._compute_connectivity_global_params( connectivity) if view_model.surface_data is not None: surface_index = load_entity_by_gid(view_model.surface_data.hex) surface_h5 = h5.h5_file_for_index(surface_index) url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering( surface_h5) else: url_vertices, url_normals, url_triangles = [], [], [] global_params["urlVertices"] = json.dumps(url_vertices) global_params["urlTriangles"] = json.dumps(url_triangles) global_params["urlNormals"] = json.dumps(url_normals) global_params['isSingleMode'] = False result_params, result_pages = Connectivity2DViewer( ).compute_parameters(connectivity, colors, rays, view_model.step) result_params.update(global_params) result_pages.update(global_pages) _params, _pages = Connectivity3DViewer().compute_parameters( connectivity, colors, rays) result_params.update(_params) result_pages.update(_pages) return self.build_display_result("connectivity/main_connectivity", result_params, result_pages)
def launch(self, view_model): # type: (PearsonCorrelationCoefficientAdapterModel) -> [CorrelationCoefficientsIndex] """ Launch algorithm and build results. Compute the node-pairwise pearson correlation coefficient of the given input 4D TimeSeries datatype. The result will contain values between -1 and 1, inclusive. :param time_series: the input time-series for which correlation coefficient should be computed :param t_start: the physical time interval start for the analysis :param t_end: physical time, interval end :returns: the correlation coefficient for the given time series :rtype: `CorrelationCoefficients` """ with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: ts_labels_ordering = ts_h5.labels_ordering.load() result = self._compute_correlation_coefficients(ts_h5, view_model.t_start, view_model.t_end) if isinstance(self.input_time_series_index, TimeSeriesEEGIndex) \ or isinstance(self.input_time_series_index, TimeSeriesMEGIndex) \ or isinstance(self.input_time_series_index, TimeSeriesSEEGIndex): labels_ordering = ["Sensor", "Sensor", "1", "1"] else: labels_ordering = list(CorrelationCoefficients.labels_ordering.default) labels_ordering[0] = ts_labels_ordering[2] labels_ordering[1] = ts_labels_ordering[2] corr_coef = CorrelationCoefficients() corr_coef.array_data = result corr_coef.source = TimeSeries(gid=view_model.time_series) corr_coef.labels_ordering = labels_ordering return h5.store_complete(corr_coef, self.storage_path)
def populate_surface_fields(self, time_series_index): """ To be overwritten for populating fields: one_to_one_map/connectivity/region_map/surface fields """ self.one_to_one_map = isinstance(time_series_index, TimeSeriesSurfaceIndex) if self.one_to_one_map: self.PAGE_SIZE /= 10 surface_gid = time_series_index.fk_surface_gid surface_index = dao.get_datatype_by_gid(surface_gid) region_map_indexes = dao.get_generic_entity(RegionMappingIndex, surface_gid, 'fk_surface_gid') if len(region_map_indexes) < 1: region_map_index = None connectivity_index = None else: region_map_index = region_map_indexes[0] connectivity_index = dao.get_datatype_by_gid(region_map_index.fk_connectivity_gid) else: connectivity_index = dao.get_datatype_by_gid(time_series_index.fk_connectivity_gid) if time_series_index.fk_region_mapping_gid: region_map_index = dao.get_datatype_by_gid(time_series_index.fk_region_mapping_gid) else: region_map_indexes = dao.get_generic_entity(RegionMappingIndex, connectivity_index.gid, 'fk_connectivity_gid') region_map_index = region_map_indexes[0] surface_index = dao.get_datatype_by_gid(region_map_index.fk_surface_gid) self.connectivity_index = connectivity_index self.region_map_gid = None if region_map_index is None else region_map_index.gid self.surface_gid = None if surface_index is None else surface_index.gid self.surface_h5 = None if surface_index is None else h5.h5_file_for_index(surface_index)
def launch(self, time_series, n_components=None): """ Launch algorithm and build results. """ # --------- Prepare a IndependentComponents object for result ----------## ica_index = IndependentComponentsIndex() ica_index.source_gid = time_series.gid time_series_h5 = h5.h5_file_for_index(time_series) result_path = h5.path_for(self.storage_path, IndependentComponentsH5, ica_index.gid) ica_h5 = IndependentComponentsH5(path=result_path) ica_h5.gid.store(uuid.UUID(ica_index.gid)) ica_h5.source.store(time_series_h5.gid.load()) ica_h5.n_components.store(self.algorithm.n_components) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## input_shape = time_series_h5.data.shape node_slice = [slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3])] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_ica = self.algorithm.evaluate() ica_h5.write_data_slice(partial_ica) ica_h5.close() time_series_h5.close() return ica_index
def launch(self, surface, region_map=None, connectivity_measure=None, shell_surface=None, title="Surface Visualizer"): surface_h5 = h5.h5_file_for_index(surface) cm_h5 = h5.h5_file_for_index( connectivity_measure) if connectivity_measure is not None else None region_map_gid = region_map.gid if region_map is not None else None connectivity_gid = region_map.connectivity_gid if region_map is not None else None assert isinstance(surface_h5, SurfaceH5) params = dict(title=title, extended_view=False, isOneToOneMapping=False, hasRegionMap=region_map is not None) params.update(self._compute_surface_params(surface_h5, region_map_gid)) params.update(self._compute_hemispheric_param(surface_h5)) params.update( self._compute_measure_points_param(surface_h5, region_map_gid, connectivity_gid)) params.update( self._compute_measure_param(cm_h5, params['noOfMeasurePoints'])) surface_h5.close() if cm_h5: cm_h5.close() params['shelfObject'] = None shell_surface = ensure_shell_surface(self.current_project_id, shell_surface) if shell_surface: shell_h5 = h5.h5_file_for_index(shell_surface) assert isinstance(shell_h5, SurfaceH5) shell_vertices, shell_normals, _, shell_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering( shell_h5) params['shelfObject'] = json.dumps( [shell_vertices, shell_normals, shell_triangles]) shell_h5.close() return self.build_display_result( "surface/surface_view", params, pages={"controlPage": "surface/surface_viewer_controls"})
def launch(self, input_data, surface_data=None, colors=None, rays=None, step=None): """ Given the input connectivity data and the surface data, build the HTML response to be displayed. :param input_data: index towards the `Connectivity` object which will be displayed :type input_data: `ConnectivityIndex` :param surface_data: if provided, it is displayed as a shadow to give an idea of the connectivity \ position relative to the full brain cortical surface :type surface_data: `SurfaceIndex` :param colors: used to establish a colormap for the nodes displayed in 2D Connectivity viewers :type colors: `ConnectivityMeasureIndex` :param rays: used to establish the size of the spheres representing each node in 3D Nodes viewer :type rays: `ConnectivityMeasureIndex` :param step: a threshold applied to the 2D Connectivity Viewers to differentiate 2 types of nodes \ the ones with a value greater that this will be displayed as red discs, instead of yellow :type step: float """ connectivity = h5.load_from_index(input_data) assert isinstance(connectivity, Connectivity) if colors: colors_dt = h5.load_from_index(colors) else: colors_dt = None if rays: rays_dt = h5.load_from_index(rays) else: rays_dt = None global_params, global_pages = self._compute_connectivity_global_params( connectivity) if surface_data is not None: surface_h5 = h5.h5_file_for_index(surface_data) url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering( surface_h5) else: url_vertices, url_normals, url_triangles = [], [], [] global_params["urlVertices"] = json.dumps(url_vertices) global_params["urlTriangles"] = json.dumps(url_triangles) global_params["urlNormals"] = json.dumps(url_normals) global_params['isSingleMode'] = False result_params, result_pages = Connectivity2DViewer( ).compute_parameters(connectivity, colors_dt, rays_dt, step) result_params.update(global_params) result_pages.update(global_pages) _params, _pages = Connectivity3DViewer().compute_parameters( connectivity, colors_dt, rays_dt) result_params.update(_params) result_pages.update(_pages) return self.build_display_result("connectivity/main_connectivity", result_params, result_pages)
def launch(self, view_model): # type: (NodeComplexCoherenceModel) -> [ComplexCoherenceSpectrumIndex] """ Launch algorithm and build results. :returns: the `ComplexCoherenceSpectrum` built with the given time-series """ # ------- Prepare a ComplexCoherenceSpectrum object for result -------## complex_coherence_spectrum_index = ComplexCoherenceSpectrumIndex() time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) dest_path = h5.path_for(self.storage_path, ComplexCoherenceSpectrumH5, complex_coherence_spectrum_index.gid) spectra_h5 = ComplexCoherenceSpectrumH5(dest_path) spectra_h5.gid.store(uuid.UUID(complex_coherence_spectrum_index.gid)) spectra_h5.source.store(time_series_h5.gid.load()) # ------------------- NOTE: Assumes 4D TimeSeries. -------------------## input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), slice(input_shape[1]), slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() small_ts.sample_period = time_series_h5.sample_period.load() small_ts.sample_period_unit = time_series_h5.sample_period_unit.load() small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_result = self.algorithm.evaluate() self.log.debug("got partial_result") self.log.debug("partial segment_length is %s" % (str(partial_result.segment_length))) self.log.debug("partial epoch_length is %s" % (str(partial_result.epoch_length))) self.log.debug("partial windowing_function is %s" % (str(partial_result.windowing_function))) # LOG.debug("partial frequency vector is %s" % (str(partial_result.frequency))) spectra_h5.write_data_slice(partial_result) spectra_h5.segment_length.store(partial_result.segment_length) spectra_h5.epoch_length.store(partial_result.epoch_length) spectra_h5.windowing_function.store(partial_result.windowing_function) # spectra.frequency = partial_result.frequency spectra_h5.close() time_series_h5.close() complex_coherence_spectrum_index.fk_source_gid = self.input_time_series_index.gid complex_coherence_spectrum_index.epoch_length = partial_result.epoch_length complex_coherence_spectrum_index.segment_length = partial_result.segment_length complex_coherence_spectrum_index.windowing_function = partial_result.windowing_function complex_coherence_spectrum_index.frequency_step = partial_result.freq_step complex_coherence_spectrum_index.max_frequency = partial_result.max_freq return complex_coherence_spectrum_index
def launch(self, view_model): # type: (CrossCorrelateAdapterModel) -> [CrossCorrelationIndex] """ Launch algorithm and build results. Compute the node-pairwise cross-correlation of the source 4D TimeSeries represented by the index given as input. Return a CrossCorrelationIndex. Create a CrossCorrelationH5 that contains the cross-correlation sequences for all possible combinations of the nodes. See: http://www.scipy.org/doc/api_docs/SciPy.signal.signaltools.html#correlate :param time_series: the input time series index for which the correlation should be computed :returns: the cross correlation index for the given time series :rtype: `CrossCorrelationIndex` """ # --------- Prepare CrossCorrelationIndex and CrossCorrelationH5 objects for result ------------## cross_corr_index = CrossCorrelationIndex() cross_corr_h5_path = h5.path_for(self.storage_path, CrossCorrelationH5, cross_corr_index.gid) cross_corr_h5 = CrossCorrelationH5(cross_corr_h5_path) node_slice = [ slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: small_ts.sample_period = ts_h5.sample_period.load() partial_cross_corr = None labels_ordering = ts_h5.labels_ordering.load() for var in range(self.input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = ts_h5.read_data_slice(tuple(node_slice)) partial_cross_corr = self._compute_cross_correlation( small_ts, ts_h5) cross_corr_h5.write_data_slice(partial_cross_corr) ts_array_metadata = cross_corr_h5.array_data.get_cached_metadata() cross_corr_h5.time.store(partial_cross_corr.time) cross_corr_labels_ordering = list(partial_cross_corr.labels_ordering) cross_corr_labels_ordering[1] = labels_ordering[2] cross_corr_labels_ordering[2] = labels_ordering[2] cross_corr_h5.labels_ordering.store( json.dumps(tuple(cross_corr_labels_ordering))) cross_corr_h5.source.store(uuid.UUID(self.input_time_series_index.gid)) cross_corr_h5.gid.store(uuid.UUID(cross_corr_index.gid)) cross_corr_index.source_gid = self.input_time_series_index.gid cross_corr_index.labels_ordering = cross_corr_h5.labels_ordering.load() cross_corr_index.type = type(cross_corr_index).__name__ cross_corr_index.array_data_min = ts_array_metadata.min cross_corr_index.array_data_max = ts_array_metadata.max cross_corr_index.array_data_mean = ts_array_metadata.mean cross_corr_h5.close() return cross_corr_index
def extract_source_labels(self, datatype_matrix): # type: (DataTypeMatrix) -> list if hasattr(datatype_matrix, "fk_connectivity_gid"): conn_idx = self.load_entity_by_gid( datatype_matrix.fk_connectivity_gid) with h5.h5_file_for_index(conn_idx) as conn_h5: labels = list(conn_h5.region_labels.load()) return labels if hasattr(datatype_matrix, "fk_source_gid"): source_index = self.load_entity_by_gid( datatype_matrix.fk_source_gid) with h5.h5_file_for_index(source_index) as source_h5: labels = self.get_space_labels(source_h5) return labels return None
def compute_parameters(self, time_series, shell_surface=None): """ Create the required parameter dictionary for the HTML/JS viewer. :rtype: `dict` :raises Exception: when * number of measure points exceeds the maximum allowed * a Face object cannot be found in database """ self.populate_surface_fields(time_series) url_vertices, url_normals, url_lines, url_triangles, url_region_map = SurfaceURLGenerator.get_urls_for_rendering( self.surface_h5, self.region_map_gid) hemisphere_chunk_mask = self.surface_h5.get_slices_to_hemisphere_mask() params = self.retrieve_measure_points_params(time_series) if not self.one_to_one_map and self.measure_points_no > MAX_MEASURE_POINTS_LENGTH: raise Exception("Max number of measure points " + str(MAX_MEASURE_POINTS_LENGTH) + " exceeded.") time_series_h5 = h5.h5_file_for_index(time_series) assert isinstance(time_series_h5, TimeSeriesH5) base_adapter_url, time_urls = self._prepare_data_slices(time_series) min_val, max_val = time_series_h5.get_min_max_values() legend_labels = self._compute_legend_labels(min_val, max_val) state_variables = time_series.get_labels_for_dimension(1) if self.surface_gid and self.region_map_gid: boundary_url = SurfaceURLGenerator.get_url_for_region_boundaries(self.surface_gid, self.region_map_gid, self.stored_adapter.id) else: boundary_url = '' shell_surface = ensure_shell_surface(self.current_project_id, shell_surface) params.update(dict(title="Cerebral Activity: " + time_series.title, isOneToOneMapping=self.one_to_one_map, urlVertices=json.dumps(url_vertices), urlTriangles=json.dumps(url_triangles), urlLines=json.dumps(url_lines), urlNormals=json.dumps(url_normals), urlRegionMap=json.dumps(url_region_map), base_adapter_url=base_adapter_url, time=json.dumps(time_urls), minActivity=min_val, maxActivity=max_val, legendLabels=legend_labels, labelsStateVar=state_variables, labelsModes=list(range(time_series.data_length_4d)), extended_view=False, shellObject=self.prepare_shell_surface_params(shell_surface, SurfaceURLGenerator), biHemispheric=self.surface_h5.bi_hemispheric.load(), hemisphereChunkMask=json.dumps(hemisphere_chunk_mask), pageSize=self.PAGE_SIZE, urlRegionBoundaries=boundary_url, measurePointsLabels=self.get_space_labels(time_series_h5), measurePointsTitle=time_series.title)) params.update(self.build_params_for_subselectable_ts(time_series_h5)) time_series_h5.close() if self.surface_h5: self.surface_h5.close() return params
def launch(self, datatype, slice=''): with h5.h5_file_for_index(datatype) as h5_file: matrix = h5_file.array_data.load() matrix2d, _, _ = compute_2d_view(matrix, slice) title = datatype.display_name + " matrix plot" pars = self.compute_params(matrix, title, slice) return self.build_display_result("matrix/svg_view", pars)
def launch(self, view_model): # type: (ConnectivityAnnotationsViewModel) -> dict annotations_index = self.load_entity_by_gid(view_model.annotations_index) if view_model.connectivity_index is None: connectivity_index = self.load_entity_by_gid(annotations_index.connectivity_gid) else: connectivity_index = self.load_entity_by_gid(view_model.connectivity_index) if view_model.region_mapping_index is None: region_map = dao.get_generic_entity(RegionMappingIndex, connectivity_index.gid, 'connectivity_gid') if len(region_map) < 1: raise LaunchException( "Can not launch this viewer unless we have at least a RegionMapping for the current Connectivity!") region_mapping_index = region_map[0] else: region_mapping_index = self.load_entity_by_gid(view_model.region_mapping_index) boundary_url = SurfaceURLGenerator.get_url_for_region_boundaries(region_mapping_index.surface_gid, region_mapping_index.gid, self.stored_adapter.id) surface_index = self.load_entity_by_gid(region_mapping_index.surface_gid) surface_h5 = h5.h5_file_for_index(surface_index) assert isinstance(surface_h5, SurfaceH5) url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering( surface_h5) url_vertices, url_normals, _, url_triangles, url_region_map = SurfaceURLGenerator.get_urls_for_rendering( surface_h5, region_mapping_index.gid) params = dict(title="Connectivity Annotations Visualizer", baseUrl=TvbProfile.current.web.BASE_URL, annotationsTreeUrl=URLGenerator.build_url(self.stored_adapter.id, 'tree_json', view_model.annotations_index), urlTriangleToRegion=URLGenerator.build_url(self.stored_adapter.id, "get_triangles_mapping", region_mapping_index.gid), urlActivationPatterns=URLGenerator.paths2url(view_model.annotations_index, "get_activation_patterns"), minValue=0, maxValue=connectivity_index.number_of_regions - 1, urlColors=json.dumps(url_region_map), urlVerticesPick=json.dumps(url_vertices_pick), urlTrianglesPick=json.dumps(url_triangles_pick), urlNormalsPick=json.dumps(url_normals_pick), brainCenter=json.dumps(surface_h5.center()), urlVertices=json.dumps(url_vertices), urlTriangles=json.dumps(url_triangles), urlNormals=json.dumps(url_normals), urlRegionBoundaries=boundary_url) return self.build_display_result("annotations/annotations_view", params, pages={"controlPage": "annotations/annotations_controls"})
def launch(self, view_model): """ Launch algorithm and build results. """ # --------- Prepare a WaveletCoefficients object for result ----------## frequencies_array = numpy.array([]) if self.algorithm.frequencies is not None: frequencies_array = self.algorithm.frequencies.to_array() time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) assert isinstance(time_series_h5, TimeSeriesH5) wavelet_index = WaveletCoefficientsIndex() dest_path = h5.path_for(self.storage_path, WaveletCoefficientsH5, wavelet_index.gid) wavelet_h5 = WaveletCoefficientsH5(path=dest_path) wavelet_h5.gid.store(uuid.UUID(wavelet_index.gid)) wavelet_h5.source.store(time_series_h5.gid.load()) wavelet_h5.mother.store(self.algorithm.mother) wavelet_h5.q_ratio.store(self.algorithm.q_ratio) wavelet_h5.sample_period.store(self.algorithm.sample_period) wavelet_h5.frequencies.store(frequencies_array) wavelet_h5.normalisation.store(self.algorithm.normalisation) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() small_ts.sample_period = time_series_h5.sample_period.load() small_ts.sample_period_unit = time_series_h5.sample_period_unit.load() for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_wavelet = self.algorithm.evaluate() wavelet_h5.write_data_slice(partial_wavelet) wavelet_h5.close() time_series_h5.close() wavelet_index.fk_source_gid = self.input_time_series_index.gid wavelet_index.mother = self.algorithm.mother wavelet_index.normalisation = self.algorithm.normalisation wavelet_index.q_ratio = self.algorithm.q_ratio wavelet_index.sample_period = self.algorithm.sample_period wavelet_index.number_of_scales = frequencies_array.shape[0] wavelet_index.frequencies_min, wavelet_index.frequencies_max, _ = from_ndarray( frequencies_array) return wavelet_index
def launch(self, view_model): # type: (FCDAdapterModel) -> [FcdIndex, ConnectivityMeasureIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the fcd index for the computed fcd matrix on the given time-series, with that sw and that sp """ with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: [fcd, fcd_segmented, eigvect_dict, eigval_dict] = self._compute_fcd_matrix(ts_h5) connectivity_gid = ts_h5.connectivity.load() connectivity = self.load_traited_by_gid(connectivity_gid) result = [ ] # list to store: fcd index, fcd_segmented index (eventually), and connectivity measure indexes # Create an index for the computed fcd. fcd_index = FcdIndex() fcd_h5_path = h5.path_for(self.storage_path, FcdH5, fcd_index.gid) with FcdH5(fcd_h5_path) as fcd_h5: self._populate_fcd_h5(fcd_h5, fcd, fcd_index.gid, self.input_time_series_index.gid, view_model.sw, view_model.sp) self._populate_fcd_index(fcd_index, self.input_time_series_index.gid, fcd_h5) result.append(fcd_index) if np.amax(fcd_segmented) == 1.1: result_fcd_segmented_index = FcdIndex() result_fcd_segmented_h5_path = h5.path_for( self.storage_path, FcdH5, result_fcd_segmented_index.gid) with FcdH5( result_fcd_segmented_h5_path) as result_fcd_segmented_h5: self._populate_fcd_h5(result_fcd_segmented_h5, fcd_segmented, result_fcd_segmented_index.gid, self.input_time_series_index.gid, view_model.sw, view_model.sp) self._populate_fcd_index(result_fcd_segmented_index, self.input_time_series_index.gid, result_fcd_segmented_h5) result.append(result_fcd_segmented_index) for mode in eigvect_dict.keys(): for var in eigvect_dict[mode].keys(): for ep in eigvect_dict[mode][var].keys(): for eig in range(3): cm_data = eigvect_dict[mode][var][ep][eig] measure = ConnectivityMeasure() measure.connectivity = connectivity measure.array_data = cm_data measure.title = "Epoch # %d, eigenvalue = %s, variable = %s, " \ "mode = %s." % (ep, eigval_dict[mode][var][ep][eig], var, mode) cm_index = h5.store_complete(measure, self.storage_path) result.append(cm_index) return result
def launch(self, view_model): # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex] """ Launch algorithm and build results. """ # --------- Prepare a CoherenceSpectrum object for result ------------## coherence_spectrum_index = CoherenceSpectrumIndex() time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5, coherence_spectrum_index.gid) coherence_h5 = CoherenceSpectrumH5(dest_path) coherence_h5.gid.store(uuid.UUID(coherence_spectrum_index.gid)) coherence_h5.source.store(view_model.time_series) coherence_h5.nfft.store(self.algorithm.nfft) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() small_ts.sample_period = time_series_h5.sample_period.load() small_ts.sample_period_unit = time_series_h5.sample_period_unit.load() partial_coh = None for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_coh = self.algorithm.evaluate() coherence_h5.write_data_slice(partial_coh) coherence_h5.frequency.store(partial_coh.frequency) array_metadata = coherence_h5.array_data.get_cached_metadata() freq_metadata = coherence_h5.frequency.get_cached_metadata() coherence_h5.close() time_series_h5.close() coherence_spectrum_index.array_data_min = array_metadata.min coherence_spectrum_index.array_data_max = array_metadata.max coherence_spectrum_index.array_data_mean = array_metadata.mean coherence_spectrum_index.array_has_complex = array_metadata.has_complex coherence_spectrum_index.array_is_finite = array_metadata.is_finite coherence_spectrum_index.shape = json.dumps( coherence_h5.array_data.shape) coherence_spectrum_index.ndim = len(coherence_h5.array_data.shape) coherence_spectrum_index.fk_source_gid = self.input_time_series_index.gid coherence_spectrum_index.nfft = partial_coh.nfft coherence_spectrum_index.frequencies_min = freq_metadata.min coherence_spectrum_index.frequencies_max = freq_metadata.max coherence_spectrum_index.subtype = CoherenceSpectrum.__name__ return coherence_spectrum_index
def get(self, datatype_gid): """ :given a guid, this function will download the H5 full data """ index = ABCAdapter.load_entity_by_gid(datatype_gid) if index is None: raise InvalidIdentifierException(INVALID_DATATYPE_GID_MESSAGE.format(datatype_gid)) h5_file = h5_file_for_index(index) last_index = h5_file.path.rfind('\\') file_name = h5_file.path[last_index + 1:] return flask.send_file(h5_file.path, as_attachment=True, attachment_filename=file_name)