def launch(self, data_file, dataset_name, connectivity): """ Execute import operations: """ try: data = self.read_matlab_data(data_file, dataset_name) measurement_count, node_count = data.shape if node_count != connectivity.number_of_regions: raise LaunchException( 'The measurements are for %s nodes but the selected connectivity' ' contains %s nodes' % (node_count, connectivity.number_of_regions)) measures = [] for i in xrange(measurement_count): measure = ConnectivityMeasure(storage_path=self.storage_path, connectivity=connectivity, array_data=data[i, :]) measure.user_tag_2 = "nr.-%d" % (i + 1) measure.user_tag_3 = "conn_%d" % node_count measures.append(measure) return measures except ParseException, excep: logger = get_logger(__name__) logger.exception(excep) raise LaunchException(excep)
def launch(self, time_series, sw, sp): """ Launch algorithm and build results. :param time_series: the input time-series for which correlation coefficient should be computed :param sw: length of the sliding window :param sp: spanning time: distance between two consecutive sliding window :returns: the fcd matrix for the given time-series, with that sw and that sp :rtype: `Fcd`,`ConnectivityMeasure` """ result = [] # where fcd, fcd_segmented (eventually), and connectivity measures will be stored [fcd, fcd_segmented, eigvect_dict, eigval_dict, Connectivity] = self.algorithm.evaluate() # Create a Fcd dataType object. result_fcd = Fcd(storage_path=self.storage_path, source=time_series, sw=sw, sp=sp) result_fcd.array_data = fcd result.append(result_fcd) if np.amax(fcd_segmented)==1.1 : result_fcd_segmented = Fcd(storage_path=self.storage_path, source=time_series, sw=sw, sp=sp) result_fcd_segmented.array_data = fcd_segmented result.append(result_fcd_segmented) for mode in eigvect_dict.keys(): for var in eigvect_dict[mode].keys(): for ep in eigvect_dict[mode][var].keys(): for eig in range(3): result_eig = ConnectivityMeasure(storage_path=self.storage_path) result_eig.connectivity = Connectivity result_eig.array_data = eigvect_dict[mode][var][ep][eig] result_eig.title = "Epoch # %d, \n eigenvalue = %s,\n variable = %s,\n mode = %s." % (ep,eigval_dict[mode][var][ep][eig], var, mode) result.append(result_eig) return result
def launch(self, view_model): # type: (ConnectivityMeasureImporterModel) -> [ConnectivityMeasureIndex] """ Execute import operations: """ try: data = self.read_matlab_data(view_model.data_file, view_model.dataset_name) measurement_count, node_count = data.shape connectivity = self.load_traited_by_gid(view_model.connectivity) if node_count != connectivity.number_of_regions: raise LaunchException('The measurements are for %s nodes but the selected connectivity' ' contains %s nodes' % (node_count, connectivity.number_of_regions)) measures = [] self.generic_attributes.user_tag_2 = "conn_%d" % node_count for i in range(measurement_count): cm_data = data[i, :] measure = ConnectivityMeasure() measure.array_data = cm_data measure.connectivity = connectivity measure.title = "Measure %d for Connectivity with %d nodes." % ((i + 1), node_count) cm_idx = h5.store_complete(measure, self.storage_path) measures.append(cm_idx) return measures except ParseException as excep: logger = get_logger(__name__) logger.exception(excep) raise LaunchException(excep)
def launch(self, view_model): # type: (FCDAdapterModel) -> [FcdIndex, ConnectivityMeasureIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the fcd index for the computed fcd matrix on the given time-series, with that sw and that sp """ with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: [fcd, fcd_segmented, eigvect_dict, eigval_dict] = self._compute_fcd_matrix(ts_h5) connectivity_gid = ts_h5.connectivity.load() connectivity = self.load_traited_by_gid(connectivity_gid) result = [ ] # list to store: fcd index, fcd_segmented index (eventually), and connectivity measure indexes # Create an index for the computed fcd. fcd_index = FcdIndex() fcd_h5_path = h5.path_for(self.storage_path, FcdH5, fcd_index.gid) with FcdH5(fcd_h5_path) as fcd_h5: self._populate_fcd_h5(fcd_h5, fcd, fcd_index.gid, self.input_time_series_index.gid, view_model.sw, view_model.sp) self._populate_fcd_index(fcd_index, self.input_time_series_index.gid, fcd_h5) result.append(fcd_index) if np.amax(fcd_segmented) == 1.1: result_fcd_segmented_index = FcdIndex() result_fcd_segmented_h5_path = h5.path_for( self.storage_path, FcdH5, result_fcd_segmented_index.gid) with FcdH5( result_fcd_segmented_h5_path) as result_fcd_segmented_h5: self._populate_fcd_h5(result_fcd_segmented_h5, fcd_segmented, result_fcd_segmented_index.gid, self.input_time_series_index.gid, view_model.sw, view_model.sp) self._populate_fcd_index(result_fcd_segmented_index, self.input_time_series_index.gid, result_fcd_segmented_h5) result.append(result_fcd_segmented_index) for mode in eigvect_dict.keys(): for var in eigvect_dict[mode].keys(): for ep in eigvect_dict[mode][var].keys(): for eig in range(3): cm_data = eigvect_dict[mode][var][ep][eig] measure = ConnectivityMeasure() measure.connectivity = connectivity measure.array_data = cm_data measure.title = "Epoch # %d, eigenvalue = %s, variable = %s, " \ "mode = %s." % (ep, eigval_dict[mode][var][ep][eig], var, mode) cm_index = h5.store_complete(measure, self.storage_path) result.append(cm_index) return result
def create_connectivity_measure(self, connectivity): """ :returns: persisted entity ConnectivityMeasure """ operation, _, storage_path = self.__create_operation() conn_measure = ConnectivityMeasure(storage_path=storage_path) conn_measure.connectivity = connectivity adapter_instance = StoreAdapter([conn_measure]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return conn_measure
def test_happy_flow(self): self.assertEqual( 0, TestFactory.get_entity_count(self.test_project, ConnectivityMeasure())) self._import('mantini_networks.mat') self.assertEqual( 6, TestFactory.get_entity_count(self.test_project, ConnectivityMeasure()))
def _create_measure(conn, op, op_dir, project_id): conn_measure = ConnectivityMeasure() conn_measure.connectivity = h5.load_from_index(conn) conn_measure.array_data = numpy.array(conn.number_of_regions) conn_measure_db = h5.store_complete(conn_measure, op_dir) conn_measure_db.fk_from_operation = op.id dao.store_entity(conn_measure_db) count = dao.count_datatypes(project_id, DataTypeMatrix) return count
def launch(self, time_series, sw, sp): """ Launch algorithm and build results. :param time_series: the input time-series for which correlation coefficient should be computed :param sw: length of the sliding window :param sp: spanning time: distance between two consecutive sliding window :returns: the fcd matrix for the given time-series, with that sw and that sp :rtype: `Fcd`,`ConnectivityMeasure` """ result = [ ] # where fcd, fcd_segmented (eventually), and connectivity measures will be stored [fcd, fcd_segmented, eigvect_dict, eigval_dict, Connectivity] = self.algorithm.evaluate() # Create a Fcd dataType object. result_fcd = Fcd(storage_path=self.storage_path, source=time_series, sw=sw, sp=sp) result_fcd.array_data = fcd result.append(result_fcd) if np.amax(fcd_segmented) == 1.1: result_fcd_segmented = Fcd(storage_path=self.storage_path, source=time_series, sw=sw, sp=sp) result_fcd_segmented.array_data = fcd_segmented result.append(result_fcd_segmented) for mode in eigvect_dict.keys(): for var in eigvect_dict[mode].keys(): for ep in eigvect_dict[mode][var].keys(): for eig in range(3): result_eig = ConnectivityMeasure( storage_path=self.storage_path) result_eig.connectivity = Connectivity result_eig.array_data = eigvect_dict[mode][var][ep][ eig] result_eig.title = "Epoch # %d, \n " \ "eigenvalue = %s,\n " \ "variable = %s,\n " \ "mode = %s." % (ep, eigval_dict[mode][var][ep][eig], var, mode) result.append(result_eig) return result
def build_connectivity_measure(self, result, key, connectivity, title="", label_x="", label_y=""): measure = ConnectivityMeasure() measure.array_data = result[key] measure.connectivity = connectivity measure.title = title measure.label_x = label_x measure.label_y = label_y return h5.store_complete(measure, self.storage_path)
def build_connectivity_measure(self, result, key, connectivity, title="", label_x="", label_y=""): measure = ConnectivityMeasure(storage_path=self.storage_path) measure.array_data = result[key] measure.connectivity = connectivity measure.title = title measure.label_x = label_x measure.label_y = label_y return measure
def launch(self, data_file, dataset_name, connectivity): """ Execute import operations: """ try: data = self.read_matlab_data(data_file, dataset_name) measurement_count, node_count = data.shape if node_count != connectivity.number_of_regions: raise LaunchException('The measurements are for %s nodes but the selected connectivity' ' contains %s nodes' % (node_count, connectivity.number_of_regions)) measures = [] for i in xrange(measurement_count): measure = ConnectivityMeasure(storage_path=self.storage_path, connectivity=connectivity, array_data=data[i, :]) measure.user_tag_2 = "nr.-%d" % (i + 1) measure.user_tag_3 = "conn_%d" % node_count measures.append(measure) return measures except ParseException, excep: logger = get_logger(__name__) logger.exception(excep) raise LaunchException(excep)
def build_connectivity_measure(self, result, key, connectivity, title="", label_x="", label_y=""): # TODO H5 measure = ConnectivityMeasure() measure.array_data = result[key] measure.connectivity = connectivity measure.title = title measure.label_x = label_x measure.label_y = label_y return measure
def test_happy_flow(self): assert 0 == TestFactory.get_entity_count(self.test_project, ConnectivityMeasure()) self._import('mantini_networks.mat') assert 6 == TestFactory.get_entity_count(self.test_project, ConnectivityMeasure())