def _store_float_array(array_data, subject_name, operation_id): """Create Float Array and DB persist it""" datatype_inst = MappedArray(user_tag_1=subject_name) datatype_inst.set_operation_id(operation_id) datatype_inst.array_data = array_data datatype_inst.type = "MappedArray" datatype_inst.module = "tvb.datatypes.arrays" datatype_inst.subject = subject_name datatype_inst.state = "RAW" dao.store_entity(datatype_inst)
def test_read_write_arrays(self): """ Test the filter function when retrieving dataTypes with a filter after a column from a class specific table (e.g. DATA_arraywrapper). """ test_array = numpy.array(range(16)) shapes = [test_array.shape, (2, 8), (2, 2, 4), (2, 2, 2, 2)] storage_path = self.flow_service.file_helper.get_project_folder( self.operation.project, str(self.operation.id)) for i in range(4): datatype_inst = MappedArray(title="dim_" + str(i + 1), d_type="MappedArray", storage_path=storage_path, module="tvb.datatypes.arrays", subject="John Doe", state="RAW", operation_id=self.operation.id) datatype_inst.array_data = test_array.reshape(shapes[i]) result = dao.store_entity(datatype_inst) result.array_data = None inserted_data = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] self.assertEqual(len(inserted_data), 4, "Found " + str(len(inserted_data))) for i in range(4): ## inserted_data will be retrieved in the opposite order than the insert order actual_datatype = dao.get_generic_entity(MappedArray, inserted_data[3 - i][2], 'gid')[0] self.assertEqual(actual_datatype.length_1d, shapes[i][0]) if i > 0: self.assertEqual(actual_datatype.length_2d, shapes[i][1]) expected_arr = test_array.reshape(shapes[i]) self.assertTrue( numpy.equal(actual_datatype.array_data, expected_arr).all(), str(i + 1) + "D Data not read correctly") actual_datatype.array_data = None ### Check that meta-data are also written for Array attributes. metadata = actual_datatype.get_metadata('array_data') self.assertTrue(actual_datatype.METADATA_ARRAY_MAX in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MAX], 15) self.assertTrue(actual_datatype.METADATA_ARRAY_MIN in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MIN], 0) self.assertTrue(actual_datatype.METADATA_ARRAY_MEAN in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MEAN], 7.5)
def launch(self, input_data=None): """ Saves in the db the following array. """ self.launch_param = input_data array_inst = MappedArray() array_inst.storage_path = self.storage_path array_inst.array_data = numpy.array(range(1, 46)).reshape((5, 3, 3)) array_inst.type = "MappedArray" array_inst.module = "tvb.datatypes.arrays" array_inst.subject = "John Doe" array_inst.state = "RAW" return array_inst
def test_read_write_arrays(self): """ Test the filter function when retrieving dataTypes with a filter after a column from a class specific table (e.g. DATA_arraywrapper). """ test_array = numpy.array(range(16)) shapes = [test_array.shape, (2, 8), (2, 2, 4), (2, 2, 2, 2)] storage_path = self.flow_service.file_helper.get_project_folder(self.operation.project, str(self.operation.id)) for i in range(4): datatype_inst = MappedArray(title="dim_" + str(i + 1), d_type="MappedArray", storage_path=storage_path, module="tvb.datatypes.arrays", subject="John Doe", state="RAW", operation_id=self.operation.id) datatype_inst.array_data = test_array.reshape(shapes[i]) result = dao.store_entity(datatype_inst) result.array_data = None inserted_data = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] self.assertEqual(len(inserted_data), 4, "Found " + str(len(inserted_data))) for i in range(4): ## inserted_data will be retrieved in the opposite order than the insert order actual_datatype = dao.get_generic_entity(MappedArray, inserted_data[3 - i][2], 'gid')[0] self.assertEqual(actual_datatype.length_1d, shapes[i][0]) if i > 0: self.assertEqual(actual_datatype.length_2d, shapes[i][1]) expected_arr = test_array.reshape(shapes[i]) self.assertTrue(numpy.equal(actual_datatype.array_data, expected_arr).all(), str(i + 1) + "D Data not read correctly") actual_datatype.array_data = None ### Check that meta-data are also written for Array attributes. metadata = actual_datatype.get_metadata('array_data') self.assertTrue(actual_datatype.METADATA_ARRAY_MAX in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MAX], 15) self.assertTrue(actual_datatype.METADATA_ARRAY_MIN in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MIN], 0) self.assertTrue(actual_datatype.METADATA_ARRAY_MEAN in metadata) self.assertEqual(metadata[actual_datatype.METADATA_ARRAY_MEAN], 7.5)
def gettemplatefordimensionselect(self, entity_gid=None, select_name="", reset_session='False', parameters_prefix="dimensions", required_dimension=1, expected_shape="", operations=""): """ Returns the HTML which contains the selects components which allows the user to reduce the dimension of a multi-dimensional array. We try to obtain the aggregation_functions from the entity, which is a list of lists. For each dimension should be a list with the supported aggregation functions. We create a DICT for each of those lists. The key will be the name of the function and the value will be its label. entity_gid the GID of the entity for which is displayed the component select_name the name of the parent select. The select in which is displayed the entity with the given GID parameters_prefix a string which will be used for computing the names of the component required_dimension the expected dimension for the resulted array expected_shape and operations used for applying conditions on the resulted array e.g.: If the resulted array is a 3D array and we want that the length of the second dimension to be smaller then 512 then the expected_shape and operations should be: ``expected_shape=x,512,x`` and ``operations='x,<,x`` """ template_params = { "select_name": "", "data": [], "parameters_prefix": parameters_prefix, "array_shape": "", "required_dimension": required_dimension, "currentDim": "", "required_dim_msg": "", "expected_shape": expected_shape, "operations": operations } #if reload => populate the selected values session_dict = self.context.get_current_default() dimensions = {1: [0], 3: [0]} selected_agg_functions = {} if not string2bool(str(reset_session)) and session_dict is not None: starts_with_str = select_name + "_" + parameters_prefix + "_" ui_sel_items = dict((k, v) for k, v in session_dict.items() if k.startswith(starts_with_str)) dimensions, selected_agg_functions, required_dimension, _ = MappedArray( ).parse_selected_items(ui_sel_items) template_params["selected_items"] = dimensions template_params["selected_functions"] = selected_agg_functions aggregation_functions = [] default_agg_functions = self.accepted__aggregation_functions() labels_set = ["Time", "Channel", "Line"] if entity_gid is not None: actual_entity = ABCAdapter.load_entity_by_gid(entity_gid) if hasattr(actual_entity, 'shape'): array_shape = actual_entity.shape new_shape, current_dim = self._compute_current_dimension( list(array_shape), dimensions, selected_agg_functions) if required_dimension is not None and current_dim != int( required_dimension): template_params[ "required_dim_msg"] = "Please select a " + str( required_dimension) + "D array" if not current_dim: template_params["currentDim"] = "1 element" else: template_params["currentDim"] = str( current_dim) + "D array" template_params["array_shape"] = json.dumps(new_shape) if hasattr(actual_entity, 'dimensions_labels' ) and actual_entity.dimensions_labels is not None: labels_set = actual_entity.dimensions_labels #make sure there exists labels for each dimension while len(labels_set) < len(array_shape): labels_set.append("Undefined") if (hasattr(actual_entity, 'aggregation_functions') and actual_entity.aggregation_functions is not None and len(actual_entity.aggregation_functions) == len(array_shape)): #will be a list of lists of aggregation functions defined_functions = actual_entity.aggregation_functions for function in defined_functions: if not len(function): aggregation_functions.append({}) else: func_dict = {} for function_key in function: func_dict[ function_key] = default_agg_functions[ function_key] aggregation_functions.append(func_dict) else: for _ in array_shape: aggregation_functions.append(default_agg_functions) result = [] for i, shape in enumerate(array_shape): labels = [] values = [] for j in xrange(shape): labels.append(labels_set[i] + " " + str(j)) values.append(entity_gid + "_" + str(i) + "_" + str(j)) result.append([labels, values, aggregation_functions[i]]) template_params["select_name"] = select_name template_params["data"] = result return template_params return template_params