Exemplo n.º 1
0
    def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
        """
        Retrieve from a given DataType a property or a method result.

        :returns: JSON representation of the attribute.
        :param entity_gid: GID for DataType entity
        :param dataset_name: name of the dataType property /method 
        :param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
            Ignored if the attribute is not an ndarray
        :param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
            pair, a load_entity will be performed and kwargs will be updated to contain the result
        :param kwargs: extra parameters to be passed when dataset_name is method.

        """
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
        entity = ABCAdapter.load_entity_by_gid(entity_gid)
        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in datatype_kwargs.iteritems():
                kwargs[key] = ABCAdapter.load_entity_by_gid(value)
        dataset = getattr(entity, dataset_name)
        if not kwargs:
            # why the deep copy?
            result = copy.deepcopy(dataset)
        else:
            result = dataset(**kwargs)

        if isinstance(result, numpy.ndarray):
            # for ndarrays honor the flatten kwarg and convert to lists as ndarrs are not json-able
            if flatten is True or flatten == "True":
                result = result.flatten()
            return result.tolist()
        else:
            return result
 def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
     """
     Retrieve from a given DataType a property or a method result.
     :returns: JSON with a NumPy array
     :param entity_gid: GID for DataType entity
     :param dataset_name: name of the dataType property /method 
     :param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
     :param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
     pair, a load_entity will be performed and kwargs will be updated to contain the result
     :param kwargs: extra parameters to be passed when dataset_name is method. 
     """
     try:
         self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
         entity = ABCAdapter.load_entity_by_gid(entity_gid)
         if kwargs is None:
             kwargs = {}
         datatype_kwargs = json.loads(datatype_kwargs)
         if datatype_kwargs is not None:
             for key in datatype_kwargs:
                 kwargs[key] = ABCAdapter.load_entity_by_gid(datatype_kwargs[key])
         if len(kwargs) < 1:
             numpy_array = copy.deepcopy(getattr(entity, dataset_name))
         else:
             numpy_array = eval("entity." + dataset_name + "(**kwargs)")
         if (flatten is True) or (flatten == "True"):
             numpy_array = numpy_array.flatten()
         return numpy_array.tolist()
     except Exception, excep:
         self.logger.error("Could not retrieve complex entity field:" + str(entity_gid) + "/" + str(dataset_name))
         self.logger.exception(excep)
def cdata2local_connectivity(local_connectivity_data, meta, storage_path, expected_length=0):
    """
    From a CData entry in CFF, create LocalConnectivity entity.
    """
    ##### expected_length = cortex.region_mapping.shape[0]
    tmpdir = os.path.join(gettempdir(), local_connectivity_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Local Connectivity import: " + tmpdir)
    _zipfile = ZipFile(local_connectivity_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    local_connectivity_path = _zipfile.extract(local_connectivity_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    local_connectivity = surfaces.LocalConnectivity()
    local_connectivity.storage_path = storage_path 
    local_connectivity_data = read_matlab_data(local_connectivity_path, constants.DATA_NAME_LOCAL_CONN)
    
    if local_connectivity_data.shape[0] < expected_length:
        padding = sparse.csc_matrix((local_connectivity_data.shape[0],
                                    expected_length - local_connectivity_data.shape[0]))
        local_connectivity_data = sparse.hstack([local_connectivity_data, padding])
            
        padding = sparse.csc_matrix((expected_length - local_connectivity_data.shape[0],
                                     local_connectivity_data.shape[1]))
        local_connectivity_data = sparse.vstack([local_connectivity_data, padding])
    
    local_connectivity.equation = None
    local_connectivity.matrix = local_connectivity_data        
    local_connectivity.surface = surface_data
    
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return local_connectivity, uid
 def step_2(self, **kwargs):
     """
     Generate the html for the second step of the local connectivity page.
     :param kwargs: not actually used, but parameters are still submitted from UI since we just\
            use the same js function for this. TODO: do this in a smarter way
     """
     context = common.get_from_session(KEY_LCONN_CONTEXT)
     left_side_interface = self.get_select_existent_entities('Load Local Connectivity:', LocalConnectivity,
                                                             context.selected_entity)
     template_specification = dict(title="Surface - Local Connectivity")
     template_specification['mainContent'] = 'spatial/local_connectivity_step2_main'
     template_specification['existentEntitiesInputList'] = left_side_interface
     template_specification['loadExistentEntityUrl'] = LOAD_EXISTING_URL
     template_specification['resetToDefaultUrl'] = RELOAD_DEFAULT_PAGE_URL
     template_specification['next_step_url'] = '/spatial/localconnectivity/step_1'
     msg, _ = common.get_message_from_session()
     template_specification['displayedMessage'] = msg
     context = common.get_from_session(KEY_LCONN_CONTEXT)
     if context.selected_entity is not None:
         selected_local_conn = ABCAdapter.load_entity_by_gid(context.selected_entity)
         template_specification.update(self.display_surface(selected_local_conn.surface.gid))
         template_specification['no_local_connectivity'] = False
         min_value, max_value = selected_local_conn.get_min_max_values()
         template_specification['minValue'] = min_value
         template_specification['maxValue'] = max_value
     else:
         template_specification['no_local_connectivity'] = True
     template_specification[common.KEY_PARAMETERS_CONFIG] = False
     return self.fill_default_attributes(template_specification)
    def load_region_stimulus(self, region_stimulus_gid, from_step=None):
        """
        Loads the interface for the selected region stimulus.
        """
        selected_region_stimulus = ABCAdapter.load_entity_by_gid(region_stimulus_gid)
        temporal_eq = selected_region_stimulus.temporal
        spatial_eq = selected_region_stimulus.spatial
        connectivity = selected_region_stimulus.connectivity
        weights = selected_region_stimulus.weight

        temporal_eq_type = temporal_eq.__class__.__name__
        spatial_eq_type = spatial_eq.__class__.__name__
        default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type,
                        'connectivity': connectivity.gid, 'weight': json.dumps(weights)}
        for param in temporal_eq.parameters:
            prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type)
            prepared_name = prepared_name + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(temporal_eq.parameters[param])
        for param in spatial_eq.parameters:
            prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(spatial_eq.parameters[param])

        input_list = self.get_creator_and_interface(REGION_STIMULUS_CREATOR_MODULE,
                                                    REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1]
        input_list = InputTreeManager.fill_defaults(input_list, default_dict)
        context = common.get_from_session(KEY_REGION_CONTEXT)
        context.reset()
        context.update_from_interface(input_list)
        context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_region_stimulus.user_tag_1
        context.set_active_stimulus(region_stimulus_gid)

        return self.do_step(from_step)
    def _read_datatype_attribute(self, entity_gid, dataset_name, datatype_kwargs='null', **kwargs):
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
        entity = ABCAdapter.load_entity_by_gid(entity_gid)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in datatype_kwargs.iteritems():
                kwargs[key] = ABCAdapter.load_entity_by_gid(value)

        result = getattr(entity, dataset_name)
        if callable(result):
            if kwargs:
                result = result(**kwargs)
            else:
                result = result()
        return result
Exemplo n.º 7
0
 def _create_value_wrapper(self):
     """Persist ValueWrapper"""
     value_ = ValueWrapper(data_value=5.0, data_name="my_value")
     self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
     valuew = self.get_all_entities(ValueWrapper)
     self.assertEqual(1, len(valuew), "Should be one value wrapper")
     return ABCAdapter.load_entity_by_gid(valuew[0].gid)
def cdata2eeg_mapping(eeg_mapping_data, meta, storage_path, expected_shape=0):
    """
    Currently not used
    """
    tmpdir = os.path.join(gettempdir(), eeg_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for EEG_Mapping import: " + tmpdir)
    _zipfile = ZipFile(eeg_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    eeg_projection_path = _zipfile.extract(eeg_mapping_data.src, tmpdir)
    eeg_projection_data = read_matlab_data(eeg_projection_path, constants.DATA_NAME_PROJECTION)
    if eeg_projection_data.shape[1] < expected_shape:
        padding = numpy.zeros((eeg_projection_data.shape[0], expected_shape - eeg_projection_data.shape[1]))
        eeg_projection_data = numpy.hstack((eeg_projection_data, padding))
        
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    projection_matrix = projections.ProjectionSurfaceEEG(storage_path = storage_path)
    projection_matrix.projection_data = eeg_projection_data
    projection_matrix.sources = surface_data
    projection_matrix.sensors = None
    ### TODO if we decide to use this method, we will need to find a manner to fill the sensors.
    return projection_matrix
 
 
    

    
 def _create_value_wrapper(self):
     """Persist ValueWrapper"""
     value_ = ValueWrapper(data_value=5.0, data_name="my_value")
     self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
     valuew = self.flow_service.get_available_datatypes(self.test_project.id,
                                                        "tvb.datatypes.mapped_values.ValueWrapper")[0]
     self.assertEqual(len(valuew), 1, "Should be only one value wrapper")
     return ABCAdapter.load_entity_by_gid(valuew[0][2])
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)
        
        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        inserted = self.flow_service.get_available_datatypes(self.test_project.id,
                                                             "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(1, inserted, "Problems when inserting data")
        
        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops number before export!")

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")
        
        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, len(result), "Project Not removed!")
        self.assertEqual(0, lng_, "Project Not removed!")
        
        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path, self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(result), 1, "There should be only one project.")
        self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.")
        self.assertEqual(result[0].description, "test_desc", "The project description is not correct.")
        self.test_project = result[0]
        
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        
        #1 op. - import cff; 2 op. - save the array wrapper;
        self.assertEqual(2, count_operations, "Invalid ops number after export and import !")
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly')
            self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly')
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(self.test_project.id, 
                                                            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(1, len(new_val), "One !=" + str(len(new_val)))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect")
        self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect")
        self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
Exemplo n.º 11
0
    def get_entity(project, expected_data, filters=None):
        """
        Return the first entity with class given by `expected_data`

        :param expected_data: specifies the class whose entity is returned
        """
        data_types = FlowService().get_available_datatypes(project.id,
                                                           expected_data.module + "." + expected_data.type, filters)[0]
        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        return entity
    def display_connectivity(connectivity_gid):
        """
        Generates the html for displaying the connectivity matrix.
        """
        connectivity = ABCAdapter.load_entity_by_gid(connectivity_gid)
        connectivity_viewer_params = ConnectivityViewer.get_connectivity_parameters(connectivity)

        template_specification = dict()
        template_specification['isSingleMode'] = True
        template_specification.update(connectivity_viewer_params)
        return template_specification
 def _is_compatible(self, algorithm, datatype_group_gid):
     """
     Check if PSE view filters are compatible with current DataType.
     :param algorithm: Algorithm instance to get filters from it.
     :param datatype_group_gid: Current DataTypeGroup to validate against.
     :returns: True when DataTypeGroup can be displayed with current algorithm, False when incompatible.
     """
     datatype_group = ABCAdapter.load_entity_by_gid(datatype_group_gid)
     filter_chain = FilterChain.from_json(algorithm.datatype_filter)
     if datatype_group and (not filter_chain or filter_chain.get_python_filter_equivalent(datatype_group)):
         return True
     return False
    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None, "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid")
Exemplo n.º 15
0
    def downloaddata(self, data_gid, export_module):
        """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """
        current_prj = common.get_current_project()
        # Load data by GID
        entity = ABCAdapter.load_entity_by_gid(data_gid)
        # Do real export
        export_mng = ExportManager()
        file_name, file_path, delete_file = export_mng.export_data(entity, export_module, current_prj)
        if delete_file:
            # We force parent folder deletion because export process generated it.
            self.mark_file_for_delete(file_path, True)

        self.logger.debug("Data exported in file: " + str(file_path))
        return serve_file(file_path, "application/x-download", "attachment", file_name)
def cdata2region_mapping(region_mapping_data, meta, storage_path):
    """
    From a CData entry in CFF, create RegionMapping entity.
    """
    tmpdir = os.path.join(gettempdir(), region_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Region_Mapping import: " + tmpdir)
    _zipfile = ZipFile(region_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    region_mapping_path = _zipfile.extract(region_mapping_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_CONNECTIVITY_UID], Connectivity)
    connectivity = ABCAdapter.load_entity_by_gid(gid)
    
    region_mapping = surfaces.RegionMapping(storage_path = storage_path)
    region_mapping.array_data = read_list_data(region_mapping_path, dtype=numpy.int32)
    region_mapping.connectivity = connectivity
    region_mapping.surface = surface_data
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return region_mapping, uid
    def _get_entity(self, expected_data, filters=None):
        """
        Checks there is exactly one datatype with required specifications and returns it

        :param expected_data: a class whose entity is to be returned
        :param filters: optional, the returned entity will also have the required filters
        :return: an object of class `expected_data`
        """
        dt_full_name = expected_data.__module__ + "." + expected_data.__name__
        data_types = FlowService().get_available_datatypes(self.test_project.id,dt_full_name, filters)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type:" + str(expected_data.type))
        
        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(entity is not None, "Instance should not be none")
        return entity
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
                'zero_based_triangles': True,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface
Exemplo n.º 19
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
        url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering()

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface.center())
        }
Exemplo n.º 21
0
    def _get_effective_data_type(self, data):
        """
        This method returns the data type for the provided data.
        - If current data is a simple data type is returned.
        - If it is an data type group, we return the first element. Only one element is
        necessary since all group elements are the same type.
        """
        # first check if current data is a DataTypeGroup
        if self.is_data_a_group(data):
            data_types = ProjectService.get_datatypes_from_datatype_group(data.id)

            if data_types is not None and len(data_types) > 0:
                # Since all objects in a group are the same type it's enough
                return ABCAdapter.load_entity_by_gid(data_types[0].gid)
            else:
                return None
        else:
            return data
Exemplo n.º 22
0
    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(self.test_project.id, RegionMapping)
        self.assertEquals(1, mapping_count, "There should be one Mapping.")
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        self.assertEqual(surface.gid, mapping.surface.gid, "The surfaces should have the same GID")
        try:
            self.project_service.remove_datatype(self.test_project.id, surface.gid)
            self.fail("The surface should still be used by a RegionMapping " + str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        self.assertEqual(surface.id, res.id, "A used surface was deleted")
 def step_2(self):
     """
     Generate the required template dictionary for the second step.
     """
     context = common.get_from_session(KEY_REGION_CONTEXT)
     selected_stimulus_gid = context.selected_stimulus
     left_side_interface = self.get_select_existent_entities('Load Region Stimulus:',
                                                             StimuliRegion, selected_stimulus_gid)
     template_specification = dict(title="Spatio temporal - Region stimulus")
     template_specification['mainContent'] = 'spatial/stimulus_region_step2_main'
     template_specification['next_step_url'] = '/spatial/stimulus/region/step_2_submit'
     template_specification['existentEntitiesInputList'] = left_side_interface
     default_weights = context.get_weights()
     if len(default_weights) == 0:
         selected_connectivity = ABCAdapter.load_entity_by_gid(context.get_session_connectivity())
         default_weights = StimuliRegion.get_default_weights(selected_connectivity.number_of_regions)
     template_specification['node_weights'] = json.dumps(default_weights)
     template_specification[common.KEY_PARAMETERS_CONFIG] = False
     template_specification.update(self.display_connectivity(context.get_session_connectivity()))
     return self.fill_default_attributes(template_specification)
Exemplo n.º 24
0
    def _import(self, import_file_path=None, expected_result_class=StructuralMRI, connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': True, 'connectivity': connectivity}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id, expected_result_class, None)
        self.assertEqual(1, count, "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
Exemplo n.º 25
0
    def _get_all_data_types_arr(self, data):
        """
        This method builds an array with all data types to be processed later.
        - If current data is a simple data type is added to an array.
        - If it is an data type group all its children are loaded and added to array.
        """
        # first check if current data is a DataTypeGroup
        if self.is_data_a_group(data):
            data_types = ProjectService.get_datatypes_from_datatype_group(data.id)

            result = []
            if data_types is not None and len(data_types) > 0:
                for data_type in data_types:
                    entity = ABCAdapter.load_entity_by_gid(data_type.gid)
                    result.append(entity)

            return result

        else:
            return [data]
Exemplo n.º 26
0
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering(
        )
        url_vertices, url_normals, _, url_triangles = surface.get_urls_for_rendering(
        )

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface.center())
        }
    def display_surface(surface_gid):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        base.add2session(PARAM_SURFACE, surface_gid)
        url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
        url_vertices, url_normals, url_triangles, alphas, alphas_indices = surface.get_urls_for_rendering(True, None)

        template_specification = dict()
        template_specification['urlVerticesPick'] = json.dumps(url_vertices_pick)
        template_specification['urlTrianglesPick'] = json.dumps(url_triangles_pick)
        template_specification['urlNormalsPick'] = json.dumps(url_normals_pick)
        template_specification['urlVertices'] = json.dumps(url_vertices)
        template_specification['urlTriangles'] = json.dumps(url_triangles)
        template_specification['urlNormals'] = json.dumps(url_normals)
        template_specification['alphas'] = json.dumps(alphas)
        template_specification['alphas_indices'] = json.dumps(alphas_indices)
        template_specification['brainCenter'] = json.dumps(surface.center())
        return template_specification
Exemplo n.º 28
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRIIndex,
                connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')

        form = NIFTIImporterForm()
        form.fill_from_post({
            '_data_file':
            Part(import_file_path, HeaderMap({}), ''),
            '_apply_corrections':
            'True',
            '_connectivity':
            connectivity,
            '_mappings_file':
            Part(self.TXT_FILE, HeaderMap({}), ''),
            '_Data_Subject':
            'bla bla'
        })
        form.data_file.data = import_file_path
        form.mappings_file.data = self.TXT_FILE
        importer.submit_form(form)

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **form.get_dict())

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
    def setUp(self):
        """
        Reset the database before each test;
        creates a test user, a test project, a connectivity;
        sets context model parameters and a Generic2dOscillator as a default model
        """
        self.flow_service = FlowService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.default_model = models_module.Generic2dOscillator()

        all_connectivities = self.flow_service.get_available_datatypes(
            self.test_project.id, Connectivity)
        self.connectivity = ABCAdapter.load_entity_by_gid(
            all_connectivities[0][2])
        self.connectivity.number_of_regions = 74
        self.context_model_param = ContextModelParameters(
            self.connectivity, self.default_model)
Exemplo n.º 30
0
    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except Exception:
            self.logger.exception("Could not load details for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm changed dramatically. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters
Exemplo n.º 31
0
    def _get_all_data_types_arr(self, data):
        """
        This method builds an array with all data types to be processed later.
        - If current data is a simple data type is added to an array.
        - If it is an data type group all its children are loaded and added to array.
        """
        # first check if current data is a DataTypeGroup
        if self.is_data_a_group(data):
            data_types = ProjectService.get_datatypes_from_datatype_group(
                data.id)

            result = []
            if data_types is not None and len(data_types) > 0:
                for data_type in data_types:
                    entity = ABCAdapter.load_entity_by_gid(data_type.gid)
                    result.append(entity)

            return result

        else:
            return [data]
Exemplo n.º 32
0
    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except IntrospectionException:
            self.logger.warning("Could not find adapter class for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm was Removed. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters
Exemplo n.º 33
0
    def import_projection_matrix(user, project, file_path, sensors_gid,
                                 surface_gid):
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixSurfaceEEGImporter')

        form = ProjectionMatrixImporterForm()

        form.fill_from_post({
            'projection_file':
            Part(file_path, HeaderMap({}), ''),
            'dataset_name':
            'ProjectionMatrix',
            'sensors':
            sensors_gid,
            'surface':
            surface_gid,
            'Data_Subject':
            'John Doe'
        })
        form.projection_file.data = file_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, ProjectionMatrixIndex)[0]
        assert 1 == len(
            data_types
        ), "Project should contain only one data type = Projection Matrix."

        projection_matrix = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert projection_matrix is not None, "Projection Matrix instance should not be none"

        return projection_matrix
 def view_stimulus(self, focal_points):
     """
     Just create the stimulus to view the actual data, don't store to db.
     Hold the entity in session without the surface, so the next time you need
     data just get from that one.
     """
     try:
         context = base.get_from_session(KEY_SURFACE_CONTEXT)
         context.set_focal_points(focal_points)
         kwargs = copy.deepcopy(context.equation_kwargs)
         surface_stimulus_creator = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE,
                                                                   SURFACE_STIMULUS_CREATOR_CLASS,
                                                                   StimuliSurface())[0]
         min_time = float(kwargs.get('min_tmp_x', 0))
         max_time = float(kwargs.get('max_tmp_x', 100))
         kwargs = surface_stimulus_creator.prepare_ui_inputs(kwargs)
         stimulus = surface_stimulus_creator.launch(**kwargs)
         surface_gid = base.get_from_session(PARAM_SURFACE)
         surface = ABCAdapter.load_entity_by_gid(surface_gid)
         stimulus.surface = surface
         stimulus.configure_space()
         time = numpy.arange(min_time, max_time, 1)
         time = time[numpy.newaxis, :]
         stimulus.configure_time(time)
         data = []
         max_value = numpy.max(stimulus())
         min_value = numpy.min(stimulus())
         for i in range(min(CHUNK_SIZE, stimulus.temporal_pattern.shape[1])):
             step_data = stimulus(i).tolist()
             data.append(step_data)
         stimulus.surface = surface.gid
         base.add2session(KEY_STIMULUS, stimulus)
         result = {'status': 'ok', 'max': max_value, 'min': min_value,
                   'data': data, "time_min": min_time, "time_max": max_time, "chunk_size": CHUNK_SIZE}
         return result
     except (NameError, ValueError, SyntaxError):
         return {'status': 'error',
                 'errorMsg': "Could not generate stimulus data. Some of the parameters hold invalid characters."}
     except Exception, ex:
         return {'allSeries': 'error', 'errorMsg': ex.message}
Exemplo n.º 35
0
    def import_surface_gifti(user, project, path):
        """
        This method is used for importing data in GIFIT format
        :param path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.gifti_surface_importer',
            'GIFTISurfaceImporter')

        form = GIFTISurfaceImporterForm()
        form.fill_from_post({
            'file_type': form.get_view_model().KEY_OPTION_READ_METADATA,
            'data_file': Part(path, HeaderMap({}), ''),
            'data_file_part2': Part('', HeaderMap({}), ''),
            'should_center': 'False',
            'Data_Subject': 'John Doe',
        })
        form.data_file.data = path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        surface = CorticalSurface
        data_types = FlowService().get_available_datatypes(
            project.id, surface.__module__ + "." + surface.__name__)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface
Exemplo n.º 36
0
 def view_stimulus(self, focal_points):
     """
     Just create the stimulus to view the actual data, don't store to db.
     Hold the entity in session without the surface, so the next time you need
     data just get from that one.
     """
     try:
         context = common.get_from_session(KEY_SURFACE_CONTEXT)
         context.set_focal_points(focal_points)
         kwargs = copy.deepcopy(context.equation_kwargs)
         surface_stimulus_creator = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE,
                                                                   SURFACE_STIMULUS_CREATOR_CLASS,
                                                                   StimuliSurface())[0]
         min_time = float(kwargs.get('min_tmp_x', 0))
         max_time = float(kwargs.get('max_tmp_x', 100))
         kwargs = surface_stimulus_creator.prepare_ui_inputs(kwargs)
         stimulus = surface_stimulus_creator.launch(**kwargs)
         surface_gid = common.get_from_session(PARAM_SURFACE)
         surface = ABCAdapter.load_entity_by_gid(surface_gid)
         stimulus.surface = surface
         stimulus.configure_space()
         time = numpy.arange(min_time, max_time, 1)
         time = time[numpy.newaxis, :]
         stimulus.configure_time(time)
         data = []
         max_value = numpy.max(stimulus())
         min_value = numpy.min(stimulus())
         for i in range(min(CHUNK_SIZE, stimulus.temporal_pattern.shape[1])):
             step_data = stimulus(i).tolist()
             data.append(step_data)
         stimulus.surface = surface.gid
         common.add2session(KEY_STIMULUS, stimulus)
         result = {'status': 'ok', 'max': max_value, 'min': min_value,
                   'data': data, "time_min": min_time, "time_max": max_time, "chunk_size": CHUNK_SIZE}
         return result
     except (NameError, ValueError, SyntaxError):
         return {'status': 'error',
                 'errorMsg': "Could not generate stimulus data. Some of the parameters hold invalid characters."}
     except Exception, ex:
         return {'allSeries': 'error', 'errorMsg': ex.message}
    def get_template_from_context(self):
        """
        Return the parameters for the local connectivity in case one is stored in context. Load the entity
        and use it to populate the defaults from the interface accordingly.
        """
        context = common.get_from_session(KEY_LCONN_CONTEXT)
        selected_local_conn = ABCAdapter.load_entity_by_gid(context.selected_entity)
        cutoff = selected_local_conn.cutoff
        equation = selected_local_conn.equation
        surface = selected_local_conn.surface

        default_dict = {"surface": surface.gid, "cutoff": cutoff}
        if equation is not None:
            equation_type = equation.__class__.__name__
            default_dict["equation"] = equation_type
            for param in equation.parameters:
                prepared_name = "equation_parameters_option_" + str(equation_type)
                prepared_name = prepared_name + "_parameters_parameters_" + str(param)
                default_dict[prepared_name] = equation.parameters[param]
        else:
            msg = "There is no equation specified for this local connectivity. "
            msg += "The default equation is displayed into the spatial field."
            self.logger.warning(msg)
            common.set_info_message(msg)

        default_dict[DataTypeMetaData.KEY_TAG_1] = selected_local_conn.user_tag_1

        input_list = self.get_creator_and_interface(
            LOCAL_CONN_CREATOR_MODULE, LOCAL_CONN_CREATOR_CLASS, LocalConnectivity(), lock_midpoint_for_eq=[1]
        )[1]
        input_list = self._add_extra_fields_to_interface(input_list)
        input_list = ABCAdapter.fill_defaults(input_list, default_dict)

        template_specification = {
            "inputList": input_list,
            common.KEY_PARAMETERS_CONFIG: False,
            "equationViewerUrl": "/spatial/localconnectivity/get_equation_chart",
            "equationsPrefixes": json.dumps(self.plotted_equations_prefixes),
        }
        return template_specification
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
Exemplo n.º 39
0
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        assert 1 == len(data_types), "Project should contain only one data type = Sensors."

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert time_series is not None, "Sensors instance should not be none"

        return time_series
    def display_surface(surface_gid, region_mapping_gid=None):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = ABCAdapter.load_entity_by_gid(surface_gid)
        common.add2session(PARAM_SURFACE, surface_gid)
        surface_h5 = h5.h5_file_for_index(surface)
        url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering(
            surface_h5)
        url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering(
            surface_h5, region_mapping_gid)
        surface_h5.close()

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface_h5.center())
        }
Exemplo n.º 41
0
 def _create_value_wrapper(test_user, test_project=None):
     """
     Creates a ValueWrapper dataType, and the associated parent Operation.
     This is also used in ProjectStructureTest.
     """
     if test_project is None:
         test_project = TestFactory.create_project(test_user, 'test_proj')
     operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
     value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
     value_wrapper.type = "ValueWrapper"
     value_wrapper.module = "tvb.datatypes.mapped_values"
     value_wrapper.subject = "John Doe"
     value_wrapper.state = "RAW_STATE"
     value_wrapper.set_operation_id(operation.id)
     adapter_instance = StoreAdapter([value_wrapper])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                "tvb.datatypes.mapped_values.ValueWrapper")[0]
     if len(all_value_wrappers) != 1:
         raise Exception("Should be only one value wrapper.")
     result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
     return test_project, result_vw.gid, operation.gid
Exemplo n.º 42
0
 def _create_value_wrapper(test_user, test_project=None):
     """
     Creates a ValueWrapper dataType, and the associated parent Operation.
     This is also used in ProjectStructureTest.
     """
     if test_project is None:
         test_project = TestFactory.create_project(test_user, 'test_proj')
     operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
     value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
     value_wrapper.type = "ValueWrapper"
     value_wrapper.module = "tvb.datatypes.mapped_values"
     value_wrapper.subject = "John Doe"
     value_wrapper.state = "RAW_STATE"
     value_wrapper.set_operation_id(operation.id)
     adapter_instance = StoreAdapter([value_wrapper])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                "tvb.datatypes.mapped_values.ValueWrapper")[0]
     if len(all_value_wrappers) != 1:
         raise Exception("Should be only one value wrapper.")
     result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
     return test_project, result_vw.gid, operation.gid
Exemplo n.º 43
0
    def set_connectivity(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)
        form = SimulatorAdapterForm()

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            form.fill_from_post(data)

            connectivity_index_gid = form.connectivity.value
            conduction_speed = form.conduction_speed.value
            coupling = form.coupling.value

            connectivity_index = ABCAdapter.load_entity_by_gid(
                connectivity_index_gid)
            connectivity = h5.load_from_index(connectivity_index)

            # TODO: handle this cases in a better manner
            session_stored_simulator.connectivity = connectivity
            session_stored_simulator.conduction_speed = conduction_speed
            session_stored_simulator.coupling = coupling()

        next_form = get_form_for_coupling(
            type(session_stored_simulator.coupling))()
        self.range_parameters.coupling_parameters = next_form.get_range_parameters(
        )
        next_form.fill_from_trait(session_stored_simulator.coupling)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = next_form
        dict_to_render[self.ACTION_KEY] = '/burst/set_coupling_params'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_connectivity'
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        return dict_to_render
Exemplo n.º 44
0
    def step_2(self):
        """
        Generate the required template dictionary for the second step.
        """
        current_region_stimulus = common.get_from_session(KEY_REGION_STIMULUS)
        region_stim_selector_form = StimulusRegionSelectorForm(
            common.get_current_project().id)
        region_stim_selector_form.region_stimulus.data = current_region_stimulus.gid.hex
        region_stim_selector_form.display_name.data = common.get_from_session(
            KEY_REGION_STIMULUS_NAME)

        template_specification = dict(
            title="Spatio temporal - Region stimulus")
        template_specification[
            'mainContent'] = 'spatial/stimulus_region_step2_main'
        template_specification[
            'next_step_url'] = '/spatial/stimulus/region/step_2_submit'
        template_specification[
            'regionStimSelectorForm'] = self.render_adapter_form(
                region_stim_selector_form)

        default_weights = current_region_stimulus.weight
        if len(default_weights) == 0:
            selected_connectivity = ABCAdapter.load_entity_by_gid(
                current_region_stimulus.connectivity.hex)
            if selected_connectivity is None:
                common.set_error_message(self.MSG_MISSING_CONNECTIVITY)
                default_weights = numpy.array([])
            else:
                default_weights = StimuliRegion.get_default_weights(
                    selected_connectivity.number_of_regions)

        template_specification['node_weights'] = json.dumps(
            default_weights.tolist())
        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification.update(
            self.display_connectivity(
                current_region_stimulus.connectivity.hex))
        return self.fill_default_attributes(template_specification)
Exemplo n.º 45
0
    def import_surface_zip(user,
                           project,
                           zip_path,
                           surface_type,
                           zero_based='True'):
        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')

        form = ZIPSurfaceImporterForm()
        form.fill_from_post({
            'uploaded': Part(zip_path, HeaderMap({}), ''),
            'zero_based_triangles': zero_based,
            'should_center': 'True',
            'surface_type': surface_type,
            'Data_Subject': 'John Doe'
        })
        form.uploaded.data = zip_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, SurfaceIndex)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        surface.user_tag_3 = ''
        assert surface is not None, "Surface should not be None"
        return surface
Exemplo n.º 46
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {
            'data_file': import_file_path,
            "surface_type": FACE,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
Exemplo n.º 47
0
    def compute_data_for_gradient_view(self, local_connectivity_gid, selected_triangle):
        """
        When the user loads an existent local connectivity and he picks a vertex from the used surface, this
        method computes the data needed for drawing a gradient view corresponding to that vertex.

        Returns a json which contains the data needed for drawing a gradient view for the selected vertex.
        """
        selected_local_conn = ABCAdapter.load_entity_by_gid(local_connectivity_gid)
        surface = selected_local_conn.surface
        triangle_index = int(selected_triangle)
        vertex_index = int(surface.triangles[triangle_index][0])
        picked_data = list(selected_local_conn.matrix[vertex_index].toarray().squeeze())

        result = []
        if surface.number_of_split_slices <= 1:
            result.append(picked_data)
        else:
            for slice_number in range(surface.number_of_split_slices):
                start_idx, end_idx = surface.get_slice_vertex_boundaries(slice_number)
                result.append(picked_data[start_idx:end_idx])

        result = {'data': json.dumps(result)}
        return result
Exemplo n.º 48
0
    def import_surface_obj(user, project, obj_path, surface_type):
        # Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        form = ObjSurfaceImporterForm()
        form.fill_from_post({'_data_file': Part(obj_path, HeaderMap({}), ''),
                             '_surface_type': surface_type,
                             '_Data_Subject': 'John Doe'
                             })
        form.data_file.data = obj_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer, user, project.id, view_model=view_model)

        data_types = FlowService().get_available_datatypes(project.id, SurfaceIndex)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface
Exemplo n.º 49
0
    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(
            self.test_project.id, RegionMapping)
        assert 1 == mapping_count, "There should be one Mapping."
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        assert surface.gid == mapping.surface.gid, "The surfaces should have the same GID"
        try:
            self.project_service.remove_datatype(self.test_project.id,
                                                 surface.gid)
            raise AssertionError(
                "The surface should still be used by a RegionMapping " +
                str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        assert surface.id == res.id, "A used surface was deleted"
Exemplo n.º 50
0
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')

        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: ""}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           surface.module + "." + surface.type)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface
Exemplo n.º 51
0
    def get_template_from_context(self):
        """
        Return the parameters for the local connectivity in case one is stored in context. Load the entity
        and use it to populate the defaults from the interface accordingly.
        """
        context = common.get_from_session(KEY_LCONN_CONTEXT)
        selected_local_conn = ABCAdapter.load_entity_by_gid(context.selected_entity)
        cutoff = selected_local_conn.cutoff
        equation = selected_local_conn.equation
        surface = selected_local_conn.surface

        default_dict = {'surface': surface.gid, 'cutoff': cutoff}
        if equation is not None:
            equation_type = equation.__class__.__name__
            default_dict['equation'] = equation_type
            for param in equation.parameters:
                prepared_name = 'equation_parameters_option_' + str(equation_type)
                prepared_name = prepared_name + '_parameters_parameters_' + str(param)
                default_dict[prepared_name] = equation.parameters[param]
        else:
            msg = "There is no equation specified for this local connectivity. "
            msg += "The default equation is displayed into the spatial field."
            self.logger.warning(msg)
            common.set_info_message(msg)

        default_dict[DataTypeMetaData.KEY_TAG_1] = selected_local_conn.user_tag_1

        input_list = self.get_creator_and_interface(LOCAL_CONN_CREATOR_MODULE,
                                                    LOCAL_CONN_CREATOR_CLASS, LocalConnectivity(),
                                                    lock_midpoint_for_eq=[1])[1]
        input_list = self._add_extra_fields_to_interface(input_list)
        input_list = InputTreeManager.fill_defaults(input_list, default_dict)

        template_specification = {'inputList': input_list, common.KEY_PARAMETERS_CONFIG: False,
                                  'equationViewerUrl': '/spatial/localconnectivity/get_equation_chart',
                                  'equationsPrefixes': json.dumps(self.plotted_equations_prefixes)}
        return template_specification
Exemplo n.º 52
0
def _review_operation_inputs_for_adapter_model(form_fields, form_model,
                                               view_model):
    changed_attr = {}
    inputs_datatypes = []

    for field in form_fields:

        if not hasattr(view_model, field.name):
            continue
        attr_vm = getattr(view_model, field.name)
        if type(field) == TraitUploadField:
            attr_vm = os.path.basename(attr_vm)

        if isinstance(field, TraitDataTypeSelectField):
            data_type = None
            if attr_vm:
                data_type = ABCAdapter.load_entity_by_gid(attr_vm)
                changed_attr[
                    field.
                    label] = data_type.display_name if data_type else "None"
            inputs_datatypes.append(data_type)
        else:
            attr_default = None
            if hasattr(form_model, field.name):
                attr_default = getattr(form_model, field.name)
            if attr_vm != attr_default:
                if isinstance(attr_vm, float) or isinstance(
                        attr_vm, int) or isinstance(attr_vm, str):
                    changed_attr[field.label] = attr_vm
                elif isinstance(attr_vm, tuple) or isinstance(attr_vm, list):
                    changed_attr[field.label] = ', '.join(
                        [str(sub_attr) for sub_attr in attr_vm])
                else:
                    # All HasTraits instances will show as being different than default, even if the same!! Is this ok?
                    changed_attr[field.label] = str(attr_vm)

    return inputs_datatypes, changed_attr
Exemplo n.º 53
0
    def set_stimulus(self, **data):
        session_stored_simulator = common.get_from_session(
            common.KEY_SIMULATOR_CONFIG)
        is_simulator_copy = common.get_from_session(
            common.KEY_IS_SIMULATOR_COPY)
        is_simulator_load = common.get_from_session(
            common.KEY_IS_SIMULATOR_LOAD)

        if cherrypy.request.method == 'POST':
            is_simulator_copy = False
            stimuli_fragment = SimulatorStimulusFragment(
                '',
                common.get_current_project().id,
                session_stored_simulator.is_surface_simulation)
            stimuli_fragment.fill_from_post(data)
            stimulus_gid = stimuli_fragment.stimulus.value
            if stimulus_gid != None:
                stimulus_index = ABCAdapter.load_entity_by_gid(stimulus_gid)
                stimulus = h5.load_from_index(stimulus_index)
                session_stored_simulator.stimulus = stimulus

        model_fragment = SimulatorModelFragment(
            '',
            common.get_current_project().id)
        model_fragment.fill_from_trait(session_stored_simulator)

        dict_to_render = copy.deepcopy(self.dict_to_render)
        dict_to_render[self.FORM_KEY] = model_fragment
        dict_to_render[self.ACTION_KEY] = '/burst/set_model'
        dict_to_render[self.PREVIOUS_ACTION_KEY] = '/burst/set_stimulus'
        dict_to_render[self.IS_MODEL_FRAGMENT_KEY] = True
        dict_to_render[self.IS_COPY] = is_simulator_copy
        dict_to_render[self.IS_LOAD] = is_simulator_load
        dict_to_render[
            self.
            IS_SURFACE_SIMULATION_KEY] = session_stored_simulator.is_surface_simulation
        return dict_to_render
Exemplo n.º 54
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': False, 'connectivity': None}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           time_series.module + "." + time_series.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "TimeSeries should not be none")

        return time_series
Exemplo n.º 55
0
    def test_import_nii_without_time_dimension(self):
        """
        This method tests import of a NIFTI file.
        """
        structural_mri_index = self._import(self.NII_FILE)
        assert "T1" == structural_mri_index.weighting

        structural_mri = h5.load_from_index(structural_mri_index)

        data_shape = structural_mri.array_data.shape
        assert 3 == len(data_shape)
        assert 64 == data_shape[0]
        assert 64 == data_shape[1]
        assert 10 == data_shape[2]

        volume_index = ABCAdapter.load_entity_by_gid(
            structural_mri_index.volume_gid)
        assert volume_index is not None

        volume = h5.load_from_index(volume_index)

        assert numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all()
        assert numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all()
        assert self.UNKNOWN_STR == volume.voxel_unit
 def step_2(self, **kwargs):
     """
     Generate the html for the second step of the local connectivity page.
     :param kwargs: not actually used, but parameters are still submitted from UI since we just\
            use the same js function for this.
     """
     current_lconn = common.get_from_session(KEY_LCONN)
     left_side_form = LocalConnectivitySelectorForm(
         project_id=common.get_current_project().id)
     left_side_form.existentEntitiesSelect.data = current_lconn.gid.hex
     template_specification = dict(title="Surface - Local Connectivity")
     template_specification[
         'mainContent'] = 'spatial/local_connectivity_step2_main'
     template_specification[
         'existentEntitiesInputList'] = self.render_adapter_form(
             left_side_form)
     template_specification['loadExistentEntityUrl'] = LOAD_EXISTING_URL
     template_specification['resetToDefaultUrl'] = RELOAD_DEFAULT_PAGE_URL
     template_specification[
         'next_step_url'] = '/spatial/localconnectivity/step_1'
     msg, _ = common.get_message_from_session()
     template_specification['displayedMessage'] = msg
     if current_lconn is not None:
         selected_local_conn = ABCAdapter.load_entity_by_gid(
             current_lconn.gid.hex)
         template_specification.update(
             self.display_surface(selected_local_conn.fk_surface_gid))
         template_specification['no_local_connectivity'] = False
         template_specification[
             'minValue'] = selected_local_conn.matrix_non_zero_min
         template_specification[
             'maxValue'] = selected_local_conn.matrix_non_zero_max
     else:
         template_specification['no_local_connectivity'] = True
     template_specification[common.KEY_PARAMETERS_CONFIG] = False
     return self.fill_default_attributes(template_specification)
Exemplo n.º 57
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')
        args = {
            'uploaded': import_file_path,
            'surface_type': OUTER_SKULL,
            'zero_based_triangles': True,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, SkullSkin)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface
Exemplo n.º 58
0
    def load_surface_stimulus(self, surface_stimulus_gid, from_step):
        """
        Loads the interface for the selected surface stimulus.
        """
        context = base.get_from_session(KEY_SURFACE_CONTEXT)
        selected_surface_stimulus = ABCAdapter.load_entity_by_gid(surface_stimulus_gid)
        temporal_eq = selected_surface_stimulus.temporal
        spatial_eq = selected_surface_stimulus.spatial
        surface = selected_surface_stimulus.surface
        focal_points_surface = selected_surface_stimulus.focal_points_surface
        focal_points_triangles = selected_surface_stimulus.focal_points_triangles

        temporal_eq_type = temporal_eq.__class__.__name__
        spatial_eq_type = spatial_eq.__class__.__name__
        default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type,
                        'surface': surface.gid, 'focal_points_surface': json.dumps(focal_points_surface),
                        'focal_points_triangles': json.dumps(focal_points_triangles)}
        for param in temporal_eq.parameters:
            prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type)
            prepared_name = prepared_name + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(temporal_eq.parameters[param])
        for param in spatial_eq.parameters:
            prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(spatial_eq.parameters[param])

        default_dict[DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1

        input_list = self.get_creator_and_interface(SURFACE_STIMULUS_CREATOR_MODULE,
                                                    SURFACE_STIMULUS_CREATOR_CLASS, StimuliSurface(),
                                                    lock_midpoint_for_eq=[1])[1]
        input_list = ABCAdapter.fill_defaults(input_list, default_dict)
        context.reset()
        context.update_from_interface(input_list)
        context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_surface_stimulus.user_tag_1
        context.set_active_stimulus(surface_stimulus_gid)
        return self.do_step(from_step)
Exemplo n.º 59
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        view_categ_id = dao.get_visualisers_categories()[0].id
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no
Exemplo n.º 60
0
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert 1 == inserted, "Problems when inserting data"

        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)
        assert 2 == count_operations, "Invalid ops number before export!"

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "GeneratedProject", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        self.test_project = result[0]

        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)

        #1 op. - import cff; 2 op. - save the array wrapper;
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(
            self.test_project.id,
            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        assert 1 == len(new_val), "One !=" + str(len(new_val))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"