Пример #1
0
def cdata2region_mapping(region_mapping_data, meta, storage_path):
    """
    From a CData entry in CFF, create RegionMapping entity.
    """
    tmpdir = os.path.join(
        gettempdir(), region_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Region_Mapping import: " + tmpdir)
    _zipfile = ZipFile(region_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    region_mapping_path = _zipfile.extract(region_mapping_data.src, tmpdir)

    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID],
                                     surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)

    gid = dao.get_last_data_with_uid(meta[constants.KEY_CONNECTIVITY_UID],
                                     Connectivity)
    connectivity = ABCAdapter.load_entity_by_gid(gid)

    region_mapping = surfaces.RegionMapping(storage_path=storage_path)
    region_mapping.array_data = read_list_data(region_mapping_path,
                                               dtype=numpy.int32)
    region_mapping.connectivity = connectivity
    region_mapping.surface = surface_data
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None

    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return region_mapping, uid
def cdata2eeg_mapping(eeg_mapping_data, meta, storage_path, expected_shape=0):
    """
    Currently not used
    """
    tmpdir = os.path.join(gettempdir(), eeg_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for EEG_Mapping import: " + tmpdir)
    _zipfile = ZipFile(eeg_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    eeg_projection_path = _zipfile.extract(eeg_mapping_data.src, tmpdir)
    eeg_projection_data = read_matlab_data(eeg_projection_path, constants.DATA_NAME_PROJECTION)
    if eeg_projection_data.shape[1] < expected_shape:
        padding = numpy.zeros((eeg_projection_data.shape[0], expected_shape - eeg_projection_data.shape[1]))
        eeg_projection_data = numpy.hstack((eeg_projection_data, padding))
        
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    projection_matrix = projections.ProjectionSurfaceEEG(storage_path = storage_path)
    projection_matrix.projection_data = eeg_projection_data
    projection_matrix.sources = surface_data
    projection_matrix.sensors = None
    ### TODO if we decide to use this method, we will need to find a manner to fill the sensors.
    return projection_matrix
 
 
    

    
def cdata2local_connectivity(local_connectivity_data, meta, storage_path, expected_length=0):
    """
    From a CData entry in CFF, create LocalConnectivity entity.
    """
    ##### expected_length = cortex.region_mapping.shape[0]
    tmpdir = os.path.join(gettempdir(), local_connectivity_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Local Connectivity import: " + tmpdir)
    _zipfile = ZipFile(local_connectivity_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    local_connectivity_path = _zipfile.extract(local_connectivity_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    local_connectivity = surfaces.LocalConnectivity()
    local_connectivity.storage_path = storage_path 
    local_connectivity_data = read_matlab_data(local_connectivity_path, constants.DATA_NAME_LOCAL_CONN)
    
    if local_connectivity_data.shape[0] < expected_length:
        padding = sparse.csc_matrix((local_connectivity_data.shape[0],
                                    expected_length - local_connectivity_data.shape[0]))
        local_connectivity_data = sparse.hstack([local_connectivity_data, padding])
            
        padding = sparse.csc_matrix((expected_length - local_connectivity_data.shape[0],
                                     local_connectivity_data.shape[1]))
        local_connectivity_data = sparse.vstack([local_connectivity_data, padding])
    
    local_connectivity.equation = None
    local_connectivity.matrix = local_connectivity_data        
    local_connectivity.surface = surface_data
    
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return local_connectivity, uid
Пример #4
0
def cdata2eeg_mapping(eeg_mapping_data, meta, storage_path, expected_shape=0):
    """
    Currently not used
    """
    tmpdir = os.path.join(gettempdir(),
                          eeg_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for EEG_Mapping import: " + tmpdir)
    _zipfile = ZipFile(eeg_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    eeg_projection_path = _zipfile.extract(eeg_mapping_data.src, tmpdir)
    eeg_projection_data = read_matlab_data(eeg_projection_path,
                                           constants.DATA_NAME_PROJECTION)
    if eeg_projection_data.shape[1] < expected_shape:
        padding = numpy.zeros((eeg_projection_data.shape[0],
                               expected_shape - eeg_projection_data.shape[1]))
        eeg_projection_data = numpy.hstack((eeg_projection_data, padding))

    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID],
                                     surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)

    projection_matrix = projections.ProjectionSurfaceEEG(
        storage_path=storage_path)
    projection_matrix.projection_data = eeg_projection_data
    projection_matrix.sources = surface_data
    projection_matrix.sensors = None
    ### TODO if we decide to use this method, we will need to find a manner to fill the sensors.
    return projection_matrix
def cdata2region_mapping(region_mapping_data, meta, storage_path):
    """
    From a CData entry in CFF, create RegionMapping entity.
    """
    tmpdir = os.path.join(gettempdir(), region_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Region_Mapping import: " + tmpdir)
    _zipfile = ZipFile(region_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    region_mapping_path = _zipfile.extract(region_mapping_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_CONNECTIVITY_UID], Connectivity)
    connectivity = ABCAdapter.load_entity_by_gid(gid)
    
    region_mapping = surfaces.RegionMapping(storage_path = storage_path)
    region_mapping.array_data = read_list_data(region_mapping_path, dtype=numpy.int32)
    region_mapping.connectivity = connectivity
    region_mapping.surface = surface_data
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return region_mapping, uid
Пример #6
0
def cdata2local_connectivity(local_connectivity_data,
                             meta,
                             storage_path,
                             expected_length=0):
    """
    From a CData entry in CFF, create LocalConnectivity entity.
    """
    ##### expected_length = cortex.region_mapping.shape[0]
    tmpdir = os.path.join(
        gettempdir(),
        local_connectivity_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Local Connectivity import: " +
              tmpdir)
    _zipfile = ZipFile(local_connectivity_data.parent_cfile.src, 'r',
                       ZIP_DEFLATED)
    local_connectivity_path = _zipfile.extract(local_connectivity_data.src,
                                               tmpdir)

    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID],
                                     surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)

    local_connectivity = surfaces.LocalConnectivity()
    local_connectivity.storage_path = storage_path
    local_connectivity_data = read_matlab_data(local_connectivity_path,
                                               constants.DATA_NAME_LOCAL_CONN)

    if local_connectivity_data.shape[0] < expected_length:
        padding = sparse.csc_matrix(
            (local_connectivity_data.shape[0],
             expected_length - local_connectivity_data.shape[0]))
        local_connectivity_data = sparse.hstack(
            [local_connectivity_data, padding])

        padding = sparse.csc_matrix(
            (expected_length - local_connectivity_data.shape[0],
             local_connectivity_data.shape[1]))
        local_connectivity_data = sparse.vstack(
            [local_connectivity_data, padding])

    local_connectivity.equation = None
    local_connectivity.matrix = local_connectivity_data
    local_connectivity.surface = surface_data

    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return local_connectivity, uid
Пример #7
0
    def _prepare_custom_parameter(self, arg):
        """ Overwrite to prepare specific parameters. """
        current_value = self.arguments[arg]

        if isinstance(current_value, dict) and ATT_UID in current_value:
            uid = current_value[ATT_UID]
            current_type = model.DataType
            ## Search current entity's type in adapter.get_input_tree result
            full_input_tree = self.callable_object.get_input_tree()
            for att_def in full_input_tree:
                if att_def[ATT_NAME] == arg:
                    current_type = att_def[ATT_TYPE]
                    break
            ### Retrieve entity of the correct Type from DB.
            current_value = dao.get_last_data_with_uid(uid, current_type)
        return current_value
Пример #8
0
    def _prepare_custom_parameter(self, arg):
        """ Overwrite to prepare specific parameters. """
        current_value = self.arguments[arg]

        if isinstance(current_value, dict) and ATT_UID in current_value:
            uid = current_value[ATT_UID]
            current_type = model.DataType
            ## Search current entity's type in adapter.get_input_tree result
            full_input_tree = self.callable_object.get_input_tree()
            for att_def in full_input_tree:
                if att_def[ATT_NAME] == arg:
                    current_type = att_def[ATT_TYPE]
                    break
            ### Retrieve entity of the correct Type from DB.
            current_value = dao.get_last_data_with_uid(uid, current_type)
        return current_value
Пример #9
0
 def test_handle_event(self):
     """
     Test a defined handler for the store project method.
     """
     path_to_events = os.path.dirname(__file__)
     event_handlers.read_events([path_to_events])
     data = dict(name="test_project", description="test_description", users=[])
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
     test_project = self.project_service.store_project(self.test_user, True, None, **data)
     # Operations will start asynchronously; Give them time.
     time.sleep(1)
     gid = dao.get_last_data_with_uid("test_uid")
     self.assertTrue(gid is not None, "Nothing was stored in database!")
     datatype = dao.get_datatype_by_gid(gid)
     self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
     self.project_service._remove_project_node_files(test_project.id, gid)
Пример #10
0
 def test_handle_event(self):
     """
     Test a defined handler for the store project method.
     """
     path_to_events = os.path.dirname(__file__)
     eventhandler.read_events([path_to_events])
     data = dict(name="test_project",
                 description="test_description",
                 users=[])
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0,
                      "Database reset probably failed!")
     test_project = self.project_service.store_project(
         self.test_user, True, None, **data)
     # Operations will start asynchronously; Give them time.
     time.sleep(1)
     gid = dao.get_last_data_with_uid("test_uid")
     self.assertTrue(gid is not None, "Nothing was stored in database!")
     datatype = dao.get_datatype_by_gid(gid)
     self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
     self.project_service._remove_project_node_files(test_project.id, gid)