Esempio n. 1
0
 def launch(self, weights, tracts, input_data):
     """
     Execute import operations: process the weights and tracts csv files, then use
     the reference connectivity passed as input_data for the rest of the attributes.
     """
     dti_service = dtipipelineservice.DTIPipelineService()
     dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
     dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
     weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
     tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
     FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                               os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])
     if weights_matrix.shape[0] != input_data.orientations.shape[0]:
         raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference has only %s nodes."%(
                                 weights_matrix.shape[0], input_data.orientations.shape[0]))
     result = Connectivity()
     result.storage_path = self.storage_path
     result.nose_correction = input_data.nose_correction
     result.centres = input_data.centres
     result.region_labels = input_data.region_labels
     result.weights = weights_matrix
     result.tract_lengths = tract_matrix
     result.orientations = input_data.orientations
     result.areas = input_data.areas
     result.cortical = input_data.cortical
     result.hemispheres = input_data.hemispheres
     return result
    def launch(self, weights, tracts, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        dti_service = DTIPipelineService()
        dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE)
        dti_service._process_csv_file(tracts, dti_service.TRACT_FILE)
        weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE))
        tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE))
        FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), 
                                  os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)])

        if weights_matrix.shape[0] != input_data.orientations.shape[0]:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0]))
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = input_data.nose_correction
        result.centres = input_data.centres
        result.region_labels = input_data.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_data.orientations
        result.areas = input_data.areas
        result.cortical = input_data.cortical
        result.hemispheres = input_data.hemispheres
        return result
Esempio n. 3
0
    def launch(self, sensors_file, sensors_type):
        """
        Creates required sensors from the uploaded file.

        :param sensors_file: the file containing sensor data
        :param sensors_type: a string from "EEG Sensors", "MEG sensors", "Internal Sensors"

        :returns: a list of sensors instances of the specified type

        :raises LaunchException: when
                    * no sensors_file specified
                    * sensors_type is invalid (not one of the mentioned options)
                    * sensors_type is "MEG sensors" and no orientation is specified
        """
        if sensors_file is None:
            raise LaunchException(
                "Please select sensors file which contains data to import")
        sensors_inst = None

        self.logger.debug("Create sensors instance")
        if sensors_type == self.EEG_SENSORS:
            sensors_inst = SensorsEEG()
        elif sensors_type == self.MEG_SENSORS:
            sensors_inst = SensorsMEG()
        elif sensors_type == self.INTERNAL_SENSORS:
            sensors_inst = SensorsInternal()
        else:
            exception_str = "Could not determine sensors type (selected option %s)" % sensors_type
            raise LaunchException(exception_str)

        sensors_inst.storage_path = self.storage_path

        sensors_inst.locations = read_list_data(sensors_file,
                                                usecols=[1, 2, 3])
        sensors_inst.labels = read_list_data(sensors_file,
                                             dtype=numpy.str,
                                             usecols=[0])

        if isinstance(sensors_inst, SensorsMEG):
            try:
                sensors_inst.orientations = read_list_data(sensors_file,
                                                           usecols=[4, 5, 6])
            except IndexError:
                raise LaunchException(
                    "Uploaded file does not contains sensors orientation.")

        self.logger.debug("Sensors instance ready to be stored")

        return [sensors_inst]
Esempio n. 4
0
def cdata2region_mapping(region_mapping_data, meta, storage_path):
    """
    From a CData entry in CFF, create RegionMapping entity.
    """
    tmpdir = os.path.join(
        gettempdir(), region_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Region_Mapping import: " + tmpdir)
    _zipfile = ZipFile(region_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    region_mapping_path = _zipfile.extract(region_mapping_data.src, tmpdir)

    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID],
                                     surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)

    gid = dao.get_last_data_with_uid(meta[constants.KEY_CONNECTIVITY_UID],
                                     Connectivity)
    connectivity = ABCAdapter.load_entity_by_gid(gid)

    region_mapping = surfaces.RegionMapping(storage_path=storage_path)
    region_mapping.array_data = read_list_data(region_mapping_path,
                                               dtype=numpy.int32)
    region_mapping.connectivity = connectivity
    region_mapping.surface = surface_data
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None

    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return region_mapping, uid
    def launch(self, sensors_file, sensors_type):
        """
        Creates required sensors from the uploaded file.

        :param sensors_file: the file containing sensor data
        :param sensors_type: a string from "EEG Sensors", "MEG sensors", "Internal Sensors"

        :returns: a list of sensors instances of the specified type

        :raises LaunchException: when
                    * no sensors_file specified
                    * sensors_type is invalid (not one of the mentioned options)
                    * sensors_type is "MEG sensors" and no orientation is specified
        """
        if sensors_file is None:
            raise LaunchException ("Please select sensors file which contains data to import")
        sensors_inst = None
        
        self.logger.debug("Create sensors instance")
        if sensors_type == self.EEG_SENSORS:
            sensors_inst = SensorsEEG()
        elif sensors_type == self.MEG_SENSORS:
            sensors_inst = SensorsMEG()
        elif sensors_type == self.INTERNAL_SENSORS:
            sensors_inst = SensorsInternal()
        else:
            exception_str = "Could not determine sensors type (selected option %s)" % sensors_type
            raise LaunchException(exception_str)
            
        sensors_inst.storage_path = self.storage_path
        
        sensors_inst.locations = read_list_data(sensors_file, usecols=[1,2,3])
        sensors_inst.labels = read_list_data(sensors_file, dtype=numpy.str, usecols=[0])
        
        if isinstance(sensors_inst, SensorsMEG):
            try:
                sensors_inst.orientations = read_list_data(sensors_file, usecols=[4,5,6])
            except IndexError:
                raise LaunchException("Uploaded file does not contains sensors orientation.")
         
        self.logger.debug("Sensors instance ready to be stored")
        
        return [sensors_inst]
Esempio n. 6
0
    def launch(self, sensors_file, sensors_type):
        """
        Created required sensors from the uploaded file.
        """
        if sensors_file is None:
            raise LaunchException(
                "Please select sensors file which contains data to import")
        sensors_inst = None

        self.logger.debug("Create sensors instance")
        if sensors_type == self.EEG_SENSORS:
            sensors_inst = SensorsEEG()
        elif sensors_type == self.MEG_SENSORS:
            sensors_inst = SensorsMEG()
        elif sensors_type == self.INTERNAL_SENSORS:
            sensors_inst = SensorsInternal()
        else:
            exception_str = "Could not determine sensors type (selected option %s)" % sensors_type
            raise LaunchException(exception_str)

        sensors_inst.storage_path = self.storage_path

        sensors_inst.locations = read_list_data(sensors_file,
                                                usecols=[1, 2, 3])
        sensors_inst.labels = read_list_data(sensors_file,
                                             dtype=numpy.str,
                                             usecols=[0])

        if isinstance(sensors_inst, SensorsMEG):
            try:
                sensors_inst.orientations = read_list_data(sensors_file,
                                                           usecols=[4, 5, 6])
            except IndexError:
                raise LaunchException(
                    "Uploaded file does not contains sensors orientation.")

        self.logger.debug("Sensors instance ready to be stored")

        return [sensors_inst]
Esempio n. 7
0
    def test_process_csv(self):
        """
        Test that a CSV generated on the server is correctly processed.
        """

        folder = self.helper.get_project_folder(self.test_project, "TEMP")

        for file_name in [self.FILE_1, self.FILE_2, self.FILE_3, self.FILE_4]:

            intermediate_file = os.path.join(folder,
                                             os.path.split(file_name)[1])
            self.helper.copy_file(file_name, intermediate_file)
            result_file = 'weights.txt' if 'Capacity' in file_name else 'tracts.txt'
            result_file = os.path.join(folder, result_file)
            self.service._process_csv_file(intermediate_file, result_file)
            matrix = read_list_data(result_file)
            self.assertEqual(96, len(matrix))
            self.assertEqual(96, len(matrix[0]))
Esempio n. 8
0
    def read_data(self, file_name=None, matlab_data_name=None, dtype=numpy.float64,
                  skiprows=0, usecols=None, field=None, lazy_load=False):
        """
        Read from given file and sub-file.
        """
        if TVBSettings.TRAITS_CONFIGURATION.use_storage:
            # We want to avoid reading files when no library-mode is used.
            return None

        if field is not None:
            self.references[field] = {self.KEY_PARAMETERS: {'file_name': file_name,
                                                            'matlab_data_name': matlab_data_name,
                                                            'dtype': dtype,
                                                            'skiprows': skiprows,
                                                            'usecols': usecols,
                                                            'field': field},
                                      self.KEY_METHOD: 'read_data'}
        if lazy_load:
            ## Do not read now, just keep the reference. It will be used on "reload" later.
            return None
        
        # If we are reloading from a h5py we will need to skip check for file_name
        # since a 'non' h5py read will always pass a filename, which will overwrite
        # self.file_name (which is the h5py file in this reload case)
        if self.file_name is not None and self.file_name.endswith('.h5'):
            if H5PY_SUPPORT:
                full_path = os.path.join(self.folder_path, self.file_name)
                return self._read_h5py(full_path, field)
            else:
                self.logger.warning("You need h5py properly installed in order to load from a HDF5 file.")

        if file_name is None:
            file_name = self.file_name
        full_path = os.path.join(self.folder_path, file_name)
        self.logger.debug("Starting to read from: " + str(full_path))

        # Try to read NumPy
        if full_path.endswith('.txt') or full_path.endswith('.txt.bz2'):
            return read_list_data(full_path, dtype=dtype, skiprows=skiprows, usecols=usecols)
        if full_path.endswith('.npz'):
            return numpy.load(full_path)

        # Try to read Matlab format
        return self._read_matlab(full_path, matlab_data_name)
    def read_data(self, file_name=None, matlab_data_name=None, dtype=numpy.float64,
                  skiprows=0, usecols=None, field=None, lazy_load=False):
        """
        Read from given file and sub-file.
        """
        if TVBSettings.TRAITS_CONFIGURATION.use_storage:
            # We want to avoid reading files when no library-mode is used.
            return None

        if field is not None:
            self.references[field] = {self.KEY_PARAMETERS: {'file_name': file_name,
                                                            'matlab_data_name': matlab_data_name,
                                                            'dtype': dtype,
                                                            'skiprows': skiprows,
                                                            'usecols': usecols,
                                                            'field' : field},
                                      self.KEY_METHOD: 'read_data'}
        if lazy_load:
            ## Do not read now, just keep the reference. It will be used on "reload" later.
            return None
        
        # If we are reloading from a h5py we will need to skip check for file_name
        # since a 'non' h5py read will always pass a filename, which will overwrite
        # self.file_name (which is the h5py file in this reload case)
        if self.file_name is not None and self.file_name.endswith('.h5'):
            if H5PY_SUPPORT:
                full_path = os.path.join(self.folder_path, self.file_name)
                return self._read_h5py(full_path, field)
            else:
                self.logger.warning("You need h5py properly installed in order to load from a HDF5 file.")

        if file_name is None:
            file_name = self.file_name
        full_path = os.path.join(self.folder_path, file_name)
        self.logger.debug("Starting to read from: " + str(full_path))

        # Try to read NumPy
        if full_path.endswith('.txt') or full_path.endswith('.txt.bz2'):
            return read_list_data(full_path, dtype=dtype, skiprows=skiprows, usecols=usecols)
        if full_path.endswith('.npz'):
            return numpy.load(full_path)

        # Try to read Matlab format
        return self._read_matlab(full_path, matlab_data_name)
Esempio n. 10
0
 def __init__(self, remote_machine=None, remote_user=None):
     """
     :param remote_machine: IP for the remote machine
     :param remote_user: Username valid on remote_machine. No further password should be needed for connecting.
     """
     self.logger = get_logger(self.__class__.__module__)
     self.remote_machine = remote_machine
     self.remote_user = remote_user
     self.flow_service = FlowService()
     self.file_handler = FilesHelper()
     
     folder_default_data = os.path.dirname(demo_root.__file__)
     file_order = os.path.join(folder_default_data, self.FILE_NODES_ORDER)
     self.expected_nodes_order = read_list_data(file_order, dtype=numpy.int32,  usecols=[0])
     
     zip_path = os.path.join(folder_default_data, self.CONNECTIVITY_DEFAULT)
     if not (os.path.exists(zip_path) and os.path.isfile(zip_path)):
         raise ConnectException("Could not find default Connectivity for the pipeline! " + str(zip_path))
     self.default_connectivity_zip_path = zip_path
 def __init__(self, remote_machine=None, remote_user=None):
     """
     :param remote_machine: IP for the remote machine
     :param remote_user: Username valid on remote_machine. No further password should be needed for connecting.
     """
     self.logger = get_logger(self.__class__.__module__)
     self.remote_machine = remote_machine
     self.remote_user = remote_user
     self.flow_service = FlowService()
     self.file_handler = FilesHelper()
     
     folder_default_data = os.path.dirname(demo_root.__file__)
     file_order = os.path.join(folder_default_data, self.FILE_NODES_ORDER)
     self.expected_nodes_order = read_list_data(file_order, dtype=numpy.int32, usecols=[0])
     
     zip_path = os.path.join(folder_default_data, self.CONNECTIVITY_DEFAULT)
     if not (os.path.exists(zip_path) and os.path.isfile(zip_path)):
         raise ConnectException("Could not find default Connectivity for the pipeline! " + str(zip_path))
     self.default_connectivity_zip_path = zip_path
def cdata2region_mapping(region_mapping_data, meta, storage_path):
    """
    From a CData entry in CFF, create RegionMapping entity.
    """
    tmpdir = os.path.join(gettempdir(), region_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Region_Mapping import: " + tmpdir)
    _zipfile = ZipFile(region_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    region_mapping_path = _zipfile.extract(region_mapping_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_CONNECTIVITY_UID], Connectivity)
    connectivity = ABCAdapter.load_entity_by_gid(gid)
    
    region_mapping = surfaces.RegionMapping(storage_path = storage_path)
    region_mapping.array_data = read_list_data(region_mapping_path, dtype=numpy.int32)
    region_mapping.connectivity = connectivity
    region_mapping.surface = surface_data
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return region_mapping, uid
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.

        :param uploaded: an archive containing the Connectivity data to be imported

        :returns: `Connectivity`

        :raises LaunchException: when `uploaded` is empty or nonexistent
        :raises Exception: when
                    * weights or tracts matrix is invalid (negative values, wrong shape)
                    * any of the vector orientation, areas, cortical or hemisphere is \
                      different from the expected number of nodes
        """
        if uploaded is None:
            raise LaunchException("Please select ZIP file which contains data to import")
        
        files = FilesHelper().unpack_zip(uploaded, self.storage_path)
        
        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None
        
        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)
        
        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]
        
        ### Fill positions
        if centres is None:
            raise Exception("Positions for Connectivity Regions are required! "
                            "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector
            
        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception("Negative values are not accepted in weights matrix! "
                                "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for weights matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix
            
        ### Fill and check tracts    
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception("Negative values are not accepted in tracts matrix! "
                                "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes):
                raise Exception("Unexpected shape for tracts matrix! "
                                "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix
        
        
        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception("Invalid size for vector orientation. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.orientations = orientation
            
        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception("Invalid size for vector areas. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.areas = areas
            
        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector cortical. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.cortical = cortical_vector
            
        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception("Invalid size for vector hemispheres. "
                                "Expected the same as region-centers number %d" % expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result
    def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0):
        """
        Execute import operations: unpack ZIP and build Connectivity object as result.
        """
        if uploaded is None:
            raise LaunchException(
                "Please select ZIP file which contains data to import")

        files = FilesHelper().unpack_zip(uploaded, self.storage_path)

        weights_matrix = None
        centres = None
        labels_vector = None
        tract_matrix = None
        orientation = None
        areas = None
        cortical_vector = None
        hemisphere_vector = None

        for file_name in files:
            if file_name.lower().find(self.WEIGHT_TOKEN) >= 0:
                weights_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.POSITION_TOKEN) >= 0:
                centres = read_list_data(file_name,
                                         skiprows=1,
                                         usecols=[1, 2, 3])
                labels_vector = read_list_data(file_name,
                                               dtype=numpy.str,
                                               skiprows=1,
                                               usecols=[0])
                continue
            if file_name.lower().find(self.TRACT_TOKEN) >= 0:
                tract_matrix = read_list_data(file_name)
                continue
            if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0:
                orientation = read_list_data(file_name)
                continue
            if file_name.lower().find(self.AREA_TOKEN) >= 0:
                areas = read_list_data(file_name)
                continue
            if file_name.lower().find(self.CORTICAL_INFO) >= 0:
                cortical_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
            if file_name.lower().find(self.HEMISPHERE_INFO) >= 0:
                hemisphere_vector = read_list_data(file_name, dtype=numpy.bool)
                continue
        ### Clean remaining text-files.
        FilesHelper.remove_files(files, True)

        result = Connectivity()
        result.storage_path = self.storage_path
        result.nose_correction = [rotate_x, rotate_y, rotate_z]

        ### Fill positions
        if centres is None:
            raise Exception(
                "Positions for Connectivity Regions are required! "
                "We expect a file *position* inside the uploaded ZIP.")
        expected_number_of_nodes = len(centres)
        if expected_number_of_nodes < 2:
            raise Exception("A connectivity with at least 2 nodes is expected")
        result.centres = centres
        if labels_vector is not None:
            result.region_labels = labels_vector

        ### Fill and check weights
        if weights_matrix is not None:
            if numpy.any([x < 0 for x in weights_matrix.flatten()]):
                raise Exception(
                    "Negative values are not accepted in weights matrix! "
                    "Please check your file, and use values >= 0")
            if weights_matrix.shape != (expected_number_of_nodes,
                                        expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for weights matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.weights = weights_matrix

        ### Fill and check tracts
        if tract_matrix is not None:
            if numpy.any([x < 0 for x in tract_matrix.flatten()]):
                raise Exception(
                    "Negative values are not accepted in tracts matrix! "
                    "Please check your file, and use values >= 0")
            if tract_matrix.shape != (expected_number_of_nodes,
                                      expected_number_of_nodes):
                raise Exception(
                    "Unexpected shape for tracts matrix! "
                    "Should be %d x %d " %
                    (expected_number_of_nodes, expected_number_of_nodes))
            result.tract_lengths = tract_matrix

        if orientation is not None:
            if len(orientation) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector orientation. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.orientations = orientation

        if areas is not None:
            if len(areas) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector areas. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.areas = areas

        if cortical_vector is not None:
            if len(cortical_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector cortical. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.cortical = cortical_vector

        if hemisphere_vector is not None:
            if len(hemisphere_vector) != expected_number_of_nodes:
                raise Exception(
                    "Invalid size for vector hemispheres. "
                    "Expected the same as region-centers number %d" %
                    expected_number_of_nodes)
            result.hemispheres = hemisphere_vector
        return result
    def launch(self, mapping_file, surface, connectivity):
        """
        Creates region mapping from uploaded data.

        :param mapping_file: an archive containing data for mapping surface to connectivity

        :raises LaunchException: when
                    * a parameter is None or missing
                    * archive has more than one file
                    * uploaded files are empty
                    * number of vertices in imported file is different to the number of surface vertices
                    * imported file has negative values
                    * imported file has regions which are not in connectivity
        """
        if mapping_file is None:
            raise LaunchException("Please select mappings file which contains data to import")
        if surface is None:
            raise LaunchException("No surface selected. Please initiate upload again and select a brain surface.")
        if connectivity is None:
            raise LaunchException("No connectivity selected. Please initiate upload again and select one.")
            
        self.logger.debug("Reading mappings from uploaded file")
        array_data = None
        if zipfile.is_zipfile(mapping_file):
            tmp_folder = tempfile.mkdtemp(prefix='region_mapping_zip_', dir=cfg.TVB_TEMP_FOLDER)
            try:
                files = FilesHelper().unpack_zip(mapping_file, tmp_folder)
                if len(files) > 1:
                    raise LaunchException("Please upload a ZIP file containing only one file.")
                array_data = read_list_data(files[0], dtype=numpy.int32)    
            finally:
                if os.path.exists(tmp_folder):
                    shutil.rmtree(tmp_folder)
        else:
            array_data = read_list_data(mapping_file, dtype=numpy.int32)
        
        # Now we do some checks before building final RegionMapping
        if array_data is None or len(array_data) == 0:
            raise LaunchException("Uploaded file does not contains any data. Please initiate upload with another file.")
        
        # Check if we have a mapping for each surface vertex.
        if len(array_data) != surface.number_of_vertices:
            msg = "Imported file contains a different number of values than the number of surface vertices. " \
                  "Imported: %d values while surface has: %d vertices." % (len(array_data), surface.number_of_vertices)
            raise LaunchException(msg)     
        
        # Now check if the values from imported file correspond to connectivity regions
        if array_data.min() < 0:
            raise LaunchException("Imported file contains negative values. Please fix problem and re-import file")
        
        if array_data.max() >= connectivity.number_of_regions:
            msg = "Imported file contains invalid regions. Found region: %d while selected connectivity has: %d " \
                  "regions defined (0 based)." % (array_data.max(), connectivity.number_of_regions)
            raise LaunchException(msg)
        
        self.logger.debug("Creating RegionMapping instance")
        region_mapping_inst = RegionMapping()
        region_mapping_inst.storage_path = self.storage_path
        region_mapping_inst.set_operation_id(self.operation_id)
        region_mapping_inst.surface = surface
        region_mapping_inst.connectivity = connectivity
        
        if array_data is not None:
            region_mapping_inst.array_data = array_data
        
        return [region_mapping_inst]
Esempio n. 16
0
    def launch(self, mapping_file, surface, connectivity):
        """
        Creates region mapping from uploaded data.

        :param mapping_file: an archive containing data for mapping surface to connectivity

        :raises LaunchException: when
                    * a parameter is None or missing
                    * archive has more than one file
                    * uploaded files are empty
                    * number of vertices in imported file is different to the number of surface vertices
                    * imported file has negative values
                    * imported file has regions which are not in connectivity
        """
        if mapping_file is None:
            raise LaunchException(
                "Please select mappings file which contains data to import")
        if surface is None:
            raise LaunchException("No surface selected. Please initiate " +
                                  "upload again and select a brain surface.")
        if connectivity is None:
            raise LaunchException(
                "No connectivity selected. Please initiate " +
                "upload again and select one.")

        self.logger.debug("Reading mappings from uploaded file")
        array_data = None
        if zipfile.is_zipfile(mapping_file):
            tmp_folder = tempfile.mkdtemp(prefix='region_mapping_zip_',
                                          dir=cfg.TVB_TEMP_FOLDER)
            try:
                files = FilesHelper().unpack_zip(mapping_file, tmp_folder)
                if len(files) > 1:
                    raise LaunchException(
                        "Please upload a ZIP file containing only one file.")
                array_data = read_list_data(files[0], dtype=numpy.int32)
            finally:
                if os.path.exists(tmp_folder):
                    shutil.rmtree(tmp_folder)
        else:
            array_data = read_list_data(mapping_file, dtype=numpy.int32)

        # Now we do some checks before building final RegionMapping
        if array_data is None or len(array_data) == 0:
            raise LaunchException("Uploaded file does not contains any data." +
                                  " Please initiate upload with another file.")

        # Check if we have a mapping for each surface vertex.
        if len(array_data) != surface.number_of_vertices:
            msg = ("Imported file contains a different number of values " +
                   "than the number of surface vertices. " +
                   "Imported: %d values while surface has: %d vertices.")
            msg = msg % (len(array_data), surface.number_of_vertices)
            raise LaunchException(msg)

        # Now check if the values from imported file correspond to connectivity regions
        if array_data.min() < 0:
            raise LaunchException(
                "Imported file contains negative values. Please fix problem and re-import file"
            )

        if array_data.max() >= connectivity.number_of_regions:
            msg = (
                "Imported file contains invalid regions. Found region: %d while selected "
                + "connectivity has: %d regions defined (0 based).")
            msg = msg % (array_data.max(), connectivity.number_of_regions)
            raise LaunchException(msg)

        self.logger.debug("Creating RegionMapping instance")
        region_mapping_inst = RegionMapping()
        region_mapping_inst.storage_path = self.storage_path
        region_mapping_inst.set_operation_id(self.operation_id)
        region_mapping_inst.surface = surface
        region_mapping_inst.connectivity = connectivity

        if array_data is not None:
            region_mapping_inst.array_data = array_data

        return [region_mapping_inst]