Exemplo n.º 1
0
    def launch(self, data_file, surface=None):
        """
        Execute import operations:
        """
        if surface is None:
            raise LaunchException(
                "No surface selected. Please initiate upload again and select a brain surface."
            )

        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            time_series = parser.parse(data_file)
            ts_data_shape = time_series.read_data_shape()

            if surface.number_of_vertices != ts_data_shape[1]:
                msg = "Imported time series doesn't have values for all surface vertices. Surface has %d vertices " \
                      "while time series has %d values." % (surface.number_of_vertices, ts_data_shape[1])
                raise LaunchException(msg)
            else:
                time_series.surface = surface

            return [time_series]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
    def launch(self, data_file, surface=None):
        """
        Execute import operations:
        """
        if surface is None:
            raise LaunchException("No surface selected. Please initiate upload again and select a brain surface.")
            
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            time_series = parser.parse(data_file)
            ts_data_shape = time_series.read_data_shape()

            if surface.number_of_vertices != ts_data_shape[1]:
                msg = "Imported time series doesn't have values for all surface vertices. Surface has %d vertices " \
                      "while time series has %d values." % (surface.number_of_vertices, ts_data_shape[1])
                raise LaunchException(msg)
            else:
                time_series.surface = surface

            return [time_series]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Exemplo n.º 3
0
    def launch(self,
               file_type,
               data_file,
               data_file_part2,
               should_center=False):
        """
        Execute import operations:
        """
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            surface = parser.parse(data_file,
                                   data_file_part2,
                                   file_type,
                                   should_center=should_center)
            surface.compute_triangle_normals()
            surface.compute_vertex_normals()
            validation_result = surface.validate()

            if validation_result.warnings:
                self.add_operation_additional_info(validation_result.summary())
            self.generic_attributes.user_tag_1 = surface.surface_type
            surface_idx = h5.store_complete(surface, self.storage_path)
            return [surface_idx]
        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Exemplo n.º 4
0
    def _parse_connectome_surfaces(self, connectome_surface, warning_message,
                                   should_center):
        """
        Parse data from a CSurface object and save it in our internal Surface DataTypes
        """
        surfaces, processed_files = [], []
        parser = GIFTIParser(self.storage_path, self.operation_id)

        for c_surface in connectome_surface:
            if c_surface.src in processed_files:
                continue

            try:
                # create a meaningful but unique temporary path to extract
                tmpdir = os.path.join(
                    gettempdir(), c_surface.parent_cfile.get_unique_cff_name())
                self.log.debug("Extracting %s[%s] into %s ..." %
                               (c_surface.src, c_surface.name, tmpdir))
                _zipfile = ZipFile(c_surface.parent_cfile.src, 'r',
                                   ZIP_DEFLATED)
                gifti_file_1 = _zipfile.extract(c_surface.src, tmpdir)

                gifti_file_2 = None
                surface_name, pair_surface = self._find_pair_file(
                    c_surface, connectome_surface)
                if pair_surface:
                    self.log.debug(
                        "Extracting pair %s[%s] into %s ..." %
                        (pair_surface.src, pair_surface.name, tmpdir))
                    gifti_file_2 = _zipfile.extract(pair_surface.src, tmpdir)

                surface_type = self._guess_surface_type(c_surface.src.lower())
                self.logger.info("We will import surface %s as type %s" %
                                 (c_surface.src, surface_type))
                surface = parser.parse(gifti_file_1, gifti_file_2,
                                       surface_type, should_center)
                surface.user_tag_1 = surface_name

                validation_result = surface.validate()
                if validation_result.warnings:
                    warning_message += validation_result.summary() + "\n"

                surfaces.append(surface)

                if pair_surface:
                    processed_files.append(pair_surface.src)
                processed_files.append(c_surface.src)

                if os.path.exists(tmpdir):
                    shutil.rmtree(tmpdir)

            except ParseException:
                self.logger.exception("Could not import a Surface entity.")
                warning_message += "Problem when importing Surfaces!! \n"
            except OSError:
                self.log.exception("Could not clean up temporary file(s).")

        return surfaces
Exemplo n.º 5
0
    def launch(self, view_model):
        # type: (GIFTITimeSeriesImporterModel) -> [TimeSeriesSurfaceIndex]
        """
        Execute import operations:
        """
        if view_model.surface is None:
            raise LaunchException(
                "No surface selected. Please initiate upload again and select a brain surface."
            )

        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            partial_time_series, gifti_data_arrays = parser.parse(
                view_model.data_file)

            ts_idx = TimeSeriesSurfaceIndex()
            ts_h5_path = h5.path_for(self.storage_path, TimeSeriesSurfaceH5,
                                     ts_idx.gid)

            ts_h5 = TimeSeriesSurfaceH5(ts_h5_path)
            # todo : make sure that write_time_slice is not required here
            for data_array in gifti_data_arrays:
                ts_h5.write_data_slice([data_array.data])

            ts_h5.store(partial_time_series,
                        scalars_only=True,
                        store_references=False)
            ts_h5.gid.store(uuid.UUID(ts_idx.gid))

            ts_data_shape = ts_h5.read_data_shape()
            surface = self.load_entity_by_gid(view_model.surface)
            if surface.number_of_vertices != ts_data_shape[1]:
                msg = "Imported time series doesn't have values for all surface vertices. Surface has %d vertices " \
                      "while time series has %d values." % (surface.number_of_vertices, ts_data_shape[1])
                raise LaunchException(msg)
            else:
                ts_h5.surface.store(uuid.UUID(surface.gid))
                ts_idx.fk_surface_gid = surface.gid
            ts_h5.close()

            ts_idx.sample_period_unit = partial_time_series.sample_period_unit
            ts_idx.sample_period = partial_time_series.sample_period
            ts_idx.sample_rate = partial_time_series.sample_rate
            ts_idx.labels_ordering = json.dumps(
                partial_time_series.labels_ordering)
            ts_idx.labels_dimensions = json.dumps(
                partial_time_series.labels_dimensions)
            ts_idx.data_ndim = len(ts_data_shape)
            ts_idx.data_length_1d, ts_idx.data_length_2d, ts_idx.data_length_3d, ts_idx.data_length_4d = prepare_array_shape_meta(
                ts_data_shape)

            return [ts_idx]

        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Exemplo n.º 6
0
    def test_import_surface_gifti_data(self, operation_factory):
        """
            This method tests import of a surface from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = operation_factory().id

        parser = GIFTIParser(operation_id)
        surface = parser.parse(self.GIFTI_SURFACE_FILE)

        assert 131342 == len(surface.vertices)
        assert 262680 == len(surface.triangles)
Exemplo n.º 7
0
    def _parse_connectome_surfaces(self, connectome_surface, warning_message, should_center):
        """
        Parse data from a CSurface object and save it in our internal Surface DataTypes
        """
        surfaces, processed_files = [], []
        parser = GIFTIParser(self.storage_path, self.operation_id)

        for c_surface in connectome_surface:
            if c_surface.src in processed_files:
                continue

            try:
                # create a meaningful but unique temporary path to extract
                tmpdir = os.path.join(gettempdir(), c_surface.parent_cfile.get_unique_cff_name())
                self.log.debug("Extracting %s[%s] into %s ..." % (c_surface.src, c_surface.name, tmpdir))
                _zipfile = ZipFile(c_surface.parent_cfile.src, 'r', ZIP_DEFLATED)
                gifti_file_1 = _zipfile.extract(c_surface.src, tmpdir)

                gifti_file_2 = None
                surface_name, pair_surface = self._find_pair_file(c_surface, connectome_surface)
                if pair_surface:
                    self.log.debug("Extracting pair %s[%s] into %s ..." % (pair_surface.src, pair_surface.name, tmpdir))
                    gifti_file_2 = _zipfile.extract(pair_surface.src, tmpdir)

                surface_type = self._guess_surface_type(c_surface.src.lower())
                self.logger.info("We will import surface %s as type %s" % (c_surface.src, surface_type))
                surface = parser.parse(gifti_file_1, gifti_file_2, surface_type, should_center)
                surface.user_tag_1 = surface_name

                validation_result = surface.validate()
                if validation_result.warnings:
                    warning_message += validation_result.summary() + "\n"

                surfaces.append(surface)

                if pair_surface:
                    processed_files.append(pair_surface.src)
                processed_files.append(c_surface.src)

                if os.path.exists(tmpdir):
                    shutil.rmtree(tmpdir)

            except ParseException:
                self.logger.exception("Could not import a Surface entity.")
                warning_message += "Problem when importing Surfaces!! \n"
            except OSError:
                self.log.exception("Could not clean up temporary file(s).")

        return surfaces
Exemplo n.º 8
0
    def test_import_surface_gifti_data(self):
        """
            This method tests import of a surface from GIFTI file.
            !!! Important: We changed this test to execute only GIFTI parse
                because storing surface it takes too long (~ 9min) since
                normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        surface = parser.parse(self.GIFTI_SURFACE_FILE)

        assert 131342 == len(surface.vertices)
        assert 262680 == len(surface.triangles)
Exemplo n.º 9
0
    def test_import_timeseries_gifti_data(self, operation_factory):
        """
        This method tests import of a time series from GIFTI file.
        !!! Important: We changed this test to execute only GIFTI parse
            because storing surface it takes too long (~ 9min) since
            normals needs to be calculated.
        """
        operation_id = operation_factory().id

        parser = GIFTIParser(operation_id)
        time_series = parser.parse(self.GIFTI_TIME_SERIES_FILE)

        data_shape = time_series[1]

        assert 135 == len(data_shape)
        assert 143479 == data_shape[0].dims[0]
    def launch(self, file_type, data_file, data_file_part2, should_center=False):
        """
        Execute import operations:
        """
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            surface = parser.parse(data_file, data_file_part2, file_type, should_center=should_center)
            validation_result = surface.validate()

            if validation_result.warnings:
                self.add_operation_additional_info(validation_result.summary())

            return [surface]             
        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Exemplo n.º 11
0
    def test_import_timeseries_gifti_data(self):
        """
        This method tests import of a time series from GIFTI file.
        !!! Important: We changed this test to execute only GIFTI parse
            because storing surface it takes too long (~ 9min) since
            normals needs to be calculated.
        """
        operation_id = self.datatypeFactory.get_operation().id
        storage_path = FilesHelper().get_operation_folder(self.test_project.name, operation_id)

        parser = GIFTIParser(storage_path, operation_id)
        time_series = parser.parse(self.GIFTI_TIME_SERIES_FILE)

        data_shape = time_series.read_data_shape()

        assert 135 == data_shape[0]
        assert 143479 == data_shape[1]
    def launch(self, view_model):
        # type: (GIFTISurfaceImporterModel) -> [SurfaceIndex]
        """
        Execute import operations:
        """
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            surface = parser.parse(view_model.data_file, view_model.data_file_part2, view_model.file_type,
                                   should_center=view_model.should_center)
            surface.compute_triangle_normals()
            surface.compute_vertex_normals()
            validation_result = surface.validate()

            if validation_result.warnings:
                self.add_operation_additional_info(validation_result.summary())
            surface_idx = h5.store_complete(surface, self.storage_path)
            return [surface_idx]
        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)