Example #1
0
    def _export_h5n5(self, compress=False):
        self.progressSignal(0)

        # Create and open the hdf5/n5 file
        export_components = PathComponents(self.ExportPath.value)
        try:
            with OpStreamingH5N5Reader.get_h5_n5_file(
                    export_components.externalPath, mode="a") as h5N5File:
                # Create a temporary operator to do the work for us
                opH5N5Writer = OpH5N5WriterBigDataset(parent=self)
                with contextlib.suppress(KeyError):
                    del h5N5File[export_components.internalPath]
                try:
                    opH5N5Writer.CompressionEnabled.setValue(compress)
                    opH5N5Writer.h5N5File.setValue(h5N5File)
                    opH5N5Writer.h5N5Path.setValue(
                        export_components.internalPath)
                    opH5N5Writer.Image.connect(self.Input)

                    # The H5 Writer provides it's own progress signal, so just connect ours to it.
                    opH5N5Writer.progressSignal.subscribe(self.progressSignal)

                    # Perform the export and block for it in THIS THREAD.
                    opH5N5Writer.WriteImage[:].wait()
                finally:
                    opH5N5Writer.cleanUp()
                    self.progressSignal(100)
        except IOError as ex:
            import sys

            msg = "\nException raised when attempting to export to {}: {}\n".format(
                export_components.externalPath, str(ex))
            sys.stderr.write(msg)
            raise
    def setup_method(self, method):
        self.graph = Graph()
        self.testFileDir = tempfile.TemporaryDirectory()
        self.testDataH5FileName = self.testFileDir.name + "test.h5"
        self.testDataN5FileName = self.testFileDir.name + "test.n5"
        self.h5_op = OpStreamingH5N5Reader(graph=self.graph)
        self.n5_op = OpStreamingH5N5Reader(graph=self.graph)

        self.h5File = OpStreamingH5N5Reader.get_h5_n5_file(self.testDataH5FileName)
        self.n5File = OpStreamingH5N5Reader.get_h5_n5_file(self.testDataN5FileName)
        self.h5File.create_group("volume")
        self.n5File.create_group("volume")

        # Create a test dataset
        datashape = (1, 2, 3, 4, 5)
        self.data = numpy.indices(datashape).sum(0).astype(numpy.float32)
Example #3
0
    def setup_method(self, method):
        self.graph = Graph()
        self.testFileDir = tempfile.TemporaryDirectory()
        self.testDataH5FileName = self.testFileDir.name + "test.h5"
        self.testDataN5FileName = self.testFileDir.name + "test.n5"
        self.h5_op = OpStreamingH5N5Reader(graph=self.graph)
        self.n5_op = OpStreamingH5N5Reader(graph=self.graph)

        self.h5File = OpStreamingH5N5Reader.get_h5_n5_file(
            self.testDataH5FileName)
        self.n5File = OpStreamingH5N5Reader.get_h5_n5_file(
            self.testDataN5FileName)
        self.h5File.create_group("volume")
        self.n5File.create_group("volume")

        # Create a test dataset
        datashape = (1, 2, 3, 4, 5)
        self.data = numpy.indices(datashape).sum(0).astype(numpy.float32)
Example #4
0
    def _applyPattern(self):
        globStrings = self.patternEdit.text()
        H5EXTS = OpStreamingH5N5SequenceReaderM.H5EXTS
        N5EXTS = OpStreamingH5N5SequenceReaderM.N5EXTS
        filenames = []
        # see if some glob strings include HDF5 and/or N5 files
        globStrings = globStrings.split(os.path.pathsep)
        pcs = [PathComponents(x) for x in globStrings]
        is_h5_n5 = [x.extension in (H5EXTS + N5EXTS) for x in pcs]

        h5GlobStrings = os.path.pathsep.join(
            [x for x, y in zip(globStrings, is_h5_n5) if y is True])
        globStrings = os.path.pathsep.join(
            [x for x, y in zip(globStrings, is_h5_n5) if y is False])

        filenames.extend(OpStackLoader.expandGlobStrings(globStrings))

        try:
            OpStreamingH5N5SequenceReaderS.checkGlobString(h5GlobStrings)
            # OK, if nothing raised there is a single h5 file in h5GlobStrings:
            pathComponents = PathComponents(
                h5GlobStrings.split(os.path.pathsep)[0])
            h5file = OpStreamingH5N5Reader.get_h5_n5_file(
                pathComponents.externalPath, mode="r")
            filenames.extend(
                "{}/{}".format(pathComponents.externalPath, internal)
                for internal in OpStreamingH5N5SequenceReaderS.
                expandGlobStrings(h5file, h5GlobStrings))
        except (
                OpStreamingH5N5SequenceReaderS.WrongFileTypeError,
                OpStreamingH5N5SequenceReaderS.NotTheSameFileError,
                OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError,
                OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError,
        ):
            pass

        try:
            OpStreamingH5N5SequenceReaderM.checkGlobString(h5GlobStrings)
            filenames.extend(
                "{}/{}".format(external, internal)
                for external, internal in zip(
                    *OpStreamingH5N5SequenceReaderM.expandGlobStrings(
                        h5GlobStrings)))
        except (
                OpStreamingH5N5SequenceReaderM.WrongFileTypeError,
                OpStreamingH5N5SequenceReaderM.SameFileError,
                OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError,
                OpStreamingH5N5SequenceReaderM.InternalPlaceholderError,
        ):
            pass
        self._updateFileList(filenames)
Example #5
0
    def _h5N5FindCommonInternal(h5N5Files):
        """
        Tries to find common internal path (containing data)

        Method is used, when a directory is selected and the internal path is,
        thus, unclear.

        Args:
            h5Files or hń5Files (list of strings): h5 or n5 files to be globbed internally

        Returns:
            list of internal paths
        """
        h5 = OpStreamingH5N5Reader.get_h5_n5_file(h5N5Files[0], mode='r')
        internal_paths = set([x['name'] for x in lsH5N5(h5, minShape=2)])
        h5.close()
        for h5N5File in h5N5Files[1::]:
            h5 = OpStreamingH5N5Reader.get_h5_n5_file(h5N5File, 'r')
            # get all files with with at least 2D shape
            tmp = set([x['name'] for x in lsH5N5(h5, minShape=2)])
            internal_paths = internal_paths.intersection(tmp)

        return list(internal_paths)
    def _h5N5FindCommonInternal(h5N5Files):
        """
        Tries to find common internal path (containing data)

        Method is used, when a directory is selected and the internal path is,
        thus, unclear.

        Args:
            h5Files or hń5Files (list of strings): h5 or n5 files to be globbed internally

        Returns:
            list of internal paths
        """
        h5 = OpStreamingH5N5Reader.get_h5_n5_file(h5N5Files[0], mode='r')
        internal_paths = set([x['name'] for x in lsH5N5(h5, minShape=2)])
        h5.close()
        for h5N5File in h5N5Files[1::]:
            h5 = OpStreamingH5N5Reader.get_h5_n5_file(h5N5File, 'r')
            # get all files with with at least 2D shape
            tmp = set([x['name'] for x in lsH5N5(h5, minShape=2)])
            internal_paths = internal_paths.intersection(tmp)

        return list(internal_paths)
    def _applyPattern(self):
        globStrings = self.patternEdit.text()
        H5EXTS = OpStreamingH5N5SequenceReaderM.H5EXTS
        N5EXTS = OpStreamingH5N5SequenceReaderM.N5EXTS
        filenames = []
        # see if some glob strings include HDF5 and/or N5 files
        globStrings = globStrings.split(os.path.pathsep)
        pcs = [PathComponents(x) for x in globStrings]
        is_h5_n5 = [x.extension in (H5EXTS + N5EXTS) for x in pcs]

        h5GlobStrings = os.path.pathsep.join([x for x, y in zip(globStrings, is_h5_n5) if y is True])
        globStrings = os.path.pathsep.join([x for x, y in zip(globStrings, is_h5_n5) if y is False])

        filenames.extend(OpStackLoader.expandGlobStrings(globStrings))

        try:
            OpStreamingH5N5SequenceReaderS.checkGlobString(h5GlobStrings)
            # OK, if nothing raised there is a single h5 file in h5GlobStrings:
            pathComponents = PathComponents(h5GlobStrings.split(os.path.pathsep)[0])
            h5file = OpStreamingH5N5Reader.get_h5_n5_file(pathComponents.externalPath, mode='r')
            filenames.extend(
                "{}/{}".format(pathComponents.externalPath, internal)
                for internal in OpStreamingH5N5SequenceReaderS.expandGlobStrings(h5file, h5GlobStrings))
        except (
                OpStreamingH5N5SequenceReaderS.WrongFileTypeError,
                OpStreamingH5N5SequenceReaderS.NotTheSameFileError,
                OpStreamingH5N5SequenceReaderS.NoInternalPlaceholderError,
                OpStreamingH5N5SequenceReaderS.ExternalPlaceholderError):
            pass

        try:
            OpStreamingH5N5SequenceReaderM.checkGlobString(h5GlobStrings)
            filenames.extend(
                "{}/{}".format(external, internal)
                for external, internal
                in zip(*OpStreamingH5N5SequenceReaderM.expandGlobStrings(h5GlobStrings))
            )
        except (
                OpStreamingH5N5SequenceReaderM.WrongFileTypeError,
                OpStreamingH5N5SequenceReaderM.SameFileError,
                OpStreamingH5N5SequenceReaderM.NoExternalPlaceholderError,
                OpStreamingH5N5SequenceReaderM.InternalPlaceholderError):
            pass
        self._updateFileList(filenames)
Example #8
0
    def _export_h5n5(self, compress=False):
        self.progressSignal(0)

        # Create and open the hdf5/n5 file
        export_components = PathComponents(self.ExportPath.value)
        try:
            if os.path.isdir(export_components.externalPath
                             ):  # externalPath leads to a n5 file
                shutil.rmtree(export_components.externalPath
                              )  # n5 is stored as a directory structure
            else:
                os.remove(export_components.externalPath)
        except OSError as ex:
            # It's okay if the file isn't there.
            if ex.errno != 2:
                raise
        try:
            with OpStreamingH5N5Reader.get_h5_n5_file(
                    export_components.externalPath, "w") as h5N5File:
                # Create a temporary operator to do the work for us
                opH5N5Writer = OpH5N5WriterBigDataset(parent=self)
                try:
                    opH5N5Writer.CompressionEnabled.setValue(compress)
                    opH5N5Writer.h5N5File.setValue(h5N5File)
                    opH5N5Writer.h5N5Path.setValue(
                        export_components.internalPath)
                    opH5N5Writer.Image.connect(self.Input)

                    # The H5 Writer provides it's own progress signal, so just connect ours to it.
                    opH5N5Writer.progressSignal.subscribe(self.progressSignal)

                    # Perform the export and block for it in THIS THREAD.
                    opH5N5Writer.WriteImage[:].wait()
                finally:
                    opH5N5Writer.cleanUp()
                    self.progressSignal(100)
        except IOError as ex:
            import sys

            msg = "\nException raised when attempting to export to {}: {}\n".format(
                export_components.externalPath, str(ex))
            sys.stderr.write(msg)
            raise
    def _findInternalStacks(h5N5File):
        """
        Tries to find common internal path (containing data)

        Method is used, when a directory is selected and the internal path is,
        thus, unclear.

        Args:
            h5file or n5file (list of strings): h5 or n5 files to be globbed internally

        Returns:
            list of internal stacks
        """
        pathComponents = PathComponents(h5N5File)
        if pathComponents.extension in (OpStreamingH5N5SequenceReaderM.H5EXTS + OpStreamingH5N5SequenceReaderM.N5EXTS):
            # get all internal paths
            with OpStreamingH5N5Reader.get_h5_n5_file(h5N5File, mode='r') as h5:
                internal_paths = lsH5N5(h5, minShape=2)
            return [x['name'] for x in internal_paths]
Example #10
0
    def _findInternalStacks(h5N5File):
        """
        Tries to find common internal path (containing data)

        Method is used, when a directory is selected and the internal path is,
        thus, unclear.

        Args:
            h5file or n5file (list of strings): h5 or n5 files to be globbed internally

        Returns:
            list of internal stacks
        """
        pathComponents = PathComponents(h5N5File)
        if pathComponents.extension in (OpStreamingH5N5SequenceReaderM.H5EXTS +
                                        OpStreamingH5N5SequenceReaderM.N5EXTS):
            # get all internal paths
            with OpStreamingH5N5Reader.get_h5_n5_file(h5N5File,
                                                      mode='r') as h5:
                internal_paths = lsH5N5(h5, minShape=2)
            return [x['name'] for x in internal_paths]
Example #11
0
    def _export_h5n5(self, compress=False):
        self.progressSignal(0)

        # Create and open the hdf5/n5 file
        export_components = PathComponents(self.ExportPath.value)
        try:
            if os.path.isdir(export_components.externalPath):  # externalPath leads to a n5 file
                shutil.rmtree(export_components.externalPath)  # n5 is stored as a directory structure
            else:
                os.remove(export_components.externalPath)
        except OSError as ex:
            # It's okay if the file isn't there.
            if ex.errno != 2:
                raise
        try:
            with OpStreamingH5N5Reader.get_h5_n5_file(export_components.externalPath, "w") as h5N5File:
                # Create a temporary operator to do the work for us
                opH5N5Writer = OpH5N5WriterBigDataset(parent=self)
                try:
                    opH5N5Writer.CompressionEnabled.setValue(compress)
                    opH5N5Writer.h5N5File.setValue(h5N5File)
                    opH5N5Writer.h5N5Path.setValue(export_components.internalPath)
                    opH5N5Writer.Image.connect(self.Input)

                    # The H5 Writer provides it's own progress signal, so just connect ours to it.
                    opH5N5Writer.progressSignal.subscribe(self.progressSignal)

                    # Perform the export and block for it in THIS THREAD.
                    opH5N5Writer.WriteImage[:].wait()
                finally:
                    opH5N5Writer.cleanUp()
                    self.progressSignal(100)
        except IOError as ex:
            import sys

            msg = "\nException raised when attempting to export to {}: {}\n".format(
                export_components.externalPath, str(ex)
            )
            sys.stderr.write(msg)
            raise
Example #12
0
    def _attemptOpenAsH5N5(self, filePath):
        # Check for an hdf5 or n5 extension
        pathComponents = PathComponents(filePath)
        ext = pathComponents.extension
        if ext[1:] not in OpInputDataReader.h5_n5_Exts:
            return [], None

        externalPath = pathComponents.externalPath
        internalPath = pathComponents.internalPath

        if not os.path.exists(externalPath):
            raise OpInputDataReader.DatasetReadError("Input file does not exist: " + externalPath)

        # Open the h5/n5 file in read-only mode
        try:
            h5N5File = OpStreamingH5N5Reader.get_h5_n5_file(externalPath, "r")
        except OpInputDataReader.DatasetReadError:
            raise
        except Exception as e:
            msg = "Unable to open H5/N5 File: {}\n{}".format(externalPath, str(e))
            raise OpInputDataReader.DatasetReadError(msg) from e
        else:
            if not internalPath:
                possible_internal_paths = lsH5N5(h5N5File)
                if len(possible_internal_paths) == 1:
                    internalPath = possible_internal_paths[0]["name"]
                elif len(possible_internal_paths) == 0:
                    h5N5File.close()
                    msg = "H5/N5 file contains no datasets: {}".format(externalPath)
                    raise OpInputDataReader.DatasetReadError(msg)
                else:
                    h5N5File.close()
                    msg = (
                        "When using hdf5/n5, you must append the hdf5 internal path to the "
                        "data set to your filename, e.g. myfile.h5/volume/data  "
                        "No internal path provided for dataset in file: {}".format(externalPath)
                    )
                    raise OpInputDataReader.DatasetReadError(msg)
            try:
                compression_setting = h5N5File[internalPath].compression
            except Exception as e:
                h5N5File.close()
                msg = "Error reading H5/N5 File: {}\n{}".format(externalPath, e)
                raise OpInputDataReader.DatasetReadError(msg) from e

            # If the h5 dataset is compressed, we'll have better performance
            #  with a multi-process hdf5 access object.
            # (Otherwise, single-process is faster.)
            allow_multiprocess_hdf5 = (
                "LAZYFLOW_MULTIPROCESS_HDF5" in os.environ and os.environ["LAZYFLOW_MULTIPROCESS_HDF5"] != ""
            )
            if compression_setting is not None and allow_multiprocess_hdf5 and isinstance(h5N5File, h5py.File):
                h5N5File.close()
                h5N5File = MultiProcessHdf5File(externalPath, "r")

        self._file = h5N5File

        h5N5Reader = OpStreamingH5N5Reader(parent=self)
        h5N5Reader.H5N5File.setValue(h5N5File)

        try:
            h5N5Reader.InternalPath.setValue(internalPath)
        except OpStreamingH5N5Reader.DatasetReadError as e:
            msg = "Error reading H5/N5 File: {}\n{}".format(externalPath, e.msg)
            raise OpInputDataReader.DatasetReadError(msg) from e

        return ([h5N5Reader], h5N5Reader.OutputImage)