def _find_180deg_proj(self, sample_dirname: Path): expected_path = sample_dirname / '..' / '180deg' try: return get_file_names(expected_path.absolute(), self.image_format)[0] except RuntimeError: logger.info(f"Could not find 180 degree projection in {expected_path}") return ""
def load_stack(file_path: str, progress: Optional[Progress] = None) -> Images: image_format = get_file_extension(file_path) prefix = get_prefix(file_path) file_names = get_file_names(path=os.path.dirname(file_path), img_format=image_format, prefix=prefix) # type: ignore return load(file_names=file_names, progress=progress).sample
def load_data( self, file_path: Optional[str] ) -> Tuple[Optional[np.ndarray], Optional[List[str]]]: if file_path: file_names = get_file_names(os.path.dirname(file_path), self.img_format, get_prefix(file_path)) return self.load_files(file_names), file_names return None, None
def _find_log(self, dirname: Path, log_name: str): expected_path = dirname / '..' try: return get_file_names(expected_path.absolute(), "txt", prefix=log_name)[0] except RuntimeError: logger.info( f"Could not find a log file for {log_name} in {dirname}") return ""
def load_data( self, file_path ) -> Tuple[Optional[np.ndarray], Optional[List[str]], Optional[str]]: if file_path: file_names = get_file_names(os.path.dirname(file_path), self.img_format, get_prefix(file_path)) memory_file_name = pu.create_shared_name(file_names[0]) return self.load_files( file_names, memory_file_name), file_names, memory_file_name return None, None, None
def test_get_file_names(self): # Create test file with .tiff extension tiff_filename = os.path.join(self.output_directory, 'test.tiff') with open(tiff_filename, 'wb') as tf: tf.write(b'\0') # Search for files with .tif extension found_files = utility.get_file_names(self.output_directory, 'tif') # Expect to find the .tiff file self.assertEqual([tiff_filename], found_files)
def _find_images_in_same_directory(self, sample_dirname: Path, type: str, suffix: str) -> Optional[List[str]]: prefix_list = [f"*{type}", f"*{type.lower()}", f"*{type}_{suffix}", f"*{type.lower()}_{suffix}"] for prefix in prefix_list: try: if suffix != "After": return get_file_names(sample_dirname.absolute(), self.image_format, prefix=prefix) except RuntimeError: logger.info(f"Could not find {prefix} files in {sample_dirname.absolute()}") return None
def _find_log(dirname: Path, log_name: str) -> str: """ :param dirname: The directory in which the sample images were found :param log_name: The log name is typically the directory name of the sample :return: """ expected_path = dirname / '..' try: return get_file_names(expected_path.absolute(), "txt", prefix=log_name)[0] except RuntimeError: logger.info(f"Could not find a log file for {log_name} in {dirname}") return ""
def _find_images(self, sample_dirname: Path, type: str) -> List[str]: # same folder try: return get_file_names(sample_dirname.absolute(), self.image_format, prefix=f"*{type}") except RuntimeError: logger.info( f"Could not find {type} files in {sample_dirname.absolute()}") # look into different directories 1 level above dirs = [f"{type.lower()}", type, f"{type}_After", f"{type}_Before"] for d in dirs: expected_folder_path = sample_dirname / ".." / d try: return get_file_names(expected_folder_path.absolute(), self.image_format) except RuntimeError: logger.info( f"Could not find {type} files in {expected_folder_path.absolute()}" ) return []
def read_in_shape(input_path, in_prefix='', in_format=DEFAULT_IO_FILE_FORMAT, data_dtype=np.float32) -> Tuple[Tuple[int, int, int], bool]: input_file_names = get_file_names(input_path, in_format, in_prefix) dataset = load(input_path, in_prefix=in_prefix, in_format=in_format, dtype=data_dtype, indices=[0, 1, 1], file_names=input_file_names) images = dataset.sample # construct and return the new shape shape = (len(input_file_names), ) + images.data[0].shape images.free_memory() return shape, images.is_sinograms
def _find_images(self, sample_dirname: Path, type: str, suffix: str, look_without_suffix=False) -> List[str]: # same folder file_names = self._find_images_in_same_directory(sample_dirname, type, suffix) if file_names is not None: return file_names # look into different directories 1 level above dirs = [f"{type} {suffix}", f"{type.lower()} {suffix}", f"{type}_{suffix}", f"{type.lower()}_{suffix}"] if look_without_suffix: dirs.extend([f"{type.lower()}", type]) for d in dirs: expected_folder_path = sample_dirname / ".." / d try: return get_file_names(expected_folder_path.absolute(), self.image_format) except RuntimeError: logger.info(f"Could not find {self.image_format} files in {expected_folder_path.absolute()}") return []
def read_in_file_information(input_path, in_prefix='', in_format=DEFAULT_IO_FILE_FORMAT, data_dtype=np.float32) -> FileInformation: input_file_names = get_file_names(input_path, in_format, in_prefix) dataset = load(input_path, in_prefix=in_prefix, in_format=in_format, dtype=data_dtype, indices=[0, 1, 1], file_names=input_file_names) images = dataset.sample # construct and return the new shape shape = (len(input_file_names), ) + images.data[0].shape images.free_memory() fi = FileInformation(filenames=input_file_names, shape=shape, sinograms=images.is_sinograms) return fi
def load(input_path=None, input_path_flat=None, input_path_dark=None, in_prefix='', in_format=DEFAULT_IO_FILE_FORMAT, dtype=np.float32, file_names=None, indices=None, progress=None) -> Dataset: """ Loads a stack, including sample, white and dark images. :param input_path: Path for the input data folder :param input_path_flat: Optional: Path for the input Flat images folder :param input_path_dark: Optional: Path for the input Dark images folder :param in_prefix: Optional: Prefix for loaded files :param in_format: Default:'tiff', format for the input images :param dtype: Default:np.float32, data type for the input images :param file_names: Use provided file names for loading :param indices: Specify which indices are loaded from the found files. This **DOES NOT** check for the number in the image filename, but removes all indices from the filenames list that are not selected :param progress: The progress reporting instance :return: a tuple with shape 3: (sample, flat, dark), if no flat and dark were loaded, they will be None """ if in_format not in supported_formats(): raise ValueError("Image format {0} not supported!".format(in_format)) if indices and len(indices) < 3: raise ValueError("Indices at this point MUST have 3 elements: [start, stop, step]!") if not file_names: input_file_names = get_file_names(input_path, in_format, in_prefix) else: input_file_names = file_names if in_format in ['nxs']: raise NotImplementedError("TODO this needs to be adapted to the new changes") # pass only the first filename as we only expect a stack # input_file = input_file_names[0] # images = stack_loader.execute(_nxsread, input_file, dtype, "NXS Load", indices, progress) else: if in_format in ['fits', 'fit']: load_func = _fitsread else: load_func = _imread dataset = img_loader.execute(load_func, input_file_names, input_path_flat, input_path_dark, in_format, dtype, indices, progress) # Search for and load metadata file metadata_found_filenames = get_file_names(input_path, 'json', in_prefix, essential=False) metadata_filename = metadata_found_filenames[0] if metadata_found_filenames else None if metadata_filename: with open(metadata_filename) as f: dataset.sample.load_metadata(f) LOG.debug('Loaded metadata from: {}'.format(metadata_filename)) else: LOG.debug('No metadata file found') return dataset