def _read_raw(path_: Path, volume_: Volume): # from pymicro return file_utils.HST_read( str(path_), # it doesn't accept paths... # pre-loaded kwargs autoparse_filename=False, # the file names are not properly formatted data_type=volume.metadata.dtype, dims=volume.metadata.dimensions, verbose=True, )
volume = Volume.with_check( name=args.volume_name, version=args.volume_version ) logger.info(f"volume\n{dict2str(asdict(volume))}") assert volume.nclasses == model_nclasses, f"{model_nclasses=} {volume.nclasses=}" logger.info("Loading data from disk.") # Data voldata = file_utils.HST_read( str(volume.data_path), # it doesn't accept paths... autoparse_filename=False, # the file names are not properly formatted data_type=volume.metadata.dtype, dims=volume.metadata.dimensions, verbose=False, ) / volume.normalization_factor logger.debug(f"{voldata.shape=}") voldata_train = volume[args.partition_train].get_volume_partition(voldata) voldata_val = volume[args.partition_val].get_volume_partition(voldata) logger.debug(f"{voldata_train.shape=}") logger.debug(f"{voldata_val.shape=}") del voldata
logger.info("done") # ##### data logger.info(f"Loading data from disk at file: {volume.data_path.name}") logger.debug(f"{volume.data_path=}") normalization_factor = volume_sequence.NORMALIZE_FACTORS[ volume.metadata.dtype] logger.debug(f"{normalization_factor=}") data_volume = file_utils.HST_read( str(volume.data_path), # it doesn't accept paths... autoparse_filename=False, # the file names are not properly formatted data_type=volume.metadata.dtype, dims=volume.metadata.dimensions, verbose=True, ) / normalization_factor # normalize logger.debug(f"{data_volume.shape=}") if partition is not None: logger.info(f"Cutting data with {partition.alias=}") logger.debug(f"{partition=}") data_volume = partition.get_volume_partition(data_volume) else: logger.debug(f"No partition. The whole volume will be processed.")