def abspath(root, relpath): from pathlib import Path root = Path(root) if root.is_dir(): path = root / relpath else: path = root.parent / relpath return str(path.absolute())
def export_imagej_rois(fname, polygons, set_position=True, subpixel=True, compression=ZIP_DEFLATED): """ polygons assumed to be a list of arrays with shape (id,2,c) """ if isinstance(polygons,np.ndarray): polygons = (polygons,) fname = Path(fname) if fname.suffix == '.zip': fname = fname.with_suffix('') with ZipFile(str(fname)+'.zip', mode='w', compression=compression) as roizip: for pos,polygroup in enumerate(polygons,start=1): for i,poly in enumerate(polygroup,start=1): roi = polyroi_bytearray(poly[1],poly[0], pos=(pos if set_position else None), subpixel=subpixel) roizip.writestr('{pos:03d}_{i:03d}.roi'.format(pos=pos,i=i), roi)
def __init__(self, config, name=None, basedir='.'): """See class docstring""" config is None or isinstance(config, DenoiSegConfig) or _raise( ValueError('Invalid configuration: %s' % str(config))) if config is not None and not config.is_valid(): invalid_attr = config.is_valid(True)[1] raise ValueError('Invalid configuration attributes: ' + ', '.join(invalid_attr)) (not (config is None and basedir is None)) or _raise(ValueError()) name is None or isinstance(name, string_types) or _raise(ValueError()) basedir is None or isinstance(basedir, (string_types, Path)) or _raise( ValueError()) self.config = config self.name = name if name is not None else datetime.datetime.now( ).strftime("%Y-%m-%d-%H-%M-%S.%f") self.basedir = Path(basedir) if basedir is not None else None if config is not None: # config was provided -> update before it is saved to disk self._update_and_check_config() self._set_logdir() if config is None: # config was loaded from disk -> update it after loading self._update_and_check_config() self._model_prepared = False self.keras_model = self._build() if config is None: self._find_and_load_weights() self.alpha = K.variable(value=1, dtype='float32')
def __init__(self, config, name=None, basedir='.'): """See class docstring.""" config is None or isinstance(config, self._config_class) or _raise( ValueError("Invalid configuration of type '%s', was expecting type '%s'." % ( type(config).__name__, self._config_class.__name__)) ) if config is not None and not config.is_valid(): invalid_attr = config.is_valid(True)[1] raise ValueError('Invalid configuration attributes: ' + ', '.join(invalid_attr)) (not (config is None and basedir is None)) or _raise( ValueError("No config provided and cannot be loaded from disk since basedir=None.")) name is None or (isinstance(name, string_types) and len(name) > 0) or _raise( ValueError("No valid name: '%s'" % str(name))) basedir is None or isinstance(basedir, (string_types, Path)) or _raise( ValueError("No valid basedir: '%s'" % str(basedir))) self.config = config self.name = name if name is not None else datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S.%f") self.basedir = Path(basedir) if basedir is not None else None if config is not None: # config was provided -> update before it is saved to disk self._update_and_check_config() self._set_logdir() if config is None: # config was loaded from disk -> update it after loading self._update_and_check_config() self._model_prepared = False self.keras_model = self._build() if config is None: self._find_and_load_weights()
def export_imagej_rois(fname, polygons, set_position=True, compression=ZIP_DEFLATED): """ polygons assumed to be a list/array of arrays with shape (id,x,y) """ fname = Path(fname) if fname.suffix == '.zip': fname = Path(fname.stem) with ZipFile(str(fname) + '.zip', mode='w', compression=compression) as roizip: for pos, polygroup in enumerate(polygons, start=1): for i, poly in enumerate(polygroup, start=1): roi = polyroi_bytearray(poly[1], poly[0], pos=(pos if set_position else None)) roizip.writestr('{pos:03d}_{i:03d}.roi'.format(pos=pos, i=i), roi)
def __init__(self, config, name=None, basedir='.'): """See class docstring""" config is None or isinstance(config, N2VConfig) or _raise(ValueError('Invalid configuration: %s' % str(config))) if config is not None and not config.is_valid(): invalid_attr = config.is_valid(True)[1] raise ValueError('Invalid configuration attributes: ' + ', '.join(invalid_attr)) (not (config is None and basedir is None)) or _raise(ValueError()) name is None or isinstance(name, string_types) or _raise(ValueError()) basedir is None or isinstance(basedir, (string_types, Path)) or _raise(ValueError()) self.config = config self.name = name if name is not None else datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S.%f") self.basedir = Path(basedir) if basedir is not None else None self._set_logdir() self._model_prepared = False self.keras_model = self._build() if config is None: self._find_and_load_weights() else: config.probabilistic = False
def export_TF(self, name, description, authors, test_img, axes, patch_shape, fname=None): """ name: String Name of the model. description: String A short description of the model e.g. on what data it was trained. authors: String Comma seperated list of author names. patch_shape: The shape of the patches used in model.train(). """ if fname is None: fname = self.logdir / 'export.bioimage.io.zip' else: fname = Path(fname) input_n_dims = len(test_img.shape) if 'C' in axes: input_n_dims -= 1 assert input_n_dims == self.config.n_dim, 'Input and network dimensions do not match.' assert test_img.shape[axes.index('X')] == test_img.shape[axes.index( 'Y')], 'X and Y dimensions are not of same length.' test_output = self.predict(test_img, axes) # Extract central slice of Z-Stack if 'Z' in axes: z_dim = axes.index('Z') if z_dim != 0: test_output = np.moveaxis(test_output, z_dim, 0) test_output = test_output[int(test_output.shape[0] / 2)] # CSBDeep Export meta = { 'type': self.__class__.__name__, 'version': package_version, 'probabilistic': self.config.probabilistic, 'axes': self.config.axes, 'axes_div_by': self._axes_div_by(self.config.axes), 'tile_overlap': self._axes_tile_overlap(self.config.axes) } export_SavedModel(self.keras_model, str(fname), meta=meta) # CSBDeep Export Done # Replace : with - name = name.replace(':', ' -') yml_dict = self.get_yml_dict(name, description, authors, test_img, axes, patch_shape=patch_shape) yml_file = self.logdir / 'model.yaml' '''default_flow_style must be set to TRUE in order for the output to display arrays as [x,y,z]''' yaml = YAML(typ='rt') yaml.default_flow_style = False with open(yml_file, 'w') as outfile: yaml.dump(yml_dict, outfile) input_file = self.logdir / 'testinput.tif' output_file = self.logdir / 'testoutput.tif' imsave(input_file, test_img) imsave(output_file, test_output) with ZipFile(fname, 'a') as myzip: myzip.write(yml_file, arcname=os.path.basename(yml_file)) myzip.write(input_file, arcname=os.path.basename(input_file)) myzip.write(output_file, arcname=os.path.basename(output_file)) print("\nModel exported in BioImage ModelZoo format:\n%s" % str(fname.resolve()))
def main(): if not ('__file__' in locals() or '__file__' in globals()): print('running interactively, exiting.') sys.exit(0) # parse arguments parser, args = parse_args() args_dict = vars(args) # exit and show help if no arguments provided at all if len(sys.argv) == 1: parser.print_help() sys.exit(0) # check for required arguments manually (because of argparse issue) required = ('--input-dir', '--input-axes', '--norm-pmin', '--norm-pmax', '--model-basedir', '--model-name', '--output-dir') for r in required: dest = r[2:].replace('-', '_') if args_dict[dest] is None: parser.print_usage(file=sys.stderr) print("%s: error: the following arguments are required: %s" % (parser.prog, r), file=sys.stderr) sys.exit(1) # show effective arguments (including defaults) if not args.quiet: print('Arguments') print('---------') pprint(args_dict) print() sys.stdout.flush() # logging function log = (lambda *a, **k: None) if args.quiet else tqdm.write # get list of input files and exit if there are none file_list = list(Path(args.input_dir).glob(args.input_pattern)) if len(file_list) == 0: log("No files to process in '%s' with pattern '%s'." % (args.input_dir, args.input_pattern)) sys.exit(0) # delay imports after checking to all required arguments are provided from tifffile import imread, imsave from csbdeep.utils.tf import keras_import K = keras_import('backend') from csbdeep.models import CARE from csbdeep.data import PercentileNormalizer sys.stdout.flush() sys.stderr.flush() # limit gpu memory if args.gpu_memory_limit is not None: from csbdeep.utils.tf import limit_gpu_memory limit_gpu_memory(args.gpu_memory_limit) # create CARE model and load weights, create normalizer K.clear_session() model = CARE(config=None, name=args.model_name, basedir=args.model_basedir) if args.model_weights is not None: print("Loading network weights from '%s'." % args.model_weights) model.load_weights(args.model_weights) normalizer = PercentileNormalizer(pmin=args.norm_pmin, pmax=args.norm_pmax, do_after=args.norm_undo) n_tiles = args.n_tiles if n_tiles is not None and len(n_tiles) == 1: n_tiles = n_tiles[0] processed = [] # process all files for file_in in tqdm(file_list, disable=args.quiet or (n_tiles is not None and np.prod(n_tiles) > 1)): # construct output file name file_out = Path(args.output_dir) / args.output_name.format( file_path=str(file_in.relative_to(args.input_dir).parent), file_name=file_in.stem, file_ext=file_in.suffix, model_name=args.model_name, model_weights=Path(args.model_weights).stem if args.model_weights is not None else None) # checks (file_in.suffix.lower() in ('.tif', '.tiff') and file_out.suffix.lower() in ('.tif', '.tiff')) or _raise( ValueError('only tiff files supported.')) # load and predict restored image img = imread(str(file_in)) restored = model.predict(img, axes=args.input_axes, normalizer=normalizer, n_tiles=n_tiles) # restored image could be multi-channel even if input image is not axes_out = axes_check_and_normalize(args.input_axes) if restored.ndim > img.ndim: assert restored.ndim == img.ndim + 1 assert 'C' not in axes_out axes_out += 'C' # convert data type (if necessary) restored = restored.astype(np.dtype(args.output_dtype), copy=False) # save to disk if not args.dry_run: file_out.parent.mkdir(parents=True, exist_ok=True) if args.imagej_tiff: save_tiff_imagej_compatible(str(file_out), restored, axes_out) else: imsave(str(file_out), restored) processed.append((file_in, file_out)) # print summary of processed files if not args.quiet: sys.stdout.flush() sys.stderr.flush() n_processed = len(processed) len_processed = len(str(n_processed)) log('Finished processing %d %s' % (n_processed, 'files' if n_processed > 1 else 'file')) log('-' * (26 + len_processed if n_processed > 1 else 26)) for i, (file_in, file_out) in enumerate(processed): len_file = max(len(str(file_in)), len(str(file_out))) log(('{:>%d}. in : {:>%d}' % (len_processed, len_file)).format( 1 + i, str(file_in))) log(('{:>%d} out: {:>%d}' % (len_processed, len_file)).format( '', str(file_out)))