def prepare_annotation_files(slicing=None, orientation=None, directory=None, postfix=None, annotation_file=None, reference_file=None, distance_to_surface_file=None, overwrite=False, verbose=False): """Crop the annotation, reference and distance files to match the data. Arguments --------- slicing : tuple or None The slice specification after reorienting. orientation : tuple, str or None. The orientation specification. Strings can be 'left' or 'right', for the two hemispheres. directory : str or None The target directory. If None, use ClearMap resources folder. postfix : str or None Use this postfix for the cropped annotation file. If None and automatic label is choosen. annotation_file : str or None The annotation file to use. reference_file : str or None The reference file to use. distance_to_surface_file : str or None The distance file to use. overwrite : bool If True, overwrite exisitng files. Returns ------- annotation_file : str The cropped annotation file. reference_file : str The cropped reference file. distance_to_surface_file : str The distance cropped file. """ if annotation_file is None: annotation_file = default_annotation_file if reference_file is None: reference_file = default_reference_file if distance_to_surface_file is None: distance_to_surface_file = default_distance_to_surface_file files = [annotation_file, reference_file, distance_to_surface_file] results = [] for f in files: if f is not None: fn = format_annotation_filename(f, orientation=orientation, slicing=slicing, postfix=postfix, directory=directory) if verbose: print('Preparing: %r' % fn) if not overwrite and io.is_file(fn): results.append(fn) continue if not io.is_file(f): raise ValueError('Cannot find annotation file: %s' % f) s = io.as_source(f) if verbose: print('Preparing: from source %r' % s) data = np.array(s.array) if not orientation is None: #permute per = res.orientation_to_permuation(orientation) data = data.transpose(per) #reverse axes reslice = False sl = [slice(None)] * data.ndim for d, o in enumerate(orientation): if o < 0: sl[d] = slice(None, None, -1) reslice = True if reslice: data = data[tuple(sl)] if slicing is not None: data = data[slicing] io.write(fn, data) results.append(fn) else: results.append(None) return results
def info(self, tile_axes = None, check_extensions = True): s = self.__str__() + '\n'; l = np.max([len(k) for k in self.file_type_to_name]); l = '%' + '%d' % l + 's'; for k,v in self.file_type_to_name.items(): if len(te.Expression(v).tags) > 0: if check_extensions: files = self.file_list(k, extension='*'); extensions = [io.file_extension(f) for f in files]; extensions = np.unique(extensions); #print(extensions) else: extensions = [self.extension(k)]; if len(extensions) == 0: s += l % k + ': no file\n'; else: kk = k; for extension in extensions: expression = te.Expression(self.filename(k, extension=extension)); tag_names = expression.tag_names(); if tile_axes is None: tile_axes_ = tag_names; else: tile_axes_ = tile_axes; for n in tile_axes_: if not n in tag_names: raise ValueError('The expression does not have the named pattern %s' % n); for n in tag_names: if not n in tile_axes_: raise ValueError('The expression has the named pattern %s that is not in tile_axes=%r' % (n, tile_axes_)); #construct tiling files = io.file_list(expression); if len(files) > 0: tile_positions = [expression.values(f) for f in files]; tile_positions = [tuple(tv[n] for n in tile_axes_) for tv in tile_positions]; tile_lower = tuple(np.min(tile_positions, axis = 0)); tile_upper = tuple(np.max(tile_positions, axis = 0)); tag_names = tuple(tag_names); if kk is not None: s += (l % kk) + ': ' kk = None; else: s += (l % '') + ' ' s+= ('%s {%d files, %r: %r -> %r}' % (expression.string()[len(self.directory)+1:], len(files), tag_names, tile_lower, tile_upper)) + '\n'; else: fname = self.filename(k); files = []; if io.is_file(fname): files += [fname]; fname = self.filename(k, postfix = '*'); files += io.file_list(fname); if len(files) > 0: files = [f[len(self.directory)+1:] for f in files] s += l % k + ': ' + files[0] + '\n' for f in files[1:]: s += l % '' + ' ' + f + '\n' else: s += l % k + ': no file\n'; print(s);
def write(sink, source, slicing=None, overwrite=True, blocks=None, processes=None, verbose=False): """Write a large array to disk in parallel. Arguments --------- sink : str or Source The sink on disk to write to. source : array or Source The data to write to disk. slicing : slicing or None Optional slicing for the sink to write to. overwrite : bool If True, create new file if the source specifications do not match. blocks : int or None Number of blocks to split array into for parallel processing. processes : None or int Number of processes, if None use number of cpus. verbose : bool Print info about the file to be loaded. Returns ------- sink : Source class The sink to which the source was written. """ processes, timer, blocks = initialize_processing(processes=processes, verbose=verbose, function='write', blocks=blocks, return_blocks=True) source, source_buffer, source_order = initialize_source(source, as_1d=True, return_order=True) try: sink = io.as_source(sink) location = sink.location except: if isinstance(sink, str): location = sink sink = None else: raise ValueError( 'Sink is not a valid writable sink specification!') if location is None: raise ValueError('Sink is not a valid writable sink specification!') if slicing is not None: if not io.is_file(location): raise ValueError( 'Cannot write a slice to a non-existent sink %s!' % location) sink = slc.Slice(source=sink, slicing=slicing) else: if io.is_file(location): mode = None if (sink.shape != source.shape or sink.dtype != source.dtype or sink.order != source_order): if overwrite: mode = 'w+' else: raise ValueError( 'Sink file %s exists but does not match source!') sink_shape = source.shape sink_dtype = source.dtype sink_order = source.order sink = None else: sink_shape = None sink_dtype = None sink_order = None mode = None sink = initialize_sink(sink=sink, location=location, shape=sink_shape, dtype=sink_dtype, order=sink_order, mode=mode, source=source, return_buffer=False) sink_order, sink_offset = sink.order, sink.offset if sink_order not in ['C', 'F']: raise NotImplementedError( 'Cannot read in parallel from non-contigous source!') if (source_order != sink_order): raise RuntimeError('Order of source %r and sink %r do not match!' % (source_order, sink_order)) #print(source_buffer.shape, location, sink_offset, blocks, processes) code.write(source_buffer, location.encode(), offset=sink_offset, blocks=blocks, processes=processes) finalize_processing(verbose=verbose, function='write', timer=timer) return sink
def skeletonize(source, sink=None, points=None, method='PK12i', steps=None, in_place=False, verbose=True, **kwargs): """Skeletonize 3d binary arrays. Arguments --------- source : array or source Binary image to skeletonize. sink : sink specification Optional sink. points : array or None Optional point list of the foreground points in the binary. method : str 'PK12' or faster index version 'PK12i'. steps : int or None Number of maximal iteration steps. If None, maximal thinning. in_place : bool If True, the skeletonization is done directly on the input array. Returns ------- skeleton : Source The skeletonized array. """ if verbose: timer = tmr.Timer() if not in_place and io.is_file(source): binary_buffer = ap.read(source).as_buffer() else: binary, binary_buffer = ap.initialize_source(source) if not in_place: binary_buffer = np.array(binary_buffer) if method == 'PK12': result = PK12.skeletonize(binary_buffer, points=points, steps=steps, verbose=verbose, **kwargs) elif method == 'PK12i': result = PK12.skeletonize_index(binary_buffer, points=points, steps=steps, verbose=verbose, **kwargs) else: raise RuntimeError('Skeletonizaton method %r is not valid!' % method) if verbose: timer.print_elapsed_time(head='Skeletonization') if sink is None: sink = ap.io.as_source(result) elif isinstance(sink, str): sink = ap.write(sink, result) else: sink = io.write(sink, result) return sink