Ejemplo n.º 1
0
def rank(source,
         sink=None,
         function=rnk.median,
         resample=None,
         verbose=False,
         out=sys.stdout,
         **kwargs):
    """Rank filter inbetween reshaping."""

    timer = tmr.Timer()

    sink, sink_buffer = ap.initialize_sink(sink=sink, source=source, order='F')

    if resample:
        interpolation = cv2.INTER_NEAREST
        new_shape = np.round(np.array(sink.shape, dtype=float) *
                             resample).astype(int)
        new_shape[2] = sink.shape[2]
        data = np.zeros(tuple(new_shape), order='F', dtype=source.dtype)
        new_shape = tuple(new_shape[1::-1])
        for z in range(source.shape[2]):
            data[:, :, z] = cv2.resize(src=source[:, :, z],
                                       dsize=new_shape,
                                       interpolation=interpolation)
        #print data.shape, data.dtype
        out.write(timer.elapsed_time(head='Rank filter: Resampling') + '\n')
    else:
        data = source

    #keys = inspect.getargspec(function).args;
    #kwargs = { k : v for k,v in kwargs.iteritems() if k in keys};

    data = function(data, **kwargs)

    out.write(
        timer.elapsed_time(head='Rank filter: %s' % function.__name__) + '\n')

    if resample:
        #interpolation = cv2.INTER_LINEAR;
        interpolation = cv2.INTER_AREA
        for z in range(sink.shape[2]):
            sink_buffer[:, :, z] = cv2.resize(src=data[:, :, z],
                                              dsize=sink.shape[1::-1],
                                              interpolation=interpolation)
        out.write(timer.elapsed_time(head='Rank filter: Upsampling') + '\n')
    else:
        sink_buffer[:] = data

    return sink
Ejemplo n.º 2
0
def clip(source, sink = None, clip_min = None, clip_max = None, clip_norm = None, processes = None, verbose = False):
  """Clip and normalize data.

  Arguments
  ---------
  source : array
      Input source.
  sink : array, dtype or None
      output sink or output data type, if None, a new array is allocated.
  clip_min : number
      Minimal number to clip source data to.
  clip_max : number
      Maximal number to clip source data to.
  clip_norm : number
      Normalization constant.

  Returns
  -------
  sink : array
      Clipped output.
  """
  processes, timer = ap.initialize_processing(verbose=verbose, processes=processes, function='clip');
  
  source, source_buffer = ap.initialize_source(source);

  if source.ndim != 3:
    raise ValueError('Source assumed to be 3d found %dd!' % source.ndim);
  
  if clip_min is None:
    clip_min = ap.io.min_value(source);
  
  if clip_max is None:
    clip_max = ap.io.max_value(source);
  
  if clip_norm is None:
    clip_norm = clip_max - clip_min;

  sink, sink_buffer = ap.initialize_sink(sink = sink, source = source);
                                            
  code.clip(source_buffer, sink_buffer, clip_min, clip_max, clip_norm, processes);
  
  return sink;
Ejemplo n.º 3
0
def detect_cells(
        source,
        sink=None,
        cell_detection_parameter=default_cell_detection_parameter,
        processing_parameter=default_cell_detection_processing_parameter):
    """Cell detection pipeline.
  
  Arguments
  ---------
  source : source specification
    The source of the stitched raw data.
  sink : sink specification or None
    The sink to write the result to. If None, an array is returned.
  cell_detection_parameter : dict
    Parameter for the binarization. See below for details.
  processing_parameter : dict
    Parameter for the parallel processing. 
    See :func:`ClearMap.ParallelProcessing.BlockProcesing.process` for 
    description of all the parameter.
  verbose : bool
    If True, print progress output.
  
  Returns
  -------
  sink : Source
    The result of the cell detection.
  
  Notes
  -----
  Effectively this function performs the following steps:
    * illumination correction via :func:`~ClearMap.ImageProcessing.IlluminationCorrection.correct_illumination`
    * background removal
    * difference of Gaussians (DoG) filter
    * maxima detection via :func:`~ClearMap.Analysis.Measurements.MaximaDetection.find_extended_maxima`
    * cell shape detection via :func:`~ClearMap.Analysis.Measurements.ShapeDetection.detect_shape`
    * cell intensity and size measurements via: :func:`~ClearMap.ImageProcessing.Measurements.ShapeDetection.find_intensity`,
      :func:`~ClearMap.ImageProcessing.Measurements.ShapeDetection.find_size`. 

  
  The parameters for each step are passed as sub-dictionaries to the 
    cell_detection_parameter dictionary.
  
  * If None is passed for one of the steps this step is skipped.
  
  * Each step also has an additional parameter 'save' that enables saving of 
    the result of that step to a file to inspect the pipeline.
  
  
  Illumination correction
  -----------------------
  illumination_correction : dict or None
    Illumination correction step parameter.

    flatfield : array or str 
      The flat field estimate for the image planes.
    
    background : array or None
      A background level to assume for the flatfield correction.
    
    scaling : float, 'max', 'mean' or None
      Optional scaling after the flat field correction.
    
    save : str or None
      Save the result of this step to the specified file if not None.
          
  See also :func:`ClearMap.ImageProcessing.IlluminationCorrection.correct_illumination`
  
  
  Background removal
  ------------------
  background_correction : dict or None
    Background removal step parameter.

    shape : tuple
      The shape of the structure lement to estimate the background.
      This should be larger than the typical cell size.
    
    form : str
      The form of the structur element (e.g. 'Disk')
        
    save : str or None
      Save the result of this step to the specified file if not None.
  
  Equalization
  ------------
  equalization : dict or None
    Equalization step parameter.
    See also :func:`ClearMap.ImageProcessing.LocalStatistics.local_percentile`
    
    precentile : tuple
      The lower and upper percentiles used to estimate the equalization.
      The lower percentile is used for normalization, the upper to limit the
      maximal boost to a maximal intensity above this percentile.
    
    max_value : float
      The maximal intensity value in the equalized image.
    
    selem : tuple
      The structural element size to estimate the percentiles. 
      Should be larger than the larger vessels.
    
    spacing : tuple
      The spacing used to move the structural elements.
      Larger spacings speed up processing but become locally less precise.
        
    interpolate : int
      The order of the interpoltation used in constructing the full 
      background estimate in case a non-trivial spacing is used.
      
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  DoG Filter
  ----------
  dog_filter : dict or None
    Difference of Gaussian filter step parameter.

    shape : tuple
      The shape of the filter.
      This should be near the typical cell size.
      
    sigma : tuple or None
       The std of the inner Gaussian.
       If None, detemined automatically from shape.
    
    sigma2 : tuple or None
       The std of the outer Gaussian.
       If None, detemined automatically from shape.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Maxima detection
  ----------------
  maxima_detection : dict or None
    Extended maxima detection step parameter.

    h_max : float or None
      The 'height'for the extended maxima.
      If None, simple local maxima detection isused.

    shape : tuple
      The shape of the structural element for extended maxima detection.
      This should be near the typical cell size.
    
    threshold : float or None
      Only maxima above this threshold are detected. If None, all maxima
      are detected.
      
    valid : bool
      If True, only detect cell centers in the valid range of the blocks with
      overlap.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Shape detection
  ---------------
  shape_detection : dict or None
    Shape detection step parameter.

    threshold : float
      Cell shape is expanded from maxima if pixles are above this threshold
      and not closer to another maxima.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Intensity detection
  -------------------
  intensity_detection : dict or None
    Intensity detection step parameter.

    method : {'max'|'min','mean'|'sum'}
      The method to use to measure the intensity of a cell.
      
    shape : tuple or None
      If no cell shapes are detected a disk of this shape is used to measure
      the cell intensity.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  References
  ----------
  [1] Renier, Adams, Kirst, Wu et al., "Mapping of Brain Activity by Automated Volume Analysis of Immediate Early Genes.", Cell 165, 1789 (2016)
  [1] Kirst et al., "Mapping the Fine-Scale Organization and Plasticity of the Brain Vasculature", Cell 180, 780 (2020)
  """

    #initialize sink
    shape = io.shape(source)
    order = io.order(source)

    for key in cell_detection_parameter.keys():
        par = cell_detection_parameter[key]
        if isinstance(par, dict):
            filename = par.get('save', None)
            if filename:
                ap.initialize_sink(filename,
                                   shape=shape,
                                   order=order,
                                   dtype='float')

    cell_detection_parameter.update(
        verbose=processing_parameter.get('verbose', False))

    results, blocks = bp.process(detect_cells_block,
                                 source,
                                 sink=None,
                                 function_type='block',
                                 return_result=True,
                                 return_blocks=True,
                                 parameter=cell_detection_parameter,
                                 **processing_parameter)

    #merge results
    results = np.vstack([np.hstack(r) for r in results])

    #create column headers
    header = ['x', 'y', 'z']
    dtypes = [int, int, int]
    if cell_detection_parameter['shape_detection'] is not None:
        header += ['size']
        dtypes += [int]
    measures = cell_detection_parameter['intensity_detection']['measure']
    header += measures
    dtypes += [float] * len(measures)

    dt = {
        'names': header,
        'formats': dtypes
    }
    cells = np.zeros(len(results), dtype=dt)
    for i, h in enumerate(header):
        cells[h] = results[:, i]

    #save results
    return io.write(sink, cells)
Ejemplo n.º 4
0
def average(source,
            sink=None,
            shape=None,
            dtype=None,
            weights=None,
            indices=None,
            kernel=None,
            return_counts=False,
            processes=None,
            verbose=False):
    """Averages a list of points into an volumetric image array.
  
  Arguments
  ---------
  source : str, array or Source
    Source of point of nxd coordinates.
  sink : str, array or None
    The sink for the devolved image, if None return array.
  shape : tuple, str or None
    Shape of the final devolved data. If None, determine from points.
    If str, determine shape from the source at the specified location.
  dtype : dtype or None
    Optional data type of the sink.
  weights : array or None
    Weight array of length n for each point. If None, use uniform weights.  
  method : str
    Method for voxelization: 'sphere', 'rectangle' or 'pixel'.
  indices : array 
    The relative indices to the center to devolve over as nxd array.
  kernel : array
    Optional kernel weights for each index in indices.
  processes : int or None
    Number of processes to use.
  verbose : bool
    If True, print progress info.                        
 
  Returns
  -------
  sink : str, array
    Volumetric data of devolved point data.
  """
    processes, timer = ap.initialize_processing(processes=processes,
                                                verbose=verbose,
                                                function='devolve')

    #points, points_buffer = ap.initialize_source(points);
    points_buffer = io.read(source)
    if points_buffer.ndim == 1:
        points_buffer = points_buffer[:, None]

    if sink is None and shape is None:
        if points_buffer.ndim > 1:
            shape = tuple(
                int(math.ceil(points_buffer[:, d].max()))
                for d in range(points_buffer.shape[1]))
        else:
            shape = (int(math.ceil(points_buffer[:].max())), )
    elif isinstance(shape, str):
        shape = io.shape(shape)

    if sink is None and dtype is None:
        if weights is not None:
            dtype = io.dtype(weights)
        elif kernel is not None:
            kernel = np.asarray(kernel)
            dtype = kernel.dtype
        else:
            dtype = int

    sink, sink_buffer, sink_shape, sink_strides = ap.initialize_sink(
        sink=sink,
        shape=shape,
        dtype=dtype,
        return_shape=True,
        return_strides=True,
        as_1d=True)

    #TODO: initialize properly
    counts = np.zeros(sink_shape, dtype=int, order=sink.order)
    counts_buffer = counts.reshape(-1, order='A')
    #print(counts.shape, counts_buffer.shape)

    if indices is None:
        return sink
    indices = np.asarray(indices, dtype=int)
    if indices.ndim == 1:
        indices = indices[:, None]

    if kernel is not None:
        kernel = np.asarray(kernel, dtype=float)

    #print(kernel);
    #print(weights)
    #return;

    code.average(points_buffer, weights, indices, sink_buffer, sink_shape,
                 sink_strides, counts_buffer, processes)
    #  if weights is None:
    #    if kernel is None:
    #      code.devolve_uniform(points_buffer, indices, sink_buffer, sink_shape, sink_strides, processes);
    #    else:
    #      code.devolve_uniform_kernel(points_buffer, indices, kernel, sink_buffer, sink_shape, sink_strides, processes);
    #  else:
    #    if kernel is None:
    #      code.devolve_weights(points_buffer, weights, indices, sink_buffer, sink_shape, sink_strides, processes);
    #    else:
    #      code.devolve_weights_kernel(points_buffer, weights, indices, kernel, sink_buffer, sink_shape, sink_strides, processes);
    #TODO: move to code
    good = counts_buffer > 0
    sink_buffer[good] /= counts_buffer[good]

    ap.finalize_processing(verbose=verbose, function='devolve', timer=timer)

    if return_counts:
        return sink, counts
    else:
        return sink
Ejemplo n.º 5
0
def postprocess(source, sink = None, postprocessing_parameter = default_postprocessing_parameter, processing_parameter = default_postprocessing_processing_parameter, processes = None, verbose = True):
  """Postprocess a binarized image.
  
  Arguments
  ---------
  source : source specification
    The binary  source.
  sink : sink specification or None
    The sink to write the postprocesses result to. 
    If None, an array is returned.
  postprocessing_parameter : dict
    Parameter for the postprocessing.
  processing_parameter : dict
    Parameter for the parallel processing.
  verbose : bool
    If True, print progress output.
  
  Returns
  -------
  sink : Source
    The result of the binarization.
    
  Notes
  -----
  * The postporcessing pipeline is composed of several steps. The parameters
    for each step are passed as sub-dictionaries to the 
    postprocessing_parameter dictionary.
  
  * If None is passed for one of the steps the step is skipped.
    
  Smoothing
  ---------
  smooth : dict or None
    Smoothing step parameter. See
    :func:`ClearMap.ImageProcessing.Binary.Smoothing.smooth_by_configuration`

    iterations : int
      Number of smoothing iterations.
      
      For the vasculature a typical value is 6.
  
  Filling
  -------
  fill : bool or None
    If True, fill holes in the binary data.
  """
  
  source = io.as_source(source);  
  sink   = ap.initialize_sink(sink, shape=source.shape, dtype=source.dtype, order=source.order, return_buffer=False);
  
  if verbose:
    timer = tmr.Timer();
    print('Binary post processing: initialized.');
  
  postprocessing_parameter = postprocessing_parameter.copy();
  parameter_smooth = postprocessing_parameter.pop('smooth', None);
  parameter_fill   = postprocessing_parameter.pop('fill', None);
  #print(parameter_smooth, parameter_fill)
  
  #smoothing
  save = None;
  if parameter_smooth:
    #intialize temporary files if needed
    if parameter_fill:
      save = parameter_smooth.pop('save', None);
      temporary_filename = save; 
      if temporary_filename is None:
        temporary_filename = postprocessing_parameter['temporary_filename'];
      if temporary_filename is None:
        temporary_filename = tmpf.mktemp(prefix='TubeMap_Vasculature_postprocessing', suffix='.npy');
      sink_smooth   = ap.initialize_sink(temporary_filename, shape=source.shape, dtype=source.dtype, order=source.order, return_buffer=False);
    else:
      sink_smooth = sink;
    
    #run smoothing
    source_fill = bs.smooth_by_configuration(source, sink=sink_smooth, processing_parameter=processing_parameter, processes=processes, verbose=verbose, **parameter_smooth);
  
  else:
    source_fill = source;
  
  if parameter_fill:
    sink = bf.fill(source_fill, sink=sink, processes=processes, verbose=verbose);
    
    if parameter_smooth and save is None:
      io.delete_file(temporary_filename);
  else:
    sink = source_fill;
  
  if verbose:
    timer.print_elapsed_time('Binary post processing');
  
  gc.collect()
  return None;
Ejemplo n.º 6
0
def binarize(source, sink = None, binarization_parameter = default_binarization_parameter, processing_parameter = default_binarization_processing_parameter):
  """Multi-path binarization of iDISCO+ cleared vasculature data.
  
  Arguments
  ---------
  source : source specification
    The source of the stitched raw data.
  sink : sink specification or None
    The sink to write the result to. If None, an array is returned.
  binarization_parameter : dict
    Parameter for the binarization. See below for details.
  processing_parameter : dict
    Parameter for the parallel processing. 
    See :func:`ClearMap.ParallelProcessing.BlockProcesing.process` for 
    description of all the parameter.
  verbose : bool
    If True, print progress output.
  
  Returns
  -------
  sink : Source
    The result of the binarization.
  
  Notes
  -----
  * The binarization pipeline is composed of several steps. The parameters for
    each step are passed as sub-dictionaries to the binarization_parameter 
    dictionary.
  
  * If None is passed for one of the steps this step is skipped.
  
  * Each step also has an additional parameter 'save' that enables saving of 
    the result of that step to a file to inspect the pipeline.
    
  General parameter
  -----------------
  binary_status : str or None
    File name to save the information about which part of the multi-path 
    binarization contributed to the final result.
    
  max_bin : int
    Number of intensity levels to use for the data after preprocessing.
    Higher values will increase the intensity resolution but slow down
    processing. 
    
    For the vasculature a typical value is 2**12.

  Clipping
  --------
  clip : dict or None
    Clipping and mask generation step parameter.

    clip_range : tuple 
      The range to clip the raw data as (lowest, highest)
      Voxels above lowest define the foregournd mask used 
      in the following steps.
      
      For the vasculature a typical value is (400,60000). 
      
    save : str or None
      Save the result of this step to the specified file if not None.
          
  See also :mod:`ClearMap.ImageProcessing.Clipping.Clipping`
      
  Lightsheet correction
  ---------------------  
  lightsheet : dict or None
    Lightsheet correction step parameter.
   
    percentile : float
      Percentile in [0,1] used to estimate the lightshieet artifact.
      
      For the vasculature a typical value is 0.25.
      
    lightsheet : dict
      Parameter for the ligthsheet artifact percentile estimation. 
      See :func:`ClearMap.ImageProcessing.LightsheetCorrection.correct_lightsheet`
      for list of all parameters. The crucial parameter is
      
      selem : tuple
        The structural element shape used to estimate the stripe artifact.
        It should match the typical lenght, width, and depth of the artifact 
        in the data.
        
        For the vasculature a typical value is (150,1,1).
    
    background : dict
      Parameter for the background estimation in the light sheet correction. 
      See :func:`ClearMap.ImageProcessing.LightsheetCorrection.correct_lightsheet`
      for list of all parameters. The crucial parameters are
      
      selem : tuple
        The structural element shape used to estimate the background.
        It should be bigger than the largest vessels,
        
        For the vasculature a typical value is (200,200,1).

      spacing : tuple
        The spacing to use to estimate the background. Larger spacings speed up
        processing but become less local estimates.
        
        For the vasculature a typical value is (25,25,1)
        
      step : tuple
        This parameter enables to subsample from the entire array defined by
        the structural element using larger than single voxel steps.
        
        For the vasculature a typical value is (2,2,1).
        
      interpolate : int
        The order of the interpoltation used in constructing the full 
        background estimate in case a non-trivial spacing is used.
        
        For the vasculature a typical value is 1.
        
    lightsheet_vs_background : float
      The background is multiplied by this weight before comparing to the
      lightsheet artifact estimate. 
      
      For the vasculature a typical value is 2.
    
    save : str or None
      Save the result of this step to the specified file if not None.

  Median filter
  -------------
  median : dict or None
    Median correction step parameter.
    See :func:`ClearMap.ImageProcessing.Filter.Rank.median` for all parameter.
    The important parameters are

    selem : tuple
      The structural element size for the median filter.
      
      For the vascualture a typical value is (3,3,3).
    
    save : str or None
      Save the result of this step to the specified file if not None.  
  
  Pseudo Deconvolution
  --------------------
  deconvolve : dict
    The deconvolution step parameter.
    
    sigma : float
      The std of a Gaussina filter applied to the high intensity pixel image.
      The number should reflect the scale of the halo effect seen around high
      intensity structures.
      
      For the vasculature a typical value is 10.
    
    save : str or None
      Save the result of this step to the specified file if not None.   
      
    threshold : float 
      Voxels above this threshold will be added to the binarization result
      in the multi-path biniarization.
      
      For the vasculature a typical value is 750.
  
  Adaptive Thresholding
  ---------------------
  adaptive : dict or None
    Adaptive thresholding step parameter.
    A local ISODATA threshold is estimated.
    See also :mod:`ClearMap.ImageProcessing.LocalStatistics`.
    
    selem : tuple
      The structural element size to estimate the percentiles. 
      Should be larger than the larger vessels.
      
      For the vasculature a typical value is (200,200,5).
    
    spacing : tuple
      The spacing used to move the structural elements.
      Larger spacings speed up processing but become locally less precise.
               
      For the vasculature a typical value is (50,50,5)
        
    interpolate : int
      The order of the interpoltation used in constructing the full 
      background estimate in case a non-trivial spacing is used.
      
      For the vasculature a typical value is 1.
      
    save : str or None
      Save the result of this step to the specified file if not None.   


  Equalization
  ------------
  equalize : dict or None
    Equalization step parameter.
    See also :func:`ClearMap.ImageProcessing.LocalStatistics.local_percentile`
    
    precentile : tuple
      The lower and upper percentiles used to estimate the equalization.
      The lower percentile is used for normalization, the upper to limit the
      maximal boost to a maximal intensity above this percentile.
    
      For the vasculature a typical value is (0.4, 0.975).
    
    max_value : float
      The maximal intensity value in the equalized image.
               
      For the vasculature a typical value is 1.5. 
    
    selem : tuple
      The structural element size to estimate the percentiles. 
      Should be larger than the larger vessels.
      
      For the vasculature a typical value is (200,200,5).
    
    spacing : tuple
      The spacing used to move the structural elements.
      Larger spacings speed up processing but become locally less precise.
               
      For the vasculature a typical value is (50,50,5)
        
    interpolate : int
      The order of the interpoltation used in constructing the full 
      background estimate in case a non-trivial spacing is used.
      
      For the vasculature a typical value is 1.
      
    save : str or None
      Save the result of this step to the specified file if not None.   
      
    threshold : float 
      Voxels above this threshold will be added to the binarization result
      in the multi-path biniarization.
      
      For the vasculature a typical value is 1.1.

  Tube filter
  -----------
  vesselize : dict
    The tube filter step parameter.
    
    background : dict or None
      Parameters to correct for local background. See 
      :func:`ClearMap.ImageProcessing.Filter.Rank.percentile`.
      If None, no background correction is done before the tube filter.
      
      selem : tuple
        The structural element specification to estimate the percentiles. 
        Should be larger than the largest vessels intended to be 
        boosted by the tube filter.
        
        For the vasculature a typical value is ('disk', (30,30,1)) .

      percentile : float
        Percentile in [0,1] used to estimate the background.

        For the vasculature a typical value is 0.5.
        
    tubness : dict
      Parameters used for the tube filter. See 
      :func:`ClearMap.ImageProcessing.Differentiation.Hessian.lambda123`.
      
      sigma : float
        The scale of the vessels to boos in the filter.
        
        For the vasculature a typical value is 1.0.
  
    save : str or None
      Save the result of this step to the specified file if not None.   
      
    threshold : float 
      Voxels above this threshold will be added to the binarization result
      in the multi-path biniarization.
      
      For the vasculature a typical value is 120.
  
  Binary filling
  --------------
  fill : dict or None
    If not None, apply a binary filling the binarized result.

  For the vasculature this step is set to None and done globally 
  in the postprocessing step.
  
  Binary smoothing
  ----------------
  smooth : dict or None
    The smoothing parameter passed to 
    :func:`ClearMap.ImageProcessing.Binary.Smoothing.smooth_by_configuration`.
  
  For the vasculature this step is set to None and done globally 
  in the postprocessing step.
  
  References
  ----------
  [1] C. Kirst et al., "Mapping the Fine-Scale Organization and Plasticity of the Brain Vasculature", Cell 180, 780 (2020)
  """
    
  #initialize sink
  shape = io.shape(source);
  order = io.order(source);
  sink, sink_buffer = ap.initialize_sink(sink=sink, shape=shape, order=order, dtype=bool); #, memory='shared');
  
  #initialize addition output sinks  
  binary_status = binarization_parameter.get('binary_status', None);
  if binary_status:
    ap.initialize_sink(binary_status, source=sink, shape=shape, order=order, dtype='uint16');

  for key in binarization_parameter.keys():
    par = binarization_parameter[key];
    if isinstance(par, dict):
      filename = par.get('save', None);
      if filename:
        ap.initialize_sink(filename, shape=shape, order=order, dtype='float');
        
  binarization_parameter.update(verbose=processing_parameter.get('verbose', False));
  
  bp.process(binarize_block, source, sink, function_type='block', parameter=binarization_parameter, **processing_parameter)                   
  
  return sink;                
Ejemplo n.º 7
0
def fill_vessels(source, sink, 
                 resample = None, threshold = 0.5,
                 network = None, dtype = 'float16', cuda = None,
                 processing_parameter = None,
                 verbose = False):
  """Fill hollow tubes via a neural network.
  
  Arguments
  ---------
  source : str or Source
    The binary data source to fill hollow tubes in.
  sink : str or Source.
    The binary sink to write data to. sink is created if it does not exists.
  resample : int or None
    If int, downsample the data by this factor, apply network and upsample.
  threshold : float or None
    Apply a threshold to the result of the cnn. If None, the probability of
    being foreground is returned.
  network : str, Model or None
    The network speicifcation. If None, the default trained network is used.
  dtype : str
    The dtype to use for the network. See 
    :func:`ClearMap.ImageProcessing.MachineLearning.Torch.to` for details.
  cuda : bool or None
    If True, use gpu processing. If None, automatically detect gpu.
  processing_parameter : dict or None
    Parameter to use for block processing.
  verbose : bool
    If True, print progress.
  
  Returns
  -------
  network : Model
    The neural network model.
  """
  if verbose:
    timer = tmr.Timer();

  #cuda
  if cuda is None:
    cuda = torch.cuda.is_available();
    
  #initialize network
  network = vessel_filling_network(network=network, dtype=dtype, cuda=cuda);
  if not cuda:  #some functions only work as float on CPU
    network = network.float();
  if verbose:
    timer.print_elapsed_time('Vessel filling: neural network initialized')
    print(network);
    print('Vessel filling: using %s' % (('gpu' if cuda else 'cpu'),))
  
  #initialize source
  source = io.as_source(source);
 
  if verbose:
    timer.print_elapsed_time('Vessel filling: source loaded');
    
  #initialize sink
  if threshold:
    sink_dtype = bool;
  else:
    sink_dtype = dtype;
  sink, sink_shape = ap.initialize_sink(sink=sink, shape=source.shape, dtype=sink_dtype, order=source.order, return_buffer=False, return_shape=True);
  
  #resampling
  if resample is not None:
    maxpool = torch.nn.MaxPool3d(kernel_size=resample)
    upsample = torch.nn.Upsample(mode="trilinear", scale_factor=resample, align_corners=False);
    
    if cuda:
      maxpool = maxpool.cuda();
      upsample = upsample.cuda();
      if dtype is not None:
        maxpool  = tor.to(maxpool, dtype);
        upsample = tor.to(upsample, dtype);
    else:
      maxpool = maxpool.float();
      upsample = upsample.float();
  
  #processing
  if processing_parameter is None:
    processing_parameter = default_fill_vessels_processing_parameter
  if processing_parameter:
    processing_parameter = processing_parameter.copy();
    processing_parameter.update(optimization=False);
    if 'size_max' not in processing_parameter or processing_parameter['size_max'] is None:
      processing_parameter['size_max'] = np.max(source.shape);
    if 'size_min' not in processing_parameter:
      processing_parameter['size_min'] = None;    
    blocks = bp.split_into_blocks(source, **processing_parameter);  
  else:
    blocks = [source];
  
  #process blocks
  for block in blocks:
    if verbose:
      timer_block = tmr.Timer();
      print('Vessel filling: processing block %s' % (block.info()));
    
    #load data
    data = np.array(block.array);
    if data.dtype == bool:
      data = data.astype('uint8');
    data = torch.unsqueeze(torch.from_numpy(data), 0)
    if cuda:
      data = tor.to(data, dtype=dtype);
      data = data.cuda();
    else:
      data = data.float();
    if verbose:
      print('Vessel filling: loaded data: %r' % (tuple(data.shape),));
    
    #down sampleprocessing_parameter
    if resample:        
      result = maxpool(data);
    else:
      result = data; 
    result = torch.unsqueeze(result, 1);  
    if verbose:
      print('Vessel filling: resampled data: %r' % (tuple(result.shape),));
    
    #fill
    result = network(result);
    if verbose:
      print('Vessel filling: network %r' % (tuple(result.shape),));
    
    #upsample
    if resample:
      result = upsample(result)
    if verbose:
      print('Vessel filling: upsampled %r' % (tuple(result.shape),));
      
    #write data
    sink_slicing = block.slicing;
    result_shape = result.shape;
    result_slicing = tuple(slice(None,min(ss, rs)) for ss,rs in zip(sink_shape, result_shape[2:]));
    data_slicing = (0,) + tuple(slice(None, s.stop) for s in result_slicing);
    sink_slicing = bp.blk.slc.sliced_slicing(result_slicing, sink_slicing, sink_shape);  
    result_slicing = (0,0) + result_slicing;

    #print('result', result.shape, result_slicing, 'data', data.shape, data_slicing, 'sink', sink_shape, sink_slicing)
    
    if threshold:
      sink_prev = torch.from_numpy(np.asarray(sink[sink_slicing], dtype='uint8'));
    else:
      sink_prev = torch.from_numpy(sink[sink_slicing]);
    
    if cuda:
      sink_prev = sink_prev.cuda();
      sink_prev = tor.to(sink_prev, dtype=dtype);
    else:
      sink_prev  = sink_prev.float();

    #print('slices:', result[result_slicing].shape, data[data_slicing].shape, sink_prev.shape)
    
    result = torch.max(torch.max(result[result_slicing], data[data_slicing]), sink_prev);
    if threshold:
      result = result >= threshold;
    if verbose:
      print('Vessel filling: thresholded %r' % (tuple(result.shape),));
    
    if cuda:
      sink[sink_slicing] = result.data.cpu();
    else:
      sink[sink_slicing] = result.data;
    
    if verbose:
      print('Vessel filling: result written to %r' % (sink_slicing,));

    del data, result, sink_prev;
    gc.collect();
    
    if verbose:
      timer_block.print_elapsed_time('Vessel filling: processing block %s' % (block.info()));
   
  if verbose:
    timer.print_elapsed_time('Vessel filling');
  
  return sink;
Ejemplo n.º 8
0
def index_from_binary(source,
                      sink=None,
                      method='shared',
                      dtype='uint32',
                      processes=None,
                      verbose=False):
    """Calculate the local 3x3x3 configuration in a binary source.
  
  Note
  ----
  The configuration kernel is separable and convolution with it 
  is calculated via a sequence of 1d convolutions.
  """
    processes, timer = ap.initialize_processing(processes=processes,
                                                verbose=verbose,
                                                function='index_from_binary')

    #determine configuration
    source, source_buffer, source_shape, source_strides, source_order = ap.initialize_source(
        source,
        as_1d=True,
        return_shape=True,
        return_strides=True,
        return_order=True)
    ndim = len(source_shape)

    buffer_dtype = np.result_type(source_buffer.dtype, 'uint32')

    delete_files = []
    if source_order == 'C':
        axis_range = range(ndim - 1, -1, -1)
        axis_last = 0
    else:
        axis_range = range(ndim)
        axis_last = ndim - 1
    for axis in axis_range:
        if axis == axis_last:
            sink, sink_buffer, sink_shape, sink_strides = ap.initialize_sink(
                sink=sink,
                as_1d=True,
                source=source,
                dtype=dtype,
                return_shape=True,
                return_strides=True)
        else:
            if method == 'shared':
                _, sink_buffer, sink_shape, sink_strides = ap.initialize_sink(
                    sink=None,
                    as_1d=True,
                    shape=source_shape,
                    dtype=buffer_dtype,
                    order=source_order,
                    return_shape=True,
                    return_strides=True)
            else:
                location = tempfile.mktemp() + '.npy'
                _, sink_buffer, sink_shape, sink_strides = ap.initialize_sink(
                    sink=location,
                    as_1d=True,
                    shape=tuple(source_shape),
                    dtype=buffer_dtype,
                    order=source_order,
                    return_shape=True,
                    return_strides=True)
                delete_files.append(location)

        kernel = index_kernel(axis=axis, dtype=float)

        #print(source_buffer.dtype, source_buffer.shape, source_shape, source_strides, axis, sink_buffer.shape, sink_buffer.dtype, sink_strides, kernel.dtype)
        ap.code.correlate_1d(source_buffer, source_shape, source_strides,
                             sink_buffer, sink_shape, sink_strides, kernel,
                             axis, processes)
        source_buffer = sink_buffer

    for f in delete_files:
        io.delete_file(f)

    ap.finalize_processing(verbose=verbose,
                           function='index_from_binary',
                           timer=timer)

    return sink