Example #1
0
def test():
    import numpy as np
    import ClearMap.IO.IO as io
    import ClearMap.Analysis.Measurements.MeasureRadius as mr

    data = 10 - np.abs(10 - np.arange(0, 21))
    search = mr.search_indices_sphere(radius=[10, 10, 10])
    print(search)

    points = np.array([10])
    d, i = mr.measure_radius(data,
                             points,
                             fraction=0.75,
                             max_radius=10,
                             scale=2,
                             verbose=True,
                             processes=4,
                             return_indices=True)

    data = np.random.rand(*(30, 40, 50))
    io.write('data.npy', data)

    points = np.array([np.random.randint(0, s, size=10) for s in data.shape]).T
    d, i = mr.measure_radius(data,
                             points,
                             value=0.5,
                             max_radius=10,
                             scale=2,
                             verbose=True,
                             processes=4,
                             return_indices=True)

    data = np.zeros((30, 40, 50), dtype=int)
    data[10:20, 15:25, 10:20] = 1
    data[15, 20, 15] = 2

    import ClearMap.Visualization.Plot3d as p3d
    p3d.plot(data)

    points = np.array([[15, 20, 15], [4, 4, 4]])
    d, i = mr.measure_radius(data,
                             points,
                             value=0.0,
                             max_radius=10,
                             scale=None,
                             verbose=True,
                             processes=None,
                             return_indices=True)
Example #2
0
def write_color_annotation(filename, annotation_file=None):
    """Creates a rgb image from the atlas color data.
  
  Arguments
  ---------
  filename : str
    The name of the color palette file.
  annotation_file : str
    File name of the atals annotation. 
  
  Returns
  -------
  filename : str
    The name of the file to which the color atlas was written.
  """
    #load atlas and convert to order
    if annotation_file is None:
        annotation_file = annotation.annotation_file
    atlas = np.array(io.read(annotation_file), dtype=int)
    atlas = convert_label(atlas, key='id', value='order', method='map')

    #apply color map
    cm = color_map(alpha=False, as_int=True)
    atlas = cm[atlas]

    return io.write(filename, atlas)
Example #3
0
def filter_cells(source, sink, thresholds):
    """Filter a array of detected cells according to the thresholds.
  
  Arguments
  ---------
  source : str, array or Source
    The source for the cell data.
  sink : str, array or Source
    The sink for the results.
  thresholds : dict
    Dictionary of the form {name : threshold} where name refers to the 
    column in the cell data and threshold can be None, a float 
    indicating a minimal threshold or a tuple (min,max) where min,max can be
    None or a minimal and maximal threshold value.
  
  Returns
  -------
  sink : str, array or Source
    The thresholded cell data.
  """
    source = io.as_source(source)

    ids = np.ones(source.shape[0], dtype=bool)
    for k, t in thresholds.items():
        if t:
            if not isinstance(t, (tuple, list)):
                t = (t, None)
            if t[0] is not None:
                ids = np.logical_and(ids, t[0] <= source[k])
            if t[1] is not None:
                ids = np.logical_and(ids, t[1] > source[k])
    cells_filtered = source[ids]

    return io.write(sink, cells_filtered)
Example #4
0
def deformation_distance(deformation_field, sink = None, scale = None):
  """Compute the distance field from a deformation vector field.
  
  Arguments
  ---------
  deformation_field : str or array
    Source of the deformation field determined by :func:`deformation_field`.
  sink : str or None
    Image sink to save the deformation field to.
  scale : tuple or None
    Scale factor for each dimension, if None = (1,1,1).
      
  Returns
  -------
  deformation_distannce : array or st
    Array or file name of the deformation distance data.
  """
  
  deformation_field = io.read(deformation_field);
  
  df = np.square(deformation_field);
  if not scale is None:
      for i in range(3):
          df[:,:,:,i] = df[:,:,:,i] * (scale[i] * scale[i]);
  df = np.sqrt(np.sum(df, axis = 3));
  
  return io.write(sink, df);
Example #5
0
 def create_debug(self, ftype, slicing, debug = None, **kwargs):
   if debug is None:
     debug = self.debug;
   if debug is None:
     debug = 'debug';
   self.debug = None;
   source = io.as_source(self.filename(ftype, **kwargs));
   self.debug = debug;
   return io.write(self.filename(ftype, **kwargs), np.asarray(source[slicing], order='F'));
Example #6
0
def save(location, view, transparent=None, *args, **kwargs):
    """Save the current view to a file."""
    canvas = get_view(view).canvas
    img = canvas.render(*args, **kwargs)
    if transparent is not None:
        t = np.logical_and(img[:, :, 0] >= transparent,
                           img[:, :, 1] >= transparent)
        t = np.logical_and(img[:, :, 2] >= transparent, t)
        img[:, :, 3][t] = 0
        img = img.transpose([0, 1, 2])
    else:
        img = img[:, :, :3].T
    return io.write(location, img)
Example #7
0
    # Add brain region ID to transformed cell array
    cells_to_save = np.hstack(
        (coordinates_aligned_to_atlas, size_intensity, cell_regions))
    header = ['x', 'y', 'z', 'size', 'intensity', 'region']
    dtypes = [int, int, int, int, float, int]
    dt = {'names': header, 'formats': dtypes}
    output_array = np.zeros(len(cells_to_save), dtype=dt)
    for i, h in enumerate(header):
        output_array[h] = cells_to_save[:, i]
    # Remove cells that are outside the atlas
    output_array = np.delete(output_array, np.argwhere(cell_regions == 0))

    # Save registered cells to cells_transformed_to_atlas.npy
    savename = ws.filename('cells', postfix='transformed_to_atlas')
    io.write(savename, output_array)
    print(f'Saving registered cell detection results to: {savename}')
    print()

    # Filter cells and save to cells_transformed_to_atlas_filtered.npy
    thresholds_file = '/jukebox/witten/Chris/data/clearmap2/utilities/cell_detection_filter.p'
    with open(thresholds_file, 'rb') as f:
        thresholds_dict = pickle.load(f)
    minsize_thresh = str(thresholds_dict['size'][0])
    postfix_filtered = f"transformed_to_atlas_filtered_{minsize_thresh}px"
    cells_filtered = cells.filter_cells(
        source=ws.filename('cells', postfix='transformed_to_atlas'),
        sink=ws.filename('cells', postfix=postfix_filtered),
        thresholds=thresholds_dict)
    print(
        f"Saving filtered cell detection results to: {ws.filename('cells',postfix=postfix_filtered)}"
Example #8
0
def detect_cells(
        source,
        sink=None,
        cell_detection_parameter=default_cell_detection_parameter,
        processing_parameter=default_cell_detection_processing_parameter):
    """Cell detection pipeline.
  
  Arguments
  ---------
  source : source specification
    The source of the stitched raw data.
  sink : sink specification or None
    The sink to write the result to. If None, an array is returned.
  cell_detection_parameter : dict
    Parameter for the binarization. See below for details.
  processing_parameter : dict
    Parameter for the parallel processing. 
    See :func:`ClearMap.ParallelProcessing.BlockProcesing.process` for 
    description of all the parameter.
  verbose : bool
    If True, print progress output.
  
  Returns
  -------
  sink : Source
    The result of the cell detection.
  
  Notes
  -----
  Effectively this function performs the following steps:
    * illumination correction via :func:`~ClearMap.ImageProcessing.IlluminationCorrection.correct_illumination`
    * background removal
    * difference of Gaussians (DoG) filter
    * maxima detection via :func:`~ClearMap.Analysis.Measurements.MaximaDetection.find_extended_maxima`
    * cell shape detection via :func:`~ClearMap.Analysis.Measurements.ShapeDetection.detect_shape`
    * cell intensity and size measurements via: :func:`~ClearMap.ImageProcessing.Measurements.ShapeDetection.find_intensity`,
      :func:`~ClearMap.ImageProcessing.Measurements.ShapeDetection.find_size`. 

  
  The parameters for each step are passed as sub-dictionaries to the 
    cell_detection_parameter dictionary.
  
  * If None is passed for one of the steps this step is skipped.
  
  * Each step also has an additional parameter 'save' that enables saving of 
    the result of that step to a file to inspect the pipeline.
  
  
  Illumination correction
  -----------------------
  illumination_correction : dict or None
    Illumination correction step parameter.

    flatfield : array or str 
      The flat field estimate for the image planes.
    
    background : array or None
      A background level to assume for the flatfield correction.
    
    scaling : float, 'max', 'mean' or None
      Optional scaling after the flat field correction.
    
    save : str or None
      Save the result of this step to the specified file if not None.
          
  See also :func:`ClearMap.ImageProcessing.IlluminationCorrection.correct_illumination`
  
  
  Background removal
  ------------------
  background_correction : dict or None
    Background removal step parameter.

    shape : tuple
      The shape of the structure lement to estimate the background.
      This should be larger than the typical cell size.
    
    form : str
      The form of the structur element (e.g. 'Disk')
        
    save : str or None
      Save the result of this step to the specified file if not None.
  
  Equalization
  ------------
  equalization : dict or None
    Equalization step parameter.
    See also :func:`ClearMap.ImageProcessing.LocalStatistics.local_percentile`
    
    precentile : tuple
      The lower and upper percentiles used to estimate the equalization.
      The lower percentile is used for normalization, the upper to limit the
      maximal boost to a maximal intensity above this percentile.
    
    max_value : float
      The maximal intensity value in the equalized image.
    
    selem : tuple
      The structural element size to estimate the percentiles. 
      Should be larger than the larger vessels.
    
    spacing : tuple
      The spacing used to move the structural elements.
      Larger spacings speed up processing but become locally less precise.
        
    interpolate : int
      The order of the interpoltation used in constructing the full 
      background estimate in case a non-trivial spacing is used.
      
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  DoG Filter
  ----------
  dog_filter : dict or None
    Difference of Gaussian filter step parameter.

    shape : tuple
      The shape of the filter.
      This should be near the typical cell size.
      
    sigma : tuple or None
       The std of the inner Gaussian.
       If None, detemined automatically from shape.
    
    sigma2 : tuple or None
       The std of the outer Gaussian.
       If None, detemined automatically from shape.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Maxima detection
  ----------------
  maxima_detection : dict or None
    Extended maxima detection step parameter.

    h_max : float or None
      The 'height'for the extended maxima.
      If None, simple local maxima detection isused.

    shape : tuple
      The shape of the structural element for extended maxima detection.
      This should be near the typical cell size.
    
    threshold : float or None
      Only maxima above this threshold are detected. If None, all maxima
      are detected.
      
    valid : bool
      If True, only detect cell centers in the valid range of the blocks with
      overlap.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Shape detection
  ---------------
  shape_detection : dict or None
    Shape detection step parameter.

    threshold : float
      Cell shape is expanded from maxima if pixles are above this threshold
      and not closer to another maxima.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  
  Intensity detection
  -------------------
  intensity_detection : dict or None
    Intensity detection step parameter.

    method : {'max'|'min','mean'|'sum'}
      The method to use to measure the intensity of a cell.
      
    shape : tuple or None
      If no cell shapes are detected a disk of this shape is used to measure
      the cell intensity.
    
    save : str or None
      Save the result of this step to the specified file if not None.
  
  References
  ----------
  [1] Renier, Adams, Kirst, Wu et al., "Mapping of Brain Activity by Automated Volume Analysis of Immediate Early Genes.", Cell 165, 1789 (2016)
  [1] Kirst et al., "Mapping the Fine-Scale Organization and Plasticity of the Brain Vasculature", Cell 180, 780 (2020)
  """

    #initialize sink
    shape = io.shape(source)
    order = io.order(source)

    for key in cell_detection_parameter.keys():
        par = cell_detection_parameter[key]
        if isinstance(par, dict):
            filename = par.get('save', None)
            if filename:
                ap.initialize_sink(filename,
                                   shape=shape,
                                   order=order,
                                   dtype='float')

    cell_detection_parameter.update(
        verbose=processing_parameter.get('verbose', False))

    results, blocks = bp.process(detect_cells_block,
                                 source,
                                 sink=None,
                                 function_type='block',
                                 return_result=True,
                                 return_blocks=True,
                                 parameter=cell_detection_parameter,
                                 **processing_parameter)

    #merge results
    results = np.vstack([np.hstack(r) for r in results])

    #create column headers
    header = ['x', 'y', 'z']
    dtypes = [int, int, int]
    if cell_detection_parameter['shape_detection'] is not None:
        header += ['size']
        dtypes += [int]
    measures = cell_detection_parameter['intensity_detection']['measure']
    header += measures
    dtypes += [float] * len(measures)

    dt = {
        'names': header,
        'formats': dtypes
    }
    cells = np.zeros(len(results), dtype=dt)
    for i, h in enumerate(header):
        cells[h] = results[:, i]

    #save results
    return io.write(sink, cells)
Example #9
0
def transform_points(source, sink = None, transform_parameter_file = None, transform_directory = None, indices = False, result_directory = None, temp_file = None, binary = True):
  """Transform coordinates math:`x` via elastix estimated transformation to :math:`T(x)`.

  Arguments
  ---------
  source : str
    Source of the points.
  sink : str or None
    Sink for transformed points.
  transform_parameter_file : str or None
    Parameter file for the primary transformation. 
    If None, the file is determined from the transform_directory.
  transform_directory : str or None
    Result directory of elastix alignment.
    If None the transform_parameter_file has to be given.
  indices : bool
    If True use points as pixel coordinates otherwise spatial coordinates.
  result_directory : str or None
    Elastic result directory.
  temp_file : str or None
    Optional file name for the elastix point file.
      
  Returns
  -------
  points : array or st
    Array or file name of transformed points.
      
  Note
  ----
  The transformation is from the fixed image coorindates to the moving 
  image coordiantes.
  """   
  check_elastix_initialized();    

  # input point file
  if temp_file == None:
    if binary:
      temp_file = os.path.join(tempfile.gettempdir(), 'elastix_input.bin');
    else:
      temp_file = os.path.join(tempfile.gettempdir(), 'elastix_input.txt');
  
  delete_point_file = None;
  if isinstance(source, str):
    if len(source) > 3 and source[-3:] in ['txt', 'bin']:
      if source[-3:] == 'txt':
        binary = False; 
      if source[-3] == 'bin':
        binary = True;
      pointfile = source;
    else:
      points = io.read(source);
      pointfile = temp_file;
      delete_point_file = temp_file;
      write_points(pointfile, points, indices = indices, binary = binary);
  elif isinstance(source, np.ndarray):
    pointfile = temp_file;
    delete_point_file = temp_file;
    write_points(pointfile, source, indices = indices, binary = binary);
  else:
    raise RuntimeError('transform_points: source not string or array!');
  #print(pointfile)
  
  # result directory
  if result_directory == None:
    outdirname = os.path.join(tempfile.gettempdir(), 'elastix_output');
    delete_result_directory = outdirname;
  else:
    outdirname = result_directory;
    delete_result_directory = None;
      
  if not os.path.exists(outdirname):
    os.makedirs(outdirname);
  
  #transform
  transform_parameter_dir, transform_parameter_file = transform_directory_and_file(transform_parameter_file = transform_parameter_file, transform_directory = transform_directory);
  set_path_transform_files(transform_parameter_dir);
  
  #run transformix   
  cmd = '%s -def %s -out %s -tp %s' % (transformix_binary, pointfile, outdirname, transform_parameter_file);
  print(cmd)
  
  res = os.system(cmd);
  
  if res != 0:
    raise RuntimeError('failed executing ' + cmd);
  
  # read data and clean up
  if delete_point_file is not None:
    os.remove(delete_point_file);
  
  #read data / file 
  if sink == []: # return sink as file name
    if binary:
      return os.path.join(outdirname, 'outputpoints.bin')
    else:
      return os.path.join(outdirname, 'outputpoints.txt')
  
  else:
    if binary:
      transpoints = read_points(os.path.join(outdirname, 'outputpoints.bin'), indices = indices, binary = True);
    else:
      transpoints = read_points(os.path.join(outdirname, 'outputpoints.txt'), indices = indices, binary = False); 
    
    if delete_result_directory is not None:
      shutil.rmtree(delete_result_directory);
  
  return io.write(sink, transpoints);
Example #10
0
def transform(source, sink = [], transform_parameter_file = None, transform_directory = None, result_directory = None):
  """Transform a raw data set to reference using the elastix alignment results.
  
  Arguments
  ---------
  source : str or array
    Image source to be transformed.
  sink : str, [] or None
    Image sink to save transformed image to. If [] return the default name 
    of the data file generated by transformix.
  transform_parameter_file : str or None
    Parameter file for the primary transformation. 
    If None, the file is determined from the transform_directory.
  transform_directory : str or None
    Result directory of elastix alignment. 
    If None the transform_parameter_file has to be given.
  result_directory : str or None
    The directorty for the transformix results.
      
  Returns
  -------
  transformed : array or st
    Array or file name of the transformed data.
      
  Note
  ----
  If the map determined by elastix is
  :math:`T: \\mathrm{fixed} \\rightarrow \\mathrm{moving}`, 
  transformix on data works as :math:`T^{-1}(\\mathrm{data})`.
  """
  check_elastix_initialized();  
  
  # image
  source = io.as_source(source);
  if isinstance(source, io.tif.Source):
    imgname = source.location;
    delete_image = None;
  else:
    imgname = os.path.join(tempfile.gettempdir(), 'elastix_input.tif');
    io.write(source, imgname);
    delete_image = imgname;

  # result directory
  delete_result_directory = None;
  if result_directory == None:
    resultdirname = os.path.join(tempfile.gettempdir(), 'elastix_output');
    delete_result_directory = resultdirname;
  else:
    resultdirname = result_directory;
     
  if not os.path.exists(resultdirname):
    os.makedirs(resultdirname);
  
  # tranformation parameter
  transform_parameter_dir, transform_parameter_file = transform_directory_and_file(transform_parameter_file = transform_parameter_file, transform_directory = transform_directory);
  
  set_path_transform_files(transform_parameter_dir);
 
  #transformix -in inputImage.ext -out outputDirectory -tp TransformParameters.txx
  cmd = '%s -in %s -out %s -tp %s' % (transformix_binary, imgname, resultdirname, transform_parameter_file);
  
  res = os.system(cmd);
  
  if res != 0:
    raise RuntimeError('transform_data: failed executing: ' + cmd);
  
  # read data and clean up
  if delete_image is not None:
      os.remove(delete_image);
  
  if sink == []:
    return result_data_file(resultdirname);
  elif sink is None:
    resultfile = result_data_file(resultdirname);
    result = io.read(resultfile);
  elif isinstance(sink, str):
    resultfile = result_data_file(resultdirname);
    result = io.convert(resultfile, sink);
  else:
    raise RuntimeError('transform_data: sink not valid!');
    
  if delete_result_directory is not None:
    shutil.rmtree(delete_result_directory);
  
  return result;
Example #11
0
def resample_points_inverse(source, sink = None, resample_source = None, resample_sink = None,
                            orientation = None, source_shape = None, sink_shape = None, 
                            source_resolution = None, sink_resolution = None, **args):
  """Resample points from original coordiantes to resampled ones.
  
  Arguments
  ---------
  source : str or array
    Points to be resampled inversely.
  sink : str or None
    Sink for the inversly resmapled points.
  resample_source : str, array or None
    Optional source as in :func:`resample`.
  resample_sink: str, array or None
    Optional sink used in :func:`resample`.
  orientation : tuple
    Orientation as specified in :func:`resample`.
  source_shape : tuple or None
    Optional value of source_shape as in :func:`resample`.
  source_resolution : tuple or None
    Optional value of source_resolution as in :func:`resample`.
  sink_resolution : tuple or None
    Optional value of sink_resolution as in :func:`resample`.
      
  Returns
  -------
  resmapled : array or str
    Sink for the inversly resampled point coordinates.

  Notes
  -----
  * The resampling of points here corresponds to the inverse resampling of 
    an image in :func:`resample`, i.e. to func:`resample_inverse`
  * The arguments should be passed exactly as in :func:`resample` except source
    and sink that point to the point sources. 
    Use resample_source and resmaple_sink to pass the source and sink values
    used in :func:`resample`.
  """
  #orientation
  orientation = format_orientation(orientation);
  
  #original source info
  if source_shape is None:
    if source_resolution is None and resample_source is None:
      raise ValueError('Either source_shape, source_resolution or resample_source must to be given!')
    if resample_source is not None:
      source_shape = io.shape(resample_source);

  #original sink info
  if sink_shape is None and sink_resolution is None: 
    if resample_sink is None:
      sink_shape = io.shape(source);
    else:
      sink_shape = io.shape(resample_sink);
  
  source_shape, sink_shape, source_resolution, sink_resolution = \
      resample_shape(source_shape=source_shape, sink_shape=sink_shape, 
                     source_resolution=source_resolution, sink_resolution=sink_resolution, 
                     orientation=orientation);
  
  sink_shape_in_source_orientation = orient_shape(sink_shape, orientation, inverse=True);
  
  resample_factor = [float(t)/float(s) for s,t in zip(source_shape, sink_shape_in_source_orientation)];
  
  points = io.read(source);
 
  # reorient points
  if orientation is not None:
    #reverse axes
    reslice = False;
    slicing = [slice(None)] * len(source_shape);
    for d,o in enumerate(orientation):
      if o < 0:
        slicing[d] = slice(None, None, -1);
        reslice = True;
    if reslice:
      points = points[slicing];

    #permute
    per = orientation_to_permuation(orientation);
    points = points.transpose(per);
  
  points = points[:] / resample_factor;
  
  return io.write(sink, points);     
Example #12
0
def resample(source, sink = None, orientation = None, 
             sink_shape = None, source_resolution = None, sink_resolution = None, 
             interpolation = 'linear', axes_order = None, method = 'shared',
             processes = None, verbose = True):
  """Resample data of source in new shape/resolution and orientation.
  
  Arguments
  ---------
  source : str or array
    The source to be resampled.
  sink : str or None
    The sink for the resampled image.
  orientation : tuple or None:
    The orientation specified by permuation and change in sign of (1,2,3).
  sink_shape : tuple or None
    The target shape of the resampled sink.
  source_resolution : tuple or None
    The resolution of the source (in length per pixel).
  sink_resolution : tuple or None
    The resolution of the resampled source (in length per pixel).
  interpolation : str 
    The method to use for interpolating to the resmapled array.
  axis_order : str, list of tuples of int or None
    The axes pairs along which to resample the data at each step.
    If None, this is detertmined automatically. For a FileList source, 
    setting the first tuple should point to axis not indicating files.
    If 'size' the axis order is determined automatically to maximally reduce 
    the size of the array in each resmapling step.
    If 'order' the axis order is chosed automatically to optimize io speed.
  method : 'shared' or 'memmap'
    Method to handle intermediate resampling results. If 'shared' use shared 
    memory, otherwise use a memory map on disk.
  processes : int, None or 'serial'
    Number of processes to use for parallel resampling, if None use maximal 
    processes avaialable, if 'serial' process in serial.
  verbose : bool
    If True, display progress information.
  
  Returns
  -------
  sink : array or str
    The data or filename of resampled sink.

  Notes
  -----
  * Resolutions are assumed to be given for the axes of the intrinsic 
    orientation of the data and reference (as when viewed by ImageJ).
  * Orientation: permuation of 1,2,3 with potential sign, indicating which 
    axes map onto the reference axes, a negative sign indicates reversal 
    of that particular axes.
  * Only a minimal set of information to determine the resampling parameter 
    has to be given, e.g. source_shape and sink_shape.
  * The resampling is done by iterating two dimensional resampling steps.
  """
  #TODO: write full nd resampling routine extending cv2 lib.
  if verbose:
    timer = tmr.Timer();
  
  source = io.as_source(source);
  source_shape = source.shape;
  ndim = len(source_shape);
  dtype = source.dtype;
  order = source.order;
  
  orientation = format_orientation(orientation);
  
  source_shape, sink_shape, source_resolution, sink_resolution = \
     resample_shape(source_shape=source_shape, sink_shape=sink_shape, 
                    source_resolution=source_resolution, sink_resolution=sink_resolution, 
                    orientation=orientation);
  
  sink_shape_in_source_orientation = orient_shape(sink_shape, orientation, inverse=True);
                                   
  interpolation = _interpolation_to_cv2(interpolation);                                   

  if not isinstance(processes, int) and processes != 'serial':
    processes = io.mp.cpu_count();
  
  #detemine order of resampling
  axes_order, shape_order = _axes_order(axes_order, source, sink_shape_in_source_orientation, order=order);
  #print(axes_order, shape_order) 
  
  if len(axes_order) == 0:
    if verbose:
      print('resampling: no resampling necessary, source has same size as sink!');
    if sink != source:
      return io.write(sink, source);
    else:
      return source;
  
  #resample
  n_steps = len(axes_order);
  last_source = source;
  delete_files = [];
  for step, axes, shape in zip(range(n_steps), axes_order, shape_order):
    if step == n_steps-1 and orientation is None:
      resampled = io.initialize(source=sink, shape=sink_shape, dtype=dtype, as_source=True); 
    else:
      if method == 'shared':
        resampled = io.sma.create(shape, dtype=dtype, order=order, as_source=True);
      else:
        location = tempfile.mktemp() + '.npy';
        resampled = io.mmp.create(location, shape=shape, dtype=dtype, order=order, as_source=True);
        delete_files.append(location);
    #print(resampled)

    #indices for non-resampled axes
    indices = tuple([range(s) for d,s in enumerate(shape) if d not in axes]);
    indices = [i for i in itertools.product(*indices)];
    n_indices = len(indices);
    
    #resample step
    last_source_virtual = last_source.as_virtual();
    resampled_virtual = resampled.as_virtual();
    _resample = ft.partial(_resample_2d, source=last_source_virtual, sink=resampled_virtual, axes=axes, shape=shape, 
                                         interpolation=interpolation, n_indices=n_indices, verbose=verbose)                       
    
    if processes == 'serial': 
      for index in indices:
        _resample(index=index);
    else:
      #print(processes);
      with concurrent.futures.ProcessPoolExecutor(processes) as executor:
        executor.map(_resample, indices);
        
    last_source = resampled;
  
  #fix orientation
  if not orientation is None:
    #permute
    per = orientation_to_permuation(orientation);
    resampled = resampled.transpose(per);

    #reverse axes
    reslice = False;
    slicing = [slice(None)] * ndim;
    for d,o in enumerate(orientation):
      if o < 0:
        slicing[d] = slice(None, None, -1);
        reslice = True;
    if reslice:
      resampled = resampled[slicing];
      
    if verbose:
      print("resample: re-oriented shape %r!" % (resampled.shape,))
  
    sink = io.write(sink, resampled);
  else: 
    sink = resampled;
  
  for f in delete_files:
      io.delete_file(f);
  
  if verbose:
    timer.print_elapsed_time('Resampling')
    
  return sink;
Example #13
0
def prepare_annotation_files(slicing=None,
                             orientation=None,
                             directory=None,
                             postfix=None,
                             annotation_file=None,
                             reference_file=None,
                             distance_to_surface_file=None,
                             overwrite=False,
                             verbose=False):
    """Crop the annotation, reference and distance files to match the data.
  
  Arguments
  ---------
  slicing : tuple or None
    The slice specification after reorienting.
  orientation : tuple, str or None.
    The orientation specification. Strings can be 'left' or 'right', for the
    two hemispheres.
  directory : str or None
    The target directory. If None, use ClearMap resources folder.
  postfix : str or None
    Use this postfix for the cropped annotation file. If None and automatic 
    label is choosen.
  annotation_file : str or None
    The annotation file to use.
  reference_file : str or None
    The reference file to use.
  distance_to_surface_file : str or None
    The distance file to use.
  overwrite : bool
    If True, overwrite exisitng files.
    
  Returns
  -------
  annotation_file : str
    The cropped annotation file.
  reference_file : str
    The cropped reference file.
  distance_to_surface_file : str
    The distance cropped file.
  """
    if annotation_file is None:
        annotation_file = default_annotation_file
    if reference_file is None:
        reference_file = default_reference_file
    if distance_to_surface_file is None:
        distance_to_surface_file = default_distance_to_surface_file

    files = [annotation_file, reference_file, distance_to_surface_file]

    results = []
    for f in files:
        if f is not None:
            fn = format_annotation_filename(f,
                                            orientation=orientation,
                                            slicing=slicing,
                                            postfix=postfix,
                                            directory=directory)
            if verbose:
                print('Preparing: %r' % fn)

            if not overwrite and io.is_file(fn):
                results.append(fn)
                continue

            if not io.is_file(f):
                raise ValueError('Cannot find annotation file: %s' % f)

            s = io.as_source(f)
            if verbose:
                print('Preparing: from source %r' % s)

            data = np.array(s.array)

            if not orientation is None:
                #permute
                per = res.orientation_to_permuation(orientation)
                data = data.transpose(per)

                #reverse axes
                reslice = False
                sl = [slice(None)] * data.ndim
                for d, o in enumerate(orientation):
                    if o < 0:
                        sl[d] = slice(None, None, -1)
                        reslice = True
                if reslice:
                    data = data[tuple(sl)]

            if slicing is not None:
                data = data[slicing]
            io.write(fn, data)
            results.append(fn)
        else:
            results.append(None)

    return results
Example #14
0
def flatfield_line_from_regression(source, sink = None, positions = None, method = 'polynomial', reverse = None, return_function = False, verbose = False):
  """Create flat field line fit from a list of positions and intensities.
      
  Arguments
  ---------
  source : str, array or Source
    Intensities as (n,)-vector or (n,m)-array of m intensity measurements
    at n points along an axis.
  sink : str, array, Source or None
    Sink for the result.
  positions : array, 'source' or None
    The positions of the soource points. If None, a linear increasing
    positions with equal spaccing is assumed. If 'source' take positions from
    first line of the source array.
  method : 'Gaussian' or 'Polynomial'
    function type for the fit.
  reverse : bool
    Reverse the line fit after fitting.
  return_function : bool
    If True, also return the fitted function.
  verbose :bool
    Print and plot information for the fit.
      
  Returns
  -------
  fit : array
    Fitted intensities on points.
  fit_function : function
    Fitted function.
      
  Note
  ----
  The fit is either to be assumed to be a 'Gaussian':
  
  .. math:
      I(x) = a \\exp^{- (x- x_0)^2 / (2 \\sigma)) + b"
      
  or follows a order 6 radial 'Polynomial'
      
  .. math:
      I(x) = a + b (x- x_0)^2 + c (x- x_0)^4 + d (x- x_0)^6
  """
  source = io.as_source(source);
  
  # split source
  if source.ndim == 1:
    y = np.atleast_2d(source.array);
  elif source.ndim == 2:
    if positions == 'source':
      positions = source[:,0];
      y = source[:,1:-1];
    else:
      y = source.array;
  else:
    raise RuntimeError('flatfield_line_from_regression: input data not a line or array of x,i data');
  
  if positions is None:
    positions = np.arange(source.shape[0])
  
  #calculate mean of the intensity measurements
  x = positions;
  ym = np.mean(y, axis = 1);

  if verbose > 1:
    plt.figure()
    for i in range(1,source.shape[1]):
      plt.plot(x, source[:,i]);
    plt.plot(x, ym, 'k');
  
  if method.lower() == 'polynomial':
    ## fit r^6
    mean = sum(ym * x)/sum(ym)

    def f(x,m,a,b,c,d):
      return a + b * (x-m)**2 + c * (x-m)**4 + d * (x-m)**6;
    
    popt, pcov = curve_fit(f, x, ym, p0 = (mean, 1, 1, 1, .1));
    m = popt[0]; a = popt[1]; b = popt[2];
    c = popt[3]; d = popt[4];     
    
    def fopt(x):
      return f(x, m = m, a = a, b = b, c = c, d = d);        
    
    if verbose:
      print("polynomial fit: %f + %f (x- %f)^2 + %f (x- %f)^4 + %f (x- %f)^6" % (a, b, m, c, m, d, m));
  
  else: 
    ## Gaussian fit       
    mean  = sum(ym * x)/sum(ym)
    sigma = sum(ym * (x-mean)**2)/(sum(ym))
    
    def f(x, a, m, s, b):
      return a * np.exp(- (x - m)**2 / 2 / s) + b;
      
    popt, pcov = curve_fit(f, x, ym, p0 = (1000, mean, sigma, 400));
    a = popt[0]; m = popt[1]; s = popt[2]; b = popt[3];
    
    def fopt(x):
      return f(x, a=a, m=m, s=s, b=b);
    
    if verbose:
      print("Gaussian fit: %f exp(- (x- %f)^2 / (2 %f)) + %f" % (a, m, s, b));
   
  fit = fopt(x);
  if reverse:
    fit.reverse();
  
  if verbose > 1:
    plt.plot(x, fit);
    plt.title('flatfield_line_from_regression')
  
  result = io.write(sink, fit);
  if return_function:
    result = (result, fopt)
  return result;
    result_dir = os.path.join(dst_dir, 'cells_blocks')
    block_result_list = []
    print()
    print('Merging block results into a single data file...')
    sys.stdout.flush()
    for block in blocks:
        block_index = block.index[-1]
        print(f"Working on block {block_index}")
        block_savename = os.path.join(result_dir,
                                      f'cells_block{block_index}.p')
        with open(block_savename, 'rb') as pkl:
            block_result = pickle.load(pkl)
            block_result_list.append(block_result)
            header = ['x', 'y', 'z']
            dtypes = [int, int, int]
            if cell_detection_parameter['shape_detection'] is not None:
                header += ['size']
                dtypes += [int]
            measures = cell_detection_parameter['intensity_detection'][
                'measure']
            header += measures
            dtypes += [float] * len(measures)
    final_results = np.vstack([np.hstack(r) for r in block_result_list])
    dt = {'names': header, 'formats': dtypes}
    cells_allblocks = np.zeros(len(final_results), dtype=dt)
    for i, h in enumerate(header):
        cells_allblocks[h] = final_results[:, i]
    savename = ws.filename('cells', postfix='raw')
    io.write(savename, cells_allblocks)
    print(f'Saved merged raw cell detection results to: {savename}')
Example #16
0
def skeletonize(source,
                sink=None,
                points=None,
                method='PK12i',
                steps=None,
                in_place=False,
                verbose=True,
                **kwargs):
    """Skeletonize 3d binary arrays.
  
  Arguments
  ---------
  source : array or source 
    Binary image to skeletonize.
  sink : sink specification
    Optional sink.
  points : array or None
    Optional point list of the foreground points in the binary.
  method : str
    'PK12' or faster index version 'PK12i'.
  steps : int or None
    Number of maximal iteration steps. If None, maximal thinning.
  in_place : bool
    If True, the skeletonization is done directly on the input array.
    
  Returns
  -------
  skeleton : Source
    The skeletonized array.
  """
    if verbose:
        timer = tmr.Timer()

    if not in_place and io.is_file(source):
        binary_buffer = ap.read(source).as_buffer()
    else:
        binary, binary_buffer = ap.initialize_source(source)
        if not in_place:
            binary_buffer = np.array(binary_buffer)

    if method == 'PK12':
        result = PK12.skeletonize(binary_buffer,
                                  points=points,
                                  steps=steps,
                                  verbose=verbose,
                                  **kwargs)
    elif method == 'PK12i':
        result = PK12.skeletonize_index(binary_buffer,
                                        points=points,
                                        steps=steps,
                                        verbose=verbose,
                                        **kwargs)
    else:
        raise RuntimeError('Skeletonizaton method %r is not valid!' % method)

    if verbose:
        timer.print_elapsed_time(head='Skeletonization')

    if sink is None:
        sink = ap.io.as_source(result)
    elif isinstance(sink, str):
        sink = ap.write(sink, result)
    else:
        sink = io.write(sink, result)
    return sink
Example #17
0
        # merge blocks
        list_of_blocks = os.listdir(os.path.join(directory, 'final_blocks'))
        block_result_list = []
        for block_file_base in list_of_blocks:
            block_file = os.path.join(directory, 'final_blocks',
                                      block_file_base)
            with open(block_file, 'rb') as file_to_load:
                block_result = pickle.load(file_to_load)
                block_result_list.append(block_result)
        final_results = np.vstack([np.hstack(r) for r in block_result_list
                                   ])  # merges results into a single array
        header = ['x', 'y', 'z']
        dtypes = [int, int, int]
        if cell_detection_parameter['shape_detection'] is not None:
            header += ['size']
            dtypes += [int]
        measures = cell_detection_parameter['intensity_detection']['measure']
        header += measures
        dtypes += [float] * len(measures)

        dt = {
            'names': header,
            'formats': dtypes
        }
        cells_out = np.zeros(len(final_results), dtype=dt)
        for i, h in enumerate(header):
            cells_out[h] = final_results[:, i]
        ws.filename('cells', postfix='raw')
        savename = ws.filename('cells', postfix='raw')
        io.write(savename, cells_out)