Exemplo n.º 1
0
def rotatedView(img, angle, enlarge=True, extend=Views.extendBorder):
  """ Return a rotated view of the image, around the Z axis,
      with an expanded (or reduced) interval view so that all pixels are exactly included.

      img: a RandomAccessibleInterval
      angle: in degrees
  """
  cx = img.dimension(0) / 2.0
  cy = img.dimension(1) / 2.0
  toCenter = AffineTransform2D()
  toCenter.translate(-cx, -cy)
  rotation = AffineTransform2D()
  # Step 1: place origin of rotation at the center of the image
  rotation.preConcatenate(toCenter)
  # Step 2: rotate around the Z axis
  rotation.rotate(radians(angle))
  # Step 3: undo translation to the center
  rotation.preConcatenate(toCenter.inverse())
  rotated = RV.transform(Views.interpolate(extend(img),
                                           NLinearInterpolatorFactory()), rotation)
  if enlarge:
    # Bounds:
    bounds = repeat((sys.maxint, 0)) # initial upper- and lower-bound values  
                                     # for min, max to compare against  
    transformed = zeros(2, 'f')
    for corner in product(*zip(repeat(0), Intervals.maxAsLongArray(img))):
      rotation.apply(corner, transformed)
      bounds = [(min(vmin, int(floor(v))), max(vmax, int(ceil(v))))
                for (vmin, vmax), v in zip(bounds, transformed)]
    minC, maxC = map(list, zip(*bounds)) # transpose list of 2 pairs
                                         # into 2 lists of 2 values
    imgRot = Views.zeroMin(Views.interval(rotated, minC, maxC))
  else:
    imgRot = Views.interval(rotated, img)
  return imgRot
Exemplo n.º 2
0
def transformedView(img, transform, interval=None):
    """ """
    imgE = Views.extendZero(img)
    imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
    imgT = RealViews.transform(imgI, transform)
    if interval:
        return Views.interval(imgT, interval)
    else:
        return Views.interval(
            imgT, [0, 0, 0],
            [img.dimension(d) - 1 for d in xrange(img.numDimensions())])
 def create(self, index):
     cell_dimensions = [
         self.grid.cellDimension(0),
         self.grid.cellDimension(1)
     ]
     n_cols = grid.imgDimension(0) / cell_dimensions[0]
     x0 = (index % n_cols) * cell_dimensions[0]
     y0 = (index / n_cols) * cell_dimensions[1]
     index += 1  # 1-based slice indices in ij.ImageStack
     if index < 1 or index > self.stack.size():
         # Return blank image: a ByteAccess that always returns 255
         return Cell(
             cell_dimensions, [x0, y0],
             type('ConstantValue', (ByteAccess, ), {
                 'getValue': lambda self, index: 255
             })())
     else:
         # ImageJ stack slice indices are 1-based
         img = IL.wrap(ImagePlus("", self.stack.getProcessor(index)))
         # Create extended image with the padding color value
         imgE = Views.extendValue(img, self.t.copy())
         # A view that includes the padding between slices
         minC = [-self.cell_padding for d in xrange(img.numDimensions())]
         maxC = [
             img.dimension(d) - 1 + self.cell_padding
             for d in xrange(img.numDimensions())
         ]
         imgP = Views.interval(imgE, minC, maxC)
         return Cell(cell_dimensions, [x0, y0],
                     ProxyByteAccess(imgP, self.grid))
def translatedView(img, matrix):
  imgE = Views.extendZero(img)
  imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
  # In negative: the inverse
  t = Translation3D(-matrix[3], -matrix[7], -matrix[11])
  imgT = RealViews.transform(imgI, t)
  return Views.interval(imgT, [0, 0, 0], [img.dimension(d) for d in xrange(3)])
Exemplo n.º 5
0
    def asNormalizedUnsignedByteArrayImg(interval, invert, blockRadius, n_bins,
                                         slope, matrices, copy_threads, index,
                                         imp):
        sp = imp.getProcessor()  # ShortProcessor
        sp.setRoi(interval.min(0), interval.min(1),
                  interval.max(0) - interval.min(0) + 1,
                  interval.max(1) - interval.min(1) + 1)
        sp = sp.crop()
        if invert:
            sp.invert()
        CLAHE.run(
            ImagePlus("", sp), blockRadius, n_bins, slope, None
        )  # far less memory requirements than NormalizeLocalContrast, and faster.
        minimum, maximum = autoAdjust(sp)

        # Transform and convert image to 8-bit, mapping to display range
        img = ArrayImgs.unsignedShorts(
            sp.getPixels(), [sp.getWidth(), sp.getHeight()])
        sp = None
        affine = AffineTransform2D()
        affine.set(matrices[index])
        imgI = Views.interpolate(Views.extendZero(img),
                                 NLinearInterpolatorFactory())
        imgA = RealViews.transform(imgI, affine)
        imgT = Views.zeroMin(Views.interval(imgA, img))
        imgMinMax = convert(imgT, RealUnsignedByteConverter(minimum, maximum),
                            UnsignedByteType)
        aimg = ArrayImgs.unsignedBytes(Intervals.dimensionsAsLongArray(img))
        ImgUtil.copy(ImgView.wrap(imgMinMax, aimg.factory()), aimg,
                     copy_threads)
        img = imgI = imgA = imgT = imgMinMax = None
        return aimg
Exemplo n.º 6
0
def test(iraf):

    # Test dimensions: should be the same as the one input image
    print "Dimensions:", Intervals.dimensionsAsLongArray(iraf)

    # Test Cursor
    c = iraf.cursor()
    pos = zeros(2, 'l')
    while c.hasNext():
        c.fwd()
        c.localize(pos)
        print "Cursor:", pos, "::", c.get()

    # Test RandomAccess
    ra = iraf.randomAccess()
    c = iraf.cursor()
    while c.hasNext():
        c.fwd()
        ra.setPosition(c)
        c.localize(pos)
        print "RandomAccess:", pos, "::", ra.get()

    # Test source img: should be untouched
    c = img.cursor()
    while c.hasNext():
        print "source:", c.next()

    # Test interval view: the middle 2x2 square
    v = Views.interval(iraf, [1, 1], [2, 2])
    IL.wrap(v, "+2 view").show()
def translate_single_stack_using_imglib2(imp, dx, dy, dz):
  # wrap into a float imglib2 and translate
  #   conversion into float is necessary due to "overflow of n-linear interpolation due to accuracy limits of unsigned bytes"
  #   see: https://github.com/fiji/fiji/issues/136#issuecomment-173831951
  img = ImagePlusImgs.from(imp.duplicate())
  extended = Views.extendBorder(img)
  converted = Converters.convert(extended, RealFloatSamplerConverter())
  interpolant = Views.interpolate(converted, NLinearInterpolatorFactory())
  
  # translate
  if imp.getNDimensions()==3:
    transformed = RealViews.affine(interpolant, Translation3D(dx, dy, dz))
  elif imp.getNDimensions()==2:
    transformed = RealViews.affine(interpolant, Translation2D(dx, dy))
  else:
    IJ.log("Can only work on 2D or 3D stacks")
    return None
  
  cropped = Views.interval(transformed, img)
  # wrap back into bit depth of input image and return
  bd = imp.getBitDepth()
  if bd==8:
    return(ImageJFunctions.wrapUnsignedByte(cropped,"imglib2"))
  elif bd == 16:
    return(ImageJFunctions.wrapUnsignedShort(cropped,"imglib2"))
  elif bd == 32:
    return(ImageJFunctions.wrapFloat(cropped,"imglib2"))
  else:
    return None    
Exemplo n.º 8
0
def translate_single_stack_using_imglib2(imp, dx, dy, dz):
  # wrap into a float imglib2 and translate
  #   conversion into float is necessary due to "overflow of n-linear interpolation due to accuracy limits of unsigned bytes"
  #   see: https://github.com/fiji/fiji/issues/136#issuecomment-173831951
  img = ImagePlusImgs.from(imp.duplicate())
  extended = Views.extendZero(img)
  converted = Converters.convert(extended, RealFloatSamplerConverter())
  interpolant = Views.interpolate(converted, NLinearInterpolatorFactory())
  
  # translate
  if imp.getNDimensions()==3:
    transformed = RealViews.affine(interpolant, Translation3D(dx, dy, dz))
  elif imp.getNDimensions()==2:
    transformed = RealViews.affine(interpolant, Translation2D(dx, dy))
  else:
    IJ.log("Can only work on 2D or 3D stacks")
    return None
  
  cropped = Views.interval(transformed, img)
  # wrap back into bit depth of input image and return
  bd = imp.getBitDepth()
  if bd==8:
    return(ImageJFunctions.wrapUnsignedByte(cropped,"imglib2"))
  elif bd == 16:
    return(ImageJFunctions.wrapUnsignedShort(cropped,"imglib2"))
  elif bd == 32:
    return(ImageJFunctions.wrapFloat(cropped,"imglib2"))
  else:
    return None    
Exemplo n.º 9
0
def findEdgePixels(img):
  edge_pix = []
  zero = img.firstElement().createVariable()
  zero.setZero()
  imgE = Views.extendValue(img, zero)
  pos = zeros(img.numDimensions(), 'l')
  inc = partial(operator.add, 1)
  dec = partial(operator.add, -1)
  cursor = img.cursor()
  while cursor.hasNext():
    t = cursor.next()
    # A pixel is on the edge of the binary mask
    # if it has a non-zero value
    if 0 == t.getByte():
      continue
    # ... and its immediate neighbors ...
    cursor.localize(pos)
    minimum = array(imap(dec, pos), 'l') # map(dec, pos) also works, less performance
    maximum = array(imap(inc, pos), 'l') # map(inc, pos) also works, less performance
    neighborhood = Views.interval(imgE, minimum, maximum)
    # ... have at least one zero value:
    # Good performance: the "if x in <iterable>" approach stops upon finding the first x    
    if 0 in imap(UnsignedByteType.getByte, neighborhood):
      edge_pix.append(RealPoint(array(list(pos), 'f')))
  return edge_pix
Exemplo n.º 10
0
def viewTransformed(img, affine):
    imgE = Views.extendZero(img)
    imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
    imgT = RealViews.transform(imgI, affine)
    minC = [0, 0, 0]
    maxC = [img.dimension(d) - 1 for d in xrange(img.numDimensions())]
    imgB = Views.interval(imgT, minC, maxC)
    return imgB
Exemplo n.º 11
0
def peakData():
    """ A generator function that returns all peaks and their pixel sum, one by one. """
    for peak in peaks:
        peak.localize(p)
        minCoords, maxCoords = centerAt(p, minC, maxC)
        fov = Views.interval(img, minCoords, maxCoords)
        s = sum(t.getInteger() for t in fov)
        yield p, s
Exemplo n.º 12
0
def roi(mask, image):
	# Convert ROI from R^n to Z^n.
	#discreteROI = Views.raster(Masks.toRealRandomAccessible(mask))
	# Apply finite bounds to the discrete ROI.
	boundedDiscreteROI = Views.interval(mask, image)
	# Create an iterable version of the finite discrete ROI.
	iterableROI = Regions.iterable(boundedDiscreteROI)
	return iterableROI
def transformedView(img, matrix):
  imgE = Views.extendZero(img)
  imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
  aff = AffineTransform3D()
  aff.set(*matrix)
  aff = aff.inverse()
  imgT = RealViews.transform(imgI, aff)
  return Views.interval(imgT, [0, 0, 0], [img.dimension(d) for d in xrange(3)])
def getViewFromImp(imp, r=None):
    # r is a java.awt.rectangle
    im = IL.wrapByte(imp)
    if r is None:
        r = Rectangle(0, 0, imp.getWidth(), imp.getHeight())
    v = Views.zeroMin(
        Views.interval(im, [r.x, r.y],
                       [r.x + r.width - 1, r.y + r.height - 1]))
    return v
Exemplo n.º 15
0
 def updatePixels(self):
     # Copy interval into pixels
     view = Views.interval(
         Views.extendZero(Views.hyperSlice(self.img3D, 2, self.indexZ)),
         self.interval2D)
     aimg = ArrayImgs.floats(
         self.getPixels(),
         [self.interval2D.dimension(0),
          self.interval2D.dimension(1)])
     ImgUtil.copy(view, aimg)
Exemplo n.º 16
0
 def get(self, path):
   transform = self.transformsDict[path]
   img = self.loader.get(path)
   imgE = Views.extendZero(img)
   imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
   imgT = RealViews.transform(imgI, transform)
   minC = self.roi[0] if self.roi else [0] * img.numDimensions()
   maxC = self.roi[1] if self.roi else [img.dimension(d) -1 for d in xrange(img.numDimensions())]
   imgO = Views.zeroMin(Views.interval(imgT, minC, maxC))
   return ImgView.wrap(imgO, img.factory()) if self.asImg else imgO
def getFFTFromView(v, extension, extSize, paddedDimensions, fftSize):
    FFTMethods.dimensionsRealToComplexFast(extSize, paddedDimensions, fftSize)
    fft = ArrayImgFactory(ComplexFloatType()).create(fftSize,
                                                     ComplexFloatType())
    FFT.realToComplex(
        Views.interval(
            PhaseCorrelation2Util.extendImageByFactor(v, extension),
            FFTMethods.paddingIntervalCentered(
                v, FinalInterval(paddedDimensions))), fft, exe)
    return fft
Exemplo n.º 18
0
 def projectMax(img, minC, maxC, reduce_max):
     imgA = ArrayImgs.unsignedSorts(
         Intervals.dimensionsAsLongArray(imgC))
     ImgUtil.copy(
         ImgView.wrap(
             convert(
                 Views.collapseReal(
                     Views.interval(img, minC, maxC)),
                 reduce_max.newInstance(), imglibtype),
             img.factory()), imgA)
     return imgA
Exemplo n.º 19
0
def viewTransformed(img, matrix):
    affine = AffineTransform3D()
    affine.set(*matrix)
    # It's a forward transform: invert
    affine = affine.inverse()
    imgE = Views.extendZero(img)
    imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
    imgT = RealViews.transform(imgI, affine)
    # Same dimensions
    imgB = Views.interval(imgT, img)
    return imgB
Exemplo n.º 20
0
 def makeCell(self, index):
     self.preloadCells(index)  # preload others in the background
     img = self.loadImg(self.filepaths[index])
     affine = AffineTransform2D()
     affine.set(self.matrices[index])
     imgI = Views.interpolate(Views.extendZero(img),
                              NLinearInterpolatorFactory())
     imgA = RealViews.transform(imgI, affine)
     imgT = Views.zeroMin(Views.interval(imgA, self.interval))
     aimg = img.factory().create(self.interval)
     ImgUtil.copy(ImgView.wrap(imgT, aimg.factory()), aimg)
     return Cell(self.cell_dimensions, [0, 0, index], aimg.update(None))
Exemplo n.º 21
0
def intoSlice(img, xOffset, yOffset):
    stack_slice = ArrayImgFactory(
        img.randomAccess().get().createVariable()).create(
            [canvas_width, canvas_height])
    target = Views.interval(
        stack_slice, [xOffset, yOffset],
        [xOffset + img.dimension(0) - 1, yOffset + img.dimension(1) - 1])
    c1 = target.cursor()
    c2 = img.cursor()
    while c1.hasNext():
        c1.next().set(c2.next())

    return stack_slice
Exemplo n.º 22
0
def scale3D(img, x=1.0, y=1.0, z=1.0):
  scale3d = AffineTransform3D()
  scale3d.set(x, 0, 0, 0,
              0, y, 0, 0,
              0, 0, z, 0)
  imgE = Views.extendZero(img)
  imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
  imgT = RealViews.transform(imgI, scale3d)
  # dimensions
  minC = [0, 0, 0]
  maxC = [int(img.dimension(d) * k + 0.5) -1 for d, k in enumerate([x, y, z])]
  imgB = Views.interval(imgT, minC, maxC)
  return imgB
def scale(img, calibration):
    scale3d = AffineTransform3D()
    scale3d.set(calibration[0], 0, 0, 0, 0, calibration[1], 0, 0, 0, 0,
                calibration[2], 0)
    imgE = Views.extendZero(img)
    imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
    imgT = RealViews.transform(imgI, scale3d)
    # dimensions
    minC = [0, 0, 0]
    maxC = [
        int(img.dimension(d) * cal) - 1 for d, cal in enumerate(calibration)
    ]
    imgB = Views.interval(imgT, minC, maxC)
    return imgB
Exemplo n.º 24
0
 def crop(event):
   global cropped, cropped_imp
   coords = [int(float(tf.getText())) for tf in textfields]
   minC = [max(0, c) for c in coords[0:3]]
   maxC = [min(d -1, c) for d, c in izip(Intervals.dimensionsAsLongArray(images[0]), coords[3:6])]
   storeRoi(minC, maxC)
   print "ROI min and max coordinates"
   print minC
   print maxC
   cropped = [Views.zeroMin(Views.interval(img, minC, maxC)) for img in images]
   cropped_imp = showAsStack(cropped, title="cropped")
   cropped_imp.setDisplayRange(imp.getDisplayRangeMin(), imp.getDisplayRangeMax())
   if cropContinuationFn:
     cropContinuationFn(images, minC, maxC, cropped, cropped_imp)
Exemplo n.º 25
0
 def keyPressed(self, ke):
   keyCode = ke.getKeyCode()
   if ke.isControlDown() and keyCode in Navigator.moves:
     d, sign = Navigator.moves[keyCode]
     inc = 200 if ke.isShiftDown() else 20
     mins[d] += sign * inc
     maxs[d] += sign * inc
     # Replace source with shifted cropped volume
     fsource.set(stack, Views.zeroMin(Views.interval(imgE, FinalInterval(mins, maxs))))
     imp.updateVirtualSlice()
     return
   # Else, pass the event onto other listeners
   for kl in kls:
     kl.keyPressed(ke)
def populateInstances(instances, synth_imgs, class_index, mins, maxs):
    # Populate the training data: create the filter bank for each feature image
    # by reading values from the interval defined by mins and maxs
    target = ArrayImgs.floats([width, height])
    interval = FinalInterval(mins, maxs)
    n_samples = Intervals.numElements(interval)
    for img in synth_imgs:
        vectors = [zeros(len(attributes), 'd') for _ in xrange(n_samples)]
        for k, op in enumerate(filterBank(img, sumType=DoubleType())):
            imgOp = compute(op).into(target)
            for i, v in enumerate(Views.interval(imgOp, interval)):
                vectors[i][k] = v.getRealDouble()
        for vector in vectors:
            vector[-1] = class_index
            instances.add(DenseInstance(1.0, vector))
Exemplo n.º 27
0
def pyramidAreaAveraging(img,
                         top_level,
                         min_width=32,
                         sumType=UnsignedLongType,
                         mathType=UnsignedLongType,
                         converter=Util.genericIntegerTypeConverter()):
  """ Return a list of image views, one per scale level of the image pyramid,
      except for level zero (the first image) which is the provided img.
      All images are of the same type as the source img.
      Based on an integral image for fast computation.
  """

  img_type = img.randomAccess().get().createVariable()
  
  # Create an integral image in longs
  alg = IntegralImg(img, sumType(), converter)
  alg.process()
  integralImg = alg.getResult()

  # Create an image pyramid as views, with ImgMath and imglib2,
  # which amounts to scale area averaging sped up by the integral image
  # and generated on demand whenever each pyramid level is read.
  width = img.dimension(0)
  imgE = Views.extendBorder(integralImg)
  blockSide = 1
  level_index = 1
  # Corners for level 1: a box of 2x2
  corners = [[0, 0], [1, 0], [0, 1], [1, 1]]
  pyramid = [img]

  while width > min_width and level_index <= top_level:
    blockSide *= 2
    width /= 2
    # Scale the corner coordinates to make the block larger
    cs = [[c * blockSide for c in corner] for corner in corners]
    blockRead = div(block(imgE, cs), pow(blockSide, 2)) # the op
    # a RandomAccessibleInterval view of the op, computed with shorts but seen as bytes
    view = blockRead.view(mathType(), img_type.createVariable())
    # Views.subsample by 2 will turn a 512-pixel width to a 257 width,
    # so crop to proper interval 256
    level = Views.interval(Views.subsample(view, blockSide),
                           [0] * img.numDimensions(), # min
                           [img.dimension(d) / blockSide -1
                            for d in xrange(img.numDimensions())]) # max
    pyramid.append(level)
    level_index += 1 # for next iteration

  return pyramid
Exemplo n.º 28
0
    def asNormalizedUnsignedByteArrayImg(interval, invert, blockRadius, n_bins,
                                         slope, matrices, index, imp):
        sp = imp.getProcessor()  # ShortProcessor
        # Crop to interval if needed
        x = interval.min(0)
        y = interval.min(1)
        width = interval.max(0) - interval.min(0) + 1
        height = interval.max(1) - interval.min(1) + 1
        if 0 != x or 0 != y or sp.getWidth() != width or sp.getHeight(
        ) != height:
            sp.setRoi(x, y, width, height)
            sp = sp.crop()

        if invert:
            sp.invert()

        CLAHE.run(
            ImagePlus("", sp), blockRadius, n_bins, slope, None
        )  # far less memory requirements than NormalizeLocalContrast, and faster.
        minimum, maximum = autoAdjust(sp)

        # Transform and convert image to 8-bit, mapping to display range
        img = ArrayImgs.unsignedShorts(
            sp.getPixels(), [sp.getWidth(), sp.getHeight()])
        sp = None
        imp = None
        # Must use linear interpolation for subpixel precision
        affine = AffineTransform2D()
        affine.set(matrices[index])
        imgI = Views.interpolate(Views.extendZero(img),
                                 NLinearInterpolatorFactory())
        imgA = RealViews.transform(imgI, affine)
        imgT = Views.zeroMin(Views.interval(imgA, img))
        # Convert to 8-bit
        imgMinMax = convert2(imgT,
                             RealUnsignedByteConverter(minimum, maximum),
                             UnsignedByteType,
                             randomAccessible=False)  # use IterableInterval
        aimg = ArrayImgs.unsignedBytes(Intervals.dimensionsAsLongArray(img))
        # ImgUtil copies multi-threaded, which is not appropriate here as there are many other images being copied too
        #ImgUtil.copy(ImgView.wrap(imgMinMax, aimg.factory()), aimg)

        # Single-threaded copy
        copier = createBiConsumerTypeSet(UnsignedByteType)
        LoopBuilder.setImages(imgMinMax, aimg).forEachPixel(copier)

        img = imgI = imgA = imgMinMax = imgT = None
        return aimg
def viewTransformed(img, calibration, affine):
  """ View img transformed to isotropy (via the calibration)
      and transformed by the affine. """
  scale3d = AffineTransform3D()
  scale3d.set(calibration[0], 0, 0, 0,
              0, calibration[1], 0, 0,
              0, 0, calibration[2], 0)
  transform = affine.copy()
  transform.concatenate(scale3d)
  imgE = Views.extendZero(img)
  imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
  imgT = RealViews.transform(imgI, transform)
  # dimensions
  minC = [0, 0, 0]
  maxC = [int(img.dimension(d) * cal) -1 for d, cal in enumerate(calibration)]
  imgB = Views.interval(imgT, minC, maxC)
  return imgB
 def get(self, path):
     img = self.klb.readFull(path)
     imgE = Views.extendZero(img)
     imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
     affine = AffineTransform3D()
     affine.set(self.transforms[path])
     affine = affine.inverse()  # it's a forward transform: must invert
     affine.concatenate(scale3d)  # calibrated space: isotropic
     imgT = RealViews.transform(imgI, affine)
     minC = [0, 0, 0]
     maxC = [
         int(img.dimension(d) * cal) - 1
         for d, cal in enumerate(calibration)
     ]
     imgB = Views.interval(imgT, minC, maxC)
     # View a RandomAccessibleInterval as an Img, required by Load.lazyStack
     return ImgView.wrap(imgB, img.factory())
def twoStep(index=0):
    # The current way:
    img = klb.readFull(filepaths[index])  # klb_loader.get(filepaths[index])
    imgE = Views.extendZero(img)
    imgI = Views.interpolate(imgE, NLinearInterpolatorFactory())
    imgT = RealViews.transform(imgI, cmIsotropicTransforms[index])
    imgB = Views.zeroMin(Views.interval(imgT, roi[0],
                                        roi[1]))  # bounded: crop with ROI
    imgBA = ArrayImgs.unsignedShorts(Intervals.dimensionsAsLongArray(imgB))
    ImgUtil.copy(ImgView.wrap(imgB, imgBA.factory()), imgBA)
    imgP = prepareImgForDeconvolution(
        imgBA,
        affine3D(fineTransformsPostROICrop[index]).inverse(),
        FinalInterval([0, 0, 0], [imgB.dimension(d) - 1 for d in xrange(3)]))
    # Copy transformed view into ArrayImg for best performance in deconvolution
    imgA = ArrayImgs.floats(Intervals.dimensionsAsLongArray(imgP))
    ImgUtil.copy(ImgView.wrap(imgP, imgA.factory()), imgA)
    IL.wrap(imgA, "two step").show()
def translate_using_imglib2(imp, dx, dy, dz):
  print "imp channels",imp.getNChannels()
  # todo:
  # if multiple channels use Duplicator to translate each channel individually
  ## wrap
  # http://javadoc.imagej.net/ImgLib2/net/imglib2/img/imageplus/ImagePlusImg.html
  img = ImagePlusImgs.from(imp.duplicate())
  print "dimensions:",img.numDimensions()
  print img.getChannels()
  ## prepare image
  print "img",img
  ddd
  extended = Views.extendBorder(img)
  #print "extended",extended
  #print "extended",extended.dimension(1)
  dims = zeros(4, 'l')
  img.dimensions(dims)
  print "dims",dims
  converted = Converters.convert(extended, RealFloatSamplerConverter())
  composite = Views.collapseReal(converted, imp.getNChannels())
  print "composite",composite
  interpolant = Views.interpolate(composite, NLinearInterpolatorFactory())
  #print "interpolant",interpolant
  transformed = RealViews.affine(interpolant, Translation3D(dx, dy, dz))
  print "transformed", transformed
  cropped = Views.interval(transformed, img)
  print "cropped.numDimensions()", cropped.numDimensions()
  print "cropped",cropped
  ## wrap back and return
  bd = imp.getBitDepth()
  # maybe simply wrap works?
  if bd==8:
    return(ImageJFunctions.wrapUnsignedByte(cropped,"imglib2"))
  elif bd == 16:
    return(ImageJFunctions.wrapUnsignedShort(cropped,"imglib2"))
  elif bd == 32:
    return(ImageJFunctions.wrapFloat(cropped,"imglib2"))
  else:
    return None    
        resultFileName = '%s/result.tif' % home.rstrip('/')
        imp = ImageJFunctions.wrap( result, 'result' )
        IJ.saveAsTiff(imp.duplicate(), resultFileName)

        relativeResult = result.copy()
        c = relativeResult.cursor()
        while c.hasNext():
            c.fwd()
            cur = c.get()
            val = cur.get()
            cur.set( val - c.getDoublePosition( 2 ) )

        relativeResultFileName = '%s/relativeResult.tif' % home.rstrip('/')
        imp = ImageJFunctions.wrap( relativeResult, 'relative result' )
        IJ.saveAsTiff(imp.duplicate(), relativeResultFileName)

        ratio = [ wrappedImage.dimension( 0 )*1.0/result.dimension( 0 ), wrappedImage.dimension( 1 )*1.0/result.dimension( 1 ) ]
        shift = [ 0.0, 0.0 ]
        lutField = SingleDimensionLUTGrid(3, 3, result, 2, ratio, shift )

        transformed = Views.interval( Views.raster( RealViews.transformReal( Views.interpolate( Views.extendBorder( wrappedImage ), NLinearInterpolatorFactory() ), lutField ) ), wrappedImage )
        imp = ImageJFunctions.wrap( transformed, 'transformed' )
        transformedFileName = '%s/transformed.tif' % home.rstrip('/')
        IJ.saveAsTiff( imp.duplicate(), transformedFileName )
        
        # result = inference.estimateZCoordinates( 0, 0, startingCoordinates, matrixTracker, options )

             
 
    threads = []
    upper = start
    while upper < stop:
        correlationRange = int(c)
        lower            = max( 0, upper - overlap )
        upper            = lower + interval
        if upper + step >= stop:
            upper = min( stop, upper + step )
        
        home = root.rstrip('/') + '/range=%d_%s/lower=%d_upper=%d'
        home = home % ( correlationRange, timestamp, lower, upper )
        make_sure_path_exists( home.rstrip('/') + '/' )

        options.comparisonRange = int(c)

        subStrip = ConvertedRandomAccessibleInterval( Views.interval( wholeStrip, [long(0), long(lower)], [long(wholeStrip.dimension(0)-1), long(upper-1)] ),  RealDoubleConverter(), DoubleType() )


        gitCommitInfoFile = '%s/commitHash' % home.rstrip('/')
        #with open( gitCommitInfoFile, 'w' ) as f:
        #    f.write( '%s\n' % utility.gitcommit.getCommit( thickness_estimation_repo_dir ) )

        gitDiffFile = '%s/gitDiff' % home.rstrip('/')
        #with open( gitDiffFile, 'w' ) as f:
        #    f.write( '%s\n' % utility.gitcommit.getDiff( thickness_estimation_repo_dir ) )


        optionsFile = '%s/options' % home.rstrip('/')
        with open( optionsFile, 'w' ) as f:
            f.write( '%s\n' % options.toString() )