def _preprocess_extend_single(im, extend, low, high, cut, rcoef, bigshape): im = utils.extend_by(im, extend) im = utils.imfilter(im, low, high, cut) if rcoef != 1: im = resample(im, rcoef) # Make the shape of images the same bg = np.zeros(bigshape, dtype=im.dtype) + utils.get_borderval(im, 5) im = utils.embed_to(bg, im) return im
def _preprocess_extend_single(im, extend, low, high, cut, rcoef, bigshape): im = utils.extend_by(im, extend) im = utils.imfilter(im, low, high, cut) if rcoef != 1: im = resample(im, rcoef) # Make the shape of images the same bg = np.zeros(bigshape) + utils.get_borderval(im, 5) im = utils.embed_to(bg, im) return im
def _preprocess_extend(ims, extend, low, high, cut, rcoef): ims = [utils.extend_by(img, extend) for img in ims] bigshape = np.array([img.shape for img in ims]).max(0) ims = filter_images(ims, low, high, cut) if rcoef != 1: ims = [resample(img, rcoef) for img in ims] bigshape *= rcoef # Make the shape of images the same bgs = [np.zeros(bigshape) + utils.get_borderval(img, 5) for img in ims] ims = [utils.embed_to(bg, img) for bg, img in zip(bgs, ims)] return ims
def process_images(ims, opts, tosa=None): # lazy import so no imports before run() is really called import numpy as np from imreg_dft import utils from imreg_dft import imreg ims = [utils.extend_by(img, opts["extend"]) for img in ims] bigshape = np.array([img.shape for img in ims]).max(0) ims = filter_images(ims, opts["low"], opts["high"]) rcoef = opts["resample"] if rcoef != 1: ims = [resample(img, rcoef) for img in ims] bigshape *= rcoef # Make the shape of images the same ims = [ utils.embed_to(np.zeros(bigshape) + utils.get_borderval(img, 5), img) for img in ims ] resdict = imreg.similarity(ims[0], ims[1], opts["iters"], opts["order"], opts["constraints"], opts["filter_pcorr"], opts["exponent"]) im2 = resdict.pop("timg") # Seems that the reampling simply scales the translation resdict["tvec"] /= rcoef ty, tx = resdict["tvec"] resdict["tx"] = tx resdict["ty"] = ty resdict["imgs"] = ims tform = resdict if tosa is not None: tosa[:] = ird.transform_img_dict(tosa, tform) if rcoef != 1: ims = [resample(img, 1.0 / rcoef) for img in ims] im2 = resample(im2, 1.0 / rcoef) resdict["Dt"] /= rcoef resdict["unextended"] = [ utils.unextend_by(img, opts["extend"]) for img in ims + [im2] ] return resdict
def testExtend(self): what = np.random.random((20, 11)) whaty = what.shape[0] what[:] += np.arange(whaty, dtype=float)[:, np.newaxis] * 5 / whaty dftscore0 = self._dftscore(what) dsts = (2, 3, 4) for dst in dsts: ext = utils.extend_by(what, dst) # Bigger distance should mean better "DFT score" dftscore = self._dftscore(ext) self.assertLess(dftscore, dftscore0 * 1.1) dftscore0 = dftscore undone = utils.unextend_by(ext, dst) self.assertEqual(what.shape, undone.shape)
def process_images(ims, opts, tosa=None): # lazy import so no imports before run() is really called import numpy as np from imreg_dft import utils from imreg_dft import imreg ims = [utils.extend_by(img, opts["extend"]) for img in ims] bigshape = np.array([img.shape for img in ims]).max(0) ims = filter_images(ims, opts["low"], opts["high"]) rcoef = opts["resample"] if rcoef != 1: ims = [resample(img, rcoef) for img in ims] bigshape *= rcoef # Make the shape of images the same ims = [utils.embed_to(np.zeros(bigshape) + utils.get_borderval(img, 5), img) for img in ims] resdict = imreg.similarity( ims[0], ims[1], opts["iters"], opts["order"], opts["constraints"], opts["filter_pcorr"], opts["exponent"]) im2 = resdict.pop("timg") # Seems that the reampling simply scales the translation resdict["tvec"] /= rcoef ty, tx = resdict["tvec"] resdict["tx"] = tx resdict["ty"] = ty resdict["imgs"] = ims tform = resdict if tosa is not None: tosa[:] = ird.transform_img_dict(tosa, tform) if rcoef != 1: ims = [resample(img, 1.0 / rcoef) for img in ims] im2 = resample(im2, 1.0 / rcoef) resdict["Dt"] /= rcoef resdict["unextended"] = [utils.unextend_by(img, opts["extend"]) for img in ims + [im2]] return resdict