def _indexed_operation(self, im, indices, which): """ Apply either a forward or adjoint transformations to `im`, depending on the value of the 'which' parameter. :param im: The incoming Image object on which to apply the forward or adjoint transformations. :param indices: The indices of the transformations to apply. :param which: The attribute indicating the function handle to obtain from underlying `Xform` objects. Typically either 'forward' or 'adjoint'. :return: An Image object as a result of applying forward or adjoint transformation to `im`. """ # Ensure that we will be able to apply all transformers to the image assert self.n_indices >= im.n_images, f'Can process Image object of max depth {self.n_indices}. Got {im.n_images}.' im_data = np.empty_like(im.asnumpy()) # For each individual transformation for i, xform in enumerate(self.unique_xforms): # Get the indices corresponding to that transformation idx = np.flatnonzero(self.indices == i) # For the incoming Image object, find out which transformation indices are applicable idx = np.intersect1d(idx, indices) # For the transformation indices we found, find the indices in the Image object that we'll use im_data_indices = np.flatnonzero(np.isin(indices, idx)) # Apply the transformation to the selected indices in the Image object if len(im_data_indices) > 0: fn_handle = getattr(xform, which) im_data[:, :, im_data_indices] = fn_handle(Image(im[:, :, im_data_indices])).asnumpy() return Image(im_data)
def background_subtract_2d(self, signal, background_p1, max_col): """ Subtract background from estimated power spectrum :param signal: Estimated power spectrum :param background_p1: 1-D background estimation :param max_col: Internal variable, returned as the second parameter from opt1d. :return: 2-tuple of NumPy arrays (Estimated PSD without noise and estimated noise). """ signal = signal.asnumpy() N = signal.shape[1] grid = grid_2d(N, normalized=False, dtype=self.dtype) radii = np.sqrt((grid["x"] / 2)**2 + (grid["y"] / 2)**2).T background = np.zeros(signal.shape, dtype=self.dtype) for r in range(max_col + 2, background_p1.shape[1]): background[:, (r < radii) & (radii <= r + 1)] = background_p1[max_col, r] mask = radii <= max_col + 2 background[:, mask] = signal[:, mask] signal = signal - background signal = np.maximum(0, signal) return Image(signal), Image(background)
class ImageTestCase(TestCase): def setUp(self): # numpy array for top-level functions that directly expect it self.im_np = misc.face( gray=True).astype('float64')[:768, :768][:, :, np.newaxis] # Independent Image object for testing Image methods self.im = Image(misc.face(gray=True).astype('float64')[:768, :768]) def tearDown(self): pass def testImShift(self): # Ensure that the two separate im_translate functions we have return the same thing # A single shift applied to all images shifts = np.array([100, 200]) im = self.im.shift(shifts) im1 = _im_translate(self.im_np, shifts.reshape(1, 2)) # Note the difference in the concept of shifts for _im_translate2 - negative sign/transpose im2 = _im_translate2(self.im_np, -shifts.reshape(2, 1)) # Pure numpy 'shifting' # 'Shifting' an Image corresponds to a 'roll' of a numpy array - again, note the negated signs and the axes im3 = np.roll(self.im.asnumpy()[:, :, 0], -shifts, axis=(0, 1)) self.assertTrue(np.allclose(im.asnumpy(), im1)) self.assertTrue(np.allclose(im1, im2)) self.assertTrue(np.allclose(im1[:, :, 0], im3)) def testArrayImageSource(self): # An Image can be wrapped in an ArrayImageSource when we need to deal with ImageSource objects. src = ArrayImageSource(self.im) im = src.images(start=0, num=np.inf) self.assertTrue(np.allclose(im.asnumpy(), self.im_np))
def output( self, classes, classes_refl, rot, shifts=None, coefs=None, ): """ Return class averages. :param classes: class indices (refering to src). (n_img, n_nbor) :param classes_refl: Bool representing whether to reflect image in `classes` :param rot: Array of in-plane rotation angles (Radians) of image in `classes` :param shifts: Optional array of shifts for image in `classes`. :coefs: Optional Fourier bessel coefs (avoids recomputing). :return: Stack of Synthetic Class Average images as Image instance. """ logger.info(f"Select {self.n_classes} Classes from Nearest Neighbors") # generate indices for random sample (can do something smart with corr later). # For testing just take the first n_classes so it matches earlier plots for manual comparison # This is assumed to be reasonably random. selection = np.arange(self.n_classes) imgs = self.src.images(0, self.src.n) fb_avgs = np.empty((self.n_classes, self.fb_basis.count), dtype=self.src.dtype) for i in tqdm(range(self.n_classes)): j = selection[i] # Get the neighbors neighbors_ids = classes[j] # Get coefs in Fourier Bessel Basis if not provided as an argument. if coefs is None: neighbors_imgs = Image(imgs[neighbors_ids]) if shifts is not None: neighbors_imgs.shift(shifts[i]) neighbors_coefs = self.fb_basis.evaluate_t(neighbors_imgs) else: neighbors_coefs = coefs[neighbors_ids] if shifts is not None: neighbors_coefs = self.fb_basis.shift( neighbors_coefs, shifts[i]) # Rotate in Fourier Bessel neighbors_coefs = self.fb_basis.rotate(neighbors_coefs, rot[j], classes_refl[j]) # Averaging in FB fb_avgs[i] = np.mean(neighbors_coefs, axis=0) # Now we convert the averaged images from FB to Cartesian. return ArrayImageSource(self.fb_basis.evaluate(fb_avgs))
def setUp(self): # numpy array for top-level functions that directly expect it self.im_np = misc.face(gray=True).astype( np.float64)[np.newaxis, :768, :768] # Independent Image object for testing Image methods self.im = Image(misc.face(gray=True).astype(np.float64)[:768, :768]) # Construct a simple stack of Images self.n = 3 self.ims_np = np.empty((3, *self.im_np.shape[1:]), dtype=self.im_np.dtype) for i in range(self.n): self.ims_np[i] = self.im_np * (i + 1) / float(self.n) # Independent Image stack object for testing Image methods self.ims = Image(self.ims_np)
def src_backward(self, mean_vol, noise_variance, shrink_method=None): """ Apply adjoint mapping to source :return: The sum of the outer products of the mean-subtracted images in `src`, corrected by the expected noise contribution and expressed as coefficients of `basis`. """ covar_b = np.zeros((self.L, self.L, self.L, self.L, self.L, self.L), dtype=self.dtype) for i in range(0, self.n, self.batch_size): im = self.src.images(i, self.batch_size) batch_n = im.n_images im_centered = im - self.src.vol_forward(mean_vol, i, self.batch_size) im_centered_b = np.zeros((batch_n, self.L, self.L, self.L), dtype=self.dtype) for j in range(batch_n): im_centered_b[j] = self.src.im_backward( Image(im_centered[j]), i + j) im_centered_b = Volume(im_centered_b).to_vec() covar_b += vecmat_to_volmat( im_centered_b.T @ im_centered_b) / self.n covar_b_coeff = self.basis.mat_evaluate_t(covar_b) return self._shrink(covar_b_coeff, noise_variance, shrink_method)
def projections(self, start=0, num=np.inf, indices=None): """ Return projections of generated volumes, without applying filters/shifts/amplitudes/noise :param start: start index (0-indexed) of the start image to return :param num: Number of images to return. If None, *all* images are returned. :param indices: A numpy array of image indices. If specified, start and num are ignored. :return: An Image instance. """ if indices is None: indices = np.arange(start, min(start + num, self.n)) im = np.zeros( (len(indices), self._original_L, self._original_L), dtype=self.dtype ) states = self.states[indices] unique_states = np.unique(states) for k in unique_states: idx_k = np.where(states == k)[0] rot = self.rots[indices[idx_k], :, :] im_k = self.vols.project(vol_idx=k - 1, rot_matrices=rot) im[idx_k, :, :] = im_k.asnumpy() return Image(im)
def _images(self, start=0, num=np.inf, indices=None, batch_size=512): """ Internal function to return a set of images after denoising :param start: The inclusive start index from which to return images. :param num: The exclusive end index up to which to return images. :param indices: The indices of images to return. :return: an `Image` object after denoisng. """ # start and end (and indices) refer to the indices in the DenoisedImageSource # that are being denoised and returned in batches if indices is None: indices = np.arange(start, min(start + num, self.n)) else: start = indices.min() end = indices.max() nimgs = len(indices) im = np.empty((nimgs, self.L, self.L)) logger.info(f"Loading {nimgs} images complete") for batch_start in range(start, end + 1, batch_size): imgs_denoised = self.denoiser.images(batch_start, batch_size) batch_end = min(batch_start + batch_size, end + 1) # we subtract start here to correct for any offset in the indices im[batch_start - start : batch_end - start] = imgs_denoised.asnumpy() return Image(im)
def testPolarBasis2DAdjoint(self): # The evaluate function should be the adjoint operator of evaluate_t. # Namely, if A = evaluate, B = evaluate_t, and B=A^t, we will have # (y, A*x) = (A^t*y, x) = (B*y, x) x = randn(self.basis.count, seed=self.seed).astype(self.dtype) x = m_reshape(x, (self.basis.nrad, self.basis.ntheta)) x = (1 / 2 * x[:, :self.basis.ntheta // 2] + 1 / 2 * x[:, :self.basis.ntheta // 2].conj()) x = np.concatenate((x, x.conj()), axis=1) x = m_reshape(x, (self.basis.nrad * self.basis.ntheta, )) x_t = self.basis.evaluate(x).asnumpy() y = randn(np.prod(self.basis.sz), seed=self.seed).astype(self.dtype) y_t = self.basis.evaluate_t( Image(m_reshape(y, self.basis.sz)[np.newaxis, :])) # RCOPT lhs = np.dot(y, m_reshape(x_t, (np.prod(self.basis.sz), ))) rhs = np.real(np.dot(y_t, x)) logging.debug( f"lhs: {lhs} rhs: {rhs} absdiff: {np.abs(lhs-rhs)} atol: {utest_tolerance(self.dtype)}" ) self.assertTrue(np.isclose(lhs, rhs, atol=utest_tolerance(self.dtype)))
def setUp(self): self.dtype = np.float32 # Test Volume v = Volume( np.load(os.path.join(DATA_DIR, "clean70SRibosome_vol.npy")).astype( self.dtype)).downsample(32) # Create Sim object. # Creates 10 projects so there is something to feed FSPCABasis. self.src = Simulation(L=v.resolution, n=10, vols=v, dtype=v.dtype) # Original projection image to transform. self.orig_img = self.src.images(0, 1) # Rotate 90 degrees in cartesian coordinates using third party tool. self.rt90_img = Image(np.rot90(self.orig_img.asnumpy(), axes=(1, 2))) # Prepare a Fourier Bessel Basis self.basis = FFBBasis2D((self.orig_img.res, ) * 2, dtype=self.dtype) self.v1 = self.basis.evaluate_t(self.orig_img) self.v2 = self.basis.evaluate_t(self.rt90_img) # These should _not_ be equal or the test is pointless. self.assertFalse(np.allclose(self.v1, self.v2)) # Prepare a FSPCA Basis too. self.fspca_basis = FSPCABasis(self.src, self.basis)
def evaluate(self, v): """ Evaluate coefficients in standard 2D coordinate basis from those in polar Fourier basis :param v: A coefficient vector (or an array of coefficient vectors) in polar Fourier basis to be evaluated. The last dimension must equal to `self.count`. :return x: Image instance in standard 2D coordinate basis with resolution of `self.sz`. """ if self.dtype != real_type(v.dtype): msg = (f"Input data type, {v.dtype}, is not consistent with" f" type defined in the class {self.dtype}.") logger.error(msg) raise TypeError(msg) v = v.reshape(-1, self.ntheta, self.nrad) nimgs = v.shape[0] half_size = self.ntheta // 2 v = v[:, :half_size, :] + v[:, half_size:, :].conj() v = v.reshape(nimgs, self.nrad * half_size) x = anufft(v, self.freqs, self.sz, real=True) return Image(x)
def _images(self, start=0, num=np.inf, indices=None, batch_size=512): """ Internal function to return a set of images after denoising :param start: The inclusive start index from which to return images. :param num: The exclusive end index up to which to return images. :param num: The indices of images to return. :return: an `Image` object after denoisng. """ if indices is None: indices = np.arange(start, min(start + num, self.n)) else: start = indices.min() end = indices.max() nimgs = len(indices) im = np.empty((nimgs, self.L, self.L)) logger.info(f"Loading {nimgs} images complete") for istart in range(start, end + 1, batch_size): imgs_denoised = self.denoiser.images(istart, batch_size) iend = min(istart + batch_size, end + 1) im[istart:iend] = imgs_denoised.data return Image(im)
def images(self, istart=0, batch_size=512): """ Obtain a batch size of 2D images after denosing by Cov2D method :param istart: the index of starting image :param batch_size: The batch size for processing images :return: an `Image` object with denoised images """ src = self.src # Denoise one batch size of 2D images using the SPCAs from the rotationally invariant covariance matrix img_start = istart img_end = min(istart + batch_size, src.n) imgs_noise = src.images(img_start, batch_size) coeffs_noise = self.basis.evaluate_t(imgs_noise.data) logger.info( f'Estimating Cov2D coefficients for images from {img_start} to {img_end-1}' ) coeffs_estim = self.cov2d.get_cwf_coeffs( coeffs_noise, self.cov2d.ctf_fb, self.cov2d.ctf_idx[img_start:img_end], mean_coeff=self.mean_est, covar_coeff=self.covar_est, noise_var=self.var_noise) # Convert Fourier-Bessel coefficients back into 2D images logger.info(f'Converting Cov2D coefficients back to 2D images') imgs_estim = self.basis.evaluate(coeffs_estim) imgs_denoised = Image(imgs_estim) return imgs_denoised
def eval_filters(self, im_orig, start=0, num=np.inf, indices=None): if not isinstance(im_orig, Image): logger.warning( f"eval_filters passed {type(im_orig)} instead of Image instance" ) # for now just convert it im = Image(im_orig) im = im_orig.copy() if indices is None: indices = np.arange(start, min(start + num, self.n)) for i, filt in enumerate(self.unique_filters): idx_k = np.where(self.filter_indices[indices] == i)[0] if len(idx_k) > 0: im[idx_k] = Image(im[idx_k]).filter(filt).asnumpy() return im
def _forward(self, im, indices): im = im.copy() for i, idx in enumerate(indices): # Note: The following random seed behavior is directly taken from MATLAB Cov3D code. random_seed = self.seed + 191 * (idx + 1) im_s = randn(2 * im.res, 2 * im.res, seed=random_seed) im_s = Image(im_s).filter(self.noise_filter)[:, :, 0] im[:, :, i] += im_s[:im.res, :im.res] return im
def eval_filters(self, im_orig, start=0, num=np.inf, indices=None): im = im_orig.copy() if indices is None: indices = np.arange(start, min(start + num, self.n)) unique_filters = set(self.filters) for f in unique_filters: idx_k = np.where(self.filters[indices] == f)[0] if len(idx_k) > 0: im[:, :, idx_k] = Image(im[:, :, idx_k]).filter(f).asnumpy() return im
def setUp(self): with importlib_resources.path(tests.saved_test_data, "sample_data_model.star") as path: self.starfile = StarFile(path) # Independent Image object for testing Image source methods L = 768 self.im = Image(misc.face(gray=True).astype("float64")[:L, :L]) self.img_src = ArrayImageSource(self.im) # We also want to flex the stack logic. self.n = 21 im_stack = np.broadcast_to(self.im.data, (self.n, L, L)) # make each image methodically different im_stack = np.multiply(im_stack, np.arange(self.n)[:, None, None]) self.im_stack = Image(im_stack) self.img_src_stack = ArrayImageSource(self.im_stack) # Create a tmpdir object for this test instance self._tmpdir = tempfile.TemporaryDirectory() # Get the directory from the name attribute of the instance self.tmpdir = self._tmpdir.name
def vol_forward(self, vol, start, num): """ Apply forward image model to volume :param vol: A volume of size L-by-L-by-L. :param start: Start index of image to consider :param num: Number of images to consider :return: The images obtained from volume by projecting, applying CTFs, translating, and multiplying by the amplitude. """ all_idx = np.arange(start, min(start + num, self.n)) im = vol_project(vol, self.rots[all_idx, :, :]) im = self.eval_filters(im, start, num) im = Image(im).shift(self.offsets[all_idx, :]) im *= np.broadcast_to(self.amplitudes[all_idx], (self.L, self.L, len(all_idx))) return im
def testFBBasis2DEvaluate_t(self): v = np.load(os.path.join(DATA_DIR, "fbbasis_coefficients_8_8.npy")).T # RCOPT # While FB can accept arrays, prefable to pass FB2D and FFB2D Image instances. img = Image(v.astype(self.dtype)) result = self.basis.evaluate_t(img) self.assertTrue( np.allclose( result, [ 0.10761825, 0.12291151, 0.00836345, -0.0619454, -0.0483326, 0.01053718, 0.03977641, 0.03420101, -0.0060131, -0.02970658, -0.0151334, -0.00017575, -0.03987446, -0.00257069, -0.0006621, -0.00975174, 0.00108047, 0.00072022, 0.00753342, 0.00604493, 0.00024362, -0.01711248, -0.01387371, 0.00112805, 0.02407385, 0.00376325, 0.00081128, 0.00951368, -0.00557536, 0.01087579, 0.00255393, -0.00525156, -0.00839695, 0.00802198, ], atol=utest_tolerance(self.dtype), ) )
def testRotate(self): # Now low res (8x8) had problems; # better with odd (7x7), but still not good. # We'll use a higher res test image. # fh = np.load(os.path.join(DATA_DIR, 'ffbbasis2d_xcoeff_in_8_8.npy'))[:7,:7] # Use a real data volume to generate a clean test image. v = Volume( np.load(os.path.join(DATA_DIR, "clean70SRibosome_vol.npy")).astype( np.float64)) src = Simulation(L=v.resolution, n=1, vols=v, dtype=v.dtype) # Extract, this is the original image to transform. x1 = src.images(0, 1) # Rotate 90 degrees in cartesian coordinates. x2 = Image(np.rot90(x1.asnumpy(), axes=(1, 2))) # Express in an FB basis basis = FFBBasis2D((x1.res, ) * 2, dtype=x1.dtype) v1 = basis.evaluate_t(x1) v2 = basis.evaluate_t(x2) v3 = basis.evaluate_t(x1) v4 = basis.evaluate_t(x1) # Reflect in the FB basis space v4 = basis.rotate(v1, 0, refl=[True]) # Rotate in the FB basis space v3 = basis.rotate(v1, 2 * np.pi) v1 = basis.rotate(v1, -np.pi / 2) # Evaluate back into cartesian y1 = basis.evaluate(v1) y2 = basis.evaluate(v2) y3 = basis.evaluate(v3) y4 = basis.evaluate(v4) # Rotate 90 self.assertTrue(np.allclose(y1[0], y2[0], atol=1e-4)) # 2*pi Identity self.assertTrue( np.allclose(x1[0], y3[0], atol=utest_tolerance(self.dtype))) # Refl (flipped using flipud) self.assertTrue(np.allclose(np.flipud(x1[0]), y4[0], atol=1e-4))
def _images(self, start=0, num=np.inf, indices=None): if indices is None: indices = np.arange(start, min(start + num, self.n)) else: start = indices.min() logger.info(f"Loading {len(indices)} images from STAR file") def load_single_mrcs(filepath, df): arr = mrcfile.open(filepath).data # if the stack only contains one image, arr will have shape (resolution, resolution) # the code below reshapes it to (1, resolution, resolution) if len(arr.shape) == 2: arr = arr.reshape((1,) + arr.shape) data = arr[df["__mrc_index"] - 1, :, :] return df.index, data n_workers = self.n_workers if n_workers < 0: n_workers = cpu_count() - 1 df = self._metadata.loc[indices] im = np.empty( (len(indices), self._original_resolution, self._original_resolution), dtype=self.dtype, ) groups = df.groupby("__mrc_filepath") n_workers = min(n_workers, len(groups)) with futures.ThreadPoolExecutor(n_workers) as executor: to_do = [] for filepath, _df in groups: future = executor.submit(load_single_mrcs, filepath, _df) to_do.append(future) for future in futures.as_completed(to_do): data_indices, data = future.result() im[data_indices - start] = data logger.info(f"Loading {len(indices)} images complete") return Image(im)
def images(self, start, num, *args, **kwargs): """ Return images from this ImageSource as an Image object. :param start: The inclusive start index from which to return images. :param num: The exclusive end index up to which to return images. :param args: Any additional positional arguments to pass on to the `ImageSource`'s underlying `_images` method. :param kwargs: Any additional keyword arguments to pass on to the `ImageSource`'s underlying `_images` method. :return: an `Image` object. """ indices = np.arange(start, min(start + num, self.n)) if self._im is not None: logger.info(f'Loading images from cache') im = Image(self._im[:, :, indices]) else: im = self._images(indices=indices, *args, **kwargs) im = self.generation_pipeline.forward(im, indices=indices) logger.info(f'Loaded {len(indices)} images') return im
def estimate_psd(self, blocks, tapers_1d): """ Estimate the power spectrum of the micrograph using the multi-taper method :param blocks: 3-D NumPy array containing windows extracted from the micrograph in the preprocess function. :param tapers_1d: NumPy array of data tapers. :return: NumPy array of estimated power spectrum. """ num_1d_tapers = tapers_1d.shape[-1] tapers_1d = tapers_1d.astype(complex_type(self.dtype), copy=False) blocks_mt = np.zeros(blocks[0, :, :].shape, dtype=self.dtype) blocks_tapered = np.zeros(blocks[0, :, :].shape, dtype=complex_type(self.dtype)) taper_2d = np.zeros((blocks.shape[1], blocks.shape[2]), dtype=complex_type(self.dtype)) for ax1 in range(num_1d_tapers): for ax2 in range(num_1d_tapers): np.matmul( tapers_1d[:, ax1, np.newaxis], tapers_1d[:, ax2, np.newaxis].T, out=taper_2d, ) for m in range(blocks.shape[0]): np.multiply(blocks[m, :, :], taper_2d, out=blocks_tapered) blocks_mt_post_fft = fft.fftn(blocks_tapered, axes=(-2, -1)) blocks_mt += abs2(blocks_mt_post_fft) blocks_mt /= blocks.shape[0]**2 blocks_mt /= tapers_1d.shape[0]**2 amplitude_spectrum = fft.fftshift( blocks_mt) # max difference 10^-13, max relative difference 10^-14 return Image(amplitude_spectrum)
def __init__(self, im, metadata=None, angles=None): """ Initialize from an `Image` object :param im: An `Image` or Numpy array object representing image data served up by this `ImageSource`. In the case of a Numpy array, attempts to create an 'Image' object. :param metadata: A Dataframe of metadata information corresponding to this ImageSource's images :param angles: Optional n-by-3 array of rotation angles corresponding to `im`. """ if not isinstance(im, Image): logger.info( "Attempting to create an Image object from Numpy array.") try: im = Image(im) except Exception as e: raise RuntimeError( "Creating Image object from Numpy array failed." f" Original error: {str(e)}") super().__init__(L=im.res, n=im.n_images, dtype=im.dtype, metadata=metadata, memory=None) self._cached_im = im # Create filter indices, these are required to pass unharmed through filter eval code # that is potentially called by other methods later. self.filter_indices = np.zeros(self.n) self.unique_filters = [IdentityFilter()] # Optionally populate angles/rotations. if angles is not None: if angles.shape != (self.n, 3): raise ValueError(f"Angles should be shape {(self.n, 3)}") # This will populate `_rotations`, # which is exposed by properties `angles` and `rots`. self.angles = angles
def projections(self, start=0, num=np.inf, indices=None): """ Return projections of generated volumes, without applying filters/shifts/amplitudes/noise :param start: start index (0-indexed) of the start image to return :param num: Number of images to return. If None, *all* images are returned. :param indices: A numpy array of image indices. If specified, start and num are ignored. :return: An ndarray of shape (L, L, num), L being the size of each image. """ if indices is None: indices = np.arange(start, min(start + num, self.n)) im = np.zeros((self.L, self.L, len(indices))) states = self.states[indices] unique_states = np.unique(states) for k in unique_states: vol_k = self.vols[:, :, :, k - 1] idx_k = np.where(states == k)[0] rot = self.rots[indices[idx_k], :, :] im_k = vol_project(vol_k, rot) im[:, :, idx_k] = im_k return Image(im)
def _read(self): with mrcfile.open(self.filepath, permissive=self.permissive) as mrc: im = mrc.data if im.dtype != self.dtype: logger.info(f"Micrograph read casting {self.filepath}" f" data to {self.dtype} from {im.dtype}.") im = im.astype(self.dtype) # NOTE: For multiple mrc files, mrcfile returns an ndarray with # (shape n_images, height, width) # Discard outer pixels im = im[..., self.margin_top:-self.margin_bottom if self. margin_bottom is not None else None, self.margin_left:-self.margin_right if self. margin_right is not None else None, ] if self.square: side_length = min(im.shape[-2], im.shape[-1]) im = im[..., :side_length, :side_length] if self.shrink_factor is not None: size = tuple((np.array(im.shape) / config.apple.mrc_shrink_factor).astype(int)) im = np.array( PILImage.fromarray(im).resize(size, PILImage.BICUBIC)) if self.gauss_filter_size is not None: im = signal.correlate( im, Micrograph.gaussian_filter(self.gauss_filter_size, self.gauss_filter_sigma), "same", ) self.im = Image(im) self.shape = self.im.shape
def setUp(self): self.dtype = np.float32 self.resolution = 8 self.n = 1024 # Generate a stack of images self.sim = sim = Simulation( n=self.n, L=self.resolution, unique_filters=[IdentityFilter()], seed=0, dtype=self.dtype, # We'll use random angles offsets=np.zeros((self.n, 2)), # No offsets amplitudes=np.ones((self.n)), # Constant amplitudes ) # Expose images as numpy array. self.ims_np = sim.images(0, sim.n).asnumpy() self.im = Image(self.ims_np) # Vol estimation requires a 3D basis self.basis = FBBasis3D((self.resolution,) * 3, dtype=self.dtype)
def _images(self, start=0, num=np.inf, indices=None): if indices is None: indices = np.arange(start, min(start + num, self.n)) else: start = indices.min() logger.info(f'Loading {len(indices)} images from STAR file') def load_single_mrcs(filepath, df): arr = mrcfile.open(filepath).data data = arr[df['__mrc_index'] - 1, :, :].T return df.index, data n_workers = self.n_workers if n_workers < 0: n_workers = cpu_count() - 1 df = self._metadata.loc[indices] im = np.empty((self._original_resolution, self._original_resolution, len(indices))) groups = df.groupby('__mrc_filepath') n_workers = min(n_workers, len(groups)) with futures.ThreadPoolExecutor(n_workers) as executor: to_do = [] for filepath, _df in groups: future = executor.submit(load_single_mrcs, filepath, _df) to_do.append(future) for future in futures.as_completed(to_do): data_indices, data = future.result() im[:, :, data_indices-start] = data logger.info(f'Loading {len(indices)} images complete') return Image(im)
def _images(self, start=0, num=np.inf, indices=None): if indices is None: indices = np.arange(start, min(start + num, self.n)) return Image(self.im[:, :, indices])
def setUp(self): # numpy array for top-level functions that directly expect it self.im_np = misc.face( gray=True).astype('float64')[:768, :768][:, :, np.newaxis] # Independent Image object for testing Image methods self.im = Image(misc.face(gray=True).astype('float64')[:768, :768])