def _zinger_removal(self, zinger_level=1000, median_width=3, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = zinger_removal _args = (zinger_level, median_width) _axis = 0 # Projection axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) data_white = distribute_jobs(self.data_white, _func, _args, _axis, num_cores, chunk_size) data_dark = distribute_jobs(self.data_dark, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("zinger_removal: zinger_level: " + str(zinger_level)) self.logger.debug("zinger_removal: median_width: " + str(median_width)) self.logger.info("zinger_removal [ok]") # Update returned values. if overwrite: self.data = data self.data_white = data_white self.data_dark = data_dark else: return data, data_white, data_dark
def _zinger_removal(xtomo, zinger_level=1000, median_width=3, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = zinger_removal _args = (zinger_level, median_width) _axis = 0 # Projection axis data = distribute_jobs(xtomo.data, _func, _args, _axis, num_cores, chunk_size) data_white = distribute_jobs(xtomo.data_white, _func, _args, _axis, num_cores, chunk_size) data_dark = distribute_jobs(xtomo.data_dark, _func, _args, _axis, num_cores, chunk_size) # Update log. xtomo.logger.debug("zinger_removal: zinger_level: " + str(zinger_level)) xtomo.logger.debug("zinger_removal: median_width: " + str(median_width)) xtomo.logger.info("zinger_removal [ok]") # Update returned values. if overwrite: xtomo.data = data xtomo.data_white = data_white xtomo.data_dark = data_dark else: return data, data_white, data_dark
def _stripe_removal(self, level=None, wname='db5', sigma=2, padding=False, num_cores=None, chunk_size=None, overwrite=True): # Find the higest level possible. if level is None: size = np.max(self.data.shape) level = int(np.ceil(np.log2(size))) # Distribute jobs. _func = stripe_removal _args = (level, wname, sigma, padding) _axis = 1 # Slice axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("stripe_removal: level: " + str(level)) self.logger.debug("stripe_removal: wname: " + str(wname)) self.logger.debug("stripe_removal: sigma: " + str(sigma)) self.logger.debug("stripe_removal: padding: " + str(padding)) self.logger.info("stripe_removal [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _phase_retrieval(self, pixel_size=1e-4, dist=50, energy=20, alpha=1e-4, padding=True, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = phase_retrieval _args = (pixel_size, dist, energy, alpha, padding) _axis = 0 # Projection axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("phase_retrieval: pixel_size: " + str(pixel_size)) self.logger.debug("phase_retrieval: dist: " + str(dist)) self.logger.debug("phase_retrieval: energy: " + str(energy)) self.logger.debug("phase_retrieval: alpha: " + str(alpha)) self.logger.debug("phase_retrieval: padding: " + str(padding)) self.logger.info("phase_retrieval [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _normalize(self, cutoff=None, negvals=1, num_cores=None, chunk_size=None, overwrite=True): # Calculate average white and dark fields for normalization. avg_white = np.mean(self.data_white, axis=0) avg_dark = np.mean(self.data_dark, axis=0) # Distribute jobs. _func = normalize _args = (avg_white, avg_dark, cutoff, negvals) _axis = 0 # Projection axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("normalize: cutoff: " + str(cutoff)) self.logger.debug("normalize: negvals: " + str(negvals)) self.logger.info("normalize [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _stripe_removal( self, level=None, wname="db5", sigma=2, padding=False, num_cores=None, chunk_size=None, overwrite=True ): # Find the higest level possible. if level is None: size = np.max(self.data.shape) level = int(np.ceil(np.log2(size))) # Distribute jobs. _func = stripe_removal _args = (level, wname, sigma, padding) _axis = 1 # Slice axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("stripe_removal: level: " + str(level)) self.logger.debug("stripe_removal: wname: " + str(wname)) self.logger.debug("stripe_removal: sigma: " + str(sigma)) self.logger.debug("stripe_removal: padding: " + str(padding)) self.logger.info("stripe_removal [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _threshold_segment(self, cutoff=None, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = threshold_segment _args = () _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.debug("threshold_segment: cutoff: " + str(cutoff)) self.logger.info("threshold_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _adaptive_segment(self, block_size=256, offset=0, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = adaptive_segment _args = (block_size, offset) _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("adaptive_segment: block_size: " + str(block_size)) self.logger.debug("adaptive_segment: offset: " + str(offset)) self.logger.info("adaptive_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _region_segment(self, low=None, high=None, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = region_segment _args = (low, high) _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.debug("region_segment: low: " + str(low)) self.logger.debug("region_segment: high: " + str(high)) self.logger.info("region_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _median_filter(self, size=5, axis=1, num_cores=None, chunk_size=None, overwrite=True): # Check input. if size < 1: size = 1 # Distribute jobs. _func = median_filter _args = (size, axis) _axis = axis data=np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel,:,:,:] = distribute_jobs(self.data[channel,:,:,:], _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("median_filter: size: " + str(size)) self.logger.debug("median_filter: axis: " + str(axis)) self.logger.info("median_filter [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _median_filter(self, size=5, axis=1, num_cores=None, chunk_size=None, overwrite=True): # Check input. if size < 1: size = 1 # Distribute jobs. _func = median_filter _args = (size, axis) _axis = axis data = np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel, :, :, :] = distribute_jobs(self.data[channel, :, :, :], _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("median_filter: size: " + str(size)) self.logger.debug("median_filter: axis: " + str(axis)) self.logger.info("median_filter [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _zinger_removal(self, zinger_level=10000, median_width=3, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = zinger_removal _args = (zinger_level, median_width) _axis = 0 # Projection axis data = np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel, :, :, :] = distribute_jobs(self.data[channel, :, :, :], _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("zinger_removal: zinger_level: " + str(zinger_level)) self.logger.debug("zinger_removal: median_width: " + str(median_width)) self.logger.info("zinger_removal [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _align_projections(self, align_to_channel=None, method='rotation_and_scale_invariant_phase_correlation', output_gifs=False, output_filename='/tmp/projections.gif', overwrite=True): if align_to_channel: data = self.data[align_to_channel, :, :, :] else: data = np.sum(self.data, axis=0) if output_gifs: unaligned_data = data # Zinger removal data = distribute_jobs(data, zinger_removal, (10000, 3), 0, None, None) # Edge detection filter if method not in ['least_squares_fit']: for i in range(data.shape[0]): data[i, :, :] = np.hypot(spn.sobel(data[i, :, :], 0), spn.sobel(data[i, :, :], 1)) data[i, :, :] = spn.median_filter(data[i, :, :], 3) data, translations = align_projections(data, method=method, theta=self.theta) if output_gifs: to_gif([unaligned_data, data], output_filename=output_filename) self.logger.debug( 'projection alignment gifs written: {:s}'.format(output_filename)) data = np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel, :, :, :], shifts = align_projections( self.data[channel, :, :, :], compute_alignment=False, alignment_translations=translations) # Update log. self.logger.debug("aligned projections using: {:s}".format(method)) self.logger.info("aligned_projections[ok]") # Update returned values. if overwrite: self.data = data self.alignment_translations = translations else: return data, translations
def _remove_background(self, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = remove_background _args = () _axis = 0 # Slice axis data_recon = distribute_jobs(self.data_recon, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.info("remove_background [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _remove_background(self, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = remove_background _args = () _axis = 0 # Slice axis data_recon = distribute_jobs(self.data_recon, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.info("remove_background [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _align_projections(self, align_to_channel=None, method='rotation_and_scale_invariant_phase_correlation', output_gifs=False, output_filename='/tmp/projections.gif', overwrite=True): if align_to_channel: data = self.data[align_to_channel,:,:,:] else: data = np.sum(self.data,axis=0) if output_gifs: unaligned_data=data # Zinger removal data = distribute_jobs(data, zinger_removal, (10000, 3), 0, None, None) # Edge detection filter if method not in ['least_squares_fit']: for i in range(data.shape[0]): data[i,:,:] = np.hypot(spn.sobel(data[i,:,:], 0), spn.sobel(data[i,:,:], 1)) data[i,:,:] = spn.median_filter(data[i,:,:], 3) data, translations = align_projections(data, method=method, theta=self.theta) if output_gifs: to_gif([unaligned_data, data], output_filename=output_filename) self.logger.debug('projection alignment gifs written: {:s}'.format(output_filename)) data=np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel,:,:,:], shifts = align_projections(self.data[channel,:,:,:], compute_alignment=False, alignment_translations=translations) # Update log. self.logger.debug("aligned projections using: {:s}".format(method)) self.logger.info("aligned_projections[ok]") # Update returned values. if overwrite: self.data = data self.alignment_translations = translations else: return data, translations
def _threshold_segment(self, cutoff=None, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = threshold_segment _args = () _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.debug("threshold_segment: cutoff: " + str(cutoff)) self.logger.info("threshold_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _median_filter(xtomo, size=5, num_cores=None, chunk_size=None, overwrite=True): # Check input. if size < 1: size = 1 # Distribute jobs. _func = median_filter _args = (size) _axis = 1 # Slice axis data = distribute_jobs(xtomo.data, _func, _args, _axis, num_cores, chunk_size) # Update log. xtomo.logger.debug("median_filter: size: " + str(size)) xtomo.logger.info("median_filter [ok]") # Update returned values. if overwrite: xtomo.data = data else: return data
def _zinger_removal(self, zinger_level=10000, median_width=3, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = zinger_removal _args = (zinger_level, median_width) _axis = 0 # Projection axis data=np.zeros_like(self.data) for channel in range(self.data.shape[0]): data[channel,:,:,:] = distribute_jobs(self.data[channel,:,:,:], _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("zinger_removal: zinger_level: " + str(zinger_level)) self.logger.debug("zinger_removal: median_width: " + str(median_width)) self.logger.info("zinger_removal [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _normalize(self, cutoff=None, negvals=1, num_cores=None, chunk_size=None, overwrite=True): # Calculate average white and dark fields for normalization. avg_white = np.mean(self.data_white, axis=0) avg_dark = np.mean(self.data_dark, axis=0) # Distribute jobs. _func = normalize _args = (avg_white, avg_dark, cutoff, negvals) _axis = 0 # Projection axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("normalize: cutoff: " + str(cutoff)) self.logger.debug("normalize: negvals: " + str(negvals)) self.logger.info("normalize [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _region_segment(self, low=None, high=None, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = region_segment _args = (low, high) _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update provenance. self.logger.debug("region_segment: low: " + str(low)) self.logger.debug("region_segment: high: " + str(high)) self.logger.info("region_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _adaptive_segment(self, block_size=256, offset=0, num_cores=None, chunk_size=None, overwrite=True): # Normalize data first. data = self.data_recon - self.data_recon.min() data /= data.max() # Distribute jobs. _func = adaptive_segment _args = (block_size, offset) _axis = 0 # Slice axis data_recon = distribute_jobs(data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("adaptive_segment: block_size: " + str(block_size)) self.logger.debug("adaptive_segment: offset: " + str(offset)) self.logger.info("adaptive_segment [ok]") # Update returned values. if overwrite: self.data_recon = data_recon else: return data_recon
def _phase_retrieval( self, pixel_size=1e-4, dist=50, energy=20, alpha=1e-4, padding=True, num_cores=None, chunk_size=None, overwrite=True ): # Distribute jobs. _func = phase_retrieval _args = (pixel_size, dist, energy, alpha, padding) _axis = 0 # Projection axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("phase_retrieval: pixel_size: " + str(pixel_size)) self.logger.debug("phase_retrieval: dist: " + str(dist)) self.logger.debug("phase_retrieval: energy: " + str(energy)) self.logger.debug("phase_retrieval: alpha: " + str(alpha)) self.logger.debug("phase_retrieval: padding: " + str(padding)) self.logger.info("phase_retrieval [ok]") # Update returned values. if overwrite: self.data = data else: return data
def _stripe_removal2(self, nblocks=0, alpha=1.5, num_cores=None, chunk_size=None, overwrite=True): # Distribute jobs. _func = stripe_removal2 _args = (nblocks, alpha) _axis = 1 # Slice axis data = distribute_jobs(self.data, _func, _args, _axis, num_cores, chunk_size) # Update log. self.logger.debug("stripe_removal2: nblocks: " + str(nblocks)) self.logger.debug("stripe_removal2: alpha: " + str(alpha)) self.logger.info("stripe_removal2 [ok]") # Update returned values. if overwrite: self.data = data else: return data