def test_camera_correction_and_sigma_calc(): import warpdrive as wd _, _, im = gen_image() im = im.astype(np.float32) darkmap = 1 * np.ones_like(im) varmap = np.ones_like(im) flatmap = np.ones_like(im) eperadu = np.float32(0.41) noise_factor = np.float32(1.0) em_gain = np.float32(1.0) corrected = (im - darkmap) * flatmap * eperadu sigma = np.sqrt(varmap + noise_factor * noise_factor * em_gain * np.max(np.stack([corrected, np.ones_like(im)]), axis=0) + em_gain * em_gain / eperadu) _warpdrive = wd.detector() _warpdrive.allocate_memory(np.shape(im)) _warpdrive.prepare_maps(darkmap, varmap, flatmap, eperadu, noise_factor, em_gain) _warpdrive.prepare_frame(im) wd.cuda.memcpy_dtoh_async(_warpdrive.data, _warpdrive.data_gpu, stream=_warpdrive.main_stream_r) _warpdrive.main_stream_r.synchronize() np.testing.assert_array_equal(_warpdrive.data, corrected) wd.cuda.memcpy_dtoh_async(_warpdrive.data, _warpdrive.noise_sigma_gpu, stream=_warpdrive.main_stream_r) _warpdrive.main_stream_r.synchronize() np.testing.assert_array_equal(_warpdrive.data, sigma)
def refresh_warpdrive(self, cameraMaps): try: import warpdrive except ImportError: print( "GPU fitting available on-request for academic use. Please contact David Baddeley or Joerg Bewersdorf." ) raise ImportError(missing_warpdrive_msg) global _warpdrive # One instance for each process, re-used for subsequent fits. # get darkmap [ADU] self.darkmap = cameraMaps.getDarkMap(self.metadata) if np.isscalar(self.darkmap): self.darkmap = self.darkmap * np.ones_like(self.data) # get flatmap [unitless] self.flatmap = cameraMaps.getFlatfieldMap(self.metadata) if np.isscalar(self.flatmap): self.flatmap = self.flatmap * np.ones_like(self.data) # get varmap [e-^2] self.varmap = cameraMaps.getVarianceMap(self.metadata) if np.isscalar(self.varmap): if self.varmap == 0: self.varmap = np.ones_like(self.data) logger.error( 'Variance map not found and read noise defaulted to 0; changing to 1 to avoid x/0.' ) self.varmap = self.varmap * np.ones_like(self.data) if isinstance(self.background, np.ndarray ): # flatfielding is done on CPU-calculated backgrounds # fixme - do we change this control flow in remfitbuf by doing our own sigma calc? self.background = self.background / self.flatmap # no unit conversion here, still in [ADU] else: # if self.background is a buffer, the background is already on the GPU and has not been flatfielded pass # Account for any changes we need to make in memory allocation on the GPU if not _warpdrive: # Initialize new detector object for this process guess_psf_sigma_pix = self.metadata.getOrDefault( 'Analysis.GuessPSFSigmaPix', 600 / 2.8 / (self.metadata['voxelsize.x'] * 1e3)) small_filter_size = self.metadata.getEntry( 'Analysis.DetectionFilterSize') large_filter_size = 2 * small_filter_size _warpdrive = warpdrive.detector(small_filter_size, large_filter_size, guess_psf_sigma_pix) _warpdrive.allocate_memory(np.shape(self.data)) _warpdrive.prepare_maps(self.darkmap, self.varmap, self.flatmap, self.metadata['Camera.ElectronsPerCount'], self.metadata['Camera.NoiseFactor'], self.metadata['Camera.TrueEMGain']) # If the data is coming from a different region of the camera, reallocate elif _warpdrive.varmap.shape == self.varmap.shape: # check if both corners are the same topLeft = np.array_equal(self.varmap[:20, :20], _warpdrive.varmap[:20, :20]) botRight = np.array_equal(self.varmap[-20:, -20:], _warpdrive.varmap[-20:, -20:]) if not (topLeft or botRight): _warpdrive.prepare_maps( self.darkmap, self.varmap, self.flatmap, self.metadata['Camera.ElectronsPerCount'], self.metadata['Camera.NoiseFactor'], self.metadata['Camera.TrueEMGain']) else: # data is a different shape - we know that we need to re-allocate and prepvar _warpdrive.allocate_memory(np.shape(self.data)) _warpdrive.prepare_maps(self.darkmap, self.varmap, self.flatmap, self.metadata['Camera.ElectronsPerCount'], self.metadata['Camera.NoiseFactor'], self.metadata['Camera.TrueEMGain'])
def refresh_warpdrive(self, cameraMaps): try: import warpdrive except ImportError: print( "GPU fitting available on-request for academic use. Please contact David Baddeley or Joerg Bewersdorf." ) raise ImportError(missing_warpdrive_msg) global _warpdrive # One instance for each process, re-used for subsequent fits. # get darkmap [ADU] self.darkmap = cameraMaps.getDarkMap(self.metadata) if np.isscalar(self.darkmap): self.darkmap = self.darkmap * np.ones_like(self.data) # get flatmap [unitless] self.flatmap = cameraMaps.getFlatfieldMap(self.metadata) if np.isscalar(self.flatmap): self.flatmap = self.flatmap * np.ones_like(self.data) # get varmap [e-^2] self.varmap = cameraMaps.getVarianceMap(self.metadata) if np.isscalar(self.varmap): if self.varmap == 0: self.varmap = np.ones_like(self.data) logger.error( 'Variance map not found and read noise defaulted to 0; changing to 1 to avoid x/0.' ) self.varmap = self.varmap * np.ones_like(self.data) if isinstance(self.background, np.ndarray ): # flatfielding is done on CPU-calculated backgrounds # fixme - do we change this control flow in remfitbuf by doing our own sigma calc? self.background = self.background / self.flatmap # no unit conversion here, still in [ADU] else: # if self.background is a buffer, the background is already on the GPU and has not been flatfielded pass # Account for any changes we need to make to the detector instance small_filter_size = self.metadata.getEntry( 'Analysis.DetectionFilterSize') guess_psf_sigma_pix = self.metadata.getOrDefault( 'Analysis.GuessPSFSigmaPix', 600 / 2.8 / (self.metadata.voxelsize_nm.x)) if not _warpdrive: # if we don't have a detector, make one and return _warpdrive = warpdrive.detector(small_filter_size, 2 * small_filter_size, guess_psf_sigma_pix) _warpdrive.allocate_memory(np.shape(self.data)) _warpdrive.prepare_maps(self.darkmap, self.varmap, self.flatmap, self.metadata['Camera.ElectronsPerCount'], self.metadata['Camera.NoiseFactor'], self.metadata['Camera.TrueEMGain']) return need_maps_filtered, need_mem_allocated = False, False # check if our filter sizes are the same if small_filter_size != _warpdrive.small_filter_size: _warpdrive.set_filter_kernels(small_filter_size, 2 * small_filter_size) # need to rerun map filters need_maps_filtered = True # check if the data is coming from a different camera region if _warpdrive.varmap.shape != self.varmap.shape: need_mem_allocated, need_maps_filtered = True, True else: # check if both corners are the same top_left = np.array_equal(self.varmap[:20, :20], _warpdrive.varmap[:20, :20]) bot_right = np.array_equal(self.varmap[-20:, -20:], _warpdrive.varmap[-20:, -20:]) if not (top_left or bot_right): need_maps_filtered = True if need_mem_allocated: _warpdrive.allocate_memory(np.shape(self.data)) if need_maps_filtered: _warpdrive.prepare_maps(self.darkmap, self.varmap, self.flatmap, self.metadata['Camera.ElectronsPerCount'], self.metadata['Camera.NoiseFactor'], self.metadata['Camera.TrueEMGain'])