def P_map(self, wavelength, stop_wavelength=None): """Total polarization P = sqrt(Q**2 + U**2 + V**2) as a 2D NDCube (coord1, coord2)""" Q = self.Q_map(wavelength, stop_wavelength=stop_wavelength) U = self.U_map(wavelength, stop_wavelength=stop_wavelength) V = self.V_map(wavelength, stop_wavelength=stop_wavelength) P = np.sqrt(Q.data**2 + U.data**2 + V.data**2) return ndcube.NDCube(P, Q.wcs)
def fit(self, *args, **kwargs): """ Apply inversion procedure to data. Returns ------- dem : `~ndcube.NDCube` Differential emission measure as a function of temperature. The temperature axis is evenly spaced in :math:`\log{T}`. The number of dimensions depend on the input data. """ dem, uncertainty = self._model(*args, **kwargs) wcs = self._make_dem_wcs() meta = self._make_dem_meta() # NOTE: Bug in NDData that does not allow passing quantity as uncertainty uncertainty = uncertainty.value if isinstance(uncertainty, u.Quantity) else uncertainty return ndcube.NDCube(dem, wcs, meta=meta, uncertainty=uncertainty, missing_axis=self.data[0].missing_axis)
def quantity_1d_to_sequence(intensity, wavelength: u.angstrom, uncertainty=None, meta=None): """ Transform 1D `~astropy.units.Quantity` of intensities to a single-axis `~ndcube.NDCubeSequence`. This is a function for easily converting a 1D array of intensity values into a 1D `~ndcube.NDCubeSequence` that can be passed to `sunkit_dem.Model` Parameters ---------- intensities : `~astropy.units.Quantity` wavelengths : `~astropy.units.Quantity` uncertainty : `~astrpoy.units.Quantity`, optional Uncertainties on intensities meta : `dict` or `dict`-like, optional """ if uncertainty is not None: # Raise an error if intensities and uncertainties have incompatible units _ = intensity.to(uncertainty.unit) cubes = [] for j, (i, w) in enumerate(zip(intensity, wavelength)): wcs = { 'CTYPE1': 'wavelength', 'CUNIT1': w.unit.to_string(), 'CDELT1': 1, 'CRPIX1': 1, 'CRVAL1': w.value, 'NAXIS1': 1 } cubes.append( ndcube.NDCube(i[np.newaxis], WCS(wcs), meta=meta, uncertainty=uncertainty.value[j, np.newaxis] if uncertainty is not None else None, extra_coords=[('wavelength', 0, [w.value])])) return ndcube.NDCubeSequence(cubes, common_axis=0)
def from_aia_level1( cls, lev1: 'level_1.Level_1', aia_path: typ.Optional[pathlib.Path] = None, line: str = 'ov', ) -> 'Level_3': """ Create a Level_3 Obj through a linear co-alignment of ESIS Level1 to AIA 304. NOTE!!! This contains hard coded variables that only pertain to the 2019 ESIS Flight, will need to be made more general for future launches. Including a rough FOV and pointing when choosing an AIA cutout should do the trick. """ if line != 'ov' or 'hei': print('Assuming Spectral Line is O V') line = 'ov' aia_304 = aia.AIA.from_time_range(time_start=lev1.time[0, 0] - 10 * u.s, time_end=lev1.time[-1, 0] + 10 * u.s, download_path=aia_path, channels=[304] * u.AA, user_email='*****@*****.**') cropped_imgs, initial_cropping, lev1 = lev1_prep(lev1, line) pad_pix = 400 initial_pad = ((pad_pix, pad_pix), (pad_pix, pad_pix)) cropped_imgs = np.pad(cropped_imgs, ((0, 0), (0, 0)) + initial_pad) # correct for most of detector tilt and anamorphic distortion theta = 16 scale = 1 / np.cos(np.radians(theta)) # relevant portion of AIA FOV for ESIS 2019, once we have a rough pointing in Level2 update to grab # area around pointing keyword for alignment speed. slice1 = slice(1380, 2650) slice2 = slice(1435, 2705) pos = (slice1, slice2) camera = np.array([0, 1, 2, 3]) sequence = np.arange(cropped_imgs.shape[0])[:-1] lev_3_transforms = [] lev_3_data = np.empty( (sequence.shape[0], camera.shape[0], slice1.stop - slice1.start, slice2.stop - slice2.start)) guess = np.array( [[1 / (scale * .79), 1 / .79, 0, 0, 0, 0, -(22.5 + 45 * j)] for j in camera]) for n, i in enumerate(sequence): transform_per_camera = [] for m, j in enumerate(camera): start = time.time() print('Fit in Progress: Camera = ', j, ' Sequence = ', i) td = aia_304.time - lev1.time[ i, 0] # should be the same for every camera best_im = np.abs(td.sec).argmin() aia_im = aia_304.intensity[best_im, 0, ...] esis_im = cropped_imgs[i, j, ...] guess_cam = guess[m] bounds = [(guess_cam[0] * .98, guess_cam[0] * 1.02), (guess_cam[1] * .98, guess_cam[1] * 1.02), (0, .01), (0, .01), (0, 1), (0, 1), (-2 + guess_cam[6], 2 + guess_cam[6])] # fit = scipy.optimize.differential_evolution(img_align.affine_alignment_quality, bounds, # args=(esis_im, aia_im[pos]), workers=4) fit = scipy.optimize.minimize( img_align.affine_alignment_quality, guess_cam, args=(esis_im, aia_im[pos]), bounds=bounds) guess[m] = fit.x print('Cross-Correlation = ', fit.fun) print('Transform = ', fit.x) origin = np.array(esis_im.shape) // 2 esis_im = img_align.modified_affine(esis_im, fit.x, origin) # fig,ax = plt.subplots() # ax.imshow(esis_im, vmax = np.percentile(esis_im,99.99)) # plt.show() aia_cc = np.empty_like(aia_im) aia_cc[pos] = aia_im[pos] cc = img_align.normalized_cc(aia_cc, esis_im) trans = np.unravel_index(cc.argmax(), cc.shape) trans = (-esis_im.shape[0] // 2 + trans[0], -esis_im.shape[1] // 2 + trans[1]) aia_shp = aia_im.shape big_esis = np.empty(aia_shp) big_esis[0:esis_im.shape[0], 0:esis_im.shape[1]] = esis_im # move based on cc big_esis = np.roll(big_esis, trans, (0, 1)) lev_3_data[n, m, ...] = big_esis[pos] # fix,ax = plt.subplots() # ax.imshow(lev_3_data[n,m,...], vmin = 0, vmax = np.percentile(lev_3_data[n,m,...],99)) transform_per_camera.append( img_align.ImageTransform(fit.x, origin, img_align.modified_affine, initial_cropping, initial_pad, aia_shp, trans, pos)) print('Fit Duration = ', time.time() - start) lev_3_transforms.append(transform_per_camera) # plt.show() aia_wcs = aia_304.wcs[0, 0].slice(pos) date_obs = lev1.time[sequence[0], 0] time_delta = lev1.time[sequence[1], 0] - date_obs # Axis 3 and 4 of the WCS object are camera and sequence respectively to match the Level1 ndarray # For future runs of ESIS with additional cameras CRVAL3 will require modification. lev_3_header = dict([ ('NAXIS1', aia_wcs._naxis[0]), ('NAXIS2', aia_wcs._naxis[1]), ('NAXIS3', camera.shape[0]), ('NAXIS4', sequence.shape[0]), ('DATEOBS', str(date_obs)), ('DATEREF', str(date_obs)), ('MJDREF', date_obs.mjd), ('CTYPE1', aia_wcs.wcs.ctype[0]), ('CTYPE2', aia_wcs.wcs.ctype[1]), ('CTYPE3', 'CAMERA_ID'), ('CTYPE4', 'UTC'), ('CRVAL1', aia_wcs.wcs.crval[0]), ('CRVAL2', aia_wcs.wcs.crval[1]), ('CRVAL3', 1), ('CRVAL4', time_delta.sec), ('CRPIX1', aia_wcs.wcs.crpix[0]), ('CRPIX2', aia_wcs.wcs.crpix[1]), ('CRPIX3', 0), ('CRPIX4', 1), ('CUNIT1', str(aia_wcs.wcs.cunit[0])), ('CUNIT2', str(aia_wcs.wcs.cunit[1])), ('CUNIT3', 'pix'), ('CUNIT4', 's'), ('CDELT1', aia_wcs.wcs.cdelt[0]), ('CDELT2', aia_wcs.wcs.cdelt[1]), ('CDELT3', 1), ('CDELT4', time_delta.sec), ]) lev_3_wcs = wcs.WCS(lev_3_header) meta = dict([ ("Description", "Level_3 was formed via a linear co-alignment of ESIS Level-1 and AIA 304" ), ]) lev_3_ndcube = ndcube.NDCube(lev_3_data, lev_3_wcs, meta=meta) lev_3_transform_cube = img_align.TransformCube(lev_3_transforms) if line == 'ov': lev_3_transform_cube.to_pickle(ov_Level3_transforms) transform_path = ov_Level3_transforms if line == 'hei': lev_3_transform_cube.to_pickle(hei_transforms) transform_path = hei_transforms return cls(observation=lev_3_ndcube, transformation_objects=transform_path, lev1_sequences=sequence, lev1_cameras=camera, spectral_line_name=line)
def _magnetic_map(self, magnetic_ix): """Return a 2D NDCube (coord1, coord2) for a given magnetic parameter""" newcube = ndcube.NDCube(self.data, self.wcs)[magnetic_ix, :, :] return newcube
def theta_map(self, wavelength, stop_wavelength=None): """Linear polarization angle theta = 0.5 arctan(U/Q) as a 2D NDCube (coord1, coord2)""" Q = self.Q_map(wavelength, stop_wavelength=stop_wavelength) U = self.U_map(wavelength, stop_wavelength=stop_wavelength) theta = np.arctan2(U.data, Q.data) return ndcube.NDCube(np.degrees(theta) * u.degree, Q.wcs)
def L_map(self, wavelength, stop_wavelength=None): """Linear polarization L = sqrt(Q**2 + U**2) as a 2D NDCube (coord1, coord2)""" Q = self.Q_map(wavelength, stop_wavelength=stop_wavelength) U = self.U_map(wavelength, stop_wavelength=stop_wavelength) L = np.sqrt(Q.data**2 + U.data**2) return ndcube.NDCube(L, Q.wcs)