def persistence(mc, func=np.max, image_normalize=True): """ Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object Returns ------- sunpy.map.MapCube A mapcube containing the persistence transform of the input mapcube. The value normalization function used in plotting the data is changed, prettifying movies of resultant mapcube. """ # Get the persistence transform new_datacube = persistence_dc(mc.as_array(), func=func) # Create a list containing the data for the new map object new_mc = [] for i, m in enumerate(mc): new_map = Map(new_datacube[:, :, i], m.meta) new_map.plot_settings = deepcopy(m.plot_settings) new_mc.append(new_map) # Create the new mapcube and return if image_normalize: return movie_normalization(Map(new_mc, cube=True)) else: return Map(new_mc, cube=True)
def test_rsun_missing(): """Tests output if 'rsun' is missing""" euvi_no_rsun = Map(fitspath) euvi_no_rsun.meta['rsun'] = None r = euvi_no_rsun.observer_coordinate.radius with pytest.warns(SunpyUserWarning, match='Missing metadata for solar angular radius'): assert euvi_no_rsun.rsun_obs == sun._angular_radius(constants.radius, r)
def mapcube_simple_replace(mc, simple_replace_nans=True, simple_replace_negative_values=True, simple_replace_zero_values=True, nans_replacement_value=1.0, negatives_replacement_value=1.0, zeroes_replacement_value=1.0): """ Return a version of input mapcube that has been cleaned in a very simple way. :param mc: :param simple_replace_nans: :param simple_replace_negative_values: :param simple_replace_zero_values: :return: """ # Get all the data from the mapcube layer by layer new_mapcube = [] for i, m in enumerate(mc): data = m.data # Apply the simple replacements as necessary if simple_replace_nans: data = data_simple_replace_nans(data, replacement_value=nans_replacement_value) if simple_replace_negative_values: data = data_simple_replace_negative_values(data, replacement_value=negatives_replacement_value) if simple_replace_zero_values: data = data_simple_replace_zero_values(data, replacement_value=zeroes_replacement_value) new_mapcube.append(Map(data, m.meta)) return Map(new_mapcube, cube=True)
def make_slope_map(emcube, temperature_lower_bound=None, em_threshold=None): """ Fit emission measure distribution in every pixel The fit is computed between `temperature_lower_bound` and the temeperature at which the EM is maximum. Parameters ---------- emcube: `EMCube` Emission measure map as a function space and temperature em_threshold: `~astropy.units.Quantity`, optional If the total EM in a pixel is below this, no slope is calculated Returns ------- slope_map: `~sunpy.map.GenericMap` rsquared_map: `~sunpy.map.GenericMap` """ if em_threshold is None: em_threshold = u.Quantity(1e25, u.cm**(-5)) i_valid = np.where( u.Quantity(emcube.total_emission.data, emcube[0].meta['bunit']) > em_threshold) em_valid = np.log10(emcube.as_array()[i_valid]) em_valid[np.logical_or(np.isinf(em_valid), np.isnan(em_valid))] = 0.0 i_peak = em_valid.argmax(axis=1) log_temperature_bin_centers = np.log10( emcube.temperature_bin_centers.value) if temperature_lower_bound is None: i_lower = 0 else: i_lower = np.fabs(emcube.temperature_bin_centers - temperature_lower_bound).argmin() slopes, rsquared = [], [] for emv, ip in zip(em_valid, i_peak): t_fit = log_temperature_bin_centers[i_lower:ip] if t_fit.size < 3: warnings.warn('Fit should be over 3 or more bins in temperature.') if t_fit.size == 0: slopes.append(np.nan) rsquared.append(0.) continue em_fit = emv[i_lower:ip] w = np.where(em_fit > 0, 1, 0) coeff, rss, _, _, _ = np.polyfit(t_fit, em_fit, 1, full=True, w=w) rss = 1 if rss.size == 0 else rss[0] _, rss_flat, _, _, _ = np.polyfit(t_fit, em_fit, 0, full=True, w=w) rss_flat = 1 if rss_flat.size == 0 else rss_flat[0] slopes.append(coeff[0]) rsquared.append(1 - rss / rss_flat) slopes_data = np.zeros(emcube.total_emission.data.shape) slopes_data[i_valid] = slopes rsquared_data = np.zeros(emcube.total_emission.data.shape) rsquared_data[i_valid] = rsquared return Map( slopes_data, emcube[0].meta, ), Map(rsquared_data, emcube[0].meta)
def running_difference(mc, offset=1, use_offset_for_meta='mean', image_normalize=True): """ Calculate the running difference of a mapcube. Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object offset : [ int ] Calculate the running difference between map 'i + offset' and image 'i'. use_offset_for_meta : {'ahead', 'behind', 'mean'} Which meta header to use in layer 'i' in the returned mapcube, either from map 'i + offset' (when set to 'ahead') and image 'i' (when set to 'behind'). When set to 'mean', the ahead meta object is copied, with the observation date replaced with the mean of the ahead and behind observation dates. image_normalize : bool If true, return the mapcube with the same image normalization applied to all maps in the mapcube. Returns ------- sunpy.map.MapCube A mapcube containing the running difference of the input mapcube. The value normalization function used in plotting the data is changed, prettifying movies of resultant mapcube. """ # Create a list containing the data for the new map object new_mc = [] for i in range(0, len(mc.maps) - offset): new_data = mc[i + offset].data - mc[i].data if use_offset_for_meta == 'ahead': new_meta = mc[i + offset].meta plot_settings = mc[i + offset].plot_settings elif use_offset_for_meta == 'behind': new_meta = mc[i].meta plot_settings = mc[i].plot_settings elif use_offset_for_meta == 'mean': new_meta = deepcopy(mc[i + offset].meta) new_meta['date_obs'] = _mean_time([parse_time(mc[i + offset].date), parse_time(mc[i].date)]) plot_settings = mc[i + offset].plot_settings else: raise ValueError('The value of the keyword "use_offset_for_meta" has not been recognized.') # Update the plot scaling. The default here attempts to produce decent # looking images new_map = Map(new_data, new_meta) new_map.plot_settings = plot_settings new_mc.append(new_map) # Create the new mapcube and return if image_normalize: return movie_normalization(Map(new_mc, cube=True), stretch=LinearStretch()) else: return Map(new_mc, cube=True)
def aia171_test_mc(aia171_test_map, aia171_test_map_layer, aia171_test_mc_pixel_displacements): # Create a map that has been shifted a known amount. d1 = sp_shift(aia171_test_map_layer, aia171_test_mc_pixel_displacements) m1 = Map((d1, aia171_test_map.meta)) # Create the mapsequence return Map([aia171_test_map, m1], sequence=True)
def prepare_test_data(file_format): pytest.importorskip('sunpy', minversion='2.1.0') from sunpy.map import Map from sunpy.coordinates.ephemeris import get_body_heliographic_stonyhurst if file_format == 'fits': map_aia = Map(os.path.join(DATA, 'aia_171_level1.fits')) data = map_aia.data wcs = map_aia.wcs date = map_aia.date target_wcs = wcs.deepcopy() elif file_format == 'asdf': pytest.importorskip('astropy', minversion='4.0') pytest.importorskip('gwcs', minversion='0.12') asdf = pytest.importorskip('asdf') aia = asdf.open(os.path.join(DATA, 'aia_171_level1.asdf')) data = aia['data'][...] wcs = aia['wcs'] date = wcs.output_frame.reference_frame.obstime target_wcs = Map(os.path.join(DATA, 'aia_171_level1.fits')).wcs.deepcopy() else: raise ValueError('file_format should be fits or asdf') # Reproject to an observer on Venus target_wcs.wcs.cdelt = ([24, 24]*u.arcsec).to(u.deg) target_wcs.wcs.crpix = [64, 64] venus = get_body_heliographic_stonyhurst('venus', date) target_wcs.wcs.aux.hgln_obs = venus.lon.to_value(u.deg) target_wcs.wcs.aux.hglt_obs = venus.lat.to_value(u.deg) target_wcs.wcs.aux.dsun_obs = venus.radius.to_value(u.m) return data, wcs, target_wcs
def modifyData(self, data_model: DataModel) -> DataModel: plot_settings = data_model.plot_preferences plot_settings["show_colorbar"] = self._ui.color_bar.isChecked() plot_settings["show_limb"] = self._ui.limb.isChecked() plot_settings["draw_grid"] = self._ui.grid.isChecked() plot_settings["mask"] = self._ui.mask.isChecked() plot_settings["wcs_grid"] = self._ui.wcs_grid.isChecked() plot_settings["annotate"] = self._ui.annotate.isChecked() if self._ui.mask.isChecked(): s_map = data_model.map data_model.map = Map(s_map.data, s_map.meta, mask=self._createMask(s_map)) else: s_map = data_model.map data_model.map = Map(s_map.data, s_map.meta) if self._ui.contours.isChecked(): strings = self._ui.contours_list.text().split(" ") levels = [int(s) for s in set(strings) if s != ""] plot_settings["contours"] = levels else: plot_settings["contours"] = False return data_model
def plot_aia_channels(aia, time: u.s, root_dir, corners=None, figsize=None, norm=None, fontsize=14, **kwargs): """ Plot maps of the EUV channels of AIA for a given timestep Parameters ---------- aia : `synthesizAR.instruments.InstrumentSDOAIA` time : `astropy.Quantity` root_dir : `str` figsize : `tuple`, optional """ if figsize is None: figsize = (15, 10) if norm is None: norm = matplotlib.colors.SymLogNorm(1e-6, vmin=1, vmax=5e3) with h5py.File(aia.counts_file, 'r') as hf: reference_time = u.Quantity(hf['time'], hf['time'].attrs['unit']) i_time = np.where(reference_time == time)[0][0] fig_format = os.path.join(root_dir, f'{aia.name}', '{}', f'map_t{i_time:06d}.fits') fig = plt.figure(figsize=figsize) plt.subplots_adjust(wspace=0., hspace=0., top=0.95) ims = {} for i, channel in enumerate(aia.channels): tmp = Map(fig_format.format(channel['name'])) if corners is not None: blc = SkyCoord(*corners[0], frame=tmp.coordinate_frame) trc = SkyCoord(*corners[1], frame=tmp.coordinate_frame) tmp = tmp.submap(blc, trc) ax = fig.add_subplot(2, 3, i + 1, projection=tmp) ims[channel['name']] = tmp.plot(annotate=False, title=False, norm=norm) lon, lat = ax.coords lon.grid(alpha=0) lat.grid(alpha=0) if i % 3 == 0: lat.set_axislabel(r'solar-y [arcsec]', fontsize=fontsize) else: lat.set_ticks_visible(False) lat.set_ticklabel_visible(False) if i > 2: lon.set_axislabel(r'solar-x [arcsec]', fontsize=fontsize) else: lon.set_ticks_visible(False) lon.set_ticklabel_visible(False) ax.text(0.1 * tmp.dimensions.x.value, 0.9 * tmp.dimensions.y.value, r'${}$ $\mathrm{{\mathring{{A}}}}$'.format(channel['name']), color='w', fontsize=fontsize) fig.suptitle(r'$t={:.0f}$ {}'.format(time.value, time.unit.to_string()), fontsize=fontsize) if kwargs.get('use_with_animation', False): return fig, ims
def persistence(mc, func=np.max): """ Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object Returns ------- sunpy.map.MapCube A mapcube containing the persistence transform of the input mapcube. The value normalization function used in plotting the data is changed, prettifying movies of resultant mapcube. """ # Get the persistence transform new_datacube = persistence_dc(mc.as_array(), func=func) # Create a list containing the data for the new map object new_mc = [] for i, m in enumerate(mc): new_map = Map(new_datacube[:, :, i], m.meta) new_map.plot_settings = deepcopy(m.plot_settings) new_mc.append(new_map) # Create the new mapcube and return return Map(new_mc, cube=True)
def submap(mc, range_a, range_b, **kwargs): """ Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object range_a : list range_b : list Returns ------- sunpy.map.MapCube A mapcube containing maps that have had the map submap method applied to each layer. """ nmc = len(mc) if (len(range_a) == nmc) and (len(range_b) == nmc): ra = range_a rb = range_b elif (len(range_a) == 1) and (len(range_b) == 1): ra = [range_a for i in range(0, nmc)] rb = [range_b for i in range(0, nmc)] else: raise ValueError('Both input ranges must be either of size 1 or size ' 'equal to the number of maps in the mapcube') # Storage for the returned maps maps = [] for im, m in enumerate(mc): maps.append(Map.submap(m, ra[im], rb[im], **kwargs)) # Create the new mapcube and return return Map(maps, cube=True)
def accumulate(mc, accum, normalize=True): """ Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object accum : normalize : Returns ------- sunpy.map.MapCube A summed mapcube in the map layer (time) direction. """ # counter for number of maps. j = 0 # storage for the returned maps maps = [] nmaps = len(mc) while j + accum <= nmaps: i = 0 these_map_times = [] while i < accum: this_map = mc[i + j] these_map_times.append(parse_time(this_map.date)) if normalize: normalization = this_map.exposure_time else: normalization = 1.0 if i == 0: # Emission rate m = this_map.data / normalization else: # Emission rate m += this_map.data / normalization i += 1 j += accum # Make a copy of the meta header and set the exposure time to accum, # indicating that 'n' normalized exposures were used. new_meta = deepcopy(this_map.meta) new_meta['exptime'] = np.float64(accum) # Set the observation time to the average of the times used to form # the map. new_meta['date_obs'] = _max_time(these_map_times) # Create the map list that will be used to make the mapcube new_map = Map(m, new_meta) new_map.plot_settings = deepcopy(this_map.plot_settings) maps.append(new_map) # Create the new mapcube and return return Map(maps, cube=True)
def accumulate(mc, accum, normalize=True): """ Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object accum : normalize : Returns ------- sunpy.map.MapCube A summed mapcube in the map layer (time) direction. """ # counter for number of maps. j = 0 # storage for the returned maps maps = [] nmaps = len(mc) while j + accum <= nmaps: i = 0 these_map_times = [] while i < accum: this_map = mc[i + j] these_map_times.append(parse_time(this_map.date)) if normalize: normalization = this_map.exposure_time else: normalization = 1.0 if i == 0: # Emission rate m = this_map.data / normalization else: # Emission rate m += this_map.data / normalization i += 1 j += accum # Make a copy of the meta header and set the exposure time to accum, # indicating that 'n' normalized exposures were used. new_meta = deepcopy(this_map.meta) new_meta['exptime'] = np.float64(accum) # Set the observation time to the average of the times used to form # the map. new_meta['date_obs'] = _mean_time(these_map_times) # Create the map list that will be used to make the mapcube new_map = Map(m, new_meta) new_map.plot_settings = deepcopy(this_map.plot_settings) maps.append(new_map) # Create the new mapcube and return return Map(maps, cube=True)
def test2(): aia = Map(sunpy.AIA_171_IMAGE) fig = plt.figure() ax = plt.subplot(111) aia.plot() plt.colorbar() aia.draw_limb() plt.show()
def test_rsun_missing(): """Tests output if 'rsun' is missing""" euvi_no_rsun = Map(fitspath) euvi_no_rsun.meta['rsun'] = None with pytest.warns(SunpyUserWarning, match='Missing metadata for solar radius'): assert euvi_no_rsun.rsun_obs.value == sun.angular_radius( euvi.date).to('arcsec').value
def create_tempmap(date, n_params=1, data_dir=home + 'SDO_data/', maps_dir=home + 'temperature_maps/'): wlens = ['94', '131', '171', '193', '211', '335'] t0 = 5.6 images = [] #imdates = {} print 'Finding data for {}.'.format(date.date()) # Loop through wavelengths for wl, wlen in enumerate(wlens): #print 'Finding {}A data...'.format(wlen), fits_dir = data_dir + '{}/{:%Y/%m/%d}/'.format(wlen, date) filename = fits_dir + 'aia*{0}*{1:%Y?%m?%d}?{1:%H?%M}*lev1?fits'.format( wlen, date) temp_im = Map(filename) # Download data if not enough found client = vso.VSOClient() if temp_im == []: print 'File not found. Downloading from VSO...' # Wavelength value for query needs to be an astropy Quantity wquant = u.Quantity(value=int(wlen), unit='Angstrom') qr = client.query( vso.attrs.Time( date, # - dt.timedelta(seconds=6), date + dt.timedelta(seconds=12)), #6)), vso.attrs.Wave(wquant, wquant), vso.attrs.Instrument('aia'), vso.attrs.Provider('JSOC')) res = client.get(qr, path=fits_dir + '{file}', site='NSO').wait() temp_im = Map(res) if temp_im == []: print 'Downloading failed.' print res, len(qr), qr return np.zeros((512, 512)), None, None if isinstance(temp_im, list): temp_im = temp_im[0] # TODO: save out level 1.5 data so it can be loaded quickly. temp_im = aiaprep(temp_im) temp_im.data = temp_im.data / temp_im.exposure_time # Can probably increase speed a bit by making this * (1.0/exp_time) images.append(temp_im) #imdates[wlen] = temp_im.date normim = images[2].data.copy() # Normalise images to 171A print 'Normalising images' for i in range(len(wlens)): images[i].data = images[i].data / normim # Produce temperature map if n_params == 1: tempmap = find_temp(images, t0) #, force_temp_scan=True) else: #tempmap = find_temp_3params(images, t0) pass return tempmap
def plot_observations(self, obsdate: str, mdi_map: Map = None): """ Plots the Active Regions for a given observation on the MDI map corresponding to that observation. Parameters ---------- obsdate : str The observation time and date. mdi_map : Map, optional The MDI map corresponding to the given observation, If None, the Map will be downloaded first. By default None. Examples -------- >>> from pythia.seo import Sunspotter >>> sunspotter = Sunspotter() >>> obsdate = '2000-01-01 12:47:02' >>> sunspotter.plot_observations(obsdate) """ obsdate = self.get_nearest_observation(obsdate) if mdi_map is None: mdi_map = self.get_mdi_fulldisk_map(obsdate) hek_result = self.get_observations_from_hek(obsdate) bottom_left_x = hek_result['boundbox_c1ll'] bottom_left_y = hek_result['boundbox_c2ll'] top_right_x = hek_result['boundbox_c1ur'] top_right_y = hek_result['boundbox_c2ur'] number_of_observations = len(hek_result) bottom_left_coords = SkyCoord( [(bottom_left_x[i], bottom_left_y[i]) * u.arcsec for i in range(number_of_observations)], frame=mdi_map.coordinate_frame) top_right_coords = SkyCoord( [(top_right_x[i], top_right_y[i]) * u.arcsec for i in range(number_of_observations)], frame=mdi_map.coordinate_frame) fig = plt.figure(figsize=(12, 10), dpi=100) mdi_map.plot() for i in range(number_of_observations): mdi_map.draw_rectangle(bottom_left_coords[i], top_right=top_right_coords[i], color='b', label="Active Regions") hek_legend, = plt.plot([], color='b', label="Active Regions") plt.legend(handles=[hek_legend]) plt.show()
def downloadData(self): results = Fido.search(self.attrsTime, self.instrument) closest = results["gong"][0] print(closest) self.file = Fido.fetch(closest, path=self.path) # Fetching only one gongmap = Map(self.file) self.map = Map(gongmap.data - np.mean(gongmap.data), gongmap.meta) return self.map
def make_comp(fitsfile): briggs = Map(fitsfile) briggs = icrs_to_helio.icrs_to_helio(briggs) briggs.plot_settings['cmap'] = 'viridis' lmax = (briggs.data).max() levels = lmax * np.arange(0.5, 1.1, 0.05) comp_map = sunpy.map.Map(aiamap, briggs, composite=True) comp_map.set_levels(index=1, levels=levels) return comp_map
def test_reproject_roundtrip(file_format): # Test the reprojection with solar data, which ensures that the masking of # pixels based on round-tripping works correctly. Using asdf is not just # about testing a different format but making sure that GWCS works. # The observer handling changed in 2.1. pytest.importorskip('sunpy', minversion='2.1.0') from sunpy.map import Map from sunpy.coordinates.ephemeris import get_body_heliographic_stonyhurst if file_format == 'fits': map_aia = Map(get_pkg_data_filename('data/aia_171_level1.fits', package='reproject.tests')) data = map_aia.data wcs = map_aia.wcs date = map_aia.date target_wcs = wcs.deepcopy() elif file_format == 'asdf': pytest.importorskip('astropy', minversion='4.0') pytest.importorskip('gwcs', minversion='0.12') asdf = pytest.importorskip('asdf') aia = asdf.open( get_pkg_data_filename('data/aia_171_level1.asdf', package='reproject.tests')) data = aia['data'][...] wcs = aia['wcs'] date = wcs.output_frame.reference_frame.obstime target_wcs = Map( get_pkg_data_filename('data/aia_171_level1.fits', package='reproject.tests')).wcs.deepcopy() else: raise ValueError('file_format should be fits or asdf') # Reproject to an observer on Venus target_wcs.wcs.cdelt = ([24, 24]*u.arcsec).to(u.deg) target_wcs.wcs.crpix = [64, 64] venus = get_body_heliographic_stonyhurst('venus', date) target_wcs.wcs.aux.hgln_obs = venus.lon.to_value(u.deg) target_wcs.wcs.aux.hglt_obs = venus.lat.to_value(u.deg) target_wcs.wcs.aux.dsun_obs = venus.radius.to_value(u.m) output, footprint = reproject_interp((data, wcs), target_wcs, (128, 128)) header_out = target_wcs.to_header() # ASTROPY_LT_40: astropy v4.0 introduced new default header keywords, # once we support only astropy 4.0 and later we can update the reference # data files and remove this section. for key in ('CRLN_OBS', 'CRLT_OBS', 'DSUN_OBS', 'HGLN_OBS', 'HGLT_OBS', 'MJDREFF', 'MJDREFI', 'MJDREF', 'MJD-OBS', 'RSUN_REF'): header_out.pop(key, None) header_out['DATE-OBS'] = header_out['DATE-OBS'].replace('T', ' ') return array_footprint_to_hdulist(output, footprint, header_out)
def create_tempmap(date, n_params=1, data_dir=home+'SDO_data/', maps_dir=home+'temperature_maps/'): wlens = ['94', '131', '171', '193', '211', '335'] t0 = 5.6 images = [] #imdates = {} print 'Finding data for {}.'.format(date.date()) # Loop through wavelengths for wl, wlen in enumerate(wlens): #print 'Finding {}A data...'.format(wlen), fits_dir = data_dir + '{}/{:%Y/%m/%d}/'.format(wlen, date) filename = fits_dir + 'aia*{0}*{1:%Y?%m?%d}?{1:%H?%M}*lev1?fits'.format(wlen, date) temp_im = Map(filename) # Download data if not enough found client = vso.VSOClient() if temp_im == []: print 'File not found. Downloading from VSO...' # Wavelength value for query needs to be an astropy Quantity wquant = u.Quantity(value=int(wlen), unit='Angstrom') qr = client.query(vso.attrs.Time(date,# - dt.timedelta(seconds=6), date + dt.timedelta(seconds=12)),#6)), vso.attrs.Wave(wquant, wquant), vso.attrs.Instrument('aia'), vso.attrs.Provider('JSOC')) res = client.get(qr, path=fits_dir+'{file}', site='NSO').wait() temp_im = Map(res) if temp_im == []: print 'Downloading failed.' print res, len(qr), qr return np.zeros((512, 512)), None, None if isinstance(temp_im, list): temp_im = temp_im[0] # TODO: save out level 1.5 data so it can be loaded quickly. temp_im = aiaprep(temp_im) temp_im.data = temp_im.data / temp_im.exposure_time # Can probably increase speed a bit by making this * (1.0/exp_time) images.append(temp_im) #imdates[wlen] = temp_im.date normim = images[2].data.copy() # Normalise images to 171A print 'Normalising images' for i in range(len(wlens)): images[i].data = images[i].data / normim # Produce temperature map if n_params == 1: tempmap = find_temp(images, t0)#, force_temp_scan=True) else: #tempmap = find_temp_3params(images, t0) pass return tempmap
def running_difference(mc, offset=1, use_offset_for_meta='ahead'): """ Calculate the running difference of a mapcube. Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object offset : [ int ] Calculate the running difference between map 'i + offset' and image 'i'. use_offset_for_meta : {'ahead', 'behind', 'mean'} Which meta header to use in layer 'i' in the returned mapcube, either from map 'i + offset' (when set to 'ahead') and image 'i' (when set to 'behind'). When set to 'mean', the ahead meta object is copied, with the observation date replaced with the mean of the ahead and behind observation dates. Returns ------- sunpy.map.MapCube A mapcube containing the running difference of the input mapcube. The value normalization function used in plotting the data is changed, prettifying movies of resultant mapcube. """ # Create a list containing the data for the new map object new_mc = [] for i in range(0, len(mc.maps) - offset): new_data = mc[i + offset].data - mc[i].data if use_offset_for_meta == 'ahead': new_meta = mc[i + offset].meta plot_settings = mc[i + offset].plot_settings elif use_offset_for_meta == 'behind': new_meta = mc[i].meta plot_settings = mc[i].plot_settings elif use_offset_for_meta == 'mean': new_meta = deepcopy(mc[i + offset].meta) new_meta['date_obs'] = _mean_time([parse_time(mc[i + offset].date), parse_time(mc[i].date)]) plot_settings = mc[i + offset].plot_settings else: raise ValueError('The value of the keyword "use_offset_for_meta" has not been recognized.') # Update the plot scaling. The default here attempts to produce decent # looking images new_map = Map(new_data, new_meta) new_map.plot_settings = plot_settings new_mc.append(new_map) # Create the new mapcube and return return Map(new_mc, cube=True)
def load_start_of_day_map(dtime): dtime = start_of_day(dtime) mappath = map_path(dtime) if not mappath.exists(): download_start_of_day_map(dtime) print(f'Loading AIA map for {dtime}') try: ret = Map(str(mappath)) ret.meta['rsun_ref'] = sunpy.sun.constants.radius.to_value(u.m) return ret except OSError as e: raise RuntimeError(f'No AIA map available for {dtime}') from e
def create_figure(unix, wavelen=131): utc_start = stix_datetime.unix2datetime(unix).strftime('%Y-%m-%dT%H:%M:%S') utc_end = stix_datetime.unix2datetime(unix + 60).strftime('%Y-%m-%dT%H:%M:%S') #print(utc_start, utc_end) sdo_query = Fido.search(a.Time(utc_start, utc_end), a.Instrument('AIA'), a.Wavelength(wavelen * u.angstrom)) sdo_res = Fido.fetch(sdo_query[0], progress=False, path='/tmp/') sdo = Map(sdo_res[0]) fig = plt.figure(figsize=(6, 6), dpi=100) ax = fig.add_subplot(projection=sdo) sdo.plot(clip_interval=[1, 100] * u.percent, axes=ax) return fig
def base_difference(mc, base=0, fraction=False): """ Calculate the base difference of a mapcube. Parameters ---------- mc : sunpy.map.MapCube A sunpy mapcube object base : int, sunpy.map.Map If base is an integer, this is understood as an index to the input mapcube. Differences are calculated relative to the map at index 'base'. If base is a sunpy map, then differences are calculated relative to that map fraction : boolean If False, then absolute changes relative to the base map are returned. If True, then fractional changes relative to the base map are returned Returns ------- sunpy.map.MapCube A mapcube containing base difference of the input mapcube. The value normalization function used in plotting the data is changed, prettifying movies of resultant mapcube. """ if not(isinstance(base, GenericMap)): base_data = mc[base].data else: base_data = base.data if base_data.shape != mc[0].data.shape: raise ValueError('Base map does not have the same shape as the maps in the input mapcube.') # Fractional changes or absolute changes if fraction: relative = base_data else: relative = 1.0 # Create a list containing the data for the new map object new_mc = [] for m in mc: new_data = (m.data - base_data) / relative new_mc.append(Map(new_data, m.meta)) # Create the new mapcube and return return Map(new_mc, cube=True)
def save(self): date = sunpy.time.parse_time(self.date) if not path.exists(self.maps_dir): makedirs(self.maps_dir) fname = path.join(self.maps_dir, '{:%Y-%m-%dT%H_%M_%S}.fits'.format(date)) alldata = np.zeros((self.shape[0], self.shape[1], self.n_params+1)) alldata[..., 0] = self.data if self.n_params != 1: fname = fname.replace('.fits', '_full.fits') alldata[..., 1] = self.dem_width alldata[..., 2] = self.emission_measure alldata[..., -1] = self.goodness_of_fit outmap = Map(alldata, self.meta.copy()) outmap.save(fname, clobber=True)
def download_helioviewer(date, observatory, instrument, detector): file = hv.download_jp2(date, observatory=observatory, instrument=instrument, detector=detector) f = Map(file) if observatory == 'SOHO': # add observer location information: soho = get_horizons_coord('SOHO', f.date) f.meta['HGLN_OBS'] = soho.lon.to('deg').value f.meta['HGLT_OBS'] = soho.lat.to('deg').value f.meta['DSUN_OBS'] = soho.radius.to('m').value return f
def get_properties(cls, header): """Parses RHESSI image header""" properties = Map.get_properties(header) properties.update({ "date": parse_time(header.get('date_obs')), "detector": header.get('telescop'), "instrument": header.get('telescop'), "measurement": [header.get('energy_l'), header.get('energy_h')], "observatory": "SDO", "name": "RHESSI %d - %d keV" % (header.get('energy_l'), header.get('energy_h')), "cmap": cm.get_cmap('rhessi'), "exposure_time": (parse_time(header.get('date_end')) - parse_time(header.get('date_obs'))).seconds, "coordinate_system": { 'x': 'HPLN-TAN', 'y': 'HPLT-TAN' } }) return properties
def get_properties(cls, header): """Parses SXT image header""" properties = Map.get_properties(header) # 2012/11/07 - the SXT headers do not have a value of the distance from # the spacecraft to the center of the Sun. The FITS keyword 'DSUN_OBS' # appears to refer to the observed diameter of the Sun. Until such # time as that is calculated and properly included in the file, we will # use the value of 1 AU as a standard. properties['dsun']= constants.au wavelnth = header.get('wavelnth') if wavelnth == 'Al.1': wavelnth = 'Al01' if wavelnth.lower() == 'open': wavelnth = 'white light' properties.update({ "detector": "SXT", "instrument": "SXT", "observatory": "Yohkoh", "name": "SXT %s" % wavelnth, "nickname": "SXT", "cmap": cm.get_cmap(name='yohkohsxt' + wavelnth[0:2].lower()) }) return properties
def convolve(sunpy_map, oversample_psf=1): """Convolve the FOXSI psf with an input map Parameters ---------- sunpy_map : `~sunpy.map.GenericMap` An input map. oversample_psf : int The number of subpixels to average over to produce a more accurate PSF Returns ------- sunpy_map : `~sunpy.map.GenericMap` The map convolved with the FOXSI psf. """ this_psf = psf(0 * u.arcmin, 0 * u.arcmin, scale=sunpy_map.scale.x, oversample=oversample_psf) smoothed_data = astropy_convolve(sunpy_map.data, this_psf) meta = sunpy_map.meta.copy() meta['telescop'] = 'FOXSI-SMEX' result = Map((smoothed_data, meta)) return result
def test_to_sunpy_map(self, m, n, pos, pixel): pos = pos * unit.arcsec pixel = pixel * unit.arcsec u = generate_uv(m, pixel[0]) v = generate_uv(n, pixel[1]) u, v = np.meshgrid(u, v) uv = np.array([u, v]).reshape(2, m * n) / unit.arcsec header = { 'crval1': pos[0].value, 'crval2': pos[1].value, 'cdelt1': pixel[0].value, 'cdelt2': pixel[1].value } data = Gaussian2DKernel(stddev=2, x_size=n, y_size=m).array mp = Map((data, header)) vis = Visibility.from_map(mp, uv) res = vis.to_map((m, n), pixel_size=pixel) # assert np.allclose(res.data, data) assert res.reference_coordinate.Tx == pos[0] assert res.reference_coordinate.Ty == pos[1] assert res.scale.axis1 == pixel[0] / unit.pix assert res.scale.axis2 == pixel[1] / unit.pix assert res.dimensions.x == m * unit.pix assert res.dimensions.y == n * unit.pix
def get_properties(cls, header): """Parses SXT image header""" properties = Map.get_properties(header) # 2012/12/19 - the SXT headers do not have a value of the distance from # the spacecraft to the center of the Sun. The FITS keyword 'DSUN_OBS' # appears to refer to the observed diameter of the Sun. Until such # time as that is calculated and properly included in the file, we will # use simple trigonometry to calculate the distance of the center of # the Sun from the spacecraft. Note that the small angle approximation # is used, and the solar radius stored in SXT FITS files is in arcseconds. properties["dsun"] = constants.au yohkoh_solar_r = header.get("solar_r", None) if yohkoh_solar_r == None: properties["dsun"] = constants.au else: properties["dsun"] = constants.radius / (np.deg2rad(yohkoh_solar_r / 3600.0)) wavelnth = header.get("wavelnth") if wavelnth == "Al.1": wavelnth = "Al01" if wavelnth.lower() == "open": wavelnth = "white light" properties.update( { "detector": "SXT", "instrument": "SXT", "observatory": "Yohkoh", "name": "SXT %s" % wavelnth, "nickname": "SXT", "cmap": cm.get_cmap(name="yohkohsxt" + wavelnth[0:2].lower()), } ) return properties
def get_CHMap_stats(map_path): '''Return all the requested stats in a SPoCA CHMap''' # Open the FITS file hdus = fits.open(map_path) # Create a sunpy Map for converting the pixel coordinates image_hdu = hdus[image_hdu_name] map = Map(image_hdu.data, image_hdu.header) # Get regions by id regions_hdu = hdus[region_hdu_name] regions = {region['ID']: region for region in regions_hdu.data} # Get region stats by id region_stats = { region_stat['ID']: region_stat for region_stat in hdus[region_stats_hdu_name].data } # Create the stats list stats_list = list() for id, region in regions.items(): stats_list.append(get_stats(map, region, region_stats[id])) return stats_list
def __new__(cls, *args, **kwargs): """Creates a new Map instance""" maps = [] data = [] headers = [] # convert input to maps for item in args: if isinstance(item, Map): maps.append(item) else: maps.append(Map.read(item)) # sort data sortby = kwargs.get("sortby", "date") if hasattr(cls, '_sort_by_%s' % sortby): maps.sort(key=getattr(cls, '_sort_by_%s' % sortby)()) # create data cube for map_ in maps: data.append(np.array(map_)) headers.append(map_._original_header) obj = np.asarray(data).view(cls) obj._headers = headers return obj
def get_properties(cls, header): """Parses XRT image header""" properties = Map.get_properties(header) # XRT uses DATE_OBS, not date-obs. properties["date"] = parse_time(header.get('date_obs', None)) #TODO: proper exception handling here - report to the user that there is # an unexpected value fw1 = header.get('EC_FW1_') if not(fw1.lower() in [x.lower() for x in cls.filter_wheel1_measurements]): pass fw2 = header.get('EC_FW2_') if not(fw2.lower() in [x.lower() for x in cls.filter_wheel2_measurements]): pass # All images get the same color table - IDL Red temperature (loadct, 3) properties.update({ "detector": "XRT", "instrument": "XRT", "observatory": "Hinode", "name": "XRT %s-%s " % (fw1.replace('_', ' '), fw2.replace('_', ' ')), "nickname": "XRT", "cmap": cm.get_cmap(name='hinodexrt') }) return properties
def process_med_int(fle): """ Processes 1 image and extracts the median intensity on the disk normalized for exposure time. Args: fle (str): image file name Returns: median intensity of the solar disk normalized for exptime """ amap = Map(fle) amap = aiaprep(amap) data = amap.data date = amap.date hdr = getFitsHdr(fle) exp_time = hdr['exptime'] r_pix = hdr['rsun_obs'] / hdr['cdelt1'] # radius of the sun in pixels disk_mask = get_disk_mask(data.shape, r_pix) disk_data = np.ma.array(data, mask=disk_mask) med_int = np.ma.median(disk_data) # np.median doesn't support masking return med_int / exp_time
def detect(self, channel, i_time, header, bins, bin_range): """ For a given timestep, map the intensity along the loop to the 3D field and return the Hi-C data product. Parameters ---------- channel : `dict` i_time : `int` header : `~sunpy.util.metadata.MetaDict` bins : `~synthesizAR.util.SpatialPair` bin_range : `~synthesizAR.util.SpatialPair` Returns ------- AIA data product : `~sunpy.map.Map` """ with h5py.File(self.counts_file, 'r') as hf: weights = np.array(hf[channel['name']][i_time, :]) units = u.Unit(get_keys(hf[channel['name']].attrs, ('unit','units'))) hpc_coordinates = self.total_coordinates dz = np.diff(bin_range.z)[0].cgs / bins.z * (1. * u.pixel) visible = is_visible(hpc_coordinates, self.observer_coordinate) hist, _, _ = np.histogram2d(hpc_coordinates.Tx.value, hpc_coordinates.Ty.value, bins=(bins.x.value, bins.y.value), range=(bin_range.x.value, bin_range.y.value), weights=visible * weights * dz.value) header['bunit'] = (units * dz.unit).to_string() counts = gaussian_filter(hist.T, (channel['gaussian_width']['y'].value, channel['gaussian_width']['x'].value)) return Map(counts.astype(np.float32), header)
def test_read_file(): """ Tests the reading of the complete JP2 file and its conversion into a SunPy map. """ map_ = Map(AIA_193_JP2) assert isinstance(map_, GenericMap)
def add_map(self, input_, zorder=None, alpha=1, levels=False): """Adds a map to the CompositeMap Parameters ---------- input_ : {sunpy.map, string} Map instance or filepath to map to be added zorder : int The index to use when determining where the map should lie along the z-axis; maps with higher z-orders appear above maps with lower z-orders. alpha : float Opacity at which the map should be displayed. An alpha value of 0 results in a fully transparent image while an alpha value of 1 results in a fully opaque image. Values between result in semi- transparent images. """ if zorder is None: zorder = max([m.zorder for m in self._maps]) + 10 m = Map.read(input_) m.zorder = zorder m.alpha = alpha m.levels = levels self._maps.append(m)
def add_noise(params, wave_maps, verbose=False): """ Adds simulated noise to a list of maps """ wave_maps_noise = [] for current_wave_map in wave_maps: if verbose: print(" * Adding noise to map at " + str(current_wave_map.date)) noise = noise_random(params, current_wave_map.data.shape) struct = noise_structure(params, current_wave_map.data.shape) noisy_wave_map = Map(current_wave_map.data + noise + struct, current_wave_map.meta) noisy_wave_map.plot_settings = deepcopy(current_wave_map.plot_settings) wave_maps_noise.append(noisy_wave_map) return Map(wave_maps_noise, cube=True)
def clean(params, wave_maps, verbose=False): """ Cleans a list of maps """ wave_maps_clean = [] for current_wave_map in wave_maps: if verbose: print(" * Cleaning map at "+str(current_wave_map.date)) data = np.asarray(current_wave_map.data) if params.get("clean_nans"): data[np.isnan(data)] = 0. cleaned_wave_map = Map(data, current_wave_map.meta) # cleaned_wave_map.name = current_wave_map.name cleaned_wave_map.meta['date-obs'] = current_wave_map.date cleaned_wave_map.plot_settings = deepcopy(current_wave_map.plot_settings) wave_maps_clean.append(cleaned_wave_map) return Map(wave_maps_clean, cube=True)
def __getitem__(self, key): """Overiding indexing operation""" if self.ndim is 3 and isinstance(key, int): data = np.ndarray.__getitem__(self, key) header = self._headers[key] for cls in Map.__subclasses__(): if cls.is_datasource_for(header): return cls(data, header) else: return np.ndarray.__getitem__(self, key)
def get_properties(cls, header): """Parses EUVI image header""" properties = Map.get_properties(header) properties.update({ "date": parse_time(header.get('date-obs',header.get('date_obs'))), "detector": "EUVI", "instrument": "SECCHI", "observatory": header.get('obsrvtry'), "cmap": cm.get_cmap('sohoeit%d' % header.get('wavelnth')), "nickname": "EUVI-" + header.get('obsrvtry')[-1] }) return properties
def get_properties(cls, header): """Parses AIA image header""" properties = Map.get_properties(header) properties.update({ "detector": "AIA", "instrument": "AIA", "observatory": "SDO", "nickname": "AIA", "cmap": cm.get_cmap('sdoaia%d' % header.get('wavelnth')), "processing_level": header.get('LVL_NUM') }) return properties
def get_properties(cls, header): """Parses SWAP image header""" properties = Map.get_properties(header) properties.update({ "detector": "SWAP", "instrument": "SWAP", "observatory": "PROBA2", "name": "SWAP %s" % header.get('wavelnth'), "nickname": "SWAP", "cmap": cm.get_cmap(name='sdoaia171') }) return properties
def get_properties(cls, header): """Parses LASCO image header""" properties = Map.get_properties(header) datestr = "%sT%s" % (header.get('date_obs'), header.get('time_obs')) properties.update({ "date": parse_time(datestr), "measurement": "white-light", "name": "LASCO %s" % header.get('detector'), "nickname": "LASCO-%s" % header.get('detector'), "cmap": cm.get_cmap('soholasco%s' % properties['detector'][1]) }) return properties
def get_properties(cls, header): """Parses AIA image header""" properties = Map.get_properties(header) properties.update( { "detector": "AIA", "instrument": "AIA", "observatory": "SDO", "nickname": "AIA", "cmap": cm.get_cmap("sdoaia%d" % header.get("wavelnth")), } ) return properties
def calculate_em(self, wlen='171', dz=100, model=False): """ Calculate an approximation of the coronal EmissionMeasure using a given TemperatureMap object and a particular AIA channel. Parameters ---------- tmap : CoronaTemps.temperature.TemperatureMap A TemperatureMap instance containing coronal temperature data wlen : {'94' | '131' | '171' | '193' | '211' | '335'} AIA wavelength used to approximate the emission measure. '171', '193' and '211' are most likely to provide reliable results. Use of other channels is not recommended. """ # Load the appropriate temperature response function tresp = read('/imaps/holly/home/ajl7/CoronaTemps/aia_tresp') resp = tresp['resp{}'.format(wlen)] # Get some information from the TemperatureMap and set up filenames, etc tempdata = self.data.copy() tempdata[np.isnan(tempdata)] = 0.0 date = sunpy.time.parse_time(self.date) if not model: data_dir = self.data_dir fits_dir = path.join(data_dir, '{:%Y/%m/%d}/{}'.format(date, wlen)) filename = path.join(fits_dir, '*{0:%Y?%m?%d}?{0:%H?%M}*fits'.format(date)) if wlen == '94': filename = filename.replace('94', '094') # Load and appropriately process AIA data filelist = glob.glob(filename) if filelist == []: print 'AIA data not found :(' return aiamap = Map(filename) aiamap.data /= aiamap.exposure_time aiamap = aiaprep(aiamap) aiamap = aiamap.submap(self.xrange, self.yrange) else: fname = '/imaps/holly/home/ajl7/CoronaTemps/data/synthetic/{}/model.fits'.format(wlen) if wlen == '94': fname = fname.replace('94', '094') aiamap = Map(fname) # Create new Map and put EM values in it emmap = Map(self.data.copy(), self.meta.copy()) indices = np.round((tempdata - 4.0) / 0.05).astype(int) indices[indices < 0] = 0 indices[indices > 100] = 100 #print emmap.shape, indices.shape, tempdata.shape, aiamap.shape, resp.shape emmap.data = np.log10(aiamap.data / resp[indices]) #emmap.data = aiamap.data / resp[indices] emmapcubehelix = _cm.cubehelix(s=2.8, r=-0.7, h=1.4, gamma=1.0) cm.register_cmap(name='emhelix', data=emmapcubehelix) emmap.cmap = cm.get_cmap('emhelix') return emmap
def get_properties(cls, header): """Parses HMI image header""" properties = Map.get_properties(header) measurement = header['content'].split(" ")[0].lower() properties.update({ "detector": "HMI", "instrument": "HMI", "measurement": measurement, "observatory": "SDO", "name": "HMI %s" % measurement, "nickname": "HMI" }) return properties
def get_properties(cls, header): """Parses COR image header""" properties = Map.get_properties(header) # @TODO: Deal with invalid values for exptime. E.g. STEREO-B COR2 # on 2012/03/20 has -1 for some images. properties.update({ "date": parse_time(header.get('date_obs')), "detector": header.get('detector'), "instrument": "SECCHI", "observatory": header.get('obsrvtry'), "measurement": "white-light", "name": "SECCHI %s" % header.get('detector'), "nickname": "%s-%s" % (header.get('detector'), header.get('obsrvtry')[-1]) }) return properties
def __new__(cls, red, green, blue, **kwargs): headers = [] data = np.zeros((red.shape[0], red.shape[1], 3), dtype=np.uint8) # convert input to maps for i, item in enumerate([red, green, blue]): if isinstance(item, Map): map_ = item else: map_ = Map.read(item) data[:, :, i] = map_ headers.append(map_.get_header(original=True)) obj = np.asarray(data).view(cls) obj._headers = headers return obj
def get_properties(cls, header): """Parses LASCO image header""" properties = Map.get_properties(header) datestr = "%sT%s" % ( header.get("date-obs", header.get("date_obs")), header.get("time-obs", header.get("time_obs")), ) properties.update( { "date": parse_time(datestr), "measurement": "white-light", "name": "LASCO %s" % header.get("detector"), "nickname": "LASCO-%s" % header.get("detector"), "cmap": cm.get_cmap("soholasco%s" % properties["detector"][1]), } ) return properties
def get_properties(cls, header): """Parses EIT image header""" properties = Map.get_properties(header) # Solar radius in arc-seconds at 1 au # @TODO: use sunpy.sun instead radius_1au = 959.644 scale = header.get("cdelt1") properties.update({ "date": parse_time(header.get('date_obs')), "detector": "EIT", "dsun": ((radius_1au / (properties['rsun_arcseconds'] * scale)) * constants.au), "name": "EIT %s" % header.get('wavelnth'), "nickname": "EIT", "cmap": cm.get_cmap('sohoeit%d' % header.get('wavelnth')) }) return properties
def get_properties(cls, header): """Parses EIT image header""" properties = Map.get_properties(header) # Solar radius in arc-seconds at 1 au radius_1au = sun.angular_size(header.get('date_obs')) scale = header.get("cdelt1") # EIT solar radius is expressed in number of EIT pixels solar_r = header.get("solar_r") properties.update({ "date": parse_time(header.get('date_obs')), "detector": "EIT", "rsun_arcseconds": solar_r * scale, "dsun": ((radius_1au / (solar_r * scale)) * constants.au), "name": "EIT %s" % header.get('wavelnth'), "nickname": "EIT", "cmap": cm.get_cmap('sohoeit%d' % header.get('wavelnth')) }) return properties
def get_properties(cls, header): """Parses MDI image header""" properties = Map.get_properties(header) # MDI sometimes has an "60" in seconds field datestr = header.get("date-obs", header.get("date_obs")) if datestr[17:19] == "60": datestr = datestr[:17] + "30" + datestr[19:] rsun = header.get("radius") # Solar radius in arc-seconds at 1 au # previous value radius_1au = 959.644 radius_1au = constants.average_angular_size # MDI images may have radius = 0.0 if not rsun: dsun = constants.au else: scale = header.get("cdelt1") dsun = (radius_1au / (rsun * scale)) * constants.au # Determine measurement dpcobsr = header.get("dpc_obsr") meas = "magnetogram" if dpcobsr.find("Mag") != -1 else "continuum" properties.update( { "date": parse_time(datestr), "detector": "MDI", "measurement": meas, "dsun": dsun, "name": "MDI %s" % meas, "nickname": "MDI", } ) return properties
def __init__(self, *args): self._maps = [] # Default alpha and zorder values alphas = [1] * len(args) zorders = range(0, 10 * len(args), 10) levels = [False] * len(args) # Parse input Maps/filepaths for i, item in enumerate(args): # Parse map if isinstance(item, Map): m = item else: m = Map.read(item) # Set z-order and alpha values for the map m.zorder = zorders[i] m.alpha = alphas[i] m.levels = levels[i] # Add map self._maps.append(m)