def test_mismatched_spectral_axes_parameters(): spec = Spectrum1D(spectral_axis=SpectralCoord(np.linspace(0, 50, 50) * u.AA, radial_velocity=u.Quantity(100.0, "km/s")), flux=np.random.randn(50) * u.Jy) spec1 = Spectrum1D(spectral_axis=SpectralCoord(np.linspace(20, 60, 50) * u.AA, radial_velocity=u.Quantity(200.0, "km/s")), flux=np.random.randn(50) * u.Jy) with pytest.raises(ValueError) as e_info: SpectrumCollection.from_spectra([spec, spec1])
def spectralcoord_from_redshift(redshift): if isinstance(redshift, SpectralCoord): return redshift return SpectralCoord((redshift + 1) * self.wcs.restwav, unit=u.m, observer=observer, target=target)
def spectralcoord_from_beta(beta): return SpectralCoord(beta * C_SI, unit=u.m / u.s, doppler_convention='relativistic', doppler_rest=self.wcs.restwav * u.m, observer=observer, target=target)
def test_create_collection_from_spectrum1D(): spec = Spectrum1D(spectral_axis=SpectralCoord(np.linspace(0, 50, 50) * u.AA, redshift=0.1), flux=np.random.randn(50) * u.Jy, uncertainty=StdDevUncertainty(np.random.sample(50), unit='Jy')) spec1 = Spectrum1D(spectral_axis=SpectralCoord(np.linspace(20, 60, 50) * u.AA, redshift=0.1), flux=np.random.randn(50) * u.Jy, uncertainty=StdDevUncertainty(np.random.sample(50), unit='Jy')) spec_coll = SpectrumCollection.from_spectra([spec, spec1]) assert spec_coll.ndim == 1 assert spec_coll.shape == (2, ) assert spec_coll.nspectral == 50 assert isinstance(spec_coll.flux, u.Quantity) assert isinstance(spec_coll.spectral_axis, SpectralCoord) assert spec.spectral_axis.unit == spec_coll.spectral_axis.unit assert spec.flux.unit == spec_coll.flux.unit assert_allclose(spec_coll.spectral_axis.redshift.value, 0.1)
def test_create_with_spectral_coord(): spectral_coord = SpectralCoord(np.arange(5100, 5150) * u.AA, radial_velocity=u.Quantity(1000.0, "km/s")) flux = np.random.randn(50) * u.Jy spec = Spectrum1D(spectral_axis=spectral_coord, flux=flux) assert spec.radial_velocity == u.Quantity(1000.0, "km/s") assert isinstance(spec.spectral_axis, SpectralCoord) assert spec.spectral_axis.size == 50
def _change_frame(ixds_block, observer, target, frequency, frame): """ This function will be called using xr.map_blocks reference http://xarray.pydata.org/en/stable/generated/xarray.map_blocks.html and https://xarray-contrib.github.io/xarray-tutorial/scipy-tutorial/06_xarray_and_dask.html#map_blocks """ if ("velocity" in ixds_block.coords) and ("Velocity" in ixds_block.attrs["units_map"]): # update target to use velo coordinate value for this block # since this is inside map_blocks, the only velo/chan available is the one we want to use chan_vel = ixds_block.velocity.values * units.Unit( ixds_block.attrs["units_map"]["Velocity"]) # assignment via `SkyCoord(target, radial_velocity=chan_vel)` doesn't seem to work # see # https://docs.astropy.org/en/stable/coordinates/velocities.html#adding-velocities-to-existing-frame-objects # https://docs.astropy.org/en/stable/coordinates/representations.html#attaching-differential-objects-to-representation-objects target = target.data.with_differentials( {'s': RadialDifferential(chan_vel)}) for xda in ixds_block.data_vars: # we don't want to transform boolean data arrays # we also don't want to if there is no chan dimension if ixds_block[xda].dtype != bool and "chan" in ixds_block[xda].dims: # ixds_block[xda].values is the computed ndarray for this block sc = SpectralCoord( ixds_block[xda].values, unit=ixds_block.attrs["units_map"]["Frequency"], observer=observer, target=target, doppler_rest=frequency, doppler_convention=ixds_block.attrs["velocity__type"], ) # see https://docs.astropy.org/en/stable/coordinates/spectralcoord.html#specifying-an-observer-and-a-target-explicitly new_sc = sc.with_observer_stationary_relative_to(frame) ixds_block[xda] = new_sc.values return ixds_block
def test_scalar_spectralcoord(tmpdir): sc = SpectralCoord(565 * u.nm) tree = dict(spectralcoord=sc) def check(asdffile): assert isinstance(asdffile['spectralcoord'], SpectralCoord) assert_quantity_allclose(asdffile['spectralcoord'].quantity, 565 * u.nm) assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check)
def test_crop_mismatch_class(ndcube_4d_ln_lt_l_t): """Test bbox coordinates not being the same length as cube WCS""" cube = ndcube_4d_ln_lt_l_t intervals = cube.wcs.array_index_to_world([1, 2], [0, 1], [0, 1], [0, 2]) intervals[0] = SpectralCoord([3e-11, 4.5e-11], unit=u.m) lower_corner = [coord[0] for coord in intervals] upper_corner = [coord[-1] for coord in intervals] with pytest.raises( TypeError, match=r"<class .*.SpectralCoord'> of component 0 in point 0 is " r"incompatible with WCS component time"): cube.crop(lower_corner, upper_corner)
def test_vector_spectralcoord(tmpdir): sc = SpectralCoord([100, 200, 300] * u.GHz) tree = dict(spectralcoord=sc) def check(asdffile): assert isinstance(asdffile['spectralcoord'], SpectralCoord) assert_quantity_allclose(asdffile['spectralcoord'].quantity, [100, 200, 300] * u.GHz) assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check, tree_match_func=assert_quantity_allclose)
def test_create_from_spectral_coord(observer, target): """ Checks that parameters are correctly copied from the SpectralCoord object to the SpectralAxis object """ spec_coord = SpectralCoord([100, 200, 300] * u.nm, observer=observer, target=target, doppler_convention='optical', doppler_rest=6000 * u.AA) spec_axis = SpectralAxis(spec_coord) assert spec_coord.observer == spec_axis.observer assert spec_coord.target == spec_axis.target assert spec_coord.radial_velocity == spec_axis.radial_velocity assert spec_coord.doppler_convention == spec_axis.doppler_convention assert spec_coord.doppler_rest == spec_axis.doppler_rest
def test_spectralcoord_with_obstarget(tmpdir): sc = SpectralCoord(10 * u.GHz, observer=ICRS(1 * u.km, 2 * u.km, 3 * u.km, representation_type='cartesian'), target=Galactic(10 * u.deg, 20 * u.deg, distance=30 * u.pc)) tree = dict(spectralcoord=sc) def check(asdffile): assert isinstance(asdffile['spectralcoord'], SpectralCoord) assert_quantity_allclose(asdffile['spectralcoord'].quantity, 10 * u.GHz) assert isinstance(asdffile['spectralcoord'].observer, ICRS) assert isinstance(asdffile['spectralcoord'].target, Galactic) assert_roundtrip_tree(tree, tmpdir, asdf_check_func=check)
def __init__(self, flux, spectral_axis=None, wcs=None, uncertainty=None, mask=None, meta=None): # Check for quantity if not isinstance(flux, u.Quantity): raise u.UnitsError("Flux must be a `Quantity`.") if spectral_axis is not None: if not isinstance(spectral_axis, u.Quantity): raise u.UnitsError("Spectral axis must be a `Quantity`.") spectral_axis = SpectralCoord(spectral_axis) # Ensure that the input values are the same shape if not (flux.shape == spectral_axis.shape): raise ValueError("Shape of all data elements must be the same.") if uncertainty is not None and uncertainty.array.shape != flux.shape: raise ValueError("Uncertainty must be the same shape as flux and " "spectral axis.") if mask is not None and mask.shape != flux.shape: raise ValueError("Mask must be the same shape as flux and " "spectral axis.") # Convert uncertainties to standard deviations if not already defined # to be of some type if uncertainty is not None and not isinstance(uncertainty, NDUncertainty): # If the uncertainties are not provided a unit, raise a warning # and use the flux units if not isinstance(uncertainty, u.Quantity): warnings.warn("No unit associated with uncertainty, assuming" f"flux units of '{flux.unit}'.") uncertainty = u.Quantity(uncertainty, unit=flux.unit) uncertainty = StdDevUncertainty(uncertainty) self._flux = flux self._spectral_axis = spectral_axis self._wcs = wcs self._uncertainty = uncertainty self._mask = mask self._meta = meta
def plot(obs_parameters='', n=0, m=0, f_rest=0, slope_correction=False, dB=False, vlsr=False, meta=False, avg_ylim=[0,0], cal_ylim=[0,0], rfi=[], xlim=[0,0], ylim=[0,0], dm=0, obs_file='observation.dat', cal_file='', waterfall_fits='', spectra_csv='', power_csv='', plot_file='plot.png'): ''' Process, analyze and plot data. Args: obs_parameters: dict. Observation parameters (identical to parameters used to acquire data) dev_args: string. Device arguments (gr-osmosdr) rf_gain: float. RF gain if_gain: float. IF gain bb_gain: float. Baseband gain frequency: float. Center frequency [Hz] bandwidth: float. Instantaneous bandwidth [Hz] channels: int: Number of frequency channels (FFT size) t_sample: float: Integration time per FFT sample duration: float: Total observing duration [sec] loc: string: latitude, longitude, and elevation of observation (float, separated by spaces) ra_dec: string: right ascension and declination of observation target (float, separated by space) az_alt: string: azimuth and altitude of observation target (float, separated by space; takes precedence over ra_dec) n: int. Median filter factor (spectrum) m: int. Median filter factor (time series) f_rest: float. Spectral line reference frequency used for radial velocity (Doppler shift) calculations [Hz] slope_correction: bool. Correct slope in poorly-calibrated spectra using linear regression dB: bool. Display data in decibel scaling vlsr: bool. Display graph in VLSR frame of reference meta: bool. Display header with date, time, and target rfi: list. Blank frequency channels contaminated with RFI ([low_frequency, high_frequency]) [Hz] avg_ylim: list. Averaged plot y-axis limits ([low, high]) cal_ylim: list. Calibrated plot y-axis limits ([low, high]) xlim: list. x-axis limits ([low_frequency, high_frequency]) [Hz] ylim: list. y-axis limits ([start_time, end_time]) [Hz] dm: float. Dispersion measure for dedispersion [pc/cm^3] obs_file: string. Input observation filename (generated with virgo.observe) cal_file: string. Input calibration filename (generated with virgo.observe) waterfall_fits: string. Output FITS filename spectra_csv: string. Output CSV filename (spectra) power_csv: string. Output CSV filename (time series) plot_file: string. Output plot filename ''' import matplotlib matplotlib.use('Agg') # Try commenting this line if you run into display/rendering errors import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec plt.rcParams['legend.fontsize'] = 14 plt.rcParams['axes.labelsize'] = 14 plt.rcParams['axes.titlesize'] = 18 plt.rcParams['xtick.labelsize'] = 12 plt.rcParams['ytick.labelsize'] = 12 def decibel(x): if dB: return 10.0*np.log10(x) return x def shift(phase_num, n_rows): waterfall[:, phase_num] = np.roll(waterfall[:, phase_num], -n_rows) def SNR(spectrum, mask=np.array([])): '''Signal-to-Noise Ratio estimator, with optional masking. If mask not given, then all channels will be used to estimate noise (will drastically underestimate S:N - not robust to outliers!)''' if mask.size == 0: mask = np.zeros_like(spectrum) noise = np.nanstd((spectrum[2:]-spectrum[:-2])[mask[1:-1] == 0])/np.sqrt(2) background = np.nanmean(spectrum[mask == 0]) return (spectrum-background)/noise def best_fit(power): '''Compute best Gaussian fit''' avg = np.nanmean(power) var = np.var(power) gaussian_fit_x = np.linspace(np.min(power),np.max(power),100) gaussian_fit_y = 1.0/np.sqrt(2*np.pi*var)*np.exp(-0.5*(gaussian_fit_x-avg)**2/var) return [gaussian_fit_x, gaussian_fit_y] # Load observation parameters from dictionary argument/header file if obs_parameters != '': frequency = obs_parameters['frequency'] bandwidth = obs_parameters['bandwidth'] channels = obs_parameters['channels'] t_sample = obs_parameters['t_sample'] loc = obs_parameters['loc'] ra_dec = obs_parameters['ra_dec'] az_alt = obs_parameters['az_alt'] else: header_file = '.'.join(obs_file.split('.')[:-1])+'.header' warnings.warn('No observation parameters passed. Attempting to load from header file ('+header_file+')...') with open(header_file, 'r') as f: headers = [parameter.rstrip('\n') for parameter in f.readlines()] for i in range(len(headers)): if 'mjd' in headers[i]: mjd = float(headers[i].strip().split('=')[1]) elif 'frequency' in headers[i]: frequency = float(headers[i].strip().split('=')[1]) elif 'bandwidth' in headers[i]: bandwidth = float(headers[i].strip().split('=')[1]) elif 'channels' in headers[i]: channels = int(headers[i].strip().split('=')[1]) elif 't_sample' in headers[i]: t_sample = float(headers[i].strip().split('=')[1]) elif 'loc' in headers[i]: loc = tuple(map(float, headers[i].strip().split('=')[1].split(' '))) elif 'ra_dec' in headers[i]: ra_dec = tuple(map(str, headers[i].split('=')[1].split(' '))) elif 'az_alt' in headers[i]: az_alt = tuple(map(float, headers[i].split('=')[1].split(' '))) # Transform frequency axis limits to MHz xlim = [x / 1e6 for x in xlim] # Transform to VLSR if vlsr: from astropy import units as u from astropy.coordinates import SpectralCoord, EarthLocation, SkyCoord from astropy.time import Time obs_location = EarthLocation.from_geodetic(loc[0], loc[1], loc[2]) obs_time = obs_location.get_itrs(obstime=Time(str(mjd), format='mjd', scale='utc')) if az_alt!='': obs_coord = SkyCoord(az=az_alt[0]*u.degree, alt=az_alt[1]*u.degree, frame='altaz', location=obs_location, obstime=Time(str(mjd), format='mjd', scale='utc')) obs_coord = obs_coord.icrs print (obs_coord) else: obs_coord = SkyCoord(ra=ra_dec[0]*u.degree, dec=ra_dec[1]*u.degree, frame='icrs') #Transform center frequency frequency = SpectralCoord(frequency * u.MHz, observer=obs_time, target=obs_coord) frequency = frequency.with_observer_stationary_relative_to('lsrk') frequency = frequency.quantity.value # Define Radial Velocity axis limits left_velocity_edge = -299792.458*(bandwidth-2*frequency+2*f_rest)/(bandwidth-2*frequency) right_velocity_edge = 299792.458*(-bandwidth-2*frequency+2*f_rest)/(bandwidth+2*frequency) # Transform sampling time to number of bins bins = int(t_sample*bandwidth/channels) # Load observation & calibration data offset = 1 waterfall = offset*np.fromfile(obs_file, dtype='float32').reshape(-1, channels)/bins # Delete first 3 rows (potentially containing outlier samples) waterfall = waterfall[3:, :] # Mask RFI-contaminated channels if rfi != []: for j in range(len(rfi)): # Frequency to channel transformation current_rfi = rfi[j] rfi_lo = channels*(current_rfi[0] - (frequency - bandwidth/2))/bandwidth rfi_hi = channels*(current_rfi[1] - (frequency - bandwidth/2))/bandwidth # Blank channels for i in range(int(rfi_lo), int(rfi_hi)): waterfall[:, i] = np.nan if cal_file != '': waterfall_cal = offset*np.fromfile(cal_file, dtype='float32').reshape(-1, channels)/bins # Delete first 3 rows (potentially containing outlier samples) waterfall_cal = waterfall_cal[3:, :] # Mask RFI-contaminated channels if rfi != []: for j in range(len(rfi)): # Frequency to channel transformation current_rfi = rfi[j] rfi_lo = channels*(current_rfi[0] - (frequency - bandwidth/2))/bandwidth rfi_hi = channels*(current_rfi[1] - (frequency - bandwidth/2))/bandwidth # Blank channels for i in range(int(rfi_lo), int(rfi_hi)): waterfall_cal[:, i] = np.nan # Compute average spectra with warnings.catch_warnings(): warnings.filterwarnings(action='ignore', message='Mean of empty slice') avg_spectrum = decibel(np.nanmean(waterfall, axis=0)) if cal_file != '': avg_spectrum_cal = decibel(np.nanmean(waterfall_cal, axis=0)) # Number of sub-integrations subs = waterfall.shape[0] # Compute Time axis t = t_sample*np.arange(subs) # Compute Frequency axis; convert Hz to MHz frequency = np.linspace(frequency-0.5*bandwidth, frequency+0.5*bandwidth, channels, endpoint=False)*1e-6 # Perform de-dispersion if dm != 0: deltaF = float(np.max(frequency)-np.min(frequency))/subs f_start = np.min(frequency) for t_bin in range(subs): f_chan = f_start+t_bin*deltaF deltaT = 4149*dm*((1/(f_chan**2))-(1/(np.max(frequency)**2))) n = int((float(deltaT)/(float(1)/channels))) shift(t_bin, n) # Define array for Time Series plot power = decibel(np.nanmean(waterfall, axis=1)) # Apply Mask mask = np.zeros_like(avg_spectrum) mask[np.logical_and(frequency > f_rest*1e-6-0.2, frequency < f_rest*1e-6+0.8)] = 1 # Margins OK for galactic HI # Define text offset for axvline text label text_offset = 0 # Calibrate Spectrum if cal_file != '': if dB: spectrum = 10**((avg_spectrum-avg_spectrum_cal)/10) else: spectrum = avg_spectrum/avg_spectrum_cal spectrum = SNR(spectrum, mask) if slope_correction: idx = np.isfinite(frequency) & np.isfinite(spectrum) fit = np.polyfit(frequency[idx], spectrum[idx], 1) ang_coeff = fit[0] intercept = fit[1] fit_eq = ang_coeff*frequency + intercept spectrum = SNR(spectrum-fit_eq, mask) # Mitigate RFI (Frequency Domain) if n != 0: spectrum_clean = SNR(spectrum.copy(), mask) for i in range(0, int(channels)): spectrum_clean[i] = np.nanmedian(spectrum_clean[i:i+n]) # Apply position offset for Spectral Line label text_offset = 60 # Mitigate RFI (Time Domain) if m != 0: power_clean = power.copy() for i in range(0, int(subs)): power_clean[i] = np.nanmedian(power_clean[i:i+m]) # Write Waterfall to file (FITS) if waterfall_fits != '': from astropy.io import fits # Load data hdu = fits.PrimaryHDU(waterfall) # Prepare FITS headers hdu.header['NAXIS'] = 2 hdu.header['NAXIS1'] = channels hdu.header['NAXIS2'] = subs hdu.header['CRPIX1'] = channels/2 hdu.header['CRPIX2'] = subs/2 hdu.header['CRVAL1'] = frequency[int(channels/2)] hdu.header['CRVAL2'] = t[int(subs/2)] hdu.header['CDELT1'] = bandwidth*1e-6/channels hdu.header['CDELT2'] = t_sample hdu.header['CTYPE1'] = 'Frequency (MHz)' hdu.header['CTYPE2'] = 'Relative Time (s)' try: hdu.header['MJD-OBS'] = mjd except NameError: warnings.warn('Observation MJD could not be found and will not be part of the FITS header.') pass # Delete pre-existing FITS file try: os.remove(waterfall_fits) except OSError: pass # Write to file hdu.writeto(waterfall_fits) # Write Spectra to file (csv) if spectra_csv != '': if cal_file != '': np.savetxt(spectra_csv, np.concatenate((frequency.reshape(channels, 1), avg_spectrum.reshape(channels, 1), avg_spectrum_cal.reshape(channels, 1), spectrum.reshape(channels, 1)), axis=1), delimiter=',', fmt='%1.6f') else: np.savetxt(spectra_csv, np.concatenate((frequency.reshape(channels, 1), avg_spectrum.reshape(channels, 1)), axis=1), delimiter=',', fmt='%1.6f') # Write Time Series to file (csv) if power_csv != '': np.savetxt(power_csv, np.concatenate((t.reshape(subs, 1), power.reshape(subs, 1)), axis=1), delimiter=',', fmt='%1.6f') # Initialize plot if cal_file != '': fig = plt.figure(figsize=(27, 15)) gs = GridSpec(2, 3) else: fig = plt.figure(figsize=(21, 15)) gs = GridSpec(2, 2) if meta: from astropy.coordinates import get_constellation epoch = (mjd - 40587) * 86400.0 meta_title = 'Date and Time: ' + time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(epoch)) + ' ' meta_title += 'Target: ' + obs_coord.to_string('hmsdms', precision=0) + ' in ' + get_constellation(obs_coord) + '\n' plt.suptitle(meta_title, fontsize=18) # Plot Average Spectrum ax1 = fig.add_subplot(gs[0, 0]) ax1.plot(frequency, avg_spectrum) if xlim == [0,0]: ax1.set_xlim(np.min(frequency), np.max(frequency)) else: ax1.set_xlim(xlim[0], xlim[1]) ax1.ticklabel_format(useOffset=False) ax1.set_xlabel('Frequency (MHz)') if avg_ylim != [0,0]: ax1.set_ylim(avg_ylim[0], avg_ylim[1]) if dB: ax1.set_ylabel('Relative Power (dB)') else: ax1.set_ylabel('Relative Power') if vlsr: cal_title = r'$Average\ Spectrum\ (V_{LSR})$' else: cal_title = 'Average Spectrum' if f_rest != 0: cal_title += '\n' ax1.set_title(cal_title) ax1.grid() if xlim == [0,0] and f_rest != 0: # Add secondary axis for Radial Velocity ax1_secondary = ax1.twiny() ax1_secondary.set_xlabel('Radial Velocity (km/s)', labelpad=5) ax1_secondary.axvline(x=0, color='brown', linestyle='--', linewidth=2, zorder=0) ax1_secondary.annotate('Spectral Line\nRest Frequency', xy=(460-text_offset, 5), xycoords='axes points', size=14, ha='left', va='bottom', color='brown') ax1_secondary.set_xlim(left_velocity_edge, right_velocity_edge) ax1_secondary.tick_params(axis='x', direction='in', pad=-22) #Plot Calibrated Spectrum if cal_file != '': ax2 = fig.add_subplot(gs[0, 1]) ax2.plot(frequency, spectrum, label='Raw Spectrum') if n != 0: ax2.plot(frequency, spectrum_clean, color='orangered', label='Median (n = '+str(n)+')') if cal_ylim !=[0,0]: ax2.set_ylim(cal_ylim[0],cal_ylim[1]) else: ax2.set_ylim() if xlim == [0,0]: ax2.set_xlim(np.min(frequency), np.max(frequency)) else: ax2.set_xlim(xlim[0], xlim[1]) ax2.ticklabel_format(useOffset=False) ax2.set_xlabel('Frequency (MHz)') ax2.set_ylabel('Signal-to-Noise Ratio (S/N)') if vlsr: cal_title = r'$Calibrated\ Spectrum\ (V_{LSR})$' + '\n' else: cal_title = 'Calibrated Spectrum\n' if f_rest != 0: ax2.set_title(cal_title) else: ax2.set_title('Calibrated Spectrum') if n != 0: if f_rest != 0: ax2.legend(bbox_to_anchor=(0.002, 0.96), loc='upper left') else: ax2.legend(loc='upper left') if xlim == [0,0] and f_rest != 0: # Add secondary axis for Radial Velocity ax2_secondary = ax2.twiny() ax2_secondary.set_xlabel('Radial Velocity (km/s)', labelpad=5) ax2_secondary.axvline(x=0, color='brown', linestyle='--', linewidth=2, zorder=0) ax2_secondary.annotate('Spectral Line\nRest Frequency', xy=(400, 5), xycoords='axes points', size=14, ha='left', va='bottom', color='brown') ax2_secondary.set_xlim(left_velocity_edge, right_velocity_edge) ax2_secondary.tick_params(axis='x', direction='in', pad=-22) ax2.grid() # Plot Dynamic Spectrum if cal_file != '': ax3 = fig.add_subplot(gs[0, 2]) else: ax3 = fig.add_subplot(gs[0, 1]) ax3.imshow(decibel(waterfall), origin='lower', interpolation='None', aspect='auto', extent=[np.min(frequency), np.max(frequency), np.min(t), np.max(t)]) if xlim == [0,0] and ylim != [0,0]: ax3.set_ylim(ylim[0], ylim[1]) elif xlim != [0,0] and ylim == [0,0]: ax3.set_xlim(xlim[0], xlim[1]) elif xlim != [0,0] and ylim != [0,0]: ax3.set_xlim(xlim[0], xlim[1]) ax3.set_ylim(ylim[0], ylim[1]) ax3.ticklabel_format(useOffset=False) ax3.set_xlabel('Frequency (MHz)') ax3.set_ylabel('Relative Time (s)') ax3.set_title('Dynamic Spectrum (Waterfall)') # Adjust Subplot Width Ratio if cal_file != '': gs = GridSpec(2, 3, width_ratios=[16.5, 1, 1]) else: gs = GridSpec(2, 2, width_ratios=[7.6, 1]) # Plot Time Series (Power vs Time) ax4 = fig.add_subplot(gs[1, 0]) ax4.plot(t, power, label='Raw Time Series') if m != 0: ax4.plot(t, power_clean, color='orangered', label='Median (n = '+str(m)+')') ax4.set_ylim() if ylim == [0,0]: ax4.set_xlim(0, np.max(t)) else: ax4.set_xlim(ylim[0], ylim[1]) ax4.set_xlabel('Relative Time (s)') if dB: ax4.set_ylabel('Relative Power (dB)') else: ax4.set_ylabel('Relative Power') ax4.set_title('Average Power vs Time') if m != 0: ax4.legend(bbox_to_anchor=(1, 1), loc='upper right') ax4.grid() # Plot Total Power Distribution if cal_file != '': gs = GridSpec(2, 3, width_ratios=[7.83, 1.5, -0.325]) else: gs = GridSpec(2, 2, width_ratios=[8.8, 1.5]) ax5 = fig.add_subplot(gs[1, 1]) ax5.hist(power, np.max([int(np.size(power)/50),10]), density=1, alpha=0.5, color='royalblue', orientation='horizontal', zorder=10) ax5.plot(best_fit(power)[1], best_fit(power)[0], '--', color='blue', label='Best fit (Raw)', zorder=20) if m != 0: ax5.hist(power_clean, np.max([int(np.size(power_clean)/50),10]), density=1, alpha=0.5, color='orangered', orientation='horizontal', zorder=10) ax5.plot(best_fit(power_clean)[1], best_fit(power_clean)[0], '--', color='red', label='Best fit (Median)', zorder=20) ax5.set_xlim() ax5.set_ylim() ax5.get_shared_x_axes().join(ax5, ax4) ax5.set_yticklabels([]) ax5.set_xlabel('Probability Density') ax5.set_title('Total Power Distribution') ax5.legend(bbox_to_anchor=(1, 1), loc='upper right') ax5.grid() # Save plots to file plt.tight_layout() plt.savefig(plot_file) plt.clf()
def from_spectra(cls, spectra): """ Create a spectrum collection from a set of individual :class:`specutils.Spectrum1D` objects. Parameters ---------- spectra : list, ndarray A list of :class:`~specutils.Spectrum1D` objects to be held in the collection. Currently the spectral_axis parameters (e.g. observer, radial_velocity) must be the same for each spectrum. """ # Enforce that the shape of each item must be the same if not all((x.shape == spectra[0].shape for x in spectra)): raise ValueError("Shape of all elements must be the same.") # Compose multi-dimensional ndarrays for each property flux = u.Quantity([spec.flux for spec in spectra]) # Check that the spectral parameters are the same for each input # spectral_axis and create the multi-dimensional SpectralCoord sa = [x.spectral_axis for x in spectra] if (not all(x.radial_velocity == sa[0].radial_velocity for x in sa) or not all(x.target == sa[0].target for x in sa) or not all(x.observer == sa[0].observer for x in sa) or not all(x.doppler_convention == sa[0].doppler_convention for x in sa) or not all(x.doppler_rest == sa[0].doppler_rest for x in sa)): raise ValueError("All input spectral_axis SpectralCoord " "objects must have the same parameters.") spectral_axis = SpectralCoord(sa, radial_velocity=sa[0].radial_velocity, doppler_rest=sa[0].doppler_rest, doppler_convention=sa[0].doppler_convention, observer=sa[0].observer, target=sa[0].target) # Check that either all spectra have associated uncertainties, or that # none of them do. If only some do, log an error and ignore the # uncertainties. if (not all((x.uncertainty is None for x in spectra)) and any((x.uncertainty is not None for x in spectra)) and all((x.uncertainty.uncertainty_type == spectra[0].uncertainty.uncertainty_type for x in spectra))): quncs = u.Quantity([spec.uncertainty.quantity for spec in spectra]) uncertainty = spectra[0].uncertainty.__class__(quncs) else: uncertainty = None warnings.warn("Not all spectra have associated uncertainties of " "the same type, skipping uncertainties.") # Check that either all spectra have associated masks, or that # none of them do. If only some do, log an error and ignore the masks. if (not all((x.mask is None for x in spectra)) and any((x.mask is not None for x in spectra))): mask = np.array([spec.mask for spec in spectra]) else: mask = None warnings.warn("Not all spectra have associated masks, " "skipping masks.") # Store the wcs and meta as lists wcs = [spec.wcs for spec in spectra] meta = [spec.meta for spec in spectra] return cls(flux=flux, spectral_axis=spectral_axis, uncertainty=uncertainty, wcs=wcs, mask=mask, meta=meta)
def test_spectral_1d(header_spectral_1d, ctype1, observer): # This is a regression test for issues that happened with 1-d WCS # where the target is not defined but observer is. header = header_spectral_1d.copy() header['CTYPE1'] = ctype1 header['CRVAL1'] = 0.1 header['CDELT1'] = 0.001 if ctype1[0] == 'V': header['CUNIT1'] = 'm s-1' else: header['CUNIT1'] = '' header['RESTWAV'] = 1.420405752E+09 header['MJD-OBS'] = 55197 if observer: header['OBSGEO-L'] = 144.2 header['OBSGEO-B'] = -20.2 header['OBSGEO-H'] = 0. header['SPECSYS'] = 'BARYCENT' with warnings.catch_warnings(): warnings.simplefilter('ignore', FITSFixedWarning) wcs = WCS(header) # First ensure that transformations round-trip spectralcoord = wcs.pixel_to_world(31) assert isinstance(spectralcoord, SpectralCoord) assert spectralcoord.target is None assert (spectralcoord.observer is not None) is observer if observer: expected_message = 'No target defined on SpectralCoord' else: expected_message = 'No observer defined on WCS' with pytest.warns(AstropyUserWarning, match=expected_message): pix = wcs.world_to_pixel(spectralcoord) assert_allclose(pix, [31], rtol=1e-6) # Also make sure that we can convert a SpectralCoord on which the observer # is not defined but the target is. with pytest.warns(AstropyUserWarning, match='No velocity defined on frame'): spectralcoord_no_obs = SpectralCoord( spectralcoord.quantity, doppler_rest=spectralcoord.doppler_rest, doppler_convention=spectralcoord.doppler_convention, target=ICRS(10 * u.deg, 20 * u.deg, distance=1 * u.kpc)) if observer: expected_message = 'No observer defined on SpectralCoord' else: expected_message = 'No observer defined on WCS' with pytest.warns(AstropyUserWarning, match=expected_message): pix2 = wcs.world_to_pixel(spectralcoord_no_obs) assert_allclose(pix2, [31], rtol=1e-6) # And finally check case when both observer and target are defined on the # SpectralCoord with pytest.warns(AstropyUserWarning, match='No velocity defined on frame'): spectralcoord_no_obs = SpectralCoord( spectralcoord.quantity, doppler_rest=spectralcoord.doppler_rest, doppler_convention=spectralcoord.doppler_convention, observer=ICRS(10 * u.deg, 20 * u.deg, distance=0 * u.kpc), target=ICRS(10 * u.deg, 20 * u.deg, distance=1 * u.kpc)) if observer: pix3 = wcs.world_to_pixel(spectralcoord_no_obs) else: with pytest.warns(AstropyUserWarning, match='No observer defined on WCS'): pix3 = wcs.world_to_pixel(spectralcoord_no_obs) assert_allclose(pix3, [31], rtol=1e-6)
def reframe(mxds, vis, mode='channel', nchan=None, start=0, width=1, interpolation='linear', phasecenter=None, restfreq=None, outframe=None, veltype='radio'): """ Transform channel labels and visibilities to a spectral reference frame which is appropriate for analysis, e.g. from TOPO to LSRK or to correct for doppler shifts throughout the time of observation Parameters ---------- mxds : xarray.core.dataset.Dataset input multi-xarray Dataset with global data vis : str visibility partition in the mxds to use nchan : int number of channels in output spw. None=all start : int first input channel to use width : int number of input channels to average interpolation : str spectral interpolation method phasecenter : int image phase center position or field index restfreq : float rest frequency outframe : str output frame, None=keep input frame veltype : str velocity definition Returns ------- xarray.core.dataset.Dataset New output multi-xarray Dataset with global data """ import xarray import datetime import numpy as np from astropy import units as u from astropy.time import Time from astropy.coordinates import EarthLocation, SpectralCoord, SkyCoord xds = mxds.attrs[vis] fields = xds.FIELD_ID.values.clip(0).flatten() sources = mxds.FIELD.sel(field_id=fields).source_id.values #[[xds.FIELD_ID.values.clip(0)]] unique_sources = np.unique(sources) #directions = mxds.SOURCE.DIRECTION.where(mxds.SOURCE.source_id targets = SkyCoord(directions[...,0], directions[...,1], unit='rad') #location = EarthLocation.of_site(input_xds['OBS_TELESCOPE_NAME']).get_itrs(obstime=Time(reference_time)) #location = EarthLocation(input_xds['ANT_POSITION'].mean()).get_itrs(obstime=Time(reference_time)) location = EarthLocation.of_site('ALMA') alma = location.get_itrs(obstime=Time(xds.time.values)) time = _reference_time(global_xds) place = _reference_location(global_xds, reference_time) source = _target_location(global_xds, ddi) # epoch lookup or assume J2000 #target_frame = 'FK5' aspc = SpectralCoord(input_array, unit=u.Hz, observer=place, target=source) # doppler_reference=img_xds.attrs['spectral__reference'], # doppler_convention=img_xds.attrs['velocity__type']) output_xds = xarray.apply_ufunc(change_frame, place, source, vis_xds.DATA.chunk({'chan': -1}), input_core_dims=[['chan']], dask='parallelized', output_dtypes=[vis_xds.DATA.dtype]) # update some properties of global_xds after conversion? # ouptut_xds.compute() return output_xds
def test_crop_reduces_dimensionality(ndcube_4d_ln_lt_l_t): cube = ndcube_4d_ln_lt_l_t point = (None, SpectralCoord([3e-11], unit=u.m), None) expected = cube[:, :, 0, :] output = cube.crop(point) helpers.assert_cubes_equal(output, expected)
def test_crop_1d_independent(ndcube_4d_ln_lt_l_t): cube_1d = ndcube_4d_ln_lt_l_t[0, 0, :, 0] wl_range = SpectralCoord([3e-11, 4.5e-11], unit=u.m) expected = cube_1d[0:2] output = cube_1d.crop([wl_range[0]], [wl_range[-1]]) helpers.assert_cubes_equal(output, expected)
def spectralcoord_from_value(value): return SpectralCoord(value, observer=observer, target=target, **kwargs)
def spectralcoord_from_value(value): if isinstance(value, SpectralCoord): return value return SpectralCoord(value, observer=observer, target=target, **kwargs)