def calcul(self, _): # Nettoyage de l'écran des résultats self.resultat.Clear() # Calcul proprement dit etoile_cible, observatoire, date_obs, criteres = self.lecture_saisies() altaz = AltAz(obstime=date_obs, location=observatoire, pressure=101300 * u.Pa, temperature=10 * u.deg_C, relative_humidity=60 * u.pct) etoile_cible['altaz'] = etoile_cible['equat'].transform_to(altaz) self.resultat.AppendText( 'Cible : {0} ({1}), altitude={2}, azimuth={3}, masse d\'air={4:.2f}\n' .format( etoile_cible['equat'].to_string(style='hmsdms', precision=0, fields=2), get_constellation(etoile_cible['equat'], short_name=True), etoile_cible['altaz'].alt.to_string(sep='dms', fields=1), etoile_cible['altaz'].az.to_string(sep='dms', fields=1), etoile_cible['altaz'].secz)) self.resultat.AppendText( 'Observatoire : lat={0}, lon={1}, alt={2:.0f}, t={3}\n'.format( observatoire.lat.to_string(fields=2, precision=0), observatoire.lon.to_string(fields=2, precision=0), observatoire.height, str(date_obs).replace('.000', '') + ' UTC')) #if etoile_cible['altaz'].alt < 0.0: # self.boite_erreur('Etoile cible sous l\'horizon') # return self.resultat.Update() # Affichage graphique des masses d'air et hauteur en fonction du temps self.sortie_graphique(etoile_cible, date_obs, observatoire) return
def constellations_in_area( min_longitude, max_longitude, min_latitude, max_latitude, nsamples=1000 ): """Generates a list of all constellations that overlap with the given area. Uses a simple Monte Carlo strategy. """ constellations = {} sum_weight = 0 for i in range(nsamples): # Generate a random longitude and latitude pair inside the bounding box longitude = min_longitude + (max_longitude - min_longitude) * random.random() latitude = min_latitude + (max_latitude - min_latitude) * random.random() # Retrieve the short name of the constellation for that coordinate constellation = get_constellation( SkyCoordDeg(longitude, latitude), short_name=True ) # Use the cosine of the latitude as weight, so higher latitude areas are not dominating weight = math.cos(math.radians(latitude)) # Update the constellation dict and sum_weight if constellation not in constellations: constellations[constellation] = weight else: constellations[constellation] += weight sum_weight += weight # Sort the constellation dict by value and return the constellations of decreasing area res = sorted([(v / sum_weight, k) for k, v in constellations.items()], reverse=True) return [x[1] for x in res]
def get_random_object(max_star_fraction=0.1): """ Choose random RA/DEC and select nearest SIMBAD source, biasing against boring-looking stars. Returns a human-readable txt string and object RA/DEC. """ customSimbad = Simbad() customSimbad.add_votable_fields('otype(V)','coo(d)') coo = coord.SkyCoord(random()*360,random()*180-90,unit='deg') results = customSimbad.query_region(coo,radius='1 deg') for res in results: obj_name = ' '.join(res['MAIN_ID'].split()) obj_type = res['OTYPE_V'] if 'star' in obj_type.lower() and random() > max_star_fraction: continue a_an = 'an' if obj_type[0].upper() in ('X','A','E','I','O','U') else 'a' obj_coo = coord.SkyCoord(res['RA_d'],res['DEC_d'],unit='deg') constellation = coord.get_constellation(obj_coo) greeting = greetings[randint(0,len(greetings)-1)] txt_str = greeting + " %s is %s %s in the constellation %s. More: %s%s" % \ (obj_name,a_an,obj_type,constellation,more_url,urlencode({'Ident':obj_name})) return obj_name,txt_str,res['RA_d'],res['DEC_d']
def get_data(self): """ Update and store list of tonight's constellations, based on the users location. Uses a matrix of points on the sky to retrieve constellations that they are located in. """ self.constellations = list(set(get_constellation(self.dome))) self.constellations.sort() return self.constellations
def print_results(tic=12350, simbad_search=False, data_search=False): target = Target(tic) catalogData = target.query()[0] catalogData['ra'] = catalogData['ra'].round(5) catalogData['dec'] = catalogData['dec'].round(5) catalogData['eclong'] = catalogData['eclong'].round(5) catalogData['eclat'] = catalogData['eclat'].round(5) catalogData['pmRA'] = catalogData['pmRA'].round(2) catalogData['pmDEC'] = catalogData['pmDEC'].round(2) catalogData['Tmag'] = catalogData['Tmag'].round(2) catalogData['Vmag'] = catalogData['Vmag'].round(2) catalogData['Kmag'] = catalogData['Kmag'].round(2) print(catalogData[['ID', 'ra', 'dec', 'pmRA', 'pmDEC', 'eclong', 'eclat', 'Tmag', 'Vmag', 'Kmag', 'Teff', 'rad', 'mass', 'd', ]]) if simbad_search: skobj = SkyCoord(ra=catalogData['ra'] * u.degree, dec=catalogData['dec'] * u.degree, frame='icrs') with warnings.catch_warnings(): warnings.simplefilter('ignore') customSimbad = Simbad() customSimbad.add_votable_fields( 'ra(2;A;ICRS;J2000;2000)', 'dec(2;D;ICRS;J2000;2000)') customSimbad.remove_votable_fields('coordinates') # try different search radii, be fast if possible for i in [5, 10, 20]: result_table = customSimbad.query_region( skobj, radius=i * u.arcsec) if result_table is None: continue else: break if result_table is None: logger.warning("No Simbad target resolved") else: print() print('Target name: {}'.format( result_table['MAIN_ID'][0])) print("The target is in constellation {}".format(get_constellation( skobj))) if data_search: obs_sectors = target.get_obs() obs2, obsffi, obs20 = obs_sectors print(f'FFI data at MAST for sectors: {sorted(list(set(obsffi)))}') print(f'2-min data at MAST for sectors: {sorted(list(set(obs2)))}') print(f'20-s data at MAST for sectors: {sorted(list(set(obs20)))}') print()
def getConstellation(self): """ Coming soon... """ ra = self.getRightAscension() dec = self.getDeclination() ra2 = ra.split(" ") dec2 = dec.split(" ") ra = ra2[0] + "h" + ra2[1] + "m" + ra2[2] + "s" dec = dec2[0] + "d" + dec2[1] + "m" + dec2[2] + "s" constellation = SkyCoord(ra, dec, frame='icrs') return get_constellation(constellation)
def show_SDSS_fcoords(obj_coords_lst): try: #Get object RA & DEC obj_ra = obj_coords_lst[0] obj_dec = obj_coords_lst[1] #Get coords string coords = ' '.join(convert_to_deg(obj_ra, obj_dec)) #Get SDSS cutout get_SDDS_image(convert_to_deg(obj_ra, obj_dec)) #Get constellaiton constellation = coord.get_constellation(SkyCoord(coords, frame='icrs', unit=(u.deg))) #Generate result string. object_string = "Showing <b>DSS</b> image for:\n○ RA: {} \n○ DEC: {} \n \nIn the constellation of <b>{}</b>.".format(obj_ra, obj_dec, constellation) return True, object_string except: return False, "Coordinates could not be found."
def find_object_coords_fname(object_name): #Query Simbad for the object. result_table = Simbad.query_object(object_name) try: #Extract RA & DEC obj_ra = result_table['RA'][0].replace(" ", ":") obj_dec = result_table['DEC'][0].replace(" ", ":") #Generate coords string coords = ' '.join(convert_to_deg(obj_ra, obj_dec)) #Get image for coords get_SDDS_image(convert_to_deg(obj_ra, obj_dec)) #Get constellation for coords constellation = coord.get_constellation(SkyCoord(coords, frame='icrs', unit=(u.deg))) #Generate string with RA, DEC, and constellation object_at_string = "Object <b>{}</b> is at:\n○ RA: {} \n○ DEC: {} \n \nIn the constellation of <b>{}</b>.".format(object_name, obj_ra, obj_dec, constellation) return True, object_at_string except: return False, "Object could not be found."
def print_results(tic=12350): print_str = "" try: tic = int(tic) except ValueError: return (None, "Not a valid TIC number") target = Target(tic) catalogData = target.query().to_pandas() catalogData['ra'] = catalogData['ra'].round(5) catalogData['dec'] = catalogData['dec'].round(5) catalogData['eclong'] = catalogData['eclong'].round(5) catalogData['eclat'] = catalogData['eclat'].round(5) catalogData['pmRA'] = catalogData['pmRA'].round(2) catalogData['pmDEC'] = catalogData['pmDEC'].round(2) catalogData['Tmag'] = catalogData['Tmag'].round(2) catalogData['Vmag'] = catalogData['Vmag'].round(2) catalogData['Kmag'] = catalogData['Kmag'].round(2) output_table = catalogData[[ 'ID', 'ra', 'dec', 'pmRA', 'pmDEC', 'eclong', 'eclat', 'Tmag', 'Vmag', 'Kmag', 'Teff', 'rad', 'mass', 'd', ]].iloc[0:1] skobj = SkyCoord(ra=catalogData['ra'] * u.degree, dec=catalogData['dec'] * u.degree, frame='icrs') with warnings.catch_warnings(): warnings.simplefilter('ignore') customSimbad = Simbad() customSimbad.add_votable_fields('ra(2;A;ICRS;J2000;2000)', 'dec(2;D;ICRS;J2000;2000)') customSimbad.remove_votable_fields('coordinates') # try different search radii, be fast if possible for i in [5, 10, 20]: result_table = customSimbad.query_region(skobj, radius=i * u.arcsec) if result_table is None: continue else: break if result_table is None: print_str += "\n\n" print_str += "No Simbad target resolved\n\n" else: # print_str += "Target name: {}\n\n".format( # result_table['MAIN_ID'][0]) # print_str += "Target name: <a href=\"{1}{2}\">{0}</a>\n\n".format( # result_table['MAIN_ID'][0], # "http://simbad.u-strasbg.fr/simbad/sim-id?", # urlencode({"Ident": result_table['MAIN_ID'][0]})) print_str += "Target name: [{0}]({1}{2})\n\n".format( result_table['MAIN_ID'][0], "http://simbad.u-strasbg.fr/simbad/sim-id?", urlencode({"Ident": result_table['MAIN_ID'][0]})) print_str += "The target is in constellation {}\n\n".format( get_constellation(skobj)[0]) obs_sectors = target.get_obs() obs2, obsffi, obs20 = obs_sectors print_str += 'FFI data at MAST for sectors: {}\n\n'.format( str(sorted(list(set(obsffi)))).replace("[", r"\[").replace("]", r"\]")) print_str += '2-min data at MAST for sectors: {}\n\n'.format( str(sorted(list(set(obs2)))).replace("[", r"\[").replace("]", r"\]")) print_str += '20-s data at MAST for sectors: {}\n\n'.format( str(sorted(list(set(obs20)))).replace("[", r"\[").replace("]", r"\]")) return output_table, print_str
def __init__(self, params, print_df=True, print_help=False): """ Get some info about a star from some other info. Args (unpacked from params): stellar_type (str): What kind of star it is (i.e. G2V) position (tuple of strs): RA, dec values in a format understood by astropy.coordinates.Angle parallax (float): parallax angle, in arcsecs proper_motion (tuple of floats): proper motion in RA/dec, in mas/year. rv (float): radial velocity, in km/s """ stellar_type, position, parallax, proper_motion, v_radial = params self.init_params = params self.stellar_type = stellar_type self.proper_motion = proper_motion # [mas/year, mas/year] self.distance = 1 / parallax # parsecs self.parallax = parallax # arcsecs self.position = position # [hms, dms] self.v_radial = v_radial # km/s self.galactic_coords = radec_to_galactic(self.position) # degrees # Proper motion, described in Cartesian components self.pm_dec = self.proper_motion[1] # We don't need to scale by cos(dec) because the units are already in mas/year self.pm_ra = self.proper_motion[0] #* np.cos(self.pm_dec) # Proper motion, described in angular components self.pm_mag = np.sqrt(self.pm_ra**2 + self.pm_dec**2) # mas/year # PA = angle east of north self.pm_posang = round(np.arctan(self.pm_ra / self.pm_dec), 4) # radians self.v_transverse = 4.74 * self.pm_mag * self.distance # km/s # Space velocity is the third leg of the v_trans/v_rad triangle. self.v_space = np.sqrt(self.v_transverse**2 + self.v_radial**2) star_obj = SkyCoord(Angle(position[0]), Angle(position[1]), frame='icrs') self.constellation = get_constellation(star_obj) self.d_from_GC = self.distance_to_galactic_center() # parsecs self.closer = True if self.d_from_GC > d_sun_GC else False d = [{ 'Name': 'Stellar Type', 'Value': self.stellar_type, 'units': 'N/A' }, { 'Name': 'Distance', 'Value': self.distance, 'units': 'parsec' }, { 'Name': 'Parallax', 'Value': self.parallax, 'units': 'arcsecs' }, { 'Name': 'Position', 'Value': self.position, 'units': '[hms, dms]' }, { 'Name': 'Galactic Coordinates', 'Value': self.galactic_coords, 'units': 'degrees' }, { 'Name': 'Proper Motion (RA)', 'Value': self.pm_ra, 'units': 'mas/year' }, { 'Name': 'Proper Motion (Dec)', 'Value': self.pm_dec, 'units': 'mas/year' }, { 'Name': 'Proper Motion Magnitude', 'Value': self.pm_mag, 'units': 'mas/year' }, { 'Name': 'Proper Motion Position Angle', 'Value': self.pm_posang, 'units': 'radians' }, { 'Name': 'Radial Velocity', 'Value': self.v_radial, 'units': 'km/s' }, { 'Name': 'Transverse Velocity', 'Value': self.v_transverse, 'units': 'km/s' }, { 'Name': 'Space Velocity', 'Value': self.v_space, 'units': 'km/s' }, { 'Name': 'Host Constellation', 'Value': self.constellation, 'units': 'N/A' }, { 'Name': 'Distance from Galactic Center', 'Value': self.d_from_GC, 'units': 'parsecs' }, { 'Name': 'Closer than Sun to GC?', 'Value': self.closer, 'units': 'N/A' }] self.full_param_df = pd.DataFrame(d) if print_help: print getdoc(self), '\n\n' if print_df: print self.full_param_df
def calcul(self, _): # Nettoyage de l'écran des résultats self.resultat.Clear() # Calcul proprement dit etoile_cible, observatoire, date_obs, criteres = self.lecture_saisies() altaz = AltAz(obstime=date_obs, location=observatoire, pressure=101300 * u.Pa, temperature=10 * u.deg_C, relative_humidity=60 * u.pct) etoile_cible['altaz'] = etoile_cible['equat'].transform_to(altaz) self.resultat.AppendText( 'Cible : {0} ({1}), altitude={2}, azimuth={3}, masse d\'air={4:.2f}\n' .format( etoile_cible['equat'].to_string(style='hmsdms', precision=0, fields=2), get_constellation(etoile_cible['equat'], short_name=True), etoile_cible['altaz'].alt.to_string(sep='dms', fields=1), etoile_cible['altaz'].az.to_string(sep='dms', fields=1), etoile_cible['altaz'].secz)) self.resultat.AppendText( 'Observatoire : lat={0}, lon={1}, alt={2:.0f}, t={3}\n'.format( observatoire.lat.to_string(fields=2, precision=0), observatoire.lon.to_string(fields=2, precision=0), observatoire.height, str(date_obs).replace('.000', '') + ' UTC')) if etoile_cible['altaz'].alt < 0.0: self.boite_erreur('Etoile cible sous l\'horizon') return etoiles = self.generation_liste(etoile_cible, altaz, criteres) if not etoiles: self.boite_erreur( 'Erreur dans le fichier de la base de données ou dans son traitement' ) return # Sélection des étoiles les plus proches selection = [] for num, etoile in enumerate(etoiles): if etoile is not None \ and etoile['distance'] <= criteres['sep_max'] \ and float(etoile['EB-V']) < criteres['ecart_bv_max']: selection.append(etoile) # Tri par différence de hauteur : selection = sorted(selection, key=lambda item: math.fabs(item['dhauteur'].degree)) # Sortie des résultats self.resultat.AppendText( '\n\nNum Nom\tSép.\tMagV\tCoordonnées\tH. Dif haut\tB-V\tDif B-V\tType\tM. Air\tMiles\n' ) ligne = 1 for etoile in selection: if etoile['Sp'] in type_pickles: self.resultat.SetForegroundColour(couleur_resultat_pickles) if etoile['Miles']: self.resultat.SetForegroundColour(couleur_resultat_miles) self.resultat.AppendText( self.formatage_sortie_etoile(ligne, etoile)) self.resultat.SetForegroundColour( couleur_resultat_defaut) # Pour la ligne suivante ligne += 1 self.resultat.Update() # Affichage graphique des masses d'air et hauteur en fonction du temps self.affichage_masse_air(etoile_cible, selection, date_obs, observatoire) return
def plot(obs_parameters='', n=0, m=0, f_rest=0, slope_correction=False, dB=False, vlsr=False, meta=False, avg_ylim=[0,0], cal_ylim=[0,0], rfi=[], xlim=[0,0], ylim=[0,0], dm=0, obs_file='observation.dat', cal_file='', waterfall_fits='', spectra_csv='', power_csv='', plot_file='plot.png'): ''' Process, analyze and plot data. Args: obs_parameters: dict. Observation parameters (identical to parameters used to acquire data) dev_args: string. Device arguments (gr-osmosdr) rf_gain: float. RF gain if_gain: float. IF gain bb_gain: float. Baseband gain frequency: float. Center frequency [Hz] bandwidth: float. Instantaneous bandwidth [Hz] channels: int: Number of frequency channels (FFT size) t_sample: float: Integration time per FFT sample duration: float: Total observing duration [sec] loc: string: latitude, longitude, and elevation of observation (float, separated by spaces) ra_dec: string: right ascension and declination of observation target (float, separated by space) az_alt: string: azimuth and altitude of observation target (float, separated by space; takes precedence over ra_dec) n: int. Median filter factor (spectrum) m: int. Median filter factor (time series) f_rest: float. Spectral line reference frequency used for radial velocity (Doppler shift) calculations [Hz] slope_correction: bool. Correct slope in poorly-calibrated spectra using linear regression dB: bool. Display data in decibel scaling vlsr: bool. Display graph in VLSR frame of reference meta: bool. Display header with date, time, and target rfi: list. Blank frequency channels contaminated with RFI ([low_frequency, high_frequency]) [Hz] avg_ylim: list. Averaged plot y-axis limits ([low, high]) cal_ylim: list. Calibrated plot y-axis limits ([low, high]) xlim: list. x-axis limits ([low_frequency, high_frequency]) [Hz] ylim: list. y-axis limits ([start_time, end_time]) [Hz] dm: float. Dispersion measure for dedispersion [pc/cm^3] obs_file: string. Input observation filename (generated with virgo.observe) cal_file: string. Input calibration filename (generated with virgo.observe) waterfall_fits: string. Output FITS filename spectra_csv: string. Output CSV filename (spectra) power_csv: string. Output CSV filename (time series) plot_file: string. Output plot filename ''' import matplotlib matplotlib.use('Agg') # Try commenting this line if you run into display/rendering errors import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec plt.rcParams['legend.fontsize'] = 14 plt.rcParams['axes.labelsize'] = 14 plt.rcParams['axes.titlesize'] = 18 plt.rcParams['xtick.labelsize'] = 12 plt.rcParams['ytick.labelsize'] = 12 def decibel(x): if dB: return 10.0*np.log10(x) return x def shift(phase_num, n_rows): waterfall[:, phase_num] = np.roll(waterfall[:, phase_num], -n_rows) def SNR(spectrum, mask=np.array([])): '''Signal-to-Noise Ratio estimator, with optional masking. If mask not given, then all channels will be used to estimate noise (will drastically underestimate S:N - not robust to outliers!)''' if mask.size == 0: mask = np.zeros_like(spectrum) noise = np.nanstd((spectrum[2:]-spectrum[:-2])[mask[1:-1] == 0])/np.sqrt(2) background = np.nanmean(spectrum[mask == 0]) return (spectrum-background)/noise def best_fit(power): '''Compute best Gaussian fit''' avg = np.nanmean(power) var = np.var(power) gaussian_fit_x = np.linspace(np.min(power),np.max(power),100) gaussian_fit_y = 1.0/np.sqrt(2*np.pi*var)*np.exp(-0.5*(gaussian_fit_x-avg)**2/var) return [gaussian_fit_x, gaussian_fit_y] # Load observation parameters from dictionary argument/header file if obs_parameters != '': frequency = obs_parameters['frequency'] bandwidth = obs_parameters['bandwidth'] channels = obs_parameters['channels'] t_sample = obs_parameters['t_sample'] loc = obs_parameters['loc'] ra_dec = obs_parameters['ra_dec'] az_alt = obs_parameters['az_alt'] else: header_file = '.'.join(obs_file.split('.')[:-1])+'.header' warnings.warn('No observation parameters passed. Attempting to load from header file ('+header_file+')...') with open(header_file, 'r') as f: headers = [parameter.rstrip('\n') for parameter in f.readlines()] for i in range(len(headers)): if 'mjd' in headers[i]: mjd = float(headers[i].strip().split('=')[1]) elif 'frequency' in headers[i]: frequency = float(headers[i].strip().split('=')[1]) elif 'bandwidth' in headers[i]: bandwidth = float(headers[i].strip().split('=')[1]) elif 'channels' in headers[i]: channels = int(headers[i].strip().split('=')[1]) elif 't_sample' in headers[i]: t_sample = float(headers[i].strip().split('=')[1]) elif 'loc' in headers[i]: loc = tuple(map(float, headers[i].strip().split('=')[1].split(' '))) elif 'ra_dec' in headers[i]: ra_dec = tuple(map(str, headers[i].split('=')[1].split(' '))) elif 'az_alt' in headers[i]: az_alt = tuple(map(float, headers[i].split('=')[1].split(' '))) # Transform frequency axis limits to MHz xlim = [x / 1e6 for x in xlim] # Transform to VLSR if vlsr: from astropy import units as u from astropy.coordinates import SpectralCoord, EarthLocation, SkyCoord from astropy.time import Time obs_location = EarthLocation.from_geodetic(loc[0], loc[1], loc[2]) obs_time = obs_location.get_itrs(obstime=Time(str(mjd), format='mjd', scale='utc')) if az_alt!='': obs_coord = SkyCoord(az=az_alt[0]*u.degree, alt=az_alt[1]*u.degree, frame='altaz', location=obs_location, obstime=Time(str(mjd), format='mjd', scale='utc')) obs_coord = obs_coord.icrs print (obs_coord) else: obs_coord = SkyCoord(ra=ra_dec[0]*u.degree, dec=ra_dec[1]*u.degree, frame='icrs') #Transform center frequency frequency = SpectralCoord(frequency * u.MHz, observer=obs_time, target=obs_coord) frequency = frequency.with_observer_stationary_relative_to('lsrk') frequency = frequency.quantity.value # Define Radial Velocity axis limits left_velocity_edge = -299792.458*(bandwidth-2*frequency+2*f_rest)/(bandwidth-2*frequency) right_velocity_edge = 299792.458*(-bandwidth-2*frequency+2*f_rest)/(bandwidth+2*frequency) # Transform sampling time to number of bins bins = int(t_sample*bandwidth/channels) # Load observation & calibration data offset = 1 waterfall = offset*np.fromfile(obs_file, dtype='float32').reshape(-1, channels)/bins # Delete first 3 rows (potentially containing outlier samples) waterfall = waterfall[3:, :] # Mask RFI-contaminated channels if rfi != []: for j in range(len(rfi)): # Frequency to channel transformation current_rfi = rfi[j] rfi_lo = channels*(current_rfi[0] - (frequency - bandwidth/2))/bandwidth rfi_hi = channels*(current_rfi[1] - (frequency - bandwidth/2))/bandwidth # Blank channels for i in range(int(rfi_lo), int(rfi_hi)): waterfall[:, i] = np.nan if cal_file != '': waterfall_cal = offset*np.fromfile(cal_file, dtype='float32').reshape(-1, channels)/bins # Delete first 3 rows (potentially containing outlier samples) waterfall_cal = waterfall_cal[3:, :] # Mask RFI-contaminated channels if rfi != []: for j in range(len(rfi)): # Frequency to channel transformation current_rfi = rfi[j] rfi_lo = channels*(current_rfi[0] - (frequency - bandwidth/2))/bandwidth rfi_hi = channels*(current_rfi[1] - (frequency - bandwidth/2))/bandwidth # Blank channels for i in range(int(rfi_lo), int(rfi_hi)): waterfall_cal[:, i] = np.nan # Compute average spectra with warnings.catch_warnings(): warnings.filterwarnings(action='ignore', message='Mean of empty slice') avg_spectrum = decibel(np.nanmean(waterfall, axis=0)) if cal_file != '': avg_spectrum_cal = decibel(np.nanmean(waterfall_cal, axis=0)) # Number of sub-integrations subs = waterfall.shape[0] # Compute Time axis t = t_sample*np.arange(subs) # Compute Frequency axis; convert Hz to MHz frequency = np.linspace(frequency-0.5*bandwidth, frequency+0.5*bandwidth, channels, endpoint=False)*1e-6 # Perform de-dispersion if dm != 0: deltaF = float(np.max(frequency)-np.min(frequency))/subs f_start = np.min(frequency) for t_bin in range(subs): f_chan = f_start+t_bin*deltaF deltaT = 4149*dm*((1/(f_chan**2))-(1/(np.max(frequency)**2))) n = int((float(deltaT)/(float(1)/channels))) shift(t_bin, n) # Define array for Time Series plot power = decibel(np.nanmean(waterfall, axis=1)) # Apply Mask mask = np.zeros_like(avg_spectrum) mask[np.logical_and(frequency > f_rest*1e-6-0.2, frequency < f_rest*1e-6+0.8)] = 1 # Margins OK for galactic HI # Define text offset for axvline text label text_offset = 0 # Calibrate Spectrum if cal_file != '': if dB: spectrum = 10**((avg_spectrum-avg_spectrum_cal)/10) else: spectrum = avg_spectrum/avg_spectrum_cal spectrum = SNR(spectrum, mask) if slope_correction: idx = np.isfinite(frequency) & np.isfinite(spectrum) fit = np.polyfit(frequency[idx], spectrum[idx], 1) ang_coeff = fit[0] intercept = fit[1] fit_eq = ang_coeff*frequency + intercept spectrum = SNR(spectrum-fit_eq, mask) # Mitigate RFI (Frequency Domain) if n != 0: spectrum_clean = SNR(spectrum.copy(), mask) for i in range(0, int(channels)): spectrum_clean[i] = np.nanmedian(spectrum_clean[i:i+n]) # Apply position offset for Spectral Line label text_offset = 60 # Mitigate RFI (Time Domain) if m != 0: power_clean = power.copy() for i in range(0, int(subs)): power_clean[i] = np.nanmedian(power_clean[i:i+m]) # Write Waterfall to file (FITS) if waterfall_fits != '': from astropy.io import fits # Load data hdu = fits.PrimaryHDU(waterfall) # Prepare FITS headers hdu.header['NAXIS'] = 2 hdu.header['NAXIS1'] = channels hdu.header['NAXIS2'] = subs hdu.header['CRPIX1'] = channels/2 hdu.header['CRPIX2'] = subs/2 hdu.header['CRVAL1'] = frequency[int(channels/2)] hdu.header['CRVAL2'] = t[int(subs/2)] hdu.header['CDELT1'] = bandwidth*1e-6/channels hdu.header['CDELT2'] = t_sample hdu.header['CTYPE1'] = 'Frequency (MHz)' hdu.header['CTYPE2'] = 'Relative Time (s)' try: hdu.header['MJD-OBS'] = mjd except NameError: warnings.warn('Observation MJD could not be found and will not be part of the FITS header.') pass # Delete pre-existing FITS file try: os.remove(waterfall_fits) except OSError: pass # Write to file hdu.writeto(waterfall_fits) # Write Spectra to file (csv) if spectra_csv != '': if cal_file != '': np.savetxt(spectra_csv, np.concatenate((frequency.reshape(channels, 1), avg_spectrum.reshape(channels, 1), avg_spectrum_cal.reshape(channels, 1), spectrum.reshape(channels, 1)), axis=1), delimiter=',', fmt='%1.6f') else: np.savetxt(spectra_csv, np.concatenate((frequency.reshape(channels, 1), avg_spectrum.reshape(channels, 1)), axis=1), delimiter=',', fmt='%1.6f') # Write Time Series to file (csv) if power_csv != '': np.savetxt(power_csv, np.concatenate((t.reshape(subs, 1), power.reshape(subs, 1)), axis=1), delimiter=',', fmt='%1.6f') # Initialize plot if cal_file != '': fig = plt.figure(figsize=(27, 15)) gs = GridSpec(2, 3) else: fig = plt.figure(figsize=(21, 15)) gs = GridSpec(2, 2) if meta: from astropy.coordinates import get_constellation epoch = (mjd - 40587) * 86400.0 meta_title = 'Date and Time: ' + time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(epoch)) + ' ' meta_title += 'Target: ' + obs_coord.to_string('hmsdms', precision=0) + ' in ' + get_constellation(obs_coord) + '\n' plt.suptitle(meta_title, fontsize=18) # Plot Average Spectrum ax1 = fig.add_subplot(gs[0, 0]) ax1.plot(frequency, avg_spectrum) if xlim == [0,0]: ax1.set_xlim(np.min(frequency), np.max(frequency)) else: ax1.set_xlim(xlim[0], xlim[1]) ax1.ticklabel_format(useOffset=False) ax1.set_xlabel('Frequency (MHz)') if avg_ylim != [0,0]: ax1.set_ylim(avg_ylim[0], avg_ylim[1]) if dB: ax1.set_ylabel('Relative Power (dB)') else: ax1.set_ylabel('Relative Power') if vlsr: cal_title = r'$Average\ Spectrum\ (V_{LSR})$' else: cal_title = 'Average Spectrum' if f_rest != 0: cal_title += '\n' ax1.set_title(cal_title) ax1.grid() if xlim == [0,0] and f_rest != 0: # Add secondary axis for Radial Velocity ax1_secondary = ax1.twiny() ax1_secondary.set_xlabel('Radial Velocity (km/s)', labelpad=5) ax1_secondary.axvline(x=0, color='brown', linestyle='--', linewidth=2, zorder=0) ax1_secondary.annotate('Spectral Line\nRest Frequency', xy=(460-text_offset, 5), xycoords='axes points', size=14, ha='left', va='bottom', color='brown') ax1_secondary.set_xlim(left_velocity_edge, right_velocity_edge) ax1_secondary.tick_params(axis='x', direction='in', pad=-22) #Plot Calibrated Spectrum if cal_file != '': ax2 = fig.add_subplot(gs[0, 1]) ax2.plot(frequency, spectrum, label='Raw Spectrum') if n != 0: ax2.plot(frequency, spectrum_clean, color='orangered', label='Median (n = '+str(n)+')') if cal_ylim !=[0,0]: ax2.set_ylim(cal_ylim[0],cal_ylim[1]) else: ax2.set_ylim() if xlim == [0,0]: ax2.set_xlim(np.min(frequency), np.max(frequency)) else: ax2.set_xlim(xlim[0], xlim[1]) ax2.ticklabel_format(useOffset=False) ax2.set_xlabel('Frequency (MHz)') ax2.set_ylabel('Signal-to-Noise Ratio (S/N)') if vlsr: cal_title = r'$Calibrated\ Spectrum\ (V_{LSR})$' + '\n' else: cal_title = 'Calibrated Spectrum\n' if f_rest != 0: ax2.set_title(cal_title) else: ax2.set_title('Calibrated Spectrum') if n != 0: if f_rest != 0: ax2.legend(bbox_to_anchor=(0.002, 0.96), loc='upper left') else: ax2.legend(loc='upper left') if xlim == [0,0] and f_rest != 0: # Add secondary axis for Radial Velocity ax2_secondary = ax2.twiny() ax2_secondary.set_xlabel('Radial Velocity (km/s)', labelpad=5) ax2_secondary.axvline(x=0, color='brown', linestyle='--', linewidth=2, zorder=0) ax2_secondary.annotate('Spectral Line\nRest Frequency', xy=(400, 5), xycoords='axes points', size=14, ha='left', va='bottom', color='brown') ax2_secondary.set_xlim(left_velocity_edge, right_velocity_edge) ax2_secondary.tick_params(axis='x', direction='in', pad=-22) ax2.grid() # Plot Dynamic Spectrum if cal_file != '': ax3 = fig.add_subplot(gs[0, 2]) else: ax3 = fig.add_subplot(gs[0, 1]) ax3.imshow(decibel(waterfall), origin='lower', interpolation='None', aspect='auto', extent=[np.min(frequency), np.max(frequency), np.min(t), np.max(t)]) if xlim == [0,0] and ylim != [0,0]: ax3.set_ylim(ylim[0], ylim[1]) elif xlim != [0,0] and ylim == [0,0]: ax3.set_xlim(xlim[0], xlim[1]) elif xlim != [0,0] and ylim != [0,0]: ax3.set_xlim(xlim[0], xlim[1]) ax3.set_ylim(ylim[0], ylim[1]) ax3.ticklabel_format(useOffset=False) ax3.set_xlabel('Frequency (MHz)') ax3.set_ylabel('Relative Time (s)') ax3.set_title('Dynamic Spectrum (Waterfall)') # Adjust Subplot Width Ratio if cal_file != '': gs = GridSpec(2, 3, width_ratios=[16.5, 1, 1]) else: gs = GridSpec(2, 2, width_ratios=[7.6, 1]) # Plot Time Series (Power vs Time) ax4 = fig.add_subplot(gs[1, 0]) ax4.plot(t, power, label='Raw Time Series') if m != 0: ax4.plot(t, power_clean, color='orangered', label='Median (n = '+str(m)+')') ax4.set_ylim() if ylim == [0,0]: ax4.set_xlim(0, np.max(t)) else: ax4.set_xlim(ylim[0], ylim[1]) ax4.set_xlabel('Relative Time (s)') if dB: ax4.set_ylabel('Relative Power (dB)') else: ax4.set_ylabel('Relative Power') ax4.set_title('Average Power vs Time') if m != 0: ax4.legend(bbox_to_anchor=(1, 1), loc='upper right') ax4.grid() # Plot Total Power Distribution if cal_file != '': gs = GridSpec(2, 3, width_ratios=[7.83, 1.5, -0.325]) else: gs = GridSpec(2, 2, width_ratios=[8.8, 1.5]) ax5 = fig.add_subplot(gs[1, 1]) ax5.hist(power, np.max([int(np.size(power)/50),10]), density=1, alpha=0.5, color='royalblue', orientation='horizontal', zorder=10) ax5.plot(best_fit(power)[1], best_fit(power)[0], '--', color='blue', label='Best fit (Raw)', zorder=20) if m != 0: ax5.hist(power_clean, np.max([int(np.size(power_clean)/50),10]), density=1, alpha=0.5, color='orangered', orientation='horizontal', zorder=10) ax5.plot(best_fit(power_clean)[1], best_fit(power_clean)[0], '--', color='red', label='Best fit (Median)', zorder=20) ax5.set_xlim() ax5.set_ylim() ax5.get_shared_x_axes().join(ax5, ax4) ax5.set_yticklabels([]) ax5.set_xlabel('Probability Density') ax5.set_title('Total Power Distribution') ax5.legend(bbox_to_anchor=(1, 1), loc='upper right') ax5.grid() # Save plots to file plt.tight_layout() plt.savefig(plot_file) plt.clf()
def get_const(const): c = SkyCoord(const,unit=(u.hourangle, u.deg)) A = get_constellation(c, short_name=True) return const_reader(A)