def get_projected_velocity(ra, dec, jd, obs_lat=nch.lat, obs_lon=nch.lon, obs_alt=nch.alt, epoch=2451545.): '''Compute the projected velocity of the telescope wrt the Local Standard of Rest. Parameters ---------- ra, dec : float degrees, the RA/DEC of target jd : float, julian date (UTC) of the observation obs_lat : float degrees, latitude of observatory, default=nch.lat obs_lon : float degrees, longitude of observatory, default=nch.lon obs_alt : float meters, altitude of observatory, default=nch.alt epoch : float, julian date epoch of ra/dec coordinates default=2451545 is J2000 Returns ------- v : float m/s, barycenter-corrected radial velocity, see (Wright & Eastman, 2014) ''' jd_utc = astropy.time.Time(jd, format='jd', scale='utc') proper_motion_ra = 0. # proper motion in ra, mas/yr proper_motion_dec = 0. # proper motion in dec, mas/yr parallax = 0. # parallax of target in mas rv = 0. # radial velocity of target in m/s zmeas = 0. # measured redshift of spectrum ephemeris = 'de430' # ephemeris from jplephem, ~100MB download first use v, warn, flag = barycorrpy.get_BC_vel(JDUTC=jd_utc, ra=ra, dec=dec, lat=obs_lat, longi=obs_lon, alt=obs_alt, pmra=proper_motion_ra, pmdec=proper_motion_dec, px=parallax, rv=rv, zmeas=zmeas, epoch=epoch, ephemeris=ephemeris, leap_update=False) return v
def get_bc_from_gaia_coords(ra, dec, pmra, pmdec, px, jd, rvabs=0): """ wrapper routine for using barycorrpy with Gaia DR2 coordinates """ # use 2015.5 as an epoch (Gaia DR2) epoch = 2457206.375 # gaia_data = Gaia.query_object_async(coordinate=coord, width=width, height=height) # q = Gaia.launch_job_async('SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) # q = Gaia.launch_job('SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) # gaia_data = q.results bc = barycorrpy.get_BC_vel(JDUTC=jd, ra=ra, dec=dec, pmra=pmra, pmdec=pmdec, px=px, rv=rvabs * 1e3, epoch=epoch, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], # px=gaia_data['parallax'], rv=gaia_data['radial_velocity']*1e3, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=pmra, pmdec=pmdec, # px=px, rv=rv, obsname='AAO', ephemeris='de430') if len(bc[0]) == 1: return bc[0][0] else: return bc[0]
def get_drift_rate(file): # open the file obs = Waterfall(file) # get the time of obs + make array 15 min into future jdate = obs.header['tstart'] + 2400000.5 JDUTC = np.linspace(jdate, jdate + (60.0 * 15.0/86400.), num=100) # get the pointing of the obs c = SkyCoord(obs.header['src_raj'], obs.header['src_dej']) s = c.to_string('decimal') ra_probe, dec_probe = [float(string) for string in s.split()] # other needed params obsname = 'GBT' epoch = 2451545.0 rv = 0.0 zmeas = 0.0 ephemeris='https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/a_old_versions/de405.bsp' # get the BC vel baryvel = get_BC_vel(JDUTC=JDUTC, ra=ra_probe, dec=dec_probe, obsname='GBT', rv=rv, zmeas=zmeas, epoch=epoch, ephemeris=ephemeris, leap_update=True) # take the derivative of velocity to get acceleration (i.e., drift) diffT = np.diff(JDUTC) * 86400.0 diffV = np.diff(baryvel[0]) drift = diffV/diffT # convert m/s^2 to Hz/s and take the max drift calculated drift *= (obs.container.f_stop * 1e6 / 3e8) # TODO check SIGN!!! return np.max(np.abs(drift))
def test_hip_id_astropy_obs(self): JDUTC = Time(2458000, format='jd', scale='utc') result = get_BC_vel(JDUTC=JDUTC, hip_id=8102, obsname='CTIO', ephemeris='de430') self.assertTrue( np.isclose(a=result[0], b=15403.9608, atol=1e-2, rtol=0))
def test_hip_id(self): JDUTC = 2458000 # Also accepts float input for JDUTC. Verify scale and format result = get_BC_vel(JDUTC=JDUTC, hip_id=8102, lat=-30.169283, longi=-70.806789, alt=2241.9, ephemeris='de430', zmeas=0.0) self.assertTrue( np.isclose(a=result[0], b=15403.9508, atol=1e-2, rtol=0), 4)
def test_SolarBC(self): JDUTC = 2458000 result6 = get_BC_vel(JDUTC=2458000, lat=-30.169138888, longi=-70.805888, alt=2379.5, zmeas=0.0, SolSystemTarget='Sun') self.assertTrue(np.isclose(a=result6[0], b=819.4474, atol=1e-2, rtol=0))
def test_stellar_predictive(self): JDUTC = 2458000 result5 = get_BC_vel(JDUTC=JDUTC, hip_id=8102, lat=-30.169283, longi=-70.806789, alt=2241.9, ephemeris='de430', zmeas=0.0, predictive=True) self.assertTrue( np.isclose(a=result5[0], b=-15403.15938, atol=1e-2, rtol=0))
def etiennes_code(ra, dec, epoch, lat, longi, alt, pmra, pmdec, px, rv, mjdate): from barycorrpy import get_BC_vel from barycorrpy import utc_tdb obsname = '' zmeas = 0.0 ephemeris = 'https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/a_old_versions/de405.bsp' JDUTC = Time(mjdate, format='mjd', scale='utc') bjd = utc_tdb.JDUTC_to_BJDTDB(JDUTC, ra=ra, dec=dec, obsname=obsname, lat=lat, longi=longi, alt=alt, pmra=pmra, pmdec=pmdec, px=px, rv=rv, epoch=epoch, ephemeris=ephemeris, leap_update=True) results = get_BC_vel(JDUTC=JDUTC, ra=ra, dec=dec, obsname=obsname, lat=lat, longi=longi, alt=alt, pmra=pmra, pmdec=pmdec, px=px, rv=rv, zmeas=zmeas, epoch=epoch, ephemeris=ephemeris, leap_update=True) return results[0][0] / 1000, bjd[0][0], np.nan
def getBarycentricCorrection(times, starname, obsname, verbose=False, **kwargs): bc = [] if verbose: seq = tqdm(times, desc='Collecting Barycentric velocity') else: seq = times for time in seq: JDUTC = Time(time, format='jd', scale='utc') output = barycorrpy.get_BC_vel(JDUTC, starname=starname, obsname=obsname) bc.append(output[0][0]) bc = np.array(bc) # Subtract BC to be in target frame bc = -bc return bc
def test_inputs(self): ra = 26.0213645867 dec = -15.9395557246 obsname = '' lat = -30.169283 longi = -70.806789 alt = 2241.9 epoch = 2451545.0 pmra = -1721.05 pmdec = 854.16 px = 273.96 rv = 0.0 zmeas = 0.0 JDUTC = [ 2458000, 2458000.00001, 2458000.00002 ] # Can also enter JDUTC as float instead of Astropy Time Object ephemeris = 'https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/a_old_versions/de405.bsp' result = get_BC_vel(JDUTC=JDUTC, ra=ra, dec=dec, obsname=obsname, lat=lat, longi=longi, alt=alt, pmra=pmra, pmdec=pmdec, px=px, rv=rv, zmeas=zmeas, epoch=epoch, ephemeris=ephemeris, leap_update=True) self.assertTrue( np.allclose([result[0][0], result[0][1], result[0][2]], [15407.4860, 15407.4723, 15407.4586], atol=1e-2, rtol=0))
def getbc(ra, dec, jd, obs='APO'): """ Get barycentric correction using barycorrpy from Eastman & Wright, as implemented by Kanodia & Wright, RNAAS 2, 1 """ if obs == 'APO': longitude = 360. - (105. + 49. / 60. + 13 / 3600.) latitude = 32. + 46 / 60. + 49. / 3600. altitude = 2788. elif obs == 'LCO': longitude = 360. - (70 + 41 / 60. + 33.36 / 3600.) latitude = -1 * (29 + 0. / 60. + 52.56 / 3600.) altitude = 2380. else: print('Unknown observatory') pdb.set_trace() out = barycorrpy.get_BC_vel(JDUTC=jd, ra=ra, dec=dec, longi=longitude, lat=latitude, alt=altitude, leap_update=False) return out[0]
def get_barycentric_correction( fn, rvabs=None, obs_path='/Users/christoph/OneDrive - UNSW/observations/'): """ wrapper routine for using barycorrpy with Gaia DR2 coordinates """ # use 2015.5 as an epoch (Gaia DR2) epoch = 2457206.375 # get UT obs start time utmjd = pyfits.getval( fn, 'UTMJD' ) + 2.4e6 + 0.5 # the fits header has 2,400,000.5 subtracted!!!!! # add half the exposure time in days texp = pyfits.getval(fn, 'ELAPSED') utmjd = utmjd + (texp / 2.) / 86400. # read in Gaia DR2 IDs gaia_dict = np.load(obs_path + 'gaiadr2_id_dict.npy').item() # check what kind of target it is targ_raw = pyfits.getval(fn, 'OBJECT') targ = targ_raw.split('+')[0] typ = targ[-3:] # Blaise's targets start with BKT or BKTRM if targ[:3] == 'BKT': if targ[:5] == 'BKTRM': targ = targ[5:] elif targ[:6] == 'BKTSec': targ = targ[6:] else: targ = targ[3:] # sometimes the name of the PI is appended to the target name if targ.split('_')[-1].lower() in ['bouma', 'dragomir', 'shporer']: namelen = len(targ.split('_')[-1]) targ = targ[:-namelen - 1] typ = targ[-3:] try: # for TOIs if (typ == '.01') or (typ == '.02') or (targ[:3] in ['TOI', 'TIC']): if len(targ) <= 10: if targ[:3] in ['TOI', 'TIC']: gaia_dr2_id = gaia_dict[ 'TOI' + targ[3:3 + len(targ.split('.')[0])]]['gaia_dr2_id'] else: gaia_dr2_id = gaia_dict[ 'TOI' + targ[:len(targ.split('.')[0])]]['gaia_dr2_id'] else: gaia_dr2_id = gaia_dict[targ]['gaia_dr2_id'] # for other targets else: if targ.lower() in [ 'gj674', 'gl87', 'gl480.1', 'proxima', 'kelt-15b', 'wasp-54b', 'gj514', 'gj526', 'gj699', 'gj3192', 'gj3193' ]: gaia_dr2_id = gaia_dict[targ]['gaia_dr2_id'] elif targ.lower() == 'gj87': gaia_dr2_id = gaia_dict['Gl87']['gaia_dr2_id'] elif targ.lower() in ['zetapic', 'zeta pic']: gaia_dr2_id = gaia_dict['zetaPic']['gaia_dr2_id'] elif targ.lower() in ['ekeri', 'ek eri']: gaia_dr2_id = gaia_dict['EKEri']['gaia_dr2_id'] elif targ.lower() in ['ksihya', 'ksi hya', 'ksihya_new']: gaia_dr2_id = gaia_dict['ksiHya']['gaia_dr2_id'] elif targ.lower()[:2] in ['hd', 'hr', 'as']: gaia_dr2_id = gaia_dict[targ]['gaia_dr2_id'] else: gaia_dr2_id = gaia_dict['HD' + targ]['gaia_dr2_id'] except: print('WARNING: could not find Gaia DR2 ID for target: ', targ) gaia_dr2_id = None return np.nan # coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') # width = u.Quantity(w, u.deg) # height = u.Quantity(h, u.deg) # gaia_data = Gaia.query_object_async(coordinate=coord, width=width, height=height) # q = Gaia.launch_job_async('SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) q = Gaia.launch_job( 'SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) gaia_data = q.results # some targets don't have a RV from Gaia if rvabs is None: rvabs = gaia_data['radial_velocity'] if np.isnan(rvabs.data.data)[0]: rvabs = 0. bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=gaia_data['ra'], dec=gaia_data['dec'], pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], px=gaia_data['parallax'], rv=rvabs * 1e3, epoch=epoch, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], # px=gaia_data['parallax'], rv=gaia_data['radial_velocity']*1e3, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=pmra, pmdec=pmdec, # px=px, rv=rv, obsname='AAO', ephemeris='de430') try: final_bc = bc[0][0][0] except: final_bc = bc[0][0] return final_bc
def get_barycentric_correction( fn, rvabs=None, obs_path='/Users/christoph/OneDrive - UNSW/observations/'): """ wrapper routine for using barycorrpy with Gaia DR2 coordinates """ # use 2015.5 as an epoch (Gaia DR2) epoch = 2457206.375 # get UT obs start time utmjd = pyfits.getval( fn, 'UTMJD' ) + 2.4e6 + 0.5 # the fits header has 2,400,000.5 subtracted!!!!! # add half the exposure time in days texp = pyfits.getval(fn, 'ELAPSED') utmjd = utmjd + (texp / 2.) / 86400. # read in Gaia DR2 IDs gaia_dict = np.load(obs_path + 'gaiadr2_id_dict.npy').item() # check what kind of target it is targ_raw = pyfits.getval(fn, 'OBJECT') targ = targ_raw.split('+')[0] typ = targ[-3:] try: # for TOIs if (typ == '.01') or (targ[:3] in ['TOI', 'TIC']): if targ[:3] in ['TOI', 'TIC']: gaia_dr2_id = gaia_dict['TOI' + targ[3:6]]['gaia_dr2_id'] else: gaia_dr2_id = gaia_dict['TOI' + targ[:3]]['gaia_dr2_id'] # for other targets else: if targ.lower() in [ 'gj674', 'gl87', 'proxima', 'KELT-15b', 'WASP-54b' ]: gaia_dr2_id = gaia_dict[targ]['gaia_dr2_id'] elif targ.lower() == 'gj87': gaia_dr2_id = gaia_dict['Gl87']['gaia_dr2_id'] elif targ.lower()[:2] == 'hd': gaia_dr2_id = gaia_dict[targ]['gaia_dr2_id'] else: gaia_dr2_id = gaia_dict['HD' + targ]['gaia_dr2_id'] except: print('WARNING: could not find Gaia DR2 ID for target: ', targ) gaia_dr2_id = None return np.nan # coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') # width = u.Quantity(w, u.deg) # height = u.Quantity(h, u.deg) # gaia_data = Gaia.query_object_async(coordinate=coord, width=width, height=height) # q = Gaia.launch_job_async('SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) q = Gaia.launch_job( 'SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) gaia_data = q.results # some targets don't have a RV from Gaia if rvabs is None: rvabs = gaia_data['radial_velocity'] if np.isnan(rvabs.data.data)[0]: rvabs = 0. bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=gaia_data['ra'], dec=gaia_data['dec'], pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], px=gaia_data['parallax'], rv=rvabs * 1e3, epoch=epoch, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], # px=gaia_data['parallax'], rv=gaia_data['radial_velocity']*1e3, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=pmra, pmdec=pmdec, # px=px, rv=rv, obsname='AAO', ephemeris='de430') return bc[0][0][0]
def bjdbrv(jd_utc, ra=None, dec=None, obsname=None, lat=0., lon=0., elevation=None, pmra=0., pmdec=0., parallax=0., rv=0., zmeas=0., epoch=2451545.0, tbase=0., leap_update=False, **kwargs): """ Wrapper to barycorrpy.py and utc2bjd. Computes the barycentric velocity correction and julian date in one call. Keyword obsname refers to observatory.pro in the IDL Astronomy User Library See also: http://astroutils.astronomy.ohio-state.edu/exofast/barycorr.html :param jd_utc: Julian date (UTC) :param ra: RA (J2000) [deg] :param dec: Dec (J2000) [deg] :param obsname: Observatory name (overrides coordinates if set) :param lat: Observatory latitude [deg] :param lon: Observatory longitude (E) [+/-360 deg] :param elevation: Observatory elevation [m] :param pmra: Proper motion (RA*cos(Dec)) [mas/yr] :param pmdec: Proper motion (Dec) [mas/yr] :param parallax: Parallax [mas] :param rv: Radial velocity (within 100 km/s) [m/s] :param zmeas: Measured redshift :param epoch: Epoch (default 2448348.56250, J2000) :param tbase: Baseline subtracted from times (default 0.0) :return: Barycentric correction for zmeas Example: -------- >>> from brv_we14py import bjdbrv >>> print bjdbrv(2457395.24563, 4.585590721, 44.02195596, 'ca') (2457395.247062386, -23684.54364462639) """ if obsname == "McDonald Observatory": # Same as used in SERVAL lat = 30.6814 lon = -104.0147 elevation = 2025. # Barycentric Julian Date # adapted from http://docs.astropy.org/en/stable/time/#barycentric-and-heliocentric-light-travel-time-corrections targ = coord.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs') loc = coord.EarthLocation.from_geodetic(lon, lat, height=elevation) #times = time.Time(jd_utc, format='jd', scale='utc', location=loc) #ltt_bary = times.light_travel_time(targ) JDUTC = Time(jd_utc, format='jd', scale='utc') ltt_bary = JDUTC.light_travel_time(targ, location=loc) bjd = JDUTC.tdb + ltt_bary # we should be JDUTC if leap_update is False: print('WARNING: LEAP UPDATE=FALSE') brv, warning, status = barycorrpy.get_BC_vel(JDUTC, ra=ra, dec=dec, epoch=epoch, pmra=pmra, pmdec=pmdec, px=parallax, lat=lat, longi=lon, alt=elevation, leap_update=leap_update, **kwargs) return bjd.value, brv[0]
w_bervs = wobble_res['bervs'][()] #make own (proper) barycentic corrections w_RVs_barycorr = np.zeros(len(w_dates)) ''' try: f = open(i[0] + "_" + i[2] +kt+ "_bervs_unweight.pickle", "rb") w_RVs_barycorr = pickle.load(f) f.close() except FileNotFoundError: ''' if True: for n in tqdm(range(len(w_RVs_barycorr))): w_RVs_barycorr[n] = bary.get_BC_vel(w_dates[n], starname=i[0], lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs[n] / lightvel)[0] f = open(i[0] + "_" + i[2] + kt + "_bervs_unweight.pickle", "wb") pickle.dump(w_RVs_barycorr, f) f.close() w_RVs_barycorr_ivar = np.zeros(len(w_dates)) ''' try: f = open(i[0] + "_" + i[2] + kt +"_bervs_ivar.pickle", "rb") w_RVs_barycorr_ivar = pickle.load(f) f.close() except FileNotFoundError: ''' if True:
def __init__(self, wobble_file, serval_dir, carmenes_object_ID, bary_starname, load_bary=True, archive=True, bary_archive=os.path.dirname(os.path.abspath(__file__)) + "/" + "../results/bary_archive/", correct_w_for_drift=False, correct_drift=True, correct_NZP=True, correct_SA=True): self.wobble_file = wobble_file self.bary_starname = bary_starname #if bary_archive == "/" + "../results/bary_archive/": #file_dir = os.path.dirname(os.path.abspath(__file__)) #print(file_dir ) #bary_archive = file_dir + bary_archive if archive == True: os.makedirs(bary_archive, exist_ok=True) # Import wobble wobble_res = h5py.File(wobble_file, 'r') w_dates = wobble_res['dates'][()] w_dates_utc = wobble_res['dates_utc'][()] self.w_orders = w_orders = wobble_res['orders'][()] w_epochs = wobble_res['epochs'][()] w_RVs = wobble_res['star_time_rvs'][()] w_RVs_er = wobble_res['star_time_sigmas'][()] w_bervs = wobble_res['bervs'][()] #orderwise RVs N_ord = len(w_orders) #should be equivalent to ["R"] k = np.arange(N_ord) keys_orderRVs = [] for j in k: keys_orderRVs.append('order' + str(j)) #initialize arrays w_order_RVs = np.zeros((N_ord, len(w_dates))) w_order_RVs_ivars = np.zeros((N_ord, len(w_dates))) w_order_RV_scatter = np.zeros(len(w_dates)) for j, k in enumerate(keys_orderRVs): temp = wobble_res[k] #load order dataset tempRVs = temp['star_rvs'][( )] # load RVs for this order at all epochs tempRVs_ivars = temp['star_ivars_rvs'][()] # load error of RVs w_order_RVs[j, :] = tempRVs # fill values into Rows of array w_order_RVs_ivars[j, :] = tempRVs_ivars if np.any(tempRVs_ivars < 0.0) == True: print("Found negative ivars.") #we may want to drop these orders as they may represent *maxima* in optimization for j in range(len(w_dates)): w_order_RV_scatter[j] = np.nanstd(w_order_RVs[:, j]) #barycentric correction #load barycorrected rvs from pickle archive to save time on recalculting them # TODO Isssue in next line w_file_basename = os.path.splitext(os.path.basename(wobble_file))[0] filename_RVs_barycorr = bary_archive + w_file_basename + "_barycorr.pkl" filename_order_RVs_barycorr = bary_archive + w_file_basename + "_barycorr_orders.pkl" if load_bary: try: with open(filename_RVs_barycorr, "rb") as f: w_RVs_barycorr = pickle.load(f) with open(filename_order_RVs_barycorr, "rb") as f: w_order_RVs_barycorr = pickle.load(f) except FileNotFoundError: print( "RV file not found, recalculating barycentric corrections") load_bary = False if not load_bary: w_RVs_barycorr = np.zeros(len(w_dates)) for n in tqdm(range(len(w_RVs_barycorr))): w_RVs_barycorr[n] = bary.get_BC_vel( w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs[n] / lightvel, leap_update=False #HACK barycorrpy issue 27 )[0] w_order_RVs_barycorr = w_order_RVs for order in tqdm(range(len(w_order_RVs))): for ep in tqdm(range(len(w_order_RVs[order]))): w_order_RVs_barycorr[order, ep] = bary.get_BC_vel( w_dates_utc[ep], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_order_RVs[order, ep] / lightvel, leap_update=False #HACK barycorrpy issue 27 )[0] if archive: with open(filename_RVs_barycorr, "wb") as f: pickle.dump(w_RVs_barycorr, f) with open(filename_order_RVs_barycorr, "wb") as f: pickle.dump(w_order_RVs_barycorr, f) # Import SERVAL TODO make some of these that aren't strictly necessary for corrections optional # TODO replace [i] with carmenes_object_ID ser_avcn = np.loadtxt(serval_dir + carmenes_object_ID + "/" + carmenes_object_ID + ".avcn.dat") #read in also info file, from which we get SNR and airmass ser_info = np.genfromtxt(serval_dir + carmenes_object_ID + "/" + carmenes_object_ID + ".info.cvs", delimiter=";") ser_addinfo = np.zeros((len(ser_avcn), 2)) #initializes array for n in range(len(ser_avcn)): ind_jd = np.where( np.abs(ser_info[:, 1] - ser_avcn[n, 0]) == np.nanmin( np.abs(ser_info[:, 1] - ser_avcn[n, 0])) )[0][ 0] #ser_info [:,1] is BJD and so is ser_rvc[n,0] this matches the ones closest to each other ser_addinfo[n, 0] = ser_info[ind_jd, 3] # SNR ser_addinfo[n, 1] = ser_info[ind_jd, 8] # Airmass #Import serval orderwise Rvs ser_rvo = np.loadtxt(serval_dir + carmenes_object_ID + "/" + carmenes_object_ID + ".rvo.dat") ser_rvo_err = np.loadtxt(serval_dir + carmenes_object_ID + "/" + carmenes_object_ID + ".rvo.daterr") # remove entries with nan in drift ind_finitedrift = np.isfinite(ser_avcn[:, 3]) ser_avcn = ser_avcn[ind_finitedrift] ser_rvo = ser_rvo[ind_finitedrift] ser_rvo_err = ser_rvo_err[ind_finitedrift] #match the observation by the JDs -> start with wobble date and find the one with the lowest timediff from serval indices_serval = [] indices_wobble = [] for n in range(len(w_dates)): ind_jd = np.where( np.abs(ser_avcn[:, 0] - w_dates[n]) == np.nanmin( np.abs(ser_avcn[:, 0] - w_dates[n])))[0][0] if (ser_avcn[ind_jd, 0] - w_dates[n] ) * 24 * 60 < 20.: #only takes matches closer than 20 minutes indices_serval.append(ind_jd) indices_wobble.append(n) #HACK Throw out nan wobble errors bool_isnan = np.isnan(w_RVs_er[indices_wobble]) indices_serval = [ indices_serval[i] for i in range(len(indices_serval)) if not bool_isnan[i] ] indices_wobble = [ indices_wobble[i] for i in range(len(indices_wobble)) if not bool_isnan[i] ] #apply to all arrays and write to object self.ser_avcn = ser_avcn[indices_serval] self.ser_rvo = ser_rvo[indices_serval] self.ser_rvo_err = ser_rvo_err[indices_serval] self.ser_addinfo = ser_addinfo[indices_serval] #wobble self.w_dates = w_dates[indices_wobble] self.w_dates_utc = w_dates_utc[indices_wobble] self.w_epochs = w_epochs[indices_wobble] self.w_RVs_barycorr = w_RVs_barycorr[indices_wobble] self.w_RVs = w_RVs[indices_wobble] self.w_RVs_er = w_RVs_er[indices_wobble] self.w_bervs = w_bervs[indices_wobble] self.w_order_RVs = w_order_RVs[:, indices_wobble] self.w_order_RVs_barycorr = w_order_RVs_barycorr[:, indices_wobble] self.w_order_RV_scatter = w_order_RV_scatter[indices_wobble] '''
def compare_results(file_list, parameter_change_list, bary_starname, orbital_parameters, objects, servaldir): """ Compares result files to find the best rv scatter around literature fit returns change in parameter that yielded best results file_list: list of `str` list containing the file paths of the files to be compared parameter_change_list: list of `int` list containing the parameter exponent shifts used to create the files in file_list orbital_parameters : list of `float` orbital_parameters = [K, P, e, omega, T0] parameters of the keplerian fit to be used as "true" baseline """ sigma_list = np.zeros(len(file_list)) + 100 # 100 is a fudge factor plots = True if plots: rec_loop_directory, key_name = os.path.split( os.path.split(file_list[0]) [0]) # HACK go up 2 directories to loop directory plot_directory = rec_loop_directory + "/compare_plots" os.makedirs(plot_directory, exist_ok=True) pp = PdfPages(plot_directory + "/" + key_name + ".pdf") fig = plt.figure(figsize=(15, 9), dpi=200) mpl.rc('font', size=16) plt.clf() fig.clf() ax1 = plt.gca() for f, fil in enumerate(file_list): #assumes order of file_listand parameter_change_list are matched. (maybe extract from file name?) wobble_res = h5py.File(fil, 'r') w_dates = wobble_res['dates'][()] w_dates_utc = wobble_res['dates_utc'][()] w_RVs = wobble_res['star_time_rvs'][()] w_RVs_original = w_RVs w_RVs_er = wobble_res['star_time_sigmas'][()] #barycorr for wobble_orig from scipy.constants import codata lightvel = codata.value('speed of light in vacuum') #for barycorr # CAHA Coordinates for barycorr _lat = 37.2236 _lon = -2.54625 _elevation = 2168. w_RVs_original_barycorr = np.zeros(len(w_dates)) for n in tqdm(range(len(w_RVs_original_barycorr))): w_RVs_original_barycorr[n] = bary.get_BC_vel( w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs_original[n] / lightvel)[0] #Serval Correction #read in SERVAL ser_rvc = np.loadtxt(servaldir + objects[1] + "/" + objects[1] + ".rvc.dat") # remove entries with nan in drift ind_finitedrift = np.isfinite(ser_rvc[:, 3]) ser_rvc = ser_rvc[ind_finitedrift] ser_corr = -ser_rvc[:, 8] - ser_rvc[:, 3] #match wobble and serval indices_serval = [] indices_wobble = [] for n in range(len(w_dates)): ind_jd = np.where( np.abs(ser_rvc[:, 0] - w_dates[n]) == np.nanmin( np.abs(ser_rvc[:, 0] - w_dates[n])))[0][0] if (ser_rvc[ind_jd, 0] - w_dates[n] ) * 24 * 60 < 20.: #only takes matches closer than 20 minutes indices_serval.append(ind_jd) indices_wobble.append(n) print("#serval_ind:" + str(len(indices_serval)), "#wobble_ind:" + str(len(indices_wobble))) #now set up all the data according to the indices ser_rvc = ser_rvc[indices_serval] ser_corr = ser_corr[indices_serval] w_dates = w_dates[indices_wobble] w_dates_utc = w_dates_utc[indices_wobble] w_RVs_original_barycorr = w_RVs_original_barycorr[ indices_wobble] + ser_corr w_RVs_er = w_RVs_er[indices_wobble] def fit_func(t, T0_offset): return rv.radial_velocity(t, orbital_parameters[0], orbital_parameters[1], orbital_parameters[2], orbital_parameters[3], orbital_parameters[4] + T0_offset) #fit to Wobble xdata = w_dates ydata = w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) popt, pcov = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma=w_RVs_er, absolute_sigma=True) print("T0_offset Wobble = ", popt) T0_offset = popt[0] #make these weighted (maybe: thsi may not be a good idea if residuals are not strongly correlated to error (as with wobble results)) sigma_wob = np.nanstd( sigma_clip(w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset), sigma=5)) sigma_list[f] = sigma_wob sigma_wob_noclip = np.nanstd(w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset)) if plots: #fit to serval: xdata = ser_rvc[:, 0] ydata = ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) popt_s, pcov_s = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma=ser_rvc[:, 2], absolute_sigma=True) print("T0_offset Serval = ", popt_s) T0_offset_s = popt_s[0] sigma_ser = np.nanstd( sigma_clip(ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) - fit_func(ser_rvc[:, 0], T0_offset_s), sigma=5)) sigma_ser_noclip = np.nanstd(ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) - fit_func(ser_rvc[:, 0], T0_offset_s)) xlst = np.linspace(w_dates[0], w_dates[0] + orbital_parameters[1] * 0.99999, num=100) ylst = [ rv.radial_velocity(t, orbital_parameters[0], orbital_parameters[1], orbital_parameters[2], orbital_parameters[3], orbital_parameters[4] + T0_offset) for t in xlst ] #sort by xlst pltlst = [[xlst[j], ylst[j]] for j in range(len(xlst))] def mod_sort(elem): return elem[0] % orbital_parameters[1] pltlst = sorted(pltlst, key=mod_sort) pltlst = np.asarray(pltlst) pltlst = [pltlst[:, 0], pltlst[:, 1]] ax1.plot(pltlst[0] % orbital_parameters[1], pltlst[1], "r-", label="literature orbit (Wobble T0_offset)") ax1.errorbar( (w_dates) % orbital_parameters[1], (w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr)), yerr=w_RVs_er, fmt="x", label="Wobble_Corr, clipped_sigma = {0:.3f}, noclip = {1:.3f} " .format(sigma_wob, sigma_wob_noclip)) ax1.errorbar( (ser_rvc[:, 0]) % orbital_parameters[1], ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]), yerr=ser_rvc[:, 2], fmt="x", label="SERVAL_Corr, clipped_sigma = {0:.3f}, noclip = {1:.3f}". format(sigma_ser, sigma_ser_noclip), color="C2") ax1.set_ylabel("RVs [m/s]") ax1.set_xlabel('jd') # add the parameter change to the title title_pre = os.path.split(os.path.split(fil)[0])[1] plt.title(title_pre + ", Phased (" + str(orbital_parameters[1]) + "d) RVs for " + objects[0] + " (" + objects[2] + ") " + " - " + objects[1] + ";") plt.grid(True) plt.tight_layout() plt.legend(shadow=True) plt.savefig(pp, format='pdf') plt.clf() fig.clf() ax1 = plt.gca() if plots: # include some nice progress plots. TODO make it not crudely placed inside this function? plt.close(fig) pp.close() best_index = np.argmin(sigma_list) return parameter_change_list[best_index]
def read_data_from_fits(filelist, arm='vis', starname=None): names = pd.read_csv('name_conversion_list.csv') name_dict = dict(zip(names['#Karmn'], names['Name'])) # input : a list of filenames N = len(filelist) # number of epochs M, R = dimensions(arm) data = [np.zeros((N, M)) for r in range(R)] ivars = [np.zeros((N, M)) for r in range(R)] xs = [np.zeros((N, M)) for r in range(R)] empty = np.array([], dtype=int) pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts, dates_utc = np.zeros( N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros( N), np.zeros(N) for n, f in enumerate(filelist): sp = fits.open(f) try: pipeline_rvs[ n] = sp[0].header['HIERARCH CARACAL SERVAL RV'] * 1.e3 # m/s pipeline_sigmas[ n] = sp[0].header['HIERARCH CARACAL SERVAL E_RV'] * 1.e3 # m/s except KeyError: pipeline_rvs[n] = 0 pipeline_sigmas[n] = 0 try: drifts[n] = sp[0].header['HIERARCH CARACAL DRIFT FP RV'] except KeyError: print("WARNING: {0} Drift missing. Skipping this one.".format(f)) empty = np.append(empty, n) continue if not starname: starname = name_dict[sp[0].header['OBJECT']] jd_start = Time(sp[0].header['DATE-OBS']) jd_mid = jd_start.jd + sp[0].header['HIERARCH CARACAL TMEAN'] * 1 / ( 24 * 60 * 60) dates_utc[n] = jd_mid # for nir ignore all dates before 2016. recommended by Adrian date = bary.JDUTC_to_BJDTDB(jd_mid, starname)[0] if date >= 2457754.5: #1 JAN 2017 dates[n] = date else: if arm == "vis": dates[n] = date elif arm == "nir": print( "Date is before 2017 for NIR measurement. Skipping this one." ) empty = np.append(empty, n) continue else: print("{} not recognized. valid options are: \"vis\" or" " \"nir\"".format(arm)) return bervs[n] = bary.get_BC_vel(jd_mid, starname=starname, lat=_lat, longi=_lon, alt=_elevation)[0] # m/s airms[n] = sp[0].header['AIRMASS'] try: wave = sp['WAVE'].data spec = sp['SPEC'].data sig = sp['SIG'].data except Exception as e: print('{} Skipping file {}.'.format(e, f)) empty = np.append(empty, n) continue # save stuff for r in range(R): data[r][n, :] = spec[r, :] ivars[r][n, :] = 1 / sig[r, :]**2 #xs[r][n, :] = wave[r, :] # replaced with drfit corrected version for l in range(len(data[r][n, :])): lambda_drifts = lambda_drift(wave[r, l], drifts[n]) xs[r][n, :] = wave[r, :] - lambda_drifts # delete data with missing attributes: for r in range(R): data[r] = np.delete(data[r], empty, axis=0) ivars[r] = np.delete(ivars[r], empty, axis=0) xs[r] = np.delete(xs[r], empty, axis=0) pipeline_rvs = np.delete(pipeline_rvs, empty) pipeline_sigmas = np.delete(pipeline_sigmas, empty) dates = np.delete(dates, empty) bervs = np.delete(bervs, empty) airms = np.delete(airms, empty) drifts = np.delete(drifts, empty) dates_utc = np.delete(dates_utc, empty) # re-introduce BERVs to HARPS results: # pipeline_rvs -= bervs # pipeline_rvs -= np.mean(pipeline_rvs) return data, ivars, xs, pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts, dates_utc
w_order_RVs = pickle.load(f) w_order_RVs_barycorr = w_order_RVs f.close() except FileNotFoundError: print("RV file not found, recalculating barycentric corrections") load_rvs = False if not load_rvs: if 'bary_starname' in locals(): if True: for n in tqdm(range(len(w_RVs_barycorr))): w_RVs_barycorr[n] = bary.get_BC_vel(w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs[n] / lightvel)[0] w_RVs_barycorr_ivar = np.zeros(len(w_dates)) if True: for n in tqdm(range(len(w_RVs_barycorr_ivar))): w_RVs_barycorr_ivar[n] = bary.get_BC_vel( w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs_own[n] / lightvel)[0]
def use_barycorrpy(p, t, **kwargs): """ Calculate the BERV using barycorrpy - kwargs must include: ra: float, right ascension in degrees dec: float, declination in degrees pmra: float, the proper motion in right ascension in mas/yr (optional) if not set this is set to 0.0 mas/yr pmde: float, the proper motion in declination in mas/yr (optional) if not set this is set to 0.0 mas/yr plx: float, the parallax in mas (optional) if not set this is set to 0.0 mas epoch: float, the epoch in Julien date i.e. epoch = astropy.time.Time(2000.0, format='decimalyear').jd long: float, the longitude of the observatory (degrees) west is defined as negative lat: float, the latitude of the observatory (degrees) alt: float, the altitude in meters - kwargs that are optional: return_all: bool, if True returns all bervs within time range if False returns berv at time "t" timerange: numpy array, if not provided uses np.arange(0, 365, 1.5) the array of days to add on to "t" i.e. the default bervs calcualted are: t + [0, 1.5, 3.0, ..., 363, 364.5] this is used for the maximum berv returned and for return_all (where all bervs are returned) :param p: param dict or None :param t: astropy.time.Time object, the time to use e.g. astropy.time.Time('2019-01-01 15:00:00', format='iso') e.g. astropy.time.Time(2451544.5, format='jd') e.g. astropy.time.Time(51544.0, format='mjd') :param kwargs: keyword arguments passed to berv calculator :returns: berv - Barycentric correction [km/s], bjd - The Barycentric Julien Date maxberv - the Maximum berv (if timerange not defined) """ # get variables from kwargs return_all = kwargs.get('return_all', False) timerange = kwargs.get('timerange', np.arange(0., 365., 1.5)) kwargs['plx'] = kwargs.get('plx', 0.0) kwargs['pmde'] = kwargs.get('pmde', 0.0) kwargs['pmra'] = kwargs.get('pmra', 0.0) # need to import barycorrpy which required online files (astropy iers) # therefore provide a way to set offline version first # noinspection PyBroadException try: # file at: http://maia.usno.navy.mil/ser7/finals2000A.all from astropy.utils import iers # get package name and relative path package = spirouConfig.Constants.PACKAGE() iers_dir = spirouConfig.Constants.ASTROPY_IERS_DIR() # get absolute folder path from package and relfolder absfolder = spirouConfig.GetAbsFolderPath(package, iers_dir) # get file name file_a = os.path.basename(iers.iers.IERS_A_FILE) path_a = os.path.join(absfolder, file_a) # set table iers.IERS.iers_table = iers.IERS_A.open(path_a) import barycorrpy except Exception as _: emsg1 = 'For method="new" must have barcorrpy installed ' emsg2 = '\ti.e. ">>> pip install barycorrpy' WLOG(p, 'warning', [emsg1, emsg2]) raise ImportError(emsg1 + '\n' + emsg2) # get the julien UTC date for observation and obs + 1 year jdutc = list(t.jd + timerange) # construct lock filename lfilename = os.path.join(p['DRS_DATA_REDUC'], 'BERV_lockfile') # add a wait for parallelisation lock, lfile = spirouFITS.check_fits_lock_file(p, lfilename) # get args bkwargs = dict(ra=kwargs['ra'], dec=kwargs['dec'], epoch=kwargs['epoch'], px=kwargs['plx'], pmra=kwargs['pmra'], pmdec=kwargs['pmde'], lat=kwargs['lat'], longi=kwargs['long'], alt=kwargs['alt']) # calculate barycorrpy try: bresults1 = barycorrpy.get_BC_vel(JDUTC=jdutc, zmeas=0.0, **bkwargs) except Exception as e: # close lock spirouFITS.close_fits_lock_file(p, lock, lfile, lfilename) # re-raise exception to catch later emsg1 = ('Error in barycorrpy. Error {0}: {1}' ''.format(type(e), e)) emsg2 = 'Parameters were: time={0}'.format(jdutc[0]) for kwarg in kwargs: emsg2 += ' {0}={1}'.format(kwarg, kwargs[kwarg]) WLOG(p, 'error', [emsg1, emsg2]) bresults1 = None # end wait for parallelisation spirouFITS.close_fits_lock_file(p, lock, lfile, lfilename) # convert JDUTC to BJDTDB bresults2 = barycorrpy.utc_tdb.JDUTC_to_BJDTDB(jdutc, **bkwargs) if return_all: out = [ bresults1[0] / 1000.0, bresults2[0], np.max(abs(bresults1[0] / 1000.0)) ] return out else: # get berv berv2 = bresults1[0][0] / 1000.0 # get bjd bjd2 = bresults2[0][0] # work ou the maximum barycentric correction bervmax2 = np.max(abs(bresults1[0] / 1000.)) # return results return berv2, bjd2, bervmax2
def compare_results(file_list, parameter_change_list, bary_starname, orbital_parameters, objects, servaldir, serval_T0_offset): """ Compares result files to find the best rv scatter around literature fit returns change in parameter that yielded best results file_list: list of `str` list containing the file paths of the files to be compared parameter_change_list: list of `int` list containing the parameter exponent shifts used to create the files in file_list orbital_parameters_mult : list of `float` orbital_parameters = [K, P, e, omega, T0] parameters of the keplerian fit to be used as "true" baseline """ #orbital_parameters = orbital_parameters_mult[0] #HACK to select only the first planet for backward compatibility WONT WORK sigma_list = np.zeros(len(file_list)) + 100 # 100 is a fudge factor plots = True if plots: rec_loop_directory, key_name = os.path.split( os.path.split(file_list[0]) [0]) # HACK go up 2 directories to loop directory plot_directory = rec_loop_directory + "/compare_plots" os.makedirs(plot_directory, exist_ok=True) pp = PdfPages(plot_directory + "/" + key_name + ".pdf") fig = plt.figure(figsize=(15, 9), dpi=200) mpl.rc('font', size=16) plt.clf() fig.clf() ax1 = plt.gca() for f, fil in enumerate(file_list): #assumes order of file_listand parameter_change_list are matched. (maybe extract from file name?) wobble_res = h5py.File(fil, 'r') w_dates = wobble_res['dates'][()] w_dates_utc = wobble_res['dates_utc'][()] w_RVs = wobble_res['star_time_rvs'][()] w_RVs_original = w_RVs w_RVs_er = wobble_res['star_time_sigmas'][()] #barycorr for wobble_orig from scipy.constants import codata lightvel = codata.value('speed of light in vacuum') #for barycorr # CAHA Coordinates for barycorr _lat = 37.2236 _lon = -2.54625 _elevation = 2168. w_RVs_original_barycorr = np.zeros(len(w_dates)) for n in tqdm(range(len(w_RVs_original_barycorr))): w_RVs_original_barycorr[n] = bary.get_BC_vel( w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs_original[n] / lightvel)[0] #Serval Correction #read in SERVAL #ser_rvc = np.loadtxt(servaldir+objects[1]+"/"+objects[1]+".rvc.dat") #Use avcn instead ser_avcn = np.loadtxt(servaldir + objects[1] + "/" + objects[1] + ".avcn.dat") ser_rvc = ser_avcn #HACK to make code below continue to function just wrote avcn data over rvc # remove entries with nan in drift ind_finitedrift = np.isfinite(ser_rvc[:, 3]) ser_rvc = ser_rvc[ind_finitedrift] #8 sa drift, 3 drift, 9 NZP ser_corr = -ser_rvc[:, 8] - ser_rvc[:, 3] - ser_rvc[:, 9] # use with avcn #optionally remove drift correction if this has been performed during data file generation correct_w_for_drift = False if correct_w_for_drift == False: ser_corr_wob = ser_corr + ser_rvc[:, 3] #match wobble and serval indices_serval = [] indices_wobble = [] for n in range(len(w_dates)): ind_jd = np.where( np.abs(ser_rvc[:, 0] - w_dates[n]) == np.nanmin( np.abs(ser_rvc[:, 0] - w_dates[n])))[0][0] if (ser_rvc[ind_jd, 0] - w_dates[n] ) * 24 * 60 < 20.: #only takes matches closer than 20 minutes indices_serval.append(ind_jd) indices_wobble.append(n) print("#serval_ind:" + str(len(indices_serval)), "#wobble_ind:" + str(len(indices_wobble))) #now set up all the data according to the indices ser_rvc = ser_rvc[indices_serval] ser_corr = ser_corr[indices_serval] ser_corr_wob = ser_corr_wob[indices_serval] w_dates = w_dates[indices_wobble] w_dates_utc = w_dates_utc[indices_wobble] w_RVs_original_barycorr = w_RVs_original_barycorr[ indices_wobble] + ser_corr_wob w_RVs_er = w_RVs_er[indices_wobble] #def fit_func(t, T0_offset): #return rv.radial_velocity(t , orbital_parameters[0], orbital_parameters[1], orbital_parameters[2],orbital_parameters[3], orbital_parameters[4] + T0_offset) #NOTE imported from compare_ws 25.oct.2019 #def keplarian_rv_mult(t): #total_rv = 0 #for parameters in orbital_parameters_mult: #total_rv = total_rv + rv.radial_velocity_M0(t , parameters[0], parameters[1], parameters[2], parameters[3], parameters[4], parameters[5]) #return total_rv #def fit_func_mult(t, T0_offset): #return keplarian_rv_mult(t + T0_offset) # For single planet work e.g. phase sychronized data def keplarian_rv(t): parameters = orbital_parameters_mult[0] return rv.radial_velocity_M0(t, parameters[0], parameters[1], parameters[2], parameters[3], parameters[4], parameters[5]) def fit_func(t, T0_offset): return keplarian_rv(t + T0_offset) ##untested attemt to make compatible with multiplanet systems: WONT WORK #if len(orbital_parameters_mult) >1: #fit_func_mult = fit_func #fit to Wobble # EDIT 30.07.2019 catch optimize warning as error and use serval fited T0_offset instead with warnings.catch_warnings(): warnings.simplefilter("error", OptimizeWarning) try: #fit to Wobble xdata = w_dates ydata = w_RVs_original_barycorr - np.nanmean( w_RVs_original_barycorr) popt, pcov = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma=w_RVs_er, absolute_sigma=True, p0=serval_T0_offset) print("T0_offset Wobble = ", popt) T0_offset = popt[0] T0_source = "Wobble" except OptimizeWarning: ## fit T0_offset to serval instead if error in fitting ## Note: breaks if this throws an exception again -> do once globally and import result ##fit to Serval #xdata = ser_rvc[:,0] #ydata = ser_rvc[:,1] - np.nanmean(ser_rvc[:,1]) #popt, pcov = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma = ser_rvc[:,2], absolute_sigma = True) #print("T0_offset Serval (wobble_fit_err) = ", popt) T0_offset = serval_T0_offset T0_source = "SERVAL (wob_fit_err)" print("T0_offset Serval (wobble_fit_err) = ", T0_offset) #make these weighted (maybe: thsi may not be a good idea if residuals are not strongly correlated to error (as with wobble results)) sigma_wob = np.nanstd( sigma_clip(w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset), sigma=5)) sigma_list[f] = sigma_wob sigma_wob_noclip = np.nanstd(w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset)) if plots: #fit to serval: xdata = ser_rvc[:, 0] ydata = ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) popt_s, pcov_s = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma=ser_rvc[:, 2], absolute_sigma=True, p0=serval_T0_offset) print("T0_offset Serval = ", popt_s) T0_offset_s = popt_s[0] sigma_ser = np.nanstd( sigma_clip(ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) - fit_func(ser_rvc[:, 0], T0_offset_s), sigma=5)) sigma_ser_noclip = np.nanstd(ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]) - fit_func(ser_rvc[:, 0], T0_offset_s)) sigma_wob_Soffset = np.nanstd( sigma_clip(w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset_s), sigma=5)) sigma_list[f] = sigma_wob sigma_wob_noclip_Soffset = np.nanstd( w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset_s)) xlst = np.linspace(w_dates[0], w_dates[0] + orbital_parameters[1] * 0.99999, num=100) ylst = [fit_func(t, T0_offset) for t in xlst] #sort by xlst pltlst = [[xlst[j], ylst[j]] for j in range(len(xlst))] def mod_sort(elem): return elem[0] % orbital_parameters[1] pltlst = sorted(pltlst, key=mod_sort) pltlst = np.asarray(pltlst) pltlst = [pltlst[:, 0], pltlst[:, 1]] ax1.plot(pltlst[0] % orbital_parameters[1], pltlst[1], "r-", label="literature orbit (" + T0_source + "T0_offset)") ax1.errorbar( (w_dates) % orbital_parameters[1], (w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr)), yerr=w_RVs_er, fmt="x", label="Wobble_Corr, clipped_sigma = {0:.3f}, noclip = {1:.3f} " .format(sigma_wob, sigma_wob_noclip)) ax1.errorbar( (ser_rvc[:, 0]) % orbital_parameters[1], ser_rvc[:, 1] - np.nanmean(ser_rvc[:, 1]), yerr=ser_rvc[:, 2], fmt="x", label="SERVAL_Corr, clipped_sigma = {0:.3f}, noclip = {1:.3f}". format(sigma_ser, sigma_ser_noclip), color="C2") ax1.plot( [], [], ' ', label= "Wobble_Corr_SERVAL_fit, clipped_sigma = {0:.3f}, noclip = {1:.3f} " .format(sigma_wob_Soffset, sigma_wob_noclip_Soffset)) ax1.set_ylabel("RVs [m/s]") ax1.set_xlabel('jd') # add the parameter change to the title title_pre = os.path.split(os.path.split(fil)[0])[1] plt.title(title_pre + ", Phased (" + str(orbital_parameters[1]) + "d) RVs for " + objects[0] + " (" + objects[2] + ") " + " - " + objects[1] + ";") plt.grid(True) plt.tight_layout() plt.legend(shadow=True) plt.savefig(pp, format='pdf') plt.clf() fig.clf() ax1 = plt.gca() if plots: # include some nice progress plots. TODO make it not crudely placed inside this function? plt.close(fig) pp.close() best_index = np.argmin(sigma_list) return parameter_change_list[best_index]
def compare_results(file_list, parameter_change_list, bary_starname, orbital_parameters, objects, servaldir): """ Compares result files to find the best rv scatter around literature fit returns change in parameter that yielded best results file_list: list of `str` list containing the file paths of the files to be compared parameter_change_list: list of `int` list containing the parameter exponent shifts used to create the files in file_list orbital_parameters : list of `float` orbital_parameters = [K, P, e, omega, T0] parameters of the keplerian fit to be used as "true" baseline """ sigma_list = np.zeros(len(file_list)) + 100 # 100 is a fudge factor for f, fil in enumerate(file_list): #assumes order of file_listand parameter_change_list are matched. (maybe extract from file name?) wobble_res = h5py.File(fil,'r') w_dates = wobble_res['dates'][()] w_dates_utc = wobble_res['dates_utc'][()] w_RVs = wobble_res['star_time_rvs'][()] w_RVs_original = w_RVs w_RVs_er = wobble_res['star_time_sigmas'][()] #barycorr for wobble_orig from scipy.constants import codata lightvel = codata.value('speed of light in vacuum') #for barycorr # CAHA Coordinates for barycorr _lat = 37.2236 _lon = -2.54625 _elevation = 2168. w_RVs_original_barycorr = np.zeros(len(w_dates)) for n in tqdm(range(len(w_RVs_original_barycorr))): w_RVs_original_barycorr[n]=bary.get_BC_vel(w_dates_utc[n], starname=bary_starname, lat=_lat, longi=_lon, alt=_elevation, zmeas=w_RVs_original[n]/lightvel)[0] #Serval Correction #read in SERVAL ser_rvc = np.loadtxt(servaldir+objects[1]+"/"+objects[1]+".rvc.dat") # remove entries with nan in drift ind_finitedrift = np.isfinite(ser_rvc[:,3]) ser_rvc = ser_rvc[ind_finitedrift] ser_corr = - ser_rvc[:,8] - ser_rvc[:,3] #match wobble and serval indices_serval = [] indices_wobble = [] for n in range(len(w_dates)): ind_jd = np.where(np.abs(ser_rvc[:,0]-w_dates[n]) == np.nanmin(np.abs(ser_rvc[:,0]-w_dates[n])))[0][0] if (ser_rvc[ind_jd,0]-w_dates[n])*24*60<20.: #only takes matches closer than 20 minutes indices_serval.append(ind_jd) indices_wobble.append(n) print("#serval_ind:"+str(len(indices_serval)), "#wobble_ind:"+str(len(indices_wobble))) #now set up all the data according to the indices ser_rvc = ser_rvc[indices_serval] ser_corr = ser_corr[indices_serval] w_dates = w_dates[indices_wobble] w_dates_utc = w_dates_utc[indices_wobble] w_RVs_original_barycorr = w_RVs_original_barycorr[indices_wobble] + ser_corr w_RVs_er = w_RVs_er[indices_wobble] def fit_func(t, T0_offset): return rv.radial_velocity(t , orbital_parameters[0], orbital_parameters[1], orbital_parameters[2],orbital_parameters[3], orbital_parameters[4] + T0_offset) #fit to Wobble xdata = w_dates ydata = w_RVs_original_barycorr-np.nanmean(w_RVs_original_barycorr) popt, pcov = sp.optimize.curve_fit(fit_func, xdata, ydata, sigma = w_RVs_er, absolute_sigma = True) print("T0_offset Wobble = ", popt) T0_offset = popt[0] #make these weighted (maybe: thsi may not be a good idea if residuals are not strongly correlated to error (as with wobble results)) sigma_wob = np.nanstd(sigma_clip( w_RVs_original_barycorr - np.nanmean(w_RVs_original_barycorr) - fit_func(w_dates, T0_offset) ,sigma = 5)) sigma_list[f] = sigma_wob #TODO include some nice progress plots best_index = np.argmin(sigma_list) return parameter_change_list[best_index]
def process_science_images(imglist, P_id, mask=None, sampling_size=25, slit_height=25, gain=[1., 1., 1., 1.], MB=None, ronmask=None, MD=None, scalable=False, saveall=False, path=None, ext_method='optimal', from_indices=True, timit=False): """ Process all science images. This includes: (1) bias and dark subtraction (2) cosmic ray removal (3) background extraction and estimation (4) flat-fielding (ie removal of pixel-to-pixel sensitivity variations) ============================= (5) extraction of stripes (6) extraction of 1-dim spectra (7) get relative intensities of different fibres (8) wavelength solution (9) barycentric correction """ if timit: start_time = time.time() ##################################### ### (1) bias and dark subtraction ### ##################################### #if the darks have a different exposure time than the science images, then we need to re-scale the master dark texp = pyfits.getval(imglist[0], 'exptime') #if INPUT arrays are not given, read them from default files if path is None: print('WARNING: output file directory not provided!!!') print('Using same directory as input file...') dum = imglist[0].split('/') path = imglist[0][0:-len(dum[-1])] if MB is None: #no need to fix orientation, this is already a processed file [ADU] MB = pyfits.getdata(path + 'master_bias.fits') if ronmask is None: #no need to fix orientation, this is already a processed file [e-] ronmask = pyfits.getdata(path + 'read_noise_mask.fits') if MD is None: if scalable: #no need to fix orientation, this is already a processed file [e-] MD = pyfits.getdata(path + 'master_dark_scalable.fits', 0) # err_MD = pyfits.getdata(path+'master_dark_scalable.fits', 1) else: #no need to fix orientation, this is already a processed file [e-] MD = pyfits.getdata( path + 'master_dark_t' + str(int(np.round(texp, 0))) + '.fits', 0) # err_MD = pyfits.getdata(path+'master_dark_t'+str(int(np.round(texp,0)))+'.fits', 1) if not from_indices: ron_stripes = extract_stripes(ronmask, P_id, return_indices=False, slit_height=slit_height, savefiles=False, timit=True) for filename in imglist: #do some housekeeping with filenames dum = filename.split('/') dum2 = dum[-1].split('.') obsname = dum2[0] # (1) call routine that does all the bias and dark correction stuff and proper error treatment img = correct_for_bias_and_dark_from_filename(filename, MB, MD, gain=gain, scalable=False, savefile=saveall, path=path, timit=True) #[e-] #err = np.sqrt(img + ronmask*ronmask) # [e-] #TEMPFIX: err_img = np.sqrt(np.clip(img, 0, None) + ronmask * ronmask) # [e-] # (2) remove cosmic rays (ERRORS REMAIN UNCHANGED) cosmic_cleaned_img = remove_cosmics(img, ronmask, obsname, path, Flim=3.0, siglim=5.0, maxiter=1, savemask=True, savefile=True, save_err=False, verbose=True, timit=True) # [e-] #adjust errors? # (3) fit and remove background (ERRORS REMAIN UNCHANGED) bg_corrected_img = remove_background(cosmic_cleaned_img, P_id, obsname, path, degpol=5, slit_height=slit_height, save_bg=True, savefile=True, save_err=False, exclude_top_and_bottom=True, verbose=True, timit=True) # [e-] #adjust errors? # (4) remove pixel-to-pixel sensitivity variations (2-dim) #XXXXXXXXXXXXXXXXXXXXXXXXXXX #TEMPFIX final_img = bg_corrected_img.copy() # [e-] #adjust errors? # (5) extract stripes stripes, stripe_indices = extract_stripes(final_img, P_id, return_indices=True, slit_height=slit_height, savefiles=True, obsname=obsname, path=path, timit=True) if not from_indices: err_stripes = extract_stripes(err_img, P_id, return_indices=False, slit_height=slit_height, savefiles=True, obsname=obsname + '_err', path=path, timit=True) # (6) perform extraction of 1-dim spectrum if from_indices: pix, flux, err = extract_spectrum_from_indices( final_img, err_img, stripe_indices, method=ext_method, slit_height=slit_height, RON=ronmask, savefile=True, filetype='fits', obsname=obsname, path=path, timit=True) else: pix2, flux2, err2 = extract_spectrum(stripes, err_stripes=err_stripes, ron_stripes=ron_stripes, method=ext_method, slit_height=slit_height, RON=ronmask, savefile=False, filetype='fits', obsname=obsname, path=path, timit=True) # (7) get relative intensities of different fibres if from_indices: relints = get_relints_from_indices(P_id, final_img, err_img, stripe_indices, mask=mask, sampling_size=sampling_size, slit_height=slit_height, return_full=False, timit=True) else: relints = get_relints(P_id, stripes, err_stripes, mask=mask, sampling_size=sampling_size, slit_height=slit_height, return_full=False, timit=True) # (8) get wavelength solution #XXXXX # (9) get barycentric correction lat, long, alt = get_obs_coords_from_header(fn) bc = barycorrpy.get_BC_vel(JDUTC=JDUTC, hip_id=8102, lat=lat, longi=long, alt=float(alt), ephemeris='de430', zmeas=0.0) #bc = barycorrpy.get_BC_vel(JDUTC=JDUTC, hip_id=8102, lat=-31.2755, longi=149.0673, alt=1165.0, ephemeris='de430', zmeas=0.0) #bc = barycorrpy.get_BC_vel(JDUTC=JDUTC, hip_id=8102, obsname='AAO', ephemeris='de430') #now append relints, wl-solution, and barycorr to extracted FITS file header outfn = path + obsname + '_extracted.fits' if os.path.isfile(outfn): #relative fibre intensities dum = append_relints_to_FITS(relints, outfn, nfib=19) #wavelength solution #pyfits.setval(fn, 'RELINT' + str(i + 1).zfill(2), value=relints[i], comment='fibre #' + str(fibnums[i]) + ' - ' + fibinfo[i] + ' fibre') #barycentric correction pyfits.setval(outfn, 'BARYCORR', value=np.array(bc[0])[0], comment='barycentric correction [m/s]') if timit: print('Total time elapsed: ' + str(np.round(time.time() - start_time, 1)) + ' seconds') return
def bjdbrv(jd_utc, ra, dec, obsname=None, lat=0., lon=0., elevation=None, pmra=0., pmdec=0., parallax=0., rv=0., zmeas=0., epoch=2451545.0, tbase=0., **kwargs): """ Wrapper to barycorrpy.py and utc2bjd. Computes the barycentric velocity correction and julian date in one call. Keyword obsname refers to observatory.pro in the IDL Astronomy User Library See also: http://astroutils.astronomy.ohio-state.edu/exofast/barycorr.html :param jd_utc: Julian date (UTC) :param ra: RA (J2000) [deg] :param dec: Dec (J2000) [deg] :param obsname: Observatory name (overrides coordinates if set) :param lat: Observatory latitude [deg] :param lon: Observatory longitude (E) [+/-360 deg] :param elevation: Observatory elevation [m] :param pmra: Proper motion (RA*cos(Dec)) [mas/yr] :param pmdec: Proper motion (Dec) [mas/yr] :param parallax: Parallax [mas] :param rv: Radial velocity (within 100 km/s) [m/s] :param zmeas: Measured redshift :param epoch: Epoch (default 2448348.56250, J2000) :param tbase: Baseline subtracted from times (default 0.0) :return: Barycentric correction for zmeas Example: -------- >>> from brv_we14py import bjdbrv >>> print bjdbrv(2457395.24563, 4.585590721, 44.02195596, 'ca') (2457395.247062386, -23684.54364462639) """ # translation obsname_idl obsname_py if obsname=='ca': lat = 37.2236 lon = -2.5463 elevation = 2168. if obsname=='eso': #obsname = 'lasilla' lat = -29.2584 lon = -70.7345 elevation = 2400. if obsname=='lapalma': lat = 28.754000 lon = -17.88905555 elevation = 2387.2 # Barycentric Julian Date # adapted from http://docs.astropy.org/en/stable/time/#barycentric-and-heliocentric-light-travel-time-corrections targ = coord.SkyCoord(ra, dec, unit=(u.deg, u.deg), frame='icrs') loc = coord.EarthLocation.from_geodetic(lon, lat, height=elevation) #times = time.Time(jd_utc, format='jd', scale='utc', location=loc) #ltt_bary = times.light_travel_time(targ) JDUTC = Time(jd_utc, format='jd', scale='utc') if JDUTC.isscalar: ltt_bary = JDUTC.light_travel_time(targ, location=loc) # Does not work vectorised with numpy 1.14 # *** TypeError: For this input type lists must contain either int or Ellipsis # https://github.com/astropy/astropy/issues/7051 bjd = JDUTC.tdb + ltt_bary else: bjd = [(jdutc.tdb + jdutc.light_travel_time(targ, location=loc)).value for jdutc in JDUTC] brv, warning_and_error, status = barycorrpy.get_BC_vel(JDUTC, ra=ra, dec=dec, epoch=epoch, pmra=pmra, pmdec=pmdec, px=parallax, lat=lat, longi=lon, alt=elevation, **kwargs) return (bjd.value, brv[0]) if JDUTC.isscalar else (bjd, brv)
def read_data_from_fits(filelist, arm='vis', starname = None, serval_dir = None, nzp_shift = True, drift_shift = True): names = pd.read_csv(os.path.dirname(os.path.abspath(__file__)) + '/carmenes_aux_files/name_conversion_list.csv') name_dict = dict(zip(names['#Karmn'], names['Name'])) # input : a list of filenames N = len(filelist) # number of epochs M, R = dimensions(arm) data = [np.zeros((N, M)) for r in range(R)] ivars = [np.zeros((N, M)) for r in range(R)] xs = [np.zeros((N, M)) for r in range(R)] empty = np.array([], dtype=int) pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts, dates_utc, total_drifts = np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N) ,np.zeros(N), np.zeros(N) #file headers sometimes list object not by Carmenes ID but instead with catalogue names (e.g. GJ436 obs 81 lists Ross905 istead of J11421+267. FIX: sp = fits.open(filelist[0]) carmenes_object_ID_master = str(sp[0].header['OBJECT']).strip() #ID in header has extra space in front of it print("Object_ID: ", carmenes_object_ID_master) for n, f in enumerate(tqdm(filelist)): sp = fits.open(f) if nzp_shift == True: if not serval_dir: raise Exception("no serval directory supplied. Cannot correct for NZP") #include NZP by adding them to drifts before correction else: #nzp_shift = True carmenes_object_ID = str(sp[0].header['OBJECT']).strip() #ID in header has extra space in front of it if carmenes_object_ID != carmenes_object_ID_master: print() print("mismatched object ID: " ,carmenes_object_ID) print("n, f:", n, f) print() #use master instead: carmenes_object_ID = carmenes_object_ID_master ser_avcn = np.loadtxt(serval_dir+ carmenes_object_ID +"/"+ carmenes_object_ID +".avcn.dat") nzp = ser_avcn[:,9] try: pipeline_rvs[n] = sp[0].header['HIERARCH CARACAL SERVAL RV'] * 1.e3 # m/s pipeline_sigmas[n] = sp[0].header['HIERARCH CARACAL SERVAL E_RV'] * 1.e3 # m/s except KeyError: pipeline_rvs[n] = 0 pipeline_sigmas[n] = 0 try: drifts[n] = sp[0].header['HIERARCH CARACAL DRIFT FP RV'] except KeyError: print("WARNING: {0} Drift missing. Skipping this one.".format(f)) empty = np.append(empty, n) continue if not starname: starname = name_dict[sp[0].header['OBJECT']] jd_start = Time(sp[0].header['DATE-OBS']) jd_mid = jd_start.jd + sp[0].header['HIERARCH CARACAL TMEAN'] * 1/(24*60*60) dates_utc[n] = jd_mid # for nir ignore all dates before 2016. recommended by Adrian #print("before bary") date = bary.JDUTC_to_BJDTDB(jd_mid, starname, leap_update = False #HACK barycorrpy issue 27 )[0] #print("after bary") #if date >=2457754.5:#1 JAN 2017 for cutting problematic nir measurements if date >=2000000.0: #HACK to get GJ436 NIR plot dates[n] = date else: if arm == "vis": dates[n] = date elif arm == "nir": print("Date is before 2017 for NIR measurement. Skipping this one.") empty = np.append(empty, n) continue else: print("{} not recognized. valid options are: \"vis\" or" " \"nir\"".format(arm)) return bervs[n] = bary.get_BC_vel(jd_mid, starname=starname, lat=_lat, longi=_lon, alt=_elevation, leap_update = False #HACK barycorrpy issue 27 )[0] # m/s airms[n] = sp[0].header['AIRMASS'] try: wave = sp['WAVE'].data spec = sp['SPEC'].data sig = sp['SIG'].data except Exception as e: print('{} Skipping file {}.'.format(e, f)) empty = np.append(empty, n) continue total_drifts[n] = drifts[n] if nzp_shift: #match only the nth date #match the observation by the JDs -> start with wobble date and find the one with the lowest timediff from serval indices_serval = [] indices_wobble = [] #for n in range(len(dates)): ind_jd = np.where(np.abs(ser_avcn[:,0]- dates[n]) == np.nanmin(np.abs(ser_avcn[:,0]- dates[n])))[0][0] if (ser_avcn[ind_jd,0]-dates[n])*24*60<20.: #only takes matches closer than 20 minutes indices_serval.append(ind_jd) indices_wobble.append(n) # add NZP to drift corrections that match dates in SERVAL: total_drifts[n] = total_drifts[n] + nzp[indices_serval] #print("totals:", total_drifts) # save stuff for r in range(R): data[r][n, :] = spec[r, :] ivars[r][n, :] = 1 / sig[r, :]**2 if drift_shift == False: xs[r][n, :] = wave[r, :] # replaced with drfit corrected version else: for l in range(len(data[r][n,:])): lambda_drifts = lambda_drift(total_drifts[n], wave[r, l]) xs[r][n, l] = wave[r, l] - lambda_drifts # delete data with missing attributes: for r in range(R): data[r] = np.delete(data[r], empty, axis=0) ivars[r] = np.delete(ivars[r], empty, axis=0) xs[r] = np.delete(xs[r], empty, axis=0) pipeline_rvs = np.delete(pipeline_rvs, empty) pipeline_sigmas = np.delete(pipeline_sigmas, empty) dates = np.delete(dates, empty) bervs = np.delete(bervs, empty) airms = np.delete(airms, empty) drifts = np.delete(drifts, empty) dates_utc= np.delete(dates_utc, empty) # re-introduce BERVs to HARPS results: # pipeline_rvs -= bervs # pipeline_rvs -= np.mean(pipeline_rvs) return data, ivars, xs, pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts, dates_utc
from barycorrpy import get_BC_vel filename_id = new_colnames.index("filename") MJDOBS_id = new_colnames.index("MJD-OBS") try: bary_rv_id = new_colnames.index("barycenter rv") except: new_colnames.append("barycenter rv") new_list_data = [item + [ np.nan, ] for item in new_list_data] bary_rv_id = new_colnames.index("barycenter rv") for k, item in enumerate(new_list_data): MJDOBS = float(item[MJDOBS_id]) result = get_BC_vel(MJDOBS + 2400000.5, hip_id=116805, obsname="Keck Observatory", ephemeris="de430") new_list_data[k][bary_rv_id] = result[0][0] if 0: # add filename if 0: filename_id = new_colnames.index("filename") ifs_filter_id = new_colnames.index("IFS filter") filelist = [item[filename_id] for item in new_list_data] filelist.sort() seqid = 0 imid = 0 pastnum = 0 for k, filename in enumerate(filelist): # if "Hbb" in new_list_data[k][ifs_filter_id]: # print("[\"{0}\",,],".format(filename) )
def read_data_from_fits(filelist, arm='vis', starname=None): names = pd.read_csv('name_conversion_list.csv') name_dict = dict(zip(names['#Karmn'], names['Name'])) # input : a list of filenames N = len(filelist) # number of epochs M, R = dimensions(arm) data = [np.zeros((N, M)) for r in range(R)] ivars = [np.zeros((N, M)) for r in range(R)] xs = [np.zeros((N, M)) for r in range(R)] empty = np.array([], dtype=int) pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts = np.zeros( N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N) for n, f in enumerate(filelist): sp = fits.open(f) try: pipeline_rvs[ n] = sp[0].header['HIERARCH CARACAL SERVAL RV'] * 1.e3 # m/s pipeline_sigmas[ n] = sp[0].header['HIERARCH CARACAL SERVAL E_RV'] * 1.e3 # m/s except KeyError: pipeline_rvs[n] = 0 pipeline_sigmas[n] = 0 try: drifts[n] = sp[0].header['HIERARCH CARACAL DRIFT FP RV'] except KeyError: print("WARNING: {0} Drift missing. Skipping this one.".format(f)) empty = np.append(empty, n) continue if not starname: starname = name_dict[sp[0].header['OBJECT']] jd_start = Time(sp[0].header['DATE-OBS']) jd_mid = jd_start.jd + sp[0].header['HIERARCH CARACAL TMEAN'] * 1 / ( 24 * 60 * 60) dates[n] = bary.JDUTC_to_BJDTDB(jd_mid, starname)[0] bervs[n] = bary.get_BC_vel( jd_mid, starname=starname, lat=_lat, longi=_lon, alt=_elevation)[0] # m/s # why not use dates[n]? airms[n] = sp[0].header['AIRMASS'] try: wave = sp['WAVE'].data spec = sp['SPEC'].data sig = sp['SIG'].data except Exception as e: print('{} Skipping file {}.'.format(e, f)) empty = np.append(empty, n) continue # save stuff for r in range(R): data[r][n, :] = spec[r, :] ivars[r][n, :] = 1 / sig[r, :]**2 xs[r][n, :] = wave[r, :] # delete data with missing attributes: for r in range(R): data[r] = np.delete(data[r], empty, axis=0) ivars[r] = np.delete(ivars[r], empty, axis=0) xs[r] = np.delete(xs[r], empty, axis=0) pipeline_rvs = np.delete(pipeline_rvs, empty) pipeline_sigmas = np.delete(pipeline_sigmas, empty) dates = np.delete(dates, empty) bervs = np.delete(bervs, empty) airms = np.delete(airms, empty) drifts = np.delete(drifts, empty) # re-introduce BERVs to HARPS results: # pipeline_rvs -= bervs # pipeline_rvs -= np.mean(pipeline_rvs) return data, ivars, xs, pipeline_rvs, pipeline_sigmas, dates, bervs, airms, drifts
def old_get_barycentric_correction(fn, starname='tauceti', h=0.01, w=0.01): # wrapper routine for using barycorrpy with Gaia DR2 coordinates # use 2015.5 as an epoch (Gaia DR2) epoch = 2457206.375 # get UT obs start time utmjd = pyfits.getval( fn, 'UTMJD' ) + 2.4e6 + 0.5 # the fits header has 2,400,000.5 subtracted!!!!! # add half the exposure time in days texp = pyfits.getval(fn, 'ELAPSED') utmjd = utmjd + (texp / 2.) / 86400. # ra = pyfits.getval(fn, 'MEANRA') # dec = pyfits.getval(fn, 'MEANDEC') if starname.lower() == 'tauceti': gaia_dr2_id = 2452378776434276992 ra = 26.00930287666994 dec = -15.933798650941204 rv = -16.68e3 h = 0.01 w = 0.01 elif starname.lower() == 'toi129': ra = 0.187097 dec = -54.830506 rv = 21.04070239e3 h = 0.005 w = 0.005 elif starname.lower() == 'gj674': gaia_dr2_id = 5951824121022278144 rv = -2.73 else: fu = 1 assert fu != 1, 'ERROR: need to implement that first...' coord = SkyCoord(ra=ra, dec=dec, unit=(u.degree, u.degree), frame='icrs') width = u.Quantity(w, u.deg) height = u.Quantity(h, u.deg) # gaia_data = Gaia.query_object_async(coordinate=coord, width=width, height=height) q = Gaia.launch_job_async( 'SELECT * FROM gaiadr2.gaia_source WHERE source_id = ' + str(gaia_dr2_id)) gaia_data = q.results bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=gaia_data['ra'], dec=gaia_data['dec'], pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], px=gaia_data['parallax'], rv=rv, epoch=epoch, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=gaia_data['pmra'], pmdec=gaia_data['pmdec'], # px=gaia_data['parallax'], rv=gaia_data['radial_velocity']*1e3, obsname='AAO', ephemeris='de430') # bc = barycorrpy.get_BC_vel(JDUTC=utmjd, ra=ra, dec=dec, pmra=pmra, pmdec=pmdec, # px=px, rv=rv, obsname='AAO', ephemeris='de430') return bc[0][0]
def main_script_for_sarah(date='20190722'): # Gaia DR2 ID dictionary for Zucker July 2019 targets hipnum_dict = { '10144': 7588, '121263': 68002, '175191': 92855, 'HD 206739 (std)': 107337 } gaia_dict = { '105435': 6126469654573573888, '118716': 6065752767077605504, '120324': 6109087784487667712, '132058': 5908509891187794176, '143018': 6235406071206201600, '157246': 5922299343254265088, '209952': 6560604777053880704, 'HD 140283 (std)': 6268770373590148224, 'HE 0015+0048': 2547143725127991168, 'HE 0107-5240': 4927204800008334464, 'CS 29504-006': 5010164739030492032, 'CS 22958-042': 4718427642340545408, 'HE 1133-0555': 3593627144045992832, 'HE 1249-3121': 6183795820024064256, 'HE 1310-0536': 3635533208672382592, 'CS 22877-001': 3621673727165280384, 'HE 1327-2326': 6194815228636688768, 'G64-12': 3662741860852094208, 'G64-37': 3643857920443831168, 'HE 1410+0213': 3667206118578976896, 'BD-18 5550': 6867802519062194560, 'BPS CS 30314-067': 6779790049231492096, 'CS 29498-043': 6788448668941293952, 'HE 2139-5432 ': 6461736966363075200, 'BPS CS 29502-092': 2629500925618285952, 'HE 2302-2154a': 2398202677437168384, 'CD-24 17504': 2383484851010749568, 'HE 2318-1621': 2406023396270909440, 'HE 2319-5228': 6501398446721935744, '6182748015506372480': 6182748015506372480, '6192933650707925376': 6192933650707925376, '6192500855443308160': 6192500855443308160, '6194706681927050496': 6194706681927050496, '6190169375397005824': 6190169375397005824, '6190736590253462784': 6190736590253462784, '151008003501121': 2558459589561967232, '141031003601274': 2977723336242924544, '140311007101309': 5363629792898912512, '150408004101222': 5398144047005910656, '170130004601208': 3698111844248492160, '170506003901265': 6140829138994504960, '160403004701275': 3616785740848955776, '140310004701055': 3673146848623371264, '160520004901236': 5818849184718555392, '170711003001241': 4377886454310583168, '140711001901267': 5809854183164908928, '170615004401258': 6702907209758894848, '170912002401113': 6888748417431916928, '160724002601324': 1733472307022576384, '140810004201231': 6579952677010742272, '171106002401258': 2668887906026528000, '140812004401091': 6397474768030945152, '140810005301179': 6406537325120547456, '140805004201070': 6381051156688800896, '170711005801135': 6485376840021854848 } # assign wl-solutions to stellar spectra by linear interpolation between a library of fibThXe dispsols path = '/Volumes/BERGRAID/data/veloce/reduced/' + date + '/' air_wl_list = glob.glob(path + 'fibth_dispsols/' + '*air*.fits') air_wl_list.sort() vac_wl_list = glob.glob(path + 'fibth_dispsols/' + '*vac*.fits') vac_wl_list.sort() fibth_obsnames = [fn.split('_air_')[0][-10:] for fn in air_wl_list] # arc_list = glob.glob(path + 'calibs/' + 'ARC*optimal*.fits') used_fibth_list = [ path + 'calibs/' + 'ARC - ThAr_' + obsname + '_optimal3a_extracted.fits' for obsname in fibth_obsnames ] stellar_list = glob.glob(path + 'stellar_only/' + '*optimal*.fits') stellar_list.sort() # stellar_list_quick = glob.glob(path + 'stellar_only/' + '*quick*.fits') # stellar_list_quick.sort() t_calibs = np.array([ pyfits.getval(fn, 'UTMJD') + 0.5 * pyfits.getval(fn, 'ELAPSED') / 86400. for fn in used_fibth_list ]) # t_stellar = [pyfits.getval(fn, 'UTMJD') + 0.5*pyfits.getval(fn, 'ELAPSED')/86400. for fn in stellar_list] ### STEP 1: create w (39 x 26 x 4112) wavelength solution for every stellar observation by linearly interpolating between wl-solutions of surrounding fibre ThXe exposures # loop over all stellar observations for i, file in enumerate(stellar_list): if i == 0: print('STEP 1: wavelength solutions') print(str(i + 1) + '/' + str(len(stellar_list))) # get observation midpoint in time tobs = pyfits.getval( file, 'UTMJD') + 0.5 * pyfits.getval(file, 'ELAPSED') / 86400. # find the indices of the ARC files bracketing the stellar observations above = np.argmax( t_calibs > tobs) # first occurrence where t_calibs are larger than tobs below = above - 1 # get obstimes and wl solutions for these ARC exposures t1 = t_calibs[below] t2 = t_calibs[above] wl1 = pyfits.getdata(air_wl_list[below]) wl2 = pyfits.getdata(air_wl_list[above]) # do a linear interpolation to find the wl-solution at t=tobs wl = interpolate_dispsols(wl1, wl2, t1, t2, tobs) # append this wavelength solution to the extracted spectrum FITS files pyfits.append(file, wl, clobber=True) ### STEP 2: append barycentric correction!?!?!? # loop over all stellar observations for i, file in enumerate(stellar_list): if i == 0: print print('STEP 3: appending barycentric correction') print(str(i + 1) + '/' + str(len(stellar_list))) # get object name object = pyfits.getval(file, 'OBJECT').split('+')[0] # get observation midpoint in time (in JD) jd = pyfits.getval(file, 'UTMJD') + 0.5 * pyfits.getval( file, 'ELAPSED') / 86400. + 2.4e6 + 0.5 # get Gaia DR2 ID from object if object in gaia_dict.keys(): gaia_dr2_id = gaia_dict[object] # get barycentric correction from Gaia DR2 ID and obstime try: bc = get_bc_from_gaia(gaia_dr2_id, jd)[0] except: bc = get_bc_from_gaia(gaia_dr2_id, jd) else: hipnum = hipnum_dict[object] bc = barycorrpy.get_BC_vel(JDUTC=jd, hip_id=hipnum, obsname='AAO', ephemeris='de430')[0][0] bc = np.round(bc, 2) assert not np.isnan( bc ), 'ERROR: could not calculate barycentric correction for ' + file print('barycentric correction for object ' + object + ' : ' + str(bc) + ' m/s') # write the barycentric correction into the FITS header of both the quick-extracted and the optimal-extracted reduced spectrum files pyfits.setval(file, 'BARYCORR', value=bc, comment='barycentric velocity correction [m/s]') ### STEP 3: combine the flux in all fibres for each exposure (by going to a common wl-grid (by default the one for the central fibre) and get median sky spectrum # loop over all stellar observations for i, file in enumerate(stellar_list): if i == 0: print print('STEP 2: combining fibres') print(str(i + 1) + '/' + str(len(stellar_list))) # read in extracted spectrum file f = pyfits.getdata(file, 0) err = pyfits.getdata(file, 1) wl = pyfits.getdata(file, 2) h = pyfits.getheader(file, 0) h_err = pyfits.getheader(file, 1) # combine the stellar fibres comb_f, comb_err, ref_wl = combine_fibres(f, err, wl, osf=5, fibs='stellar') # combine sky fibres (4 if LC was on, 5 otherwise), then take the median h = pyfits.getheader(file) assert 'LCNEXP' in h.keys( ), 'ERROR: not the latest version of the FITS headers !!! (from May 2019 onwards)' if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) comb_f_sky, comb_err_sky, ref_wl_sky = median_fibres( f, err, wl, osf=5, fibs=[1, 2, 22, 23] ) # we don't want to use the sky fibre right next to the LFC if the LFC was on!!! else: comb_f_sky, comb_err_sky, ref_wl_sky = median_fibres(f, err, wl, osf=5, fibs='sky') # save to new FITS file outpath = path + 'fibres_combined/' fname = file.split('/')[-1] new_fn = outpath + fname.split( '.')[0] + '_stellar_fibres_combined.fits' pyfits.writeto(new_fn, comb_f, h, clobber=True) pyfits.append(new_fn, comb_err, h_err, clobber=True) pyfits.append(new_fn, ref_wl, clobber=True) sky_fn = outpath + fname.split('.')[0] + '_median_sky.fits' pyfits.writeto(sky_fn, comb_f_sky, h, clobber=True) pyfits.append(sky_fn, comb_err_sky, h_err, clobber=True) pyfits.append(sky_fn, ref_wl_sky, clobber=True) ### STEP 4: combine all single-shot exposures for each target and do sky-subtraction, and flux-weighting of barycentrio correction # first we need to make a new list for the combined-fibre spectra fc_stellar_list = glob.glob(path + 'fibres_combined/' + '*optimal*stellar*.fits') fc_stellar_list.sort() sky_list = glob.glob(path + 'fibres_combined/' + '*optimal*sky*.fits') sky_list.sort() object_list = [ pyfits.getval(file, 'OBJECT').split('+')[0] for file in fc_stellar_list ] # loop over all stellar observations for i, (file, skyfile) in enumerate(zip(fc_stellar_list, sky_list)): if i == 0: print print('STEP 4: combining single-shot exposures') print(str(i + 1) + '/' + str(len(fc_stellar_list))) # get headers h = pyfits.getheader(file, 0) h_err = pyfits.getheader(file, 1) # get object name object = pyfits.getval(file, 'OBJECT').split('+')[0] # make list that keeps a record of which observations feed into the combined final one used_obsnames = [(fn.split('/')[-1]).split('_')[1] for fn, obj in zip(fc_stellar_list, object_list) if obj == object] # add this information to the fits headers h['N_EXP'] = (len(used_obsnames), 'number of single-shot exposures') h_err['N_EXP'] = (len(used_obsnames), 'number of single-shot exposures') for j in range(len(used_obsnames)): h['EXP_' + str(j + 1)] = (used_obsnames[j], 'name of single-shot exposure') h_err['EXP_' + str(j + 1)] = (used_obsnames[j], 'name of single-shot exposure') # make lists containing the (sky-subtracted) flux, error, and wl-arrays for the fibre-combined optimal extracted spectra f_list = [ pyfits.getdata(fn, 0) - 19 * pyfits.getdata(skyfn, 0) for fn, skyfn, obj in zip(fc_stellar_list, sky_list, object_list) if obj == object ] err_list = [ np.sqrt( pyfits.getdata(fn, 1)**2 + 19 * pyfits.getdata(skyfn, 1)**2) for fn, skyfn, obj in zip(fc_stellar_list, sky_list, object_list) if obj == object ] wl_list = [ pyfits.getdata(fn, 2) for fn, obj in zip(fc_stellar_list, object_list) if obj == object ] # combine the single-shot exposures comb_f, comb_err, ref_wl = combine_exposures(f_list, err_list, wl_list, osf=5, remove_cosmics=True, thresh=7, low_thresh=3, debug_level=0, timit=False) # make list of the barycentric correction and exposure for every single-shot exposure bc_list = [ pyfits.getval(fn, 'BARYCORR') for fn, obj in zip(fc_stellar_list, object_list) if obj == object ] texp_list = [ pyfits.getval(fn, 'ELAPSED') for fn, obj in zip(fc_stellar_list, object_list) if obj == object ] # now assign weights based on exposure time and get weighted mean for b.c. (that only works well if the seeing was roughly constant and there were no clouds, as it should really be FLUX-weighted) wm_bc = np.average(bc_list, weights=texp_list) # save to new FITS file(s) outpath = path + 'final_combined_spectra/' new_fn = outpath + object + '_final_combined.fits' pyfits.writeto(new_fn, comb_f, h, clobber=True) pyfits.append(new_fn, comb_err, h_err, clobber=True) pyfits.append(new_fn, ref_wl, clobber=True) # write the barycentric correction into the FITS header of both the quick-extracted and the optimal-extracted reduced spectrum files pyfits.setval(new_fn, 'BARYCORR', value=wm_bc, comment='barycentric velocity correction [m/s]') return
def newbervmain(p, ra, dec, equinox, year, month, day, hour, obs_long, obs_lat, obs_alt, pmra, pmde, plx=None, method='new'): if plx is None: plx = 0.0 # if method is off return zeros if method == 'off': WLOG(p, 'warning', 'BERV not calculated.') return np.nan, np.nan, np.nan # estimate method using helcorr from pyastronomy if method == 'estimate': tstr = '{0} {1}'.format(p['DATE-OBS'], p['UTC-OBS']) t = Time(tstr, scale='utc') # add exposure time tdelta = TimeDelta(((p['EXPTIME']) / 2.) * uu.s) t1 = t + tdelta # storage for bervs bervs, bjds = [], [] # loop around every 1.5 days in a year for dayit in np.arange(0., 365., 1.5): # get julien date for this day iteration jdi = t1.jd + dayit # calculate estimate of berv bargs = [obs_long, obs_lat, obs_alt, ra, dec, jdi] berv, bjd = spirouBERVest.helcorr(*bargs) # append to lists bervs.append(berv) bjds.append(bjd) # convert lists to numpy arrays bervs = np.array(bervs) bjds = np.array(bjds) # get berv berv2 = bervs[0] # bjd2 = bresults2[0].jd bjd2 = bjds[0] # work ou the maximum barycentric correction bervmax2 = np.max(abs(bervs)) # return results return berv2, bjd2, bervmax2 # calculation method using barycorrpy if method == 'new': # calculate JD time (as Astropy.Time object) tstr = '{0} {1}'.format(p['DATE-OBS'], p['UTC-OBS']) t = Time(tstr, scale='utc') # add exposure time tdelta = TimeDelta(((p['EXPTIME']) / 2.) * uu.s) t1 = t + tdelta # --------------------------------------------------------------------- # get reset directory location # get package name and relative path package = spirouConfig.Constants.PACKAGE() relfolder = spirouConfig.Constants.BARYCORRPY_DIR() # get absolute folder path from package and relfolder absfolder = spirouConfig.GetAbsFolderPath(package, relfolder) # get barycorrpy folder data_folder = os.path.join(absfolder, '') # --------------------------------------------------------------------- # need to import barycorrpy which required online files (astropy iers) # therefore provide a way to set offline version first # noinspection PyBroadException try: # file at: http://maia.usno.navy.mil/ser7/finals2000A.all from astropy.utils import iers # get package name and relative path package = spirouConfig.Constants.PACKAGE() iers_dir = spirouConfig.Constants.ASTROPY_IERS_DIR() # get absolute folder path from package and relfolder absfolder = spirouConfig.GetAbsFolderPath(package, iers_dir) # get file name file_a = os.path.basename(iers.iers.IERS_A_FILE) path_a = os.path.join(absfolder, file_a) # set table iers.IERS.iers_table = iers.IERS_A.open(path_a) import barycorrpy except Exception as _: emsg1 = 'For method="new" must have barcorrpy installed ' emsg2 = '\ti.e. ">>> pip install barycorrpy' WLOG(p, 'warning', [emsg1, emsg2]) raise ImportError(emsg1 + '\n' + emsg2) # set up the barycorr arguments bkwargs = dict(ra=ra, dec=dec, epoch=equinox, pmra=pmra, pmdec=pmde, px=plx, rv=0.0, lat=obs_lat, longi=obs_long * -1, alt=obs_alt * 1000., leap_dir=data_folder) print(bkwargs) # get the julien UTC date for observation and obs + 1 year jdutc = list(t1.jd + np.arange(0., 365., 1.5)) # construct lock filename lfilename = os.path.join(p['DRS_DATA_REDUC'], 'BERV_lockfile') # add a wait for parallelisation lock, lfile = spirouFITS.check_fits_lock_file(p, lfilename) # calculate barycorrpy try: bresults1 = barycorrpy.get_BC_vel(JDUTC=jdutc, zmeas=0.0, **bkwargs) except Exception as e: # close lock spirouFITS.close_fits_lock_file(p, lock, lfile, lfilename) # re-raise exception to catch later raise e # end wait for parallelisation spirouFITS.close_fits_lock_file(p, lock, lfile, lfilename) # convert JDUTC to BJDTDB bresults2 = barycorrpy.utc_tdb.JDUTC_to_BJDTDB(t1, **bkwargs) # get berv berv2 = bresults1[0][0] / 1000.0 # get bjd bjd2 = bresults2[0][0] # work ou the maximum barycentric correction bervmax2 = np.max(abs(bresults1[0] / 1000.)) # return results return berv2, bjd2, bervmax2