def add_extinction(df, mode): """ add_extinction Args: df (pandas.DataFrame): must have following columns: - ra (decimal degrees) - dec (decimal degrees) - parallax (mas) mode (str): which dust model to use - bayestar2017 (Green et al. 2018) - bayestar2015 (Green et al. 2015) Returns: pandas.DataFrame: with following columns added - ak: extinction in K - ak_err: error on extinction in K including Rv and E(B-V) """ dist = np.clip(np.array(1 / df['gaia2_sparallax'] * 1000), 0, 100000000) * u.pc coords = SkyCoord(ra=np.array(df.ra) * u.degree, dec=np.array(df.dec) * u.degree, distance=dist, frame='icrs') rk_frac_err = 0.3 # Fractional uncertainty in R_K if mode == 'bayestar2017': bayestar = BayestarWebQuery(version='bayestar2017') rk = 0.224 # A_K / E(B-V) if mode == 'bayestar2015': bayestar = BayestarWebQuery(version='bayestar2017') rk = 0.310 # A_K / E(B-V) ebv = bayestar(coords, mode='percentile', pct=[16., 50., 84.]) ak = rk * ebv ak_err1_map = ak[:, 2] - ak[:, 1] # formal A_K due to E(B-V) ak_err2_map = ak[:, 0] - ak[:, 1] # formal A_K due to E(B-V) ak_err_map = 0.5 * (ak[:, 2] - ak[:, 0]) ak_err_rk = ak[:, 1] * rk_frac_err ak_err = ak[:, 1] * np.sqrt((ak_err_map / ak[:, 1])**2 + rk_frac_err**2) df['ext_ak'] = ak[:, 1] df['ext_ak_err'] = ak_err df['ext_ak_err_map'] = ak_err_map df['ext_ebv'] = ebv[:, 1] df['ext_ebv_err'] = 0.5 * (ebv[:, 2] - ebv[:, 0]) df = pd.DataFrame(df) return df
def test_deredden_web(): """ Ensure deredden works using BayestarWebQuery """ from dustmaps.bayestar import BayestarWebQuery data = survey.get_scale_height_data(track = 'SgN', deredden = BayestarWebQuery()) assert np.any(~np.isnan(data["INTEN_DERED"]))
def query_dustmap_w_percentiles(d, ra, dec, dustmap='bayestar2019'): '''Query 3D dustmaps. Cite Green (2018) if you use them. This func queries the Bayestar2019 dust map remotely in default mode. (The web interface takes the same arguments as the local interface.) Parameters: ----------- d : 1d array with astropy units distance along the line of sight ra : 1d array with astropy units RA in ICRS dec: 1d array with astropy units Declination ICRS Return: --------- ext : 1-d array of floats Extinction value from given dustmap ext_err : 1-d array of floats uncertainty on ext flags : 1-d array of ints 0 : no extinction given 1 : uncertainties are symmetric below 0.05 2 : uncertainties are symmetric below 0.10 ''' with warnings.catch_warnings(): # silence warnings from astropy warnings.filterwarnings('ignore', category=RuntimeWarning, append=True) warnings.filterwarnings('ignore', category=AstropyDeprecationWarning, append=True) # Query dustmaps frm Bayestar2019: coords = SkyCoord(ra, dec, distance=d, frame='icrs') q = BayestarWebQuery(version=dustmap) E, quality = q(coords, mode='percentile', pct=[36,50,84], return_flags=True) # Make output numpy-ish E = np.array(E) Efl = np.nan_to_num(E) #print(quality) # Flag outputs that either have asymmetric uncertainties, or are NaNs; flags = (np.abs(2 * Efl[:,1]-Efl[:,0]-Efl[:,2]) < 0.1).astype(int) * 1 #acceptable uncertainty flags = (np.abs(2 * Efl[:,1]-Efl[:,0]-Efl[:,2]) < 0.05).astype(int) * 2 #low uncertainty flags[np.isnan(E[:,1])] += 4 #flags 1 and 2 failed to compute anything flags[~quality["reliable_dist"]] += 8 #too close or too far target flags[~quality["converged"]] += 16 #Algorithm did not converge # define extinction value and uncertainty as mean of percentiles: ext = E[:,1] ext_err = (E[:,2]-E[:,0]) / 2. return ext, ext_err, flags
def query_dustmodel_coords(ra,dec): reddenMap = BayestarWebQuery(version='bayestar2017') sightLines = SkyCoord(ra*units.deg,dec*units.deg,frame='icrs') reddenContainer = reddenMap(sightLines,mode='best') del reddenMap # To clear reddenMap from memory distanceSamples = np.array([0.06309573,0.07943284,0.1,0.12589255,0.15848933,0.19952627,0.25118864,0.31622776,0.3981072,0.50118726,0.6309574,0.7943282 ,1.,1.2589258,1.5848933,1.9952621,2.511887,3.1622777,3.981073,5.011873,6.3095727,7.943284,10.,12.589258,15.848933,19.952621,25.11887,31.622776,39.81073,50.11873,63.095726])*1000. # In pc, from bayestar2017 map distance samples dustModelDF = pd.DataFrame({'ra': [ra], 'dec': [dec]}) for index in range(len(reddenContainer)): dustModelDF['av_'+str(round(distanceSamples[index],6))] = reddenContainer[index] return dustModelDF
def get_Av(self): #Call the Bayestar catalogue bayestar = BayestarWebQuery(version='bayestar2017') coords = SkyCoord(self.ra.values * units.deg, self.dec.values * units.deg, distance=(1000 / self.oo.values) * units.pc, frame=self.frame) #Find the extinction coefficient try: Av = bayestar(coords, mode='median') except: print('The Av values cant be downloaded for some reason.') print('No Av values for this star. Set Av to 0. for now.') Av = 0. return Av
def test_pandas_dataframe(): """ ensure returning pandas dataframe works for both dereddened and raw data """ from dustmaps.bayestar import BayestarWebQuery data, df = survey.get_scale_height_data(track = 'SgN', deredden = False, return_pandas_dataframe = True) # data2, df2 = survey.get_scale_height_data(track = "SgN", deredden = False, # return_pandas_dataframe = True) # assert np.allclose(df["INTEN"], df2["INTEN"], equal_nan = True) data2, df2 = survey.get_scale_height_data(track = "SgN", deredden = BayestarWebQuery(), return_pandas_dataframe = True) assert np.allclose(df["INTEN"], df2["INTEN"], equal_nan = True)
def Av_bayes(l, b, para): d = 1 / (1e-3 * para) #pc #print(d) d = d.values l = l.values b = b.values coords = SkyCoord(l * units.deg, b * units.deg, distance=d * units.pc, frame='galactic') bayestar = BayestarWebQuery(version='bayestar2017') eebv = bayestar(coords, mode='samples') eebv = np.std(eebv, axis=1) ebv = bayestar(coords, mode='median') ebv = 2.742 * ebv return ebv, eebv
def test_deredden(): """ ensure different deredden keyword formats work """ # from dustmaps.marshall import MarshallQuery # data = survey.get_scale_height_data(track = 'SgN', deredden = True) # data2 = survey.get_scale_height_data(track = "SgN", deredden = MarshallQuery()) # assert np.allclose(data["INTEN"], data2["INTEN"], equal_nan = True) from dustmaps.bayestar import BayestarWebQuery try: data = survey.get_scale_height_data(track = 'SgN', deredden = True) except OSError: assert True else: data = survey.get_scale_height_data(track = 'SgN', deredden = BayestarWebQuery()) assert True
def test_longitude_step_size(): """ ensure step size works """ from dustmaps.bayestar import BayestarWebQuery import astropy.units as u data2, df2, masks2 = survey.get_scale_height_data(track = "SgN", deredden = False, return_pandas_dataframe = True, longitude_mask_width = 5*u.deg, step_size = 1) # assert np.allclose(df["INTEN"][masks[0]], df2["INTEN"][masks[0]], equal_nan = True) data, df, masks = survey.get_scale_height_data(track = 'SgN', deredden = BayestarWebQuery(), return_pandas_dataframe = True, longitude_mask_width = 5, step_size = 1*u.deg) assert np.allclose(df["INTEN"][masks[0]], df2["INTEN"][masks[0]], equal_nan = True)
def get_b17_ebv(self): '''Send a request to the online Bayestar catalogue for the extinction coefficients of all the targets. ''' #Call the Bayestar catalogue bayestar = BayestarWebQuery(version='bayestar2017') coords = SkyCoord(self.ra.values * units.deg, self.dec.values * units.deg, distance=(self.r.values) * units.pc, frame=self.frame) #Find the extinction coefficient try: b17 = bayestar(coords, mode='median') except: print('The Av values cant be downloaded for some reason.') print('No Av values for this star. Set Av to 0. for now.') b17 = np.ones(self.ra.shape) '''Convert the Bayestar 17 values to E(B-V) values using the Green et al. 2018 conversion''' ebv = 0.88 * b17 return b17, ebv
idlbd[i][1] = float(idlbd[i][1]) idlbd[i][2] = float(idlbd[i][2]) idlbd[i][3] = float(idlbd[i][3]) return idlbd data_filename = 'full_HRD_100k_for_example_idlbd' num_rows_in_data_file = 100000 f = open(data_filename+'.txt','r') f.readline() num_param_red = 1 resultarray = np.zeros((100000,num_param_red)) bayestar = BayestarWebQuery(version='bayestar2017') fout = open(data_filename+'_SFD_b17.txt','w') num_of_sets = (num_rows_in_data_file - 1) // 100000 + 1 for numset in range(num_of_sets): numrows = 100000 if numset == num_of_sets - 1: numrows = (num_rows_in_data_file - 1) % 100000 + 1 resultarray = np.zeros((numrows,num_param_red)) source_id = [] workarray = np.array(readdata(numrows)) l = workarray[:,1] * units.deg b = workarray[:,2] * units.deg d = workarray[:,3] * units.pc
"""Import tasks for the dust maps. """ import re import numpy as np from astropy.coordinates import SkyCoord as coord import astropy.units as un from dustmaps.bayestar import BayestarWebQuery from dustmaps.sfd import SFDWebQuery bayestar = BayestarWebQuery() sfd = SFDWebQuery() from astrocats.catalog.utils import is_number, pbar, single_spaces, uniq_cdl from ..faststars import FASTSTARS from ..utils import name_clean def do_dust(catalog): task_str = catalog.get_current_task_str() # Set preferred names, calculate some columns based on imported data, # sanitize some fields keys = list(catalog.entries.keys()) for oname in pbar(keys, task_str): # Some events may be merged in cleanup process, skip them if # non-existent. try: name = catalog.add_entry(oname) except Exception: catalog.log.warning(
b = np.linspace(-2.09, -2.17, 70) d = np.linspace(0, 7, 10) #making a grid of coords so we can see the extinction curve at every grid poit L, B = np.meshgrid(l, b) L = L.flatten() B = B.flatten() newL, newB, newd = [], [], [] for i in range(len(L)): newL.append([L[i]] * len(d)) newB.append([B[i]] * len(d)) newd.append(d) #querying extinction curves for every co-ordinate covering Cas A from the webserver of the Bayestar2019 dust map bayestar = BayestarWebQuery() # 'bayestar2019' is the default coords = SkyCoord(newL * units.deg, newB * units.deg, distance=newd * units.kpc, frame='galactic') E = bayestar(coords, mode=sightline_type) if sightline_type == 'samples': sightlines = open("sightlines-aroundcasa-panstarsmap-all", "w") for i in range(len(E)): for j in E[i].transpose(): sightlines.write(str(L[i]) + " " + str(B[i]) + "\n") for k in range(len(j)):
from dustmaps.bayestar import BayestarWebQuery from astropy.io import ascii from astropy.table import Table import sys #------------------------------------------------ if len(sys.argv) > 2: bayestar_version = sys.argv[2] else: bayestar_version = 'bayestar2019' # bversion -- сокращенная запись версии, типа b15/b17/b19 bversion = 'b' + bayestar_version[-2:] bayestar = BayestarWebQuery(version=bayestar_version) # В data_file должны быть 4 столбца: # id, l, b, d, # где id -- номер звезды, l - галактическая долгота, # b -- галактическая широта, d -- расстояние до звезды if len(sys.argv) > 1: datafileName = sys.argv[1] else: datafileName = input('Enter the name of data file: ') # нужно выбрать один из типов mode, выдаваемых Bayestar: # ‘random_sample’, ‘random_sample_per_pix’ ‘samples’, # ‘median’, ‘mean’, ‘best’ или ‘percentile’ + указание pct. # mode определяет,как выдаваемые значения поглощения будут отражать # вероятностную природу 3D-карты поглощения