def test_get_fitdata(): data_path = pkg_resources.resource_filename("measure_extinction", "data/") # read in the observed data of the stars redstar = StarData("hd229238.dat", path=data_path) compstar = StarData("hd204172.dat", path=data_path) # calculate the extinction curve ext = ExtData() ext.calc_elx(redstar, compstar) # once wavelenth units saved, update FITS file and use this line instead # of the 4 lines above # ext = ExtData(filename=data_path + "hd283809_hd064802_ext.fits") wave, y, unc = ext.get_fitdata( ["BAND", "IUE"], remove_uvwind_region=True, remove_lya_region=True ) # fitting routines often cannot handle units, make sure none are present for cursrc in ext.waves.keys(): assert isinstance(wave, u.Quantity) assert not isinstance(y, u.Quantity) assert not isinstance(unc, u.Quantity)
def SNR_ext(data_path, plot_path, starpair_list, plot=False): """ - Calculate the median SNR of the extinction curves in certain wavelength regions - Plot the SNR of the extinction curves if requested Parameters ---------- data_path : string Path to the data files plot_path : string Path to save the plots starpair_list : list of strings List of star pairs for which to calculate (and plot) the SNR, in the format "reddenedstarname_comparisonstarname" (no spaces) plot : boolean [default=False] Whether or not to plot the SNR vs. wavelength for every curve Returns ------- - Median SNRs in certain wavelength regions - Plots of the SNR vs. wavelength (if requested) """ meds = np.zeros((3, len(starpair_list))) for j, starpair in enumerate(starpair_list): # obtain the extinction curve data extdata = ExtData("%s%s_ext.fits" % (data_path, starpair.lower())) # transform the curve from E(lambda-V) to A(lambda)/A(V) extdata.trans_elv_alav() # obtain flat arrays waves, exts, uncs = extdata.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) # calculate the median SNR in certain wavelength regions ranges = [ (0.79, 2.54), (2.85, 4.05), (4.55, 5.5), ] SNR = exts / uncs for i, range in enumerate(ranges): mask = (waves.value > range[0]) & (waves.value < range[1]) meds[i][j] = np.median(np.abs(SNR[mask])) # plot SNR vs. wavelength if requested if plot: fig, ax = plt.subplots() ax.scatter(waves, SNR, s=1) plt.savefig(plot_path + starpair + "_SNR.pdf") print(ranges[0], np.nanmin(meds[0]), np.nanmax(meds[0])) print(ranges[1], np.nanmin(meds[1]), np.nanmax(meds[1])) print(ranges[2], np.nanmin(meds[2]), np.nanmax(meds[2]))
def fit_spex_ext( starpair, path, functype="pow", dense=False, profile="drude_asym", exclude=None, bootstrap=False, fixed=False, ): """ Fit the observed SpeX NIR extinction curve Parameters ---------- starpair : string Name of the star pair for which to fit the extinction curve, in the format "reddenedstarname_comparisonstarname" (no spaces), or "average" to fit the average extinction curve path : string Path to the data files functype : string [default="pow"] Fitting function type ("pow" for powerlaw or "pol" for polynomial) dense : boolean [default=False] Whether or not to fit the features around 3 and 3.4 micron profile : string [default="drude_asym"] Profile to use for the features if dense = True (options are "gauss", "drude", "lorentz", "gauss_asym", "drude_asym", "lorentz_asym") exclude : list of tuples [default=None] list of tuples (min,max) with wavelength regions (in micron) that need to be excluded from the fitting, e.g. [(0.8,1.2),(2.2,5)] bootstrap : boolean [default=False] Whether or not to do a quick bootstrap fitting to get more realistic uncertainties on the fitting results fixed : boolean [default=False] Whether or not to add a fixed feature around 3 micron (for diffuse sightlines) Returns ------- Updates extdata.model["type", "waves", "exts", "residuals", "chi2", "params"] and extdata.columns["AV"] with the fitting results: - type: string with the type of model (e.g. "pow_elx_Drude") - waves: np.ndarray with the SpeX wavelengths - exts: np.ndarray with the fitted model to the extinction curve at "waves" wavelengths - residuals: np.ndarray with the residuals, i.e. data-fit, at "waves" wavelengths - chi2 : float with the chi square of the fitting - params: list with output Parameter objects """ # retrieve the SpeX data to be fitted, and sort the curve from short to long wavelengths filename = "%s%s_ext.fits" % (path, starpair.lower()) if fixed: filename = filename.replace(".", "_ice.") extdata = ExtData(filename) (waves, exts, exts_unc) = extdata.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) indx = np.argsort(waves) waves = waves[indx].value exts = exts[indx] exts_unc = exts_unc[indx] # exclude wavelength regions if requested if exclude: mask = np.full_like(waves, False, dtype=bool) for region in exclude: mask += (waves > region[0]) & (waves < region[1]) waves = waves[~mask] exts = exts[~mask] exts_unc = exts_unc[~mask] # get a quick estimate of A(V) if extdata.type == "elx": extdata.calc_AV() AV_guess = extdata.columns["AV"] else: AV_guess = None # convert to A(lambda)/A(1 micron) # ind1 = np.abs(waves - 1).argmin() # exts = exts / exts[ind1] # exts_unc = exts_unc / exts[ind1] # obtain the function to fit if "SpeX_LXD" not in extdata.waves.keys(): dense = False fixed = False func = fit_function( dattype=extdata.type, functype=functype, dense=dense, profile=profile, AV_guess=AV_guess, fixed=fixed, ) # for dense sightlines, add more weight to the feature region weights = 1 / exts_unc if dense: mask_ice = (waves > 2.88) & (waves < 3.19) mask_tail = (waves > 3.4) & (waves < 4) weights[mask_ice + mask_tail] *= 2 # use the Levenberg-Marquardt algorithm to fit the data with the model fit = LevMarLSQFitter() fit_result_lev = fit(func, waves, exts, weights=weights, maxiter=10000) # set up the backend to save the samples for the emcee runs emcee_samples_file = path + "Fitting_results/" + starpair + "_emcee_samples.h5" # do the fitting again, with MCMC, using the results from the first fitting as input fit2 = EmceeFitter(nsteps=10000, burnfrac=0.1, save_samples=emcee_samples_file) # add parameter bounds for param in fit_result_lev.param_names: if "amplitude" in param: getattr(fit_result_lev, param).bounds = (0, 2) elif "alpha" in param: getattr(fit_result_lev, param).bounds = (0, 4) elif "Av" in param: getattr(fit_result_lev, param).bounds = (0, 10) fit_result_mcmc = fit2(fit_result_lev, waves, exts, weights=weights) # create standard MCMC plots fit2.plot_emcee_results( fit_result_mcmc, filebase=path + "Fitting_results/" + starpair ) # choose the fit result to save fit_result = fit_result_mcmc # fit_result = fit_result_lev print(fit_result) # determine the wavelengths at which to evaluate and save the fitted model curve: all SpeX wavelengths, sorted from short to long (to avoid problems with overlap between SXD and LXD), and shortest and longest wavelength should have data if "SpeX_LXD" not in extdata.waves.keys(): full_waves = extdata.waves["SpeX_SXD"].value full_npts = extdata.npts["SpeX_SXD"] else: full_waves = np.concatenate( (extdata.waves["SpeX_SXD"].value, extdata.waves["SpeX_LXD"].value) ) full_npts = np.concatenate((extdata.npts["SpeX_SXD"], extdata.npts["SpeX_LXD"])) # sort the wavelengths indxs_sort = np.argsort(full_waves) full_waves = full_waves[indxs_sort] full_npts = full_npts[indxs_sort] # cut the wavelength region indxs = np.logical_and(full_waves >= np.min(waves), full_waves <= np.max(waves)) full_waves = full_waves[indxs] full_npts = full_npts[indxs] # calculate the residuals and put them in an array of the same length as "full_waves" for plotting residuals = exts - fit_result(waves) full_res = np.full_like(full_npts, np.nan) if exclude: mask = np.full_like(full_waves, False, dtype=bool) for region in exclude: mask += (full_waves > region[0]) & (full_waves < region[1]) full_res[(full_npts > 0) * ~mask] = residuals else: full_res[(full_npts > 0)] = residuals # bootstrap to get more realistic uncertainties on the parameter results if bootstrap: red_star = StarData(extdata.red_file, path=path, use_corfac=True) comp_star = StarData(extdata.comp_file, path=path, use_corfac=True) red_V_unc = red_star.data["BAND"].get_band_mag("V")[1] comp_V_unc = comp_star.data["BAND"].get_band_mag("V")[1] unc_V = np.sqrt(red_V_unc ** 2 + comp_V_unc ** 2) fit_result_mcmc_low = fit2(fit_result_lev, waves, exts - unc_V, weights=weights) fit_result_mcmc_high = fit2( fit_result_lev, waves, exts + unc_V, weights=weights ) # save the fitting results to the fits file if dense: functype += "_" + profile extdata.model["type"] = functype + "_" + extdata.type extdata.model["waves"] = full_waves extdata.model["exts"] = fit_result(full_waves) extdata.model["residuals"] = full_res extdata.model["chi2"] = np.sum((residuals / exts_unc) ** 2) print("Chi2", extdata.model["chi2"]) extdata.model["params"] = [] for param in fit_result.param_names: # update the uncertainties when bootstrapping if bootstrap: min_val = min( getattr(fit_result_mcmc, param).value, getattr(fit_result_mcmc_low, param).value, getattr(fit_result_mcmc_high, param).value, ) max_val = max( getattr(fit_result_mcmc, param).value, getattr(fit_result_mcmc_low, param).value, getattr(fit_result_mcmc_high, param).value, ) sys_unc = (max_val - min_val) / 2 getattr(fit_result, param).unc_minus = np.sqrt( getattr(fit_result, param).unc_minus ** 2 + sys_unc ** 2 ) getattr(fit_result, param).unc_plus = np.sqrt( getattr(fit_result, param).unc_plus ** 2 + sys_unc ** 2 ) extdata.model["params"].append(getattr(fit_result, param)) # save the column information (A(V), E(B-V) and R(V)) if "Av" in param: extdata.columns["AV"] = ( getattr(fit_result, param).value, getattr(fit_result, param).unc_minus, getattr(fit_result, param).unc_plus, ) # calculate the distrubtion of R(V) and 1/R(V) from the distributions of A(V) and E(B-V) nsamples = getattr(fit_result, param).posterior.n_samples av_dist = unc.normal( extdata.columns["AV"][0], std=(extdata.columns["AV"][1] + extdata.columns["AV"][2]) / 2, n_samples=nsamples, ) b_indx = np.abs(extdata.waves["BAND"] - 0.438 * u.micron).argmin() ebv_dist = unc.normal( extdata.exts["BAND"][b_indx], std=extdata.uncs["BAND"][b_indx], n_samples=nsamples, ) ebv_per = ebv_dist.pdf_percentiles([16.0, 50.0, 84.0]) extdata.columns["EBV"] = ( ebv_per[1], ebv_per[1] - ebv_per[0], ebv_per[2] - ebv_per[1], ) rv_dist = av_dist / ebv_dist rv_per = rv_dist.pdf_percentiles([16.0, 50.0, 84.0]) extdata.columns["RV"] = ( rv_per[1], rv_per[1] - rv_per[0], rv_per[2] - rv_per[1], ) inv_rv_dist = ebv_dist / av_dist inv_rv_per = inv_rv_dist.pdf_percentiles([16.0, 50.0, 84.0]) extdata.columns["IRV"] = ( inv_rv_per[1], inv_rv_per[1] - inv_rv_per[0], inv_rv_per[2] - inv_rv_per[1], ) print(extdata.columns) # save the fits file extdata.save(filename) # print information about the ice feature if fixed: print( "Ice feature strength: ", extdata.model["params"][3].value, extdata.model["params"][3].unc_minus, extdata.model["params"][3].unc_plus, )
def fit_features_ext(starpair, path): """ Fit the extinction features separately with different profiles Parameters ---------- starpair : string Name of the star pair for which to fit the extinction features, in the format "reddenedstarname_comparisonstarname" (no spaces) path : string Path to the data files Returns ------- waves : np.ndarray Numpy array with wavelengths exts_sub : np.ndarray Numpy array with continuum subtracted extinctions results : list List with the fitted models for different profiles """ # first, fit the continuum, excluding the region of the features fit_spex_ext(starpair, path, exclude=[(2.8, 3.6)]) # retrieve the SpeX data to be fitted, and sort the curve from short to long wavelengths extdata = ExtData("%s%s_ext.fits" % (path, starpair.lower())) (waves, exts, exts_unc) = extdata.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) indx = np.argsort(waves) waves = waves[indx].value exts = exts[indx] exts_unc = exts_unc[indx] # subtract the fitted (powerlaw) continuum from the data, and select the relevant region params = extdata.model["params"] exts_sub = exts - (params[0] * params[3] * waves ** (-params[2]) - params[3]) mask = (waves >= 2.8) & (waves <= 3.6) waves = waves[mask] exts_sub = exts_sub[mask] exts_unc = exts_unc[mask] # define different profiles # 2 Gaussians (stddev=FWHM/(2sqrt(2ln2))) gauss = Gaussian1D(mean=3, stddev=0.13) + Gaussian1D(mean=3.4, stddev=0.06) # 2 Drudes drude = Drude1D(x_0=3, fwhm=0.3) + Drude1D(x_0=3.4, fwhm=0.15) # 2 Lorentzians lorentz = Lorentz1D(x_0=3, fwhm=0.3) + Lorentz1D(x_0=3.4, fwhm=0.15) # 2 asymmetric Gaussians Gaussian_asym = custom_model(gauss_asymmetric) gauss_asym = Gaussian_asym(x_o=3, gamma_o=0.3) + Gaussian_asym( x_o=3.4, gamma_o=0.15 ) # 2 "asymmetric" Drudes Drude_asym = custom_model(drude_asymmetric) drude_asym = Drude_asym(x_o=3, gamma_o=0.3) + Drude_asym(x_o=3.4, gamma_o=0.15) # 2 asymmetric Lorentzians Lorentzian_asym = custom_model(lorentz_asymmetric) lorentz_asym = Lorentzian_asym(x_o=3, gamma_o=0.3) + Lorentzian_asym( x_o=3.4, gamma_o=0.15 ) profiles = [gauss, drude, lorentz, gauss_asym, drude_asym, lorentz_asym] # fit the different profiles fit = LevMarLSQFitter() results = [] for profile in profiles: fit_result = fit(profile, waves, exts_sub, weights=1 / exts_unc, maxiter=10000) results.append(fit_result) print(fit_result) print("Chi2", np.sum(((exts_sub - fit_result(waves)) / exts_unc) ** 2)) return waves, exts_sub, results
help="save figure as a png file", action="store_true") parser.add_argument("--pdf", help="save figure as a pdf file", action="store_true") args = parser.parse_args() # get a saved extnction curve file = args.extfile # file = '/home/kgordon/Python_git/spitzer_mir_ext/fits/hd147889_hd064802_ext.fits' ofile = file.replace(".fits", "_P92.fits") extdata = ExtData(filename=file) # get an observed extinction curve to fit (wave, y, y_unc) = extdata.get_fitdata(["BAND", "IRS"], remove_uvwind_region=True, remove_lya_region=True, remove_irsblue=True) # ["BAND", "IUE", "IRS"], remove_uvwind_region=True, remove_lya_region=True # remove data affected by Ly-alpha absorption/emission gindxs = wave > (1.0 / 8.0) * u.micron wave = wave[gindxs] y = y[gindxs] y_unc = y_unc[gindxs] # remove units as fitting routines often cannot take numbers with units x = wave.to(1.0 / u.micron, equivalencies=u.spectral()).value # determine the initial guess at the A(V) values # just use the average at wavelengths > 5 # limit as lambda -> inf, E(lamda-V) -> -A(V) (indxs, ) = np.where(1.0 / x > 5.0)
parser.add_argument("--path", help="path for the extinction curves") args = parser.parse_args() if args.path: locpath = args.path + "/" else: locpath = "" file = args.file ofile = file.replace(".fits", "_POWLAW2DRUDE.fits") # read in the observed E(l-V) or A(l)/A(V) extinction curve obsext = ExtData(filename=locpath + file) # get an observed extinction curve to fit (wave, y, y_unc) = obsext.get_fitdata(["BAND", "IRS"]) # remove units as fitting routines often cannot take numbers with units x = wave.to(1.0 / u.micron, equivalencies=u.spectral()).value if obsext.type == "elx": # determine the initial guess at the A(V) values # just use the average at wavelengths > 5 # limit as lambda -> inf, E(lamda-V) -> -A(V) (indxs, ) = np.where(1.0 / x > 5.0) av_guess = -1.0 * np.average(y[indxs]) if not np.isfinite(av_guess): av_guess = 1.0 g21_init = G21() | AxAvToExv(Av=av_guess) g21_asym_init = G21_drude_asym() | AxAvToExv(Av=av_guess)
def table_inv_rv_dep(outpath, table_waves, fit_slopes, fit_intercepts, fit_stds, norm="V"): """ Create tables with the slopes, intercepts and standard deviations at wavelengths "table_waves", and the measured and fitted average extinction curve Parameters ---------- outpath : string Path to save the table table_waves : list List with wavelengths to be included in the table fit_slopes : tuple The interpolated spline for the slopes fit_intercepts : astropy model The fitted model for the intercepts fit_stds : tuple The interpolated spline for the standard deviations norm : string [default="V"] Band or wavelength for the normalization Returns ------- Tables of the R(V)-dependent relationship at wavelengths "table_waves": - in aaxtex format for the paper - in ascii format """ # obtain the slopes, intercepts and standard deviations at the table wavelengths table_slopes = interpolate.splev(table_waves, fit_slopes) table_intercepts = fit_intercepts(table_waves) table_stds = interpolate.splev(table_waves, fit_stds) # obtain the measured average extinction curve average = ExtData(inpath + "average_ext.fits") (ave_waves, exts, exts_unc) = average.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) indx = np.argsort(ave_waves) ave_waves = ave_waves[indx].value exts = exts[indx] exts_unc = exts_unc[indx] # create wavelength bins and calculate the binned median extinction and uncertainty bin_edges = np.insert(table_waves + 0.025, 0, table_waves[0] - 0.025) meds, edges, indices = stats.binned_statistic( ave_waves, (exts, exts_unc), statistic="median", bins=bin_edges, ) # obtain the fitted average extinction curve ave_fit = average.model["params"][0] * table_waves**( -average.model["params"][2]) # obtain the measured average extinction in a few photometric bands bands = ["J", "H", "K", "WISE1", "L", "IRAC1"] band_waves = [1.22, 1.63, 2.19, 3.35, 3.45, 3.52] band_ave = get_phot(ave_waves, exts, bands) band_ave_unc = get_phot(ave_waves, exts_unc, bands) # obtain the fitted average extinction in a few photometric bands all_waves = np.arange(0.8, 4.05, 0.001) ave_fit_all = average.model["params"][0] * all_waves**( -average.model["params"][2]) band_ave_fit = get_phot(all_waves, ave_fit_all, bands) # obtain the slopes, intercepts and standard deviations in a few photometric bands band_slopes = get_phot(all_waves, -interpolate.splev(all_waves, fit_slopes), bands) band_intercepts = get_phot(all_waves, fit_intercepts(all_waves), bands) band_stds = get_phot(all_waves, interpolate.splev(all_waves, fit_stds), bands) # create the table table = Table( [ np.concatenate((band_waves, table_waves)), np.concatenate((band_ave, meds[0])), np.concatenate((band_ave_unc, meds[1])), np.concatenate((band_ave_fit, ave_fit)), np.concatenate((band_intercepts, table_intercepts)), np.concatenate((-band_slopes, table_slopes)), np.concatenate((band_stds, table_stds)), ], names=( "wavelength[micron]", "ave", "ave_unc", "ave_fit", "intercept", "slope", "std", ), ) # save it in ascii format table.write( outpath + "inv_RV_dep" + str(norm) + ".txt", format="ascii.commented_header", overwrite=True, ) # save it in aastex format table.write( outpath + "inv_RV_dep" + str(norm) + ".tex", format="aastex", names=( r"$\lambda\ [\micron]$", r"$\frac{A(\lambda)}{A(V)}$", "unc", "fit", r"$a(\lambda$)", r"$b(\lambda$)", r"$\sigma(\lambda)$", ), formats={ r"$\lambda\ [\micron]$": "{:.2f}", r"$\frac{A(\lambda)}{A(V)}$": "{:.3f}", "unc": "{:.3f}", "fit": "{:.3f}", r"$a(\lambda$)": "{:.3f}", r"$b(\lambda$)": "{:.3f}", r"$\sigma(\lambda)$": "{:.3f}", }, latexdict={ "col_align": "c|ccc|ccc", "tabletype": "deluxetable", "caption": r"Average diffuse Milky Way extinction curve and parameters of the linear relationship between extinction $A(\lambda)/A(V)$ and $1/R(V)$. \label{tab:RV_dep}", }, fill_values=[("nan", r"\nodata")], overwrite=True, )
def get_data(inpath, starpair_list_diff, starpair_list_dense, norm="V"): """ Obtain the required data for all stars in the star pair lists: - A(lambda)/A(V) - 1/R(V) - A(V) Parameters ---------- inpath : string Path to the input data files starpair_list_diffuse : list of strings List of diffuse star pairs to include in the fitting, in the format "reddenedstarname_comparisonstarname" (no spaces) starpair_list_dense : list of strings List of dense star pairs to include in the fitting, in the format "reddenedstarname_comparisonstarname" (no spaces) norm : string [default="V"] Band or wavelength for the normalization Returns ------- 1/R(V) with uncertainties, A(V) with uncertainties, A(lambda)/A(V) with uncertainties, wavelengths, boolean for dense/diffuse """ starpair_list = starpair_list_diff + starpair_list_dense inv_RVs = np.zeros((len(starpair_list), 3)) AVs = np.zeros((len(starpair_list), 3)) # determine the wavelengths at which to retrieve the extinction data extdata_model = ExtData("%s%s_ext.fits" % (inpath, starpair_list[0].lower())) waves = np.sort( np.concatenate(( extdata_model.waves["SpeX_SXD"].value, extdata_model.waves["SpeX_LXD"].value, ))) alavs = np.full((len(waves), len(starpair_list)), np.nan) alav_uncs = np.full((len(waves), len(starpair_list)), np.nan) dense_bool = np.full(len(starpair_list), False) # retrieve the information for all stars for i, starpair in enumerate(starpair_list): # retrieve 1/R(V) and A(V) (with uncertainties) extdata = ExtData("%s%s_ext.fits" % (inpath, starpair.lower())) inv_RVs[i] = np.array(extdata.columns["IRV"]) AVs[i] = np.array(extdata.columns["AV"]) # transform the curve from E(lambda-V) to A(lambda)/A(V) extdata.trans_elv_alav() # get the good data in flat arrays (flat_waves, flat_exts, flat_exts_unc) = extdata.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) # convert extinction from A(lambda)/A(V) to A(lambda)/A(norm) if norm is not "V" if norm != "V": ind1 = np.abs(flat_waves.value - norm).argmin() flat_exts = flat_exts / flat_exts[ind1] flat_exts_unc = flat_exts_unc / flat_exts[ind1] # retrieve A(lambda)/A(V) at all wavelengths for j, wave in enumerate(waves): if wave in flat_waves.value: alavs[j][i] = flat_exts[flat_waves.value == wave] alav_uncs[j][i] = flat_exts_unc[flat_waves.value == wave] # flag the dense sightlines if starpair in dense: dense_bool[i] = True return inv_RVs, AVs, alavs, alav_uncs, waves, dense_bool
help="save figure as a pdf file", action="store_true") args = parser.parse_args() # get a saved extnction curve file = args.extfile # file = '/home/kgordon/Python_git/spitzer_mir_ext/fits/hd147889_hd064802_ext.fits' ofile = file.replace(".fits", "_FM90.fits") ext = ExtData(filename=file) if ext.type == "elx": ext.trans_elv_alav(av=float(ext.columns["AV"][0])) wave, y, y_unc = ext.get_fitdata( ["IUE"], remove_uvwind_region=True, remove_lya_region=True, ) x = 1.0 / wave.value # remove points above x = 8.0 gvals = x < 8.0 x = x[gvals] y = y[gvals] y_unc = y_unc[gvals] # initialize the model fm90_init = FM90() fm90_init.C1.bounds = (-2.0, 3.0) fm90_init.C2.bounds = (-0.1, 1.0)
def plot_multi_extinction( starpair_list, path, alax=False, average=False, extmodels=False, fitmodel=False, HI_lines=False, range=None, spread=False, exclude=[], log=False, text_offsets=[], text_angles=[], pdf=False, ): """ Plot the extinction curves of multiple stars in the same plot Parameters ---------- starpair_list : list of strings List of star pairs for which to plot the extinction curve, in the format "reddenedstarname_comparisonstarname" (no spaces) path : string Path to the data files alax : boolean [default=False] Whether or not to plot A(lambda)/A(X) instead of E(lambda-X) average : boolean [default=False] Whether or not to plot the average extinction curve extmodels: boolean [default=False] Whether or not to overplot Milky Way extinction curve models fitmodel: boolean [default=False] Whether or not to overplot a fitted model HI_lines : boolean [default=False] Whether or not to indicate the HI-lines in the plot range : list of 2 floats [default=None] Wavelength range to be plotted (in micron) - [min,max] spread : boolean [default=False] Whether or not to spread the extinction curves out by adding a vertical offset to each curve exclude : list of strings [default=[]] List of data type(s) to exclude from the plot (e.g., IRS) log : boolean [default=False] Whether or not to plot the wavelengths on a log-scale text_offsets : list of floats [default=[]] List of the same length as starpair_list with offsets for the annotated text text_angles : list of integers [default=[]] List of the same length as starpair_list with rotation angles for the annotated text pdf : boolean [default=False] Whether or not to save the figure as a pdf file Returns ------- Figure with extinction curves of multiple stars """ # plotting setup for easier to read plots fontsize = 18 font = {"size": fontsize} plt.rc("font", **font) plt.rc("lines", linewidth=1) plt.rc("axes", linewidth=2) plt.rc("xtick.major", width=2, size=10) plt.rc("xtick.minor", width=1, size=5) plt.rc("ytick.major", width=2, size=10) plt.rc("ytick.minor", width=1, size=5) plt.rc("axes.formatter", min_exponent=2) # create the plot fig, ax = plt.subplots(figsize=(15, len(starpair_list) * 1.25)) colors = plt.get_cmap("tab10") # set default text offsets and angles if text_offsets == []: text_offsets = np.full(len(starpair_list), 0.2) if text_angles == []: text_angles = np.full(len(starpair_list), 10) for i, starpair in enumerate(starpair_list): # read in the extinction curve data extdata = ExtData("%s%s_ext.fits" % (path, starpair.lower())) # spread out the curves if requested if spread: yoffset = 0.25 * i else: yoffset = 0.0 # determine where to add the name of the star # find the shortest plotted wavelength (waves, exts, ext_uncs) = extdata.get_fitdata(extdata.waves.keys() - exclude) if range is not None: waves = waves[waves.value >= range[0]] min_wave = waves[-1] # find out which data type corresponds with this wavelength for data_type in extdata.waves.keys(): if data_type in exclude: continue used_waves = extdata.waves[data_type][extdata.npts[data_type] > 0] if min_wave in used_waves: ann_key = data_type ann_range = [min_wave, min_wave] * u.micron # plot the extinction curve extdata.plot( ax, color=colors(i % 10), alpha=0.7, alax=alax, exclude=exclude, yoffset=yoffset, annotate_key=ann_key, annotate_wave_range=ann_range, annotate_text=extdata.red_file.split(".")[0].upper(), annotate_yoffset=text_offsets[i], annotate_rotation=text_angles[i], annotate_color=colors(i % 10), ) # overplot a fitted model if requested if fitmodel: plot_fitmodel(extdata, yoffset=yoffset) # overplot Milky Way extinction curve models if requested if extmodels: if alax: plot_extmodels(extdata, alax) else: warnings.warn( "Overplotting Milky Way extinction curve models on a figure with multiple observed extinction curves in E(lambda-V) units is disabled, because the model curves in these units are different for every star, and would overload the plot. Please, do one of the following if you want to overplot Milky Way extinction curve models: 1) Use the flag --alax to plot ALL curves in A(lambda)/A(V) units, OR 2) Plot all curves separately by removing the flag --onefig.", stacklevel=2, ) # plot the average extinction curve if requested if average: plot_average( path, ax=ax, extmodels=extmodels, fitmodel=fitmodel, exclude=exclude, spread=spread, annotate_key=ann_key, annotate_wave_range=ann_range, ) # define the output name outname = "all_ext_%s.pdf" % (extdata.type) # plot HI-lines if requested if HI_lines: plot_HI(path, ax) # zoom in on a specific region if requested if range is not None: zoom(ax, range) outname = outname.replace(".pdf", "_zoom.pdf") # finish configuring the plot if log: ax.set_xscale("log") ax.set_xlabel(r"$\lambda$ [$\mu m$]", fontsize=1.5 * fontsize) ylabel = extdata._get_ext_ytitle(ytype=extdata.type) if spread: ylabel += " + offset" ax.set_ylabel(ylabel, fontsize=1.5 * fontsize) # show the figure or save it to a pdf file if pdf: fig.savefig(path + outname, bbox_inches="tight") else: plt.show() # return the figure and axes for additional manipulations return fig, ax
plt.rc("axes", linewidth=2) plt.rc("xtick.major", width=2) plt.rc("xtick.minor", width=2) plt.rc("ytick.major", width=2) plt.rc("ytick.minor", width=2) fig, tax = plt.subplots( ncols=2, nrows=2, figsize=(14, 6), gridspec_kw={"height_ratios": [3, 1]} ) # filename = "hd029647_hd034759_ext.fits" # filename = "hd029647_hd042560_ext.fits" filename = "hd283809_hd003360_ext.fits" ext = ExtData(filename) (wave, y, y_unc) = ext.get_fitdata(["SpeX_SXD", "SpeX_LXD"]) # remove units as fitting routines often cannot take numbers with units x = wave.to(u.micron).value gvals = (0.6 < x) & (x < 6.0) # print(y_unc[gvals]) # gvals = np.logical_or(x < 3.18, x > 3.4) weights = 1.0 / (y_unc[gvals]) # weights = np.full((len(x)), 0.1) # weight ice feature # weights[(2.9 < x) & (x < 3.2)] *= 5 # weights[(3.2 < x) & (x < 3.4)] /= 5 # weights[(2.3 < x) & (x < 2.4)] *= 5 # weights[(3.35 < x) & (x < 3.45)] *= 5 ax = tax[0, 0] ext.plot(ax)