def ReadFile(fname, blaze=None): try: orders = HelperFunctions.ReadFits(fname) except ValueError: orders = HelperFunctions.ReadFits(fname, errors=2) orders = orders[::-1] # Reverse order so the bluest order is first #Need to blaze-correct the later data if int(os.path.split(fname)[1][2:7]) > 50400 and blaze is not None: print "\tBlaze correcting!" try: blaze_orders = HelperFunctions.ReadFits(blaze) except ValueError: blaze_orders = HelperFunctions.ReadFits(blaze, errors=2) blaze_orders = blaze_orders[:: -1] # Reverse order so the bluest order is first blazecorrect = True else: blazecorrect = False for i, order in enumerate(orders): if blazecorrect: b = blaze_orders[i].y / blaze_orders[i].y.mean() #plt.plot(order.x, b, 'g-', alpha=0.4) order.y /= b order.cont = FittingUtilities.Continuum(order.x, order.y, fitorder=2, lowreject=2, highreject=5) #plt.plot(order.x, order.y, 'k-', alpha=0.4) #plt.plot(order.x, order.cont, 'r-', alpha=0.4) orders[i] = order.copy() #plt.show() return orders
print "\n***************************\nFitting order %i: " % (i) order = orders[i] fitter.AdjustValue({ "wavestart": order.x[0] - 20.0, "waveend": order.x[-1] + 20.0, "o2": 0.0, "h2o": humidity, "resolution": resolution }) fitpars = [ fitter.const_pars[j] for j in range(len(fitter.parnames)) if fitter.fitting[j] ] order.cont = FittingUtilities.Continuum(order.x, order.y, fitorder=3, lowreject=1.5, highreject=10) fitter.ImportData(order) fitter.resolution_fit_mode = "gauss" fitter.fit_source = False fitter.fit_primary = False model = fitter.GenerateModel(fitpars, separate_source=False, return_resolution=False) # Find the best scale factor model.cont = np.ones(model.size()) lines = FittingUtilities.FindLines(model, tol=0.95).astype(int) if len(lines) > 5: scale = np.median(
def get_ccfs(T=4000, vsini=5, logg=4.5, metal=0.5, hdf_file='Cross_correlations/CCF.hdf5', xgrid=np.arange(-400, 400, 1), addmode='simple'): """ Get the cross-correlation functions for the given parameters, for all stars """ ccfs = [] filenames = [] rv_shift = {} if T > 6000 else pickle.load(open('rvs.pkl')) with h5py.File(hdf_file) as f: starname = 'psi1 Dra A' date_list = f[starname].keys() for date in date_list: datasets = f[starname][date].keys() for ds_name in datasets: ds = f[starname][date][ds_name] if (ds.attrs['T'] == T and ds.attrs['vsini'] == vsini and ds.attrs['logg'] == logg and ds.attrs['[Fe/H]'] == metal and ds.attrs['addmode'] == addmode): vel, corr = ds.value #ccf = spline(vel[::-1]*-1, (1.0-corr[::-1])) ccf = spline(vel[::-1] * -1, corr[::-1]) #ccf = spline(vel, corr) fname = ds.attrs['fname'] vbary = get_rv_correction(fname) #cont = FittingUtilities.Continuum(xgrid, ccf(xgrid-vbary), fitorder=2, lowreject=2.5, highreject=5) #normed_ccf = ccf(xgrid-vbary)/cont cont = FittingUtilities.Continuum(xgrid, ccf(xgrid - vbary), fitorder=2, lowreject=5, highreject=2.5) normed_ccf = ccf(xgrid - vbary) - cont if T <= 6000: centroid = rv_shift[fname] #centroid = xgrid[np.argmax(normed_ccf)] #top = 1.0 #amp = 1.0 - min(normed_ccf) top = 0.0 amp = max(normed_ccf) #idx = np.argmin(np.abs(xgrid-centroid)) #amp = normed_ccfs[idx] else: gauss_pars = fit_gaussian(xgrid, normed_ccf) centroid = gauss_pars[2] amp = gauss_pars[1] top = gauss_pars[0] amp = 0.5 print(centroid, fname) #cont = FittingUtilities.Continuum(xgrid, ccf(xgrid-vbary+centroid), fitorder=2, lowreject=2.5, highreject=5) #normed_ccf = (ccf(xgrid-vbary+centroid) / cont - top) * 0.5/abs(amp) + top cont = FittingUtilities.Continuum(xgrid, ccf(xgrid - vbary + centroid), fitorder=2, lowreject=5, highreject=2.5) normed_ccf = (ccf(xgrid - vbary + centroid) - cont) * 0.5 / abs(amp) filenames.append(fname) ccfs.append(normed_ccf) rv_shift[fname] = centroid if T > 6000: pickle.dump(rv_shift, open('rvs.pkl', 'w')) return np.array(ccfs), filenames