######################## ### Start of program ### ######################## ### Set file_path file_path = sys.argv[1] file_name = sys.argv[2] file_name_floor = string.split(file_name,"_")[-1] file_path_temp = file_path + "temp/" file_path_reduced = file_path + "reduced/" fits = pyfits.open(file_path+file_name_floor) candidate = fits[0].header["OBJECT"] ### Find teff of target teff,logg = functions.estimate_teff_logg(file_path,file_name_floor,candidate,"true","true",5500,4.5) if teff >= 3800: ################################################ ### Open file_path and find all RV_standards ### ################################################ RV_list = open(file_path_temp + "RV_Standard_list","w") os.chdir(file_path) file_list = glob.glob("*.fits") for file_name in file_list: hdulist = pyfits.open(file_name) notes = hdulist[0].header["NOTES"]
### Find the region within which to flux normalise flux_normalise_w1 = eval(functions.read_param_file("NORM_REGION_w1")) flux_normalise_w2 = eval(functions.read_param_file("NORM_REGION_w2")) ### Find initial estimate of properties hdulist = pyfits.open(file_path + file_name) object_name = hdulist[0].header["OBJECT"] hdulist.close() print "Analysing ",object_name hsmso_connect = functions.read_config_file("HSMSO_CONNECT") hscand_connect = functions.read_config_file("HSCAND_CONNECT") default_teff = float(functions.read_config_file("TEFF_ESTIMATE")) default_logg = float(functions.read_config_file("LOGG_ESTIMATE")) teff_ini,logg_ini = functions.estimate_teff_logg(object_name,hsmso_connect,hscand_connect,default_teff,default_logg) feh_ini = 0.0 print "Initial estimate of teff, logg: ",str(teff_ini),str(logg_ini) ### Change directory to reduced/ program_dir = os.getcwd() + "/" #Save the current working directory os.chdir(file_path_reduced) #Change to ../reduced/ dir ### Load in spectra flux_spectrum = functions.read_ascii("fluxcal_" + file_name + ".dat") flux_spectrum = functions.read_table(flux_spectrum) flux_spectrum = transpose(array(flux_spectrum)) flux_spectrum = spectype_functions.normalise(flux_spectrum,flux_normalise_w1,flux_normalise_w2) norm_spectrum = functions.read_ascii("norm_" + file_name + ".dat")
if aperture_weights[i] == max(aperture_weights): best_aperture = i+1 break print "best aperture is ", best_aperture ### Estimate teff and logg hdulist = pyfits.open(file_path_reduced + "normspec_"+file_name) object_name = hdulist[0].header['OBJECT'] hdulist.close() hsmso_connect = functions.read_config_file("HSMSO_CONNECT") hscand_connect = functions.read_config_file("HSCAND_CONNECT") default_teff = float(functions.read_config_file("TEFF_ESTIMATE")) default_logg = float(functions.read_config_file("LOGG_ESTIMATE")) teff,logg = functions.estimate_teff_logg(file_path,file_name,object_name,hsmso_connect,hscand_connect,default_teff,default_logg) ### Change directory to reduced/ os.chdir(file_path_reduced) #Change to ../reduced/ dir ########################### ### Determine synthetic ### ########################### ### Check if synthetic template exists ### If not, use the closest one in logg synthetic_template = "template_" + str(teff) + "_" + str(logg) + "_0.0.fits" if os.path.exists(synthetic_library + synthetic_template): print "Using synthetic template " + synthetic_template else: print "no template exists, using closest matching template"