# Remove anything without a vsini new_file_list = [] new_prim_vsini = [] for vsini, fname in zip(prim_vsini, fileList): if vsini is not None: new_file_list.append(fname) new_prim_vsini.append(vsini) else: print('No vsini found for file {}'.format(fname)) GenericSearch.slow_companion_search(new_file_list, new_prim_vsini, hdf5_file=hdf5_filename, extensions=True, resolution=None, trimsize=trimsize, modeldir=modeldir, badregions=badregions, interp_regions=interp_regions, #metal_values=(0, -0.5, 0.5), metal_values=(0.5,), # vsini_values=(1, 5.0, 10.0, 20.0, 30.0), logg_values=(4.5), Tvalues=range(8800, 9000, 100), vsini_values=(1, 5, 10, 20, 30), observatory='McDonald', debug=False, vbary_correct=True, addmode='all', output_mode='hdf5')
def Analyze( fileList, primary_vsini, badregions=[], interp_regions=[], extensions=True, resolution=None, trimsize=1, vsini_values=(10,), Tvalues=range(3000, 6100, 100), metal_values=(0.0,), logg_values=(4.5,), max_vsini=None, hdf5_file=StellarModel.HDF5_FILE, addmode="ML", output_mode="hdf5", output_file="Sensitivity.hdf5", vel_list=range(-400, 450, 50), tolerance=5.0, rerun=False, debug=False, ): """ This function runs a sensitivity analysis using the same methodology as GenericSearch.companion_search. Most of the parameters are the same, with the exception of the ones listed below: Parameters: =========== - max_vsini: float The maximum vsini (in km/s) that we search. If it is given and less than any of the vsini_values, then the model we correlate against has this vsini. For example, if one of the vsini_values is 150 km/s and the max_vsini is 40 km/s, then a 150 km/s model will be added to the data, but we use a 40 km/s model to correlate against the result. - vel_list: list of floats The list of radial velocities to add the model to the data with. This provides for several independent(-ish) tests of the sensitivity - tolerance: float How close the highest CCF peak needs to be to the correct velocity to count as a detection - rerun: boolean If output_mode=hdf5, check to see if the current parameters have already been checked before running. """ model_list = StellarModel.GetModelList( type="hdf5", hdf5_file=hdf5_file, temperature=Tvalues, metal=metal_values, logg=logg_values ) modeldict, processed = StellarModel.MakeModelDicts( model_list, type="hdf5", hdf5_file=hdf5_file, vsini_values=vsini_values, vac2air=True, logspace=True ) get_weights = True if addmode.lower() == "weighted" else False MS = SpectralTypeRelations.MainSequence() # Do the cross-correlation datadict = defaultdict(list) alpha = 0.0 for temp in sorted(modeldict.keys()): for gravity in sorted(modeldict[temp].keys()): for metallicity in sorted(modeldict[temp][gravity].keys()): for vsini_sec in vsini_values: if debug: logging.info( "T: {}, logg: {}, [Fe/H]: {}, vsini: {}".format(temp, gravity, metallicity, vsini_sec) ) # broaden the model model = modeldict[temp][gravity][metallicity][alpha][vsini_sec].copy() broadened = Broaden.RotBroad(model, vsini_sec * units.km.to(units.cm), linear=True) if resolution is not None: broadened = FittingUtilities.ReduceResolutionFFT(broadened, resolution) if max_vsini is not None and max_vsini < vsini_sec: search_model = Broaden.RotBroad(model, vsini_sec * units.km.to(units.cm), linear=True) if resolution is not None: search_model = FittingUtilities.ReduceResolutionFFT(search_model, resolution) else: search_model = broadened.copy() # Make an interpolator function bb_flux = blackbody_lambda(broadened.x * units.nm, temp) idx = np.where(broadened.x > 700)[0] s = np.median(broadened.y[idx] / bb_flux[idx]) broadened.cont = bb_flux * s modelfcn = interp(broadened.x, broadened.y / broadened.cont) for i, (fname, vsini_prim) in enumerate(zip(fileList, primary_vsini)): # Read in data process_data = False if fname in datadict else True if process_data: orders_original = HelperFunctions.ReadExtensionFits(fname) orders_original = GenericSearch.Process_Data( orders_original, badregions=badregions, interp_regions=[], trimsize=trimsize, vsini=None, reject_outliers=False, logspacing=False, ) datadict[fname] = orders_original else: orders_original = datadict[fname] header = fits.getheader(fname) starname = header["OBJECT"] date = header["DATE-OBS"].split("T")[0] components = get_companions(starname) print(components) primary_temp = components["temperature"] primary_radius = components["radius"] primary_mass = components["mass"] secondary_spt = MS.GetSpectralType("temperature", temp)[0] secondary_radius = MS.Interpolate("radius", secondary_spt) secondary_mass = MS.Interpolate("mass", secondary_spt) for rv in vel_list: # Check if these parameters already exist params = { "velocity": rv, "primary_temps": primary_temp, "secondary_temp": temp, "object": starname, "date": date, "primary_vsini": vsini_prim, "secondary_vsini": vsini_sec, "primary_masses": primary_mass, "secondary_mass": secondary_mass, "logg": gravity, "[Fe/H]": metallicity, "addmode": addmode, } if output_mode == "hdf5" and not rerun and check_existence(output_file, params): continue # Make a copy of the data orders orders = [order.copy() for order in orders_original] for ordernum, order in enumerate(orders): # Get the flux ratio prim_flux = 0.0 for ptemp, pR in zip(primary_temp, primary_radius): prim_flux += blackbody_lambda(order.x * units.nm, ptemp).cgs.value * pR sec_flux = blackbody_lambda(order.x * units.nm, temp).cgs.value * secondary_radius scale = sec_flux / prim_flux # Add the model to the data model_segment = (modelfcn(order.x * (1.0 - rv / lightspeed)) - 1.0) * scale order.y += model_segment * order.cont orders[ordernum] = order # Process the data and model orders = GenericSearch.Process_Data( orders, badregions=[], interp_regions=interp_regions, extensions=extensions, trimsize=0, vsini=vsini_prim, logspacing=True, reject_outliers=True, ) model_orders = GenericSearch.process_model( search_model.copy(), orders, vsini_model=vsini_sec, vsini_primary=vsini_prim, debug=debug, logspace=False, ) # Do the correlation corr = Correlate.Correlate( orders, model_orders, addmode=addmode, outputdir="Sensitivity/", get_weights=get_weights, prim_teff=max(primary_temp), debug=debug, ) if debug: corr, ccf_orders = corr # Determine if we found the companion, and output check_detection(corr, params, mode="hdf5", tol=tolerance, hdf5_file=output_file) # Delete the model. We don't need it anymore and it just takes up ram. modeldict[temp][gravity][metallicity][alpha][vsini_sec] = [] return
if 1: fileList.append(arg) # Get the primary star vsini values prim_vsini = StarData.get_vsini(fileList) # Use this one for the real data search GenericSearch.slow_companion_search(fileList, prim_vsini, hdf5_file=hdf5_filename, extensions=True, resolution=None, trimsize=trimsize, modeldir=modeldir, badregions=badregions, metal_values=(0.0, -0.5, 0.5), vsini_values=(1, 5.0, 10.0, 20.0, 30), Tvalues=range(9000, 12000, 100), # Tvalues = [5300,], #metal_values=[-0.5,], #vsini_values=[5,], observatory='CTIO', debug=False, vbary_correct=True, addmode='all', output_mode='hdf5') """ # Use this one for the synthetic binary search GenericSearch.slow_companion_search(fileList, prim_vsini, hdf5_file='/media/ExtraSpace/PhoenixGrid/CHIRON_Grid.hdf5', extensions=True, resolution=None,
if 1: fileList.append(arg) # Get the primary star vsini values prim_vsini = [None for _ in fileList] # Use this one for the real data search Tvalues = range(7000, 10000, 200) + range(10000, 30000, 400) Tvalues = range(7000, 10000, 250) + range(10000, 30000, 500) GenericSearch.slow_companion_search(fileList, prim_vsini, hdf5_file=hdf5_filename, extensions=True, resolution=None, trimsize=trimsize, modeldir=modeldir, badregions=badregions, metal_values=(0.0), logg_values=(3.5, 4.0, 4.5,), vsini_values=range(75, 300, 25), #logg_values=(4.5,), #vsini_values=(250,), #Tvalues=(9250,), Tvalues=Tvalues, observatory='McDonald', debug=False, reject_outliers=False, vbary_correct=True, addmode='all', output_mode='hdf5', output_file='CCF_primary_total.hdf5')