# FIXME: sample rate could be a command line option; template duration and data # duration should be determined from chirp time sample_rate = 4096 # sample rate in Hz template_duration = 128 # template duration in seconds template_length = sample_rate * template_duration # template length in samples data_duration = 512 # data duration in seconds data_length = sample_rate * data_duration # data length in samples # Open output file. out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = command.register_to_xmldoc( out_xmldoc, parser, opts, ifos=opts.detector, comment="Little hope!") # Add search summary to output file. all_time = segments.segment([glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)]) search_summary_table = lsctables.New(lsctables.SearchSummaryTable) out_xmldoc.childNodes[0].appendChild(search_summary_table) summary = ligolw_search_summary.append_search_summary(out_xmldoc, process, inseg=all_time, outseg=all_time) # Read template bank file. progress.update(-1, 'reading ' + opts.template_bank.name) xmldoc, _ = ligolw_utils.load_fileobj( opts.template_bank, contenthandler=ligolw_bayestar.LSCTablesContentHandler) # Determine the low frequency cutoff from the template bank file. template_bank_f_low = ligolw_bayestar.get_template_bank_f_low(xmldoc)
from lalinference.bayestar import timing # Other imports. import numpy as np progress = ProgressBar() # Open output file. progress.update(-1, 'setting up output document') out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = command.register_to_xmldoc(out_xmldoc, parser, opts, ifos=opts.detector, comment="Simulated coincidences") # Add search summary to output file. all_time = segments.segment( [glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)]) search_summary_table = lsctables.New(lsctables.SearchSummaryTable) out_xmldoc.childNodes[0].appendChild(search_summary_table) summary = ligolw_search_summary.append_search_summary(out_xmldoc, process, inseg=all_time, outseg=all_time) # Read PSDs.
try: f_low, = f_lows except ValueError: raise ValueError( "sim_inspiral:f_lower columns are not unique, got values: " + ' '.join(f_lows)) else: f_low = opts.low_frequency_cutoff # Open output file. out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. Masquerade as lalapps_tmpltbank and # encode low frequency cutoff in command line arguments. process = command.register_to_xmldoc( out_xmldoc, parser, opts, ifos="H1", comment="Exact-match template bank") # Record low-frequency cutoff in the SearchSummVars table. search_summvars_table = lsctables.New(lsctables.SearchSummVarsTable) out_xmldoc.childNodes[0].appendChild(search_summvars_table) search_summvars = lsctables.SearchSummVars() search_summvars.search_summvar_id = search_summvars_table.get_next_id() search_summvars.process_id = process.process_id search_summvars.name = "low-frequency cutoff" search_summvars.string = None search_summvars.value = f_low search_summvars_table.append(search_summvars) # Create a SnglInspiral table and initialize its row ID counter. sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) out_xmldoc.childNodes[0].appendChild(sngl_inspiral_table)
series = lal.CreateREAL8FrequencySeries( psd_name, 0, 0, opts.df, lal.SecondUnit, n) if '(double f) -> double' in func.__doc__: series.data.data = vectorize_swig_psd_func( psd_name_prefix + psd_name)(f) else: func(series, 0.0) # Find indices of first and last nonzero samples. nonzero = np.flatnonzero(series.data.data) # FIXME: int cast seems to be needed on old versions of Numpy first_nonzero = int(nonzero[0]) last_nonzero = int(nonzero[-1]) # Truncate series = lal.CutREAL8FrequencySeries( series, first_nonzero, last_nonzero - first_nonzero + 1) series.f0 = first_nonzero * series.deltaF series.name = psd_name series.data.data *= scale psds[detector] = series xmldoc = lal.series.make_psd_xmldoc(psds) command.register_to_xmldoc(xmldoc, parser, opts) with glue.ligolw.utils.SignalsTrap(): glue.ligolw.utils.write_fileobj( xmldoc, opts.output, gz=(os.path.splitext(opts.output.name)[-1] == ".gz"))
from lalinspiral.sbank.tau0tau3 import m1m2_to_mchirp import lalsimulation # BAYESTAR imports. from lalinference.bayestar import filter # Other imports. import numpy as np import scipy.linalg # Open output file. xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = command.register_to_xmldoc(xmldoc, parser, opts) # Create a SnglInspiral table and initialize its row ID counter. sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) xmldoc.childNodes[0].appendChild(sngl_inspiral_table) sngl_inspiral_table.set_next_id(lsctables.SnglInspiralID(0)) f_low = 10. f_high = 2048. df = 0.1 initial_mchirp = m1m2_to_mchirp(opts.initial_mass1, opts.initial_mass2) initial_mtotal = opts.initial_mass1 + opts.initial_mass2 initial_eta = opts.initial_mass1 * opts.initial_mass2 / initial_mtotal**2 initial_chi = 0. initial_chirp_times = lalsimulation.SimInspiralTaylorF2RedSpinChirpTimesFromMchirpEtaChi(
except ValueError: raise ValueError( "sim_inspiral:f_lower columns are not unique, got values: " + ' '.join(f_lows)) else: f_low = opts.low_frequency_cutoff # Open output file. out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. Masquerade as lalapps_tmpltbank and # encode low frequency cutoff in command line arguments. process = command.register_to_xmldoc(out_xmldoc, parser, opts, ifos="H1", comment="Exact-match template bank") # Record low-frequency cutoff in the SearchSummVars table. search_summvars_table = lsctables.New(lsctables.SearchSummVarsTable) out_xmldoc.childNodes[0].appendChild(search_summvars_table) search_summvars = lsctables.SearchSummVars() search_summvars.search_summvar_id = search_summvars_table.get_next_id() search_summvars.process_id = process.process_id search_summvars.name = "low-frequency cutoff" search_summvars.string = None search_summvars.value = f_low search_summvars_table.append(search_summvars) # Create a SnglInspiral table and initialize its row ID counter.
from lalinference.bayestar import timing # Other imports. import numpy as np progress = ProgressBar() # Open output file. progress.update(-1, 'setting up output document') out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = command.register_to_xmldoc( out_xmldoc, parser, opts, ifos=opts.detector, comment="Simulated coincidences") # Add search summary to output file. all_time = segments.segment( [glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)]) search_summary_table = lsctables.New(lsctables.SearchSummaryTable) out_xmldoc.childNodes[0].appendChild(search_summary_table) summary = ligolw_search_summary.append_search_summary(out_xmldoc, process, inseg=all_time, outseg=all_time) # Read PSDs. progress.update(-1, 'reading ' + opts.reference_psd.name) xmldoc, _ = ligolw_utils.load_fileobj( opts.reference_psd, contenthandler=lal.series.PSDContentHandler) psds = lal.series.read_psd_xmldoc(xmldoc)