def write_output_url(self, outdir, row_number=None, counts=1, root_name="gstlal_inspiral_bank_SNRs"): """Writing the LIGO_LW xmldoc to disk. Args: outdir (str): The output diretory. row_number (int, default=None): The row number of the SNR to be outputed. Default=None is to output all. root_name (str, default="gstlal_inspiral_bank_SNRs"): The root name of the xml document. Return: xmldoc: The file object representing the xmldoc. """ assert counts >= 1, "Number of rows must be larger than or equals to 1." for instrument, bank_SNRs in self.bank_snrs_dict.items(): # create root xmldoc = ligolw.Document() root = xmldoc.appendChild(ligolw.LIGO_LW()) root.Name = root_name # add SNR and autocorrelation branches for bank_SNR in bank_SNRs: branch = root.appendChild(ligolw.LIGO_LW()) branch.Name = "bank_SNR" branch.appendChild( ligolw_param.Param.from_pyvalue('bank_id', bank_SNR.bank_id)) self._append_content(branch, bank_SNR, instrument, row_number=row_number, counts=counts) if row_number is not None and len(bank_SNRs) == 1 and counts == 1: outname = "%s-%s_bank_SNR_%d_%d-%d-%d.xml.gz" % ( instrument, bank_SNRs[0].method, bank_SNRs[0].bank_number, row_number, bank_SNRs[0].start, bank_SNRs[0].duration) else: outname = "%s-%s_bank_SNR_%s_%s-%d-%d.xml.gz" % ( instrument, bank_SNRs[0].method, "ALL", "ALL", bank_SNRs[0].start, bank_SNRs[0].duration) write_url(xmldoc, os.path.join(outdir, outname), verbose=self.verbose) return xmldoc
def gen_likelihood_control(coinc_params_distributions, seglists, name=u"lalapps_burca_tailor", comment=u""): xmldoc = ligolw.Document() node = xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc(xmldoc, program=process_program_name, paramdict={}, version=__version__, cvs_repository="lscsoft", cvs_entry_time=__date__, comment=comment) coinc_params_distributions.process_id = process.process_id ligolw_search_summary.append_search_summary(xmldoc, process, ifos=seglists.keys(), inseg=seglists.extent_all(), outseg=seglists.extent_all()) node.appendChild(coinc_params_distributions.to_xml(name)) ligolw_process.set_process_end_time(process) return xmldoc
def write_bank(filename, banks, verbose=False): """Write template bank to LIGO_LW xml file.""" xmldoc = ligolw.Document() head = xmldoc.appendChild(ligolw.LIGO_LW()) head.Name = u"gstlal_template_bank" for bank in banks: cloned_table = bank.sngl_inspiral_table.copy() cloned_table.extend(bank.sngl_inspiral_table) head.appendChild(cloned_table) head.appendChild( ligolw_param.Param.from_pyvalue('template_bank_filename', bank.template_bank_filename)) head.appendChild( ligolw_param.Param.from_pyvalue('sample_rate', bank.sample_rate)) head.appendChild( ligolw_param.Param.from_pyvalue('bank_id', bank.bank_id)) head.appendChild(ligolw_array.Array.build('templates', bank.templates)) head.appendChild( ligolw_array.Array.build('autocorrelation_bank', bank.autocorrelation_bank)) head.appendChild( ligolw_array.Array.build('autocorrelation_mask', bank.autocorrelation_mask)) head.appendChild( ligolw_array.Array.build('sigmasq', numpy.array(bank.sigmasq))) ligolw_utils.write_filename(xmldoc, filename, gz=filename.endswith('.gz'), verbose=verbose)
def __getitem__(self, coinc_event_id): newxmldoc = ligolw.Document() ligolw_elem = newxmldoc.appendChild(ligolw.LIGO_LW()) # when making these, we can't use .copy() method of Table # instances because we need to ensure we have a Table # subclass, not a DBTable subclass new_process_table = ligolw_elem.appendChild(lsctables.New(lsctables.ProcessTable, self.process_table.columnnamesreal)) new_process_params_table = ligolw_elem.appendChild(lsctables.New(lsctables.ProcessParamsTable, self.process_params_table.columnnamesreal)) new_sngl_inspiral_table = ligolw_elem.appendChild(lsctables.New(lsctables.SnglInspiralTable, self.sngl_inspiral_table.columnnamesreal)) new_coinc_def_table = ligolw_elem.appendChild(lsctables.New(lsctables.CoincDefTable, self.coinc_def_table.columnnamesreal)) new_coinc_event_table = ligolw_elem.appendChild(lsctables.New(lsctables.CoincTable, self.coinc_event_table.columnnamesreal)) new_coinc_inspiral_table = ligolw_elem.appendChild(lsctables.New(lsctables.CoincInspiralTable, self.coinc_inspiral_table.columnnamesreal)) new_coinc_event_map_table = ligolw_elem.appendChild(lsctables.New(lsctables.CoincMapTable, self.coinc_event_map_table.columnnamesreal)) new_time_slide_table = ligolw_elem.appendChild(lsctables.New(lsctables.TimeSlideTable, self.time_slide_table.columnnamesreal)) new_coinc_def_table.append(self.coinc_def) coincevent = self.coinc_event_index[coinc_event_id] new_time_slide_table.extend(self.time_slide_index[coincevent.time_slide_id]) new_sngl_inspiral_table.extend(self.sngl_inspiral_index[coinc_event_id]) new_coinc_event_table.append(coincevent) new_coinc_event_map_table.extend(self.coinc_event_maps_index[coinc_event_id]) new_coinc_inspiral_table.append(self.coinc_inspiral_index[coinc_event_id]) for process_id in set(new_sngl_inspiral_table.getColumnByName("process_id")) | set(new_coinc_event_table.getColumnByName("process_id")) | set(new_time_slide_table.getColumnByName("process_id")): # process row is required new_process_table.append(self.process_index[process_id]) try: new_process_params_table.extend(self.process_params_index[process_id]) except KeyError: # process_params rows are optional pass return newxmldoc
def to_xml(self, name): xml = ligolw.LIGO_LW( {u"Name": u"%s:%s" % (name, self.ligo_lw_name_suffix)}) xml.appendChild(self.numerator.to_xml("numerator")) xml.appendChild(self.denominator.to_xml("denominator")) xml.appendChild(self.candidates.to_xml("candidates")) return xml
def to_xml(self, name): """ Serialize this RankingStat object to an XML fragment and return the root element of the resulting XML tree. """ xml = ligolw.LIGO_LW( {u"Name": u"%s:%s" % (name, self.ligo_lw_name_suffix)}) xml.appendChild(self.numerator.to_xml("numerator")) xml.appendChild(self.denominator.to_xml("denominator")) xml.appendChild(self.zerolag.to_xml("zerolag")) return xml
def to_xml(self, name = u"string_cusp"): # do not allow ourselves to be written to disk without our # PDF's internal normalization metadata being up to date self.noise_lr_lnpdf.normalize() self.signal_lr_lnpdf.normalize() self.candidates_lr_lnpdf.normalize() xml = ligolw.LIGO_LW({u"Name": u"%s:%s" % (name, self.ligo_lw_name_suffix)}) xml.appendChild(self.noise_lr_lnpdf.to_xml(u"noise_lr_lnpdf")) xml.appendChild(self.signal_lr_lnpdf.to_xml(u"signal_lr_lnpdf")) xml.appendChild(self.candidates_lr_lnpdf.to_xml(u"candidates_lr_lnpdf")) xml.appendChild(ligolw_param.Param.from_pyvalue(u"segments", ",".join(segmentsUtils.to_range_strings(self.segments)))) return xml
def new_doc(comment=None, **kwargs): doc = ligolw.Document() doc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc( doc, program=u"lalapps_gen_timeslides", paramdict=kwargs, version=__version__, cvs_repository=u"lscsoft", cvs_entry_time=__date__, comment=comment) doc.childNodes[0].appendChild(lsctables.New(lsctables.TimeSlideTable)) return doc, process
def setUp(self): available_detectors = get_available_detectors() available_detectors = [a[0] for a in available_detectors] self.assertTrue('H1' in available_detectors) self.assertTrue('L1' in available_detectors) self.assertTrue('V1' in available_detectors) self.detectors = [Detector(d) for d in ['H1', 'L1', 'V1']] self.sample_rate = 4096. self.earth_time = lal.REARTH_SI / lal.C_SI # create a few random injections self.injections = [] start_time = float(lal.GPSTimeNow()) taper_choices = ('TAPER_NONE', 'TAPER_START', 'TAPER_END', 'TAPER_STARTEND') for i, taper in zip(range(20), itertools.cycle(taper_choices)): inj = MyInjection() inj.end_time = start_time + 40000 * i + \ numpy.random.normal(scale=3600) random = numpy.random.uniform inj.mass1 = random(low=1., high=20.) inj.mass2 = random(low=1., high=20.) inj.distance = random(low=0.9, high=1.1) * 1e6 * lal.PC_SI inj.latitude = numpy.arccos(random(low=-1, high=1)) inj.longitude = random(low=0, high=2 * lal.PI) inj.inclination = numpy.arccos(random(low=-1, high=1)) inj.polarization = random(low=0, high=2 * lal.PI) inj.taper = taper self.injections.append(inj) # create LIGOLW document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) # create sim inspiral table, link it to document and fill it sim_table = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[-1].appendChild(sim_table) for i in range(len(self.injections)): row = sim_table.RowType() self.injections[i].fill_sim_inspiral_row(row) row.process_id = 0 row.simulation_id = i sim_table.append(row) # write document to temp file self.inj_file = tempfile.NamedTemporaryFile(suffix='.xml') ligolw_utils.write_fileobj(xmldoc, self.inj_file)
def make_psd_xmldoc(psddict, xmldoc=None): """Add a set of PSDs to a LIGOLW XML document. If the document is not given, a new one is created first. """ xmldoc = ligolw.Document() if xmldoc is None else xmldoc.childNodes[0] # the PSDs must be children of a LIGO_LW with name "psd" root_name = 'psd' Attributes = ligolw.sax.xmlreader.AttributesImpl lw = xmldoc.appendChild(ligolw.LIGO_LW(Attributes({'Name': root_name}))) for instrument, psd in psddict.items(): xmlseries = _build_series(psd, ('Frequency,Real', 'Frequency'), None, 'deltaF', 's^-1') fs = lw.appendChild(xmlseries) fs.appendChild(LIGOLWParam.from_pyvalue('instrument', instrument)) return xmldoc
def _append_content(self, branch, bank_SNR, instrument, row_number=None, counts=1): """For internal use only.""" slicing = slice(None, None, None) if row_number is None else slice( row_number, row_number + counts, 1) for template_id, autocorrelation, snr in zip( bank_SNR.template_id[slicing], bank_SNR.bank.autocorrelation_bank[slicing], bank_SNR[slicing]): # retrieve row number row_number = int(snr.name.split("_")[1]) tmp_branch = branch.appendChild(ligolw.LIGO_LW()) tmp_branch.Name = "SNR_and_Autocorrelation" tmp_branch.appendChild( ligolw_param.Param.from_pyvalue('template_id', template_id)) # append timeseries and templates autocorrelation if snr.data.data.dtype == numpy.float32: tseries = tmp_branch.appendChild( lal.series.build_REAL4TimeSeries(snr)) elif snr.data.data.dtype == numpy.float64: tseries = tmp_branch.appendChild( lal.series.build_REAL8TimeSeries(snr)) elif snr.data.data.dtype == numpy.complex64: tseries = tmp_branch.appendChild( lal.series.build_COMPLEX8TimeSeries(snr)) elif snr.data.data.dtype == numpy.complex128: tseries = tmp_branch.appendChild( lal.series.build_COMPLEX16TimeSeries(snr)) else: raise ValueError("unsupported type : %s" % snr.data.data.dtype) # append autocorrelation_bank tmp_branch.appendChild( ligolw_array.Array.build('autocorrelation_bank', autocorrelation)) return branch
def to_xml(self, name): # do not allow ourselves to be written to disk without our # PDFs' internal normalization metadata being up to date self.noise_lr_lnpdf.normalize() self.signal_lr_lnpdf.normalize() self.zero_lag_lr_lnpdf.normalize() xml = ligolw.LIGO_LW( {u"Name": u"%s:%s" % (name, self.ligo_lw_name_suffix)}) xml.appendChild(self.noise_lr_lnpdf.to_xml(u"noise_lr_lnpdf")) xml.appendChild(self.signal_lr_lnpdf.to_xml(u"signal_lr_lnpdf")) xml.appendChild(self.zero_lag_lr_lnpdf.to_xml(u"zero_lag_lr_lnpdf")) xml.appendChild( ligolw_param.Param.from_pyvalue( u"segments", ",".join(segmentsUtils.to_range_strings(self.segments)))) xml.appendChild( ligolw_param.Param.from_pyvalue( u"template_ids", ",".join("%d" % template_id for template_id in sorted(self.template_ids)))) return xml
def _build_series(series, dim_names, comment, delta_name, delta_unit): Attributes = ligolw.sax.xmlreader.AttributesImpl elem = ligolw.LIGO_LW(Attributes({'Name': str(series.__class__.__name__)})) if comment is not None: elem.appendChild(ligolw.Comment()).pcdata = comment elem.appendChild(ligolw.Time.from_gps(series.epoch, 'epoch')) elem.appendChild(LIGOLWParam.from_pyvalue('f0', series.f0, unit='s^-1')) delta = getattr(series, delta_name) if numpy.iscomplexobj(series.data.data): data = numpy.row_stack((numpy.arange(len(series.data.data)) * delta, series.data.data.real, series.data.data.imag)) else: data = numpy.row_stack( (numpy.arange(len(series.data.data)) * delta, series.data.data)) a = LIGOLWArray.build(series.name, data, dim_names=dim_names) a.Unit = str(series.sampleUnits) dim0 = a.getElementsByTagName(ligolw.Dim.tagName)[0] dim0.Unit = delta_unit dim0.Start = series.f0 dim0.Scale = delta elem.appendChild(a) return elem
def make_psd_xmldoc(psddict, xmldoc=None, root_name=u"psd"): """ Construct an XML document tree representation of a dictionary of frequency series objects containing PSDs. See also read_psd_xmldoc() for a function to parse the resulting XML documents. If xmldoc is None (the default), then a new XML document is created and the PSD dictionary added to it inside a LIGO_LW element. If xmldoc is not None then the PSD dictionary is appended to the children of that element inside a new LIGO_LW element. In both cases, the LIGO_LW element's Name attribute is set to root_name. This will be looked for by read_psd_xmldoc() when parsing the PSD document. """ if xmldoc is None: xmldoc = ligolw.Document() lw = xmldoc.appendChild(ligolw.LIGO_LW(Attributes({u"Name": root_name}))) for instrument, psd in psddict.items(): fs = lw.appendChild(build_REAL8FrequencySeries(psd)) if instrument is not None: fs.appendChild( ligolw_param.Param.from_pyvalue(u"instrument", instrument)) return xmldoc
def write(filename, samples, write_params=None, static_args=None): """Writes the injection samples to the given xml. Parameters ---------- filename : str The name of the file to write to. samples : io.FieldArray FieldArray of parameters. write_params : list, optional Only write the given parameter names. All given names must be keys in ``samples``. Default is to write all parameters in ``samples``. static_args : dict, optional Dictionary mapping static parameter names to values. These are written to the ``attrs``. """ xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) simtable = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[0].appendChild(simtable) if static_args is None: static_args = {} if write_params is None: write_params = samples.fieldnames for ii in range(samples.size): sim = lsctables.SimInspiral() # initialize all elements to None for col in sim.__slots__: setattr(sim, col, None) for field in write_params: data = samples[ii][field] set_sim_data(sim, field, data) # set any static args for (field, value) in static_args.items(): set_sim_data(sim, field, value) simtable.append(sim) ligolw_utils.write_filename(xmldoc, filename, gz=filename.endswith('gz'))
def _build_series(series, dim_names, comment, delta_name, delta_unit): elem = ligolw.LIGO_LW( Attributes({u"Name": six.text_type(series.__class__.__name__)})) if comment is not None: elem.appendChild(ligolw.Comment()).pcdata = comment elem.appendChild(ligolw.Time.from_gps(series.epoch, u"epoch")) elem.appendChild( ligolw_param.Param.from_pyvalue(u"f0", series.f0, unit=u"s^-1")) delta = getattr(series, delta_name) if np.iscomplexobj(series.data.data): data = np.row_stack((np.arange(len(series.data.data)) * delta, series.data.data.real, series.data.data.imag)) else: data = np.row_stack( (np.arange(len(series.data.data)) * delta, series.data.data)) a = ligolw_array.Array.build(series.name, data, dim_names=dim_names) a.Unit = str(series.sampleUnits) dim0 = a.getElementsByTagName(ligolw.Dim.tagName)[0] dim0.Unit = delta_unit dim0.Start = series.f0 dim0.Scale = delta elem.appendChild(a) return elem
def main(args=None): from ligo.lw import lsctables from ligo.lw import utils as ligolw_utils from ligo.lw import ligolw import lal.series from scipy import stats p = parser() args = p.parse_args(args) xmldoc = ligolw.Document() xmlroot = xmldoc.appendChild(ligolw.LIGO_LW()) process = register_to_xmldoc(xmldoc, p, args) gwcosmo = GWCosmo( cosmology.default_cosmology.get_cosmology_from_string(args.cosmology)) ns_mass_min = 1.0 ns_mass_max = 2.0 bh_mass_min = 5.0 bh_mass_max = 50.0 ns_astro_spin_min = -0.05 ns_astro_spin_max = +0.05 ns_astro_mass_dist = stats.norm(1.33, 0.09) ns_astro_spin_dist = stats.uniform(ns_astro_spin_min, ns_astro_spin_max - ns_astro_spin_min) ns_broad_spin_min = -0.4 ns_broad_spin_max = +0.4 ns_broad_mass_dist = stats.uniform(ns_mass_min, ns_mass_max - ns_mass_min) ns_broad_spin_dist = stats.uniform(ns_broad_spin_min, ns_broad_spin_max - ns_broad_spin_min) bh_astro_spin_min = -0.99 bh_astro_spin_max = +0.99 bh_astro_mass_dist = stats.pareto(b=1.3) bh_astro_spin_dist = stats.uniform(bh_astro_spin_min, bh_astro_spin_max - bh_astro_spin_min) bh_broad_spin_min = -0.99 bh_broad_spin_max = +0.99 bh_broad_mass_dist = stats.reciprocal(bh_mass_min, bh_mass_max) bh_broad_spin_dist = stats.uniform(bh_broad_spin_min, bh_broad_spin_max - bh_broad_spin_min) if args.distribution.startswith('bns_'): m1_min = m2_min = ns_mass_min m1_max = m2_max = ns_mass_max if args.distribution.endswith('_astro'): x1_min = x2_min = ns_astro_spin_min x1_max = x2_max = ns_astro_spin_max m1_dist = m2_dist = ns_astro_mass_dist x1_dist = x2_dist = ns_astro_spin_dist elif args.distribution.endswith('_broad'): x1_min = x2_min = ns_broad_spin_min x1_max = x2_max = ns_broad_spin_max m1_dist = m2_dist = ns_broad_mass_dist x1_dist = x2_dist = ns_broad_spin_dist else: # pragma: no cover assert_not_reached() elif args.distribution.startswith('nsbh_'): m1_min = bh_mass_min m1_max = bh_mass_max m2_min = ns_mass_min m2_max = ns_mass_max if args.distribution.endswith('_astro'): x1_min = bh_astro_spin_min x1_max = bh_astro_spin_max x2_min = ns_astro_spin_min x2_max = ns_astro_spin_max m1_dist = bh_astro_mass_dist m2_dist = ns_astro_mass_dist x1_dist = bh_astro_spin_dist x2_dist = ns_astro_spin_dist elif args.distribution.endswith('_broad'): x1_min = bh_broad_spin_min x1_max = bh_broad_spin_max x2_min = ns_broad_spin_min x2_max = ns_broad_spin_max m1_dist = bh_broad_mass_dist m2_dist = ns_broad_mass_dist x1_dist = bh_broad_spin_dist x2_dist = ns_broad_spin_dist else: # pragma: no cover assert_not_reached() elif args.distribution.startswith('bbh_'): m1_min = m2_min = bh_mass_min m1_max = m2_max = bh_mass_max if args.distribution.endswith('_astro'): x1_min = x2_min = bh_astro_spin_min x1_max = x2_max = bh_astro_spin_max m1_dist = m2_dist = bh_astro_mass_dist x1_dist = x2_dist = bh_astro_spin_dist elif args.distribution.endswith('_broad'): x1_min = x2_min = bh_broad_spin_min x1_max = x2_max = bh_broad_spin_max m1_dist = m2_dist = bh_broad_mass_dist x1_dist = x2_dist = bh_broad_spin_dist else: # pragma: no cover assert_not_reached() else: # pragma: no cover assert_not_reached() dists = (m1_dist, m2_dist, x1_dist, x2_dist) # Read PSDs psds = list( lal.series.read_psd_xmldoc( ligolw_utils.load_fileobj( args.reference_psd, contenthandler=lal.series.PSDContentHandler)).values()) # Construct mass1, mass2, spin1z, spin2z grid. m1 = np.geomspace(m1_min, m1_max, 10) m2 = np.geomspace(m2_min, m2_max, 10) x1 = np.linspace(x1_min, x1_max, 10) x2 = np.linspace(x2_min, x2_max, 10) params = m1, m2, x1, x2 # Calculate the maximum distance on the grid. max_z = gwcosmo.get_max_z(psds, args.waveform, args.f_low, args.min_snr, m1, m2, x1, x2, jobs=args.jobs) if args.max_distance is not None: new_max_z = cosmology.z_at_value(gwcosmo.cosmo.luminosity_distance, args.max_distance * units.Mpc) max_z[max_z > new_max_z] = new_max_z max_distance = gwcosmo.sensitive_distance(max_z).to_value(units.Mpc) # Find piecewise constant approximate upper bound on distance. max_distance = cell_max(max_distance) # Calculate V * T in each grid cell cdfs = [dist.cdf(param) for param, dist in zip(params, dists)] cdf_los = [cdf[:-1] for cdf in cdfs] cdfs = [np.diff(cdf) for cdf in cdfs] probs = np.prod(np.meshgrid(*cdfs, indexing='ij'), axis=0) probs /= probs.sum() probs *= 4 / 3 * np.pi * max_distance**3 volume = probs.sum() probs /= volume probs = probs.ravel() volumetric_rate = args.nsamples / volume * units.year**-1 * units.Mpc**-3 # Draw random grid cells dist = stats.rv_discrete(values=(np.arange(len(probs)), probs)) indices = np.unravel_index(dist.rvs(size=args.nsamples), max_distance.shape) # Draw random intrinsic params from each cell cols = {} cols['mass1'], cols['mass2'], cols['spin1z'], cols['spin2z'] = [ dist.ppf(stats.uniform(cdf_lo[i], cdf[i]).rvs(size=args.nsamples)) for i, dist, cdf_lo, cdf in zip(indices, dists, cdf_los, cdfs) ] # Swap binary components as needed to ensure that mass1 >= mass2. # Note that the .copy() is important. # See https://github.com/numpy/numpy/issues/14428 swap = cols['mass1'] < cols['mass2'] cols['mass1'][swap], cols['mass2'][swap] = \ cols['mass2'][swap].copy(), cols['mass1'][swap].copy() cols['spin1z'][swap], cols['spin2z'][swap] = \ cols['spin2z'][swap].copy(), cols['spin1z'][swap].copy() # Draw random extrinsic parameters cols['distance'] = stats.powerlaw( a=3, scale=max_distance[indices]).rvs(size=args.nsamples) cols['longitude'] = stats.uniform(0, 2 * np.pi).rvs(size=args.nsamples) cols['latitude'] = np.arcsin(stats.uniform(-1, 2).rvs(size=args.nsamples)) cols['inclination'] = np.arccos( stats.uniform(-1, 2).rvs(size=args.nsamples)) cols['polarization'] = stats.uniform(0, 2 * np.pi).rvs(size=args.nsamples) cols['coa_phase'] = stats.uniform(-np.pi, 2 * np.pi).rvs(size=args.nsamples) cols['time_geocent'] = stats.uniform(1e9, units.year.to( units.second)).rvs(size=args.nsamples) # Convert from sensitive distance to redshift and comoving distance. # FIXME: Replace this brute-force lookup table with a solver. z = np.linspace(0, max_z.max(), 10000) ds = gwcosmo.sensitive_distance(z).to_value(units.Mpc) dc = gwcosmo.cosmo.comoving_distance(z).to_value(units.Mpc) z_for_ds = interp1d(ds, z, kind='cubic', assume_sorted=True) dc_for_ds = interp1d(ds, dc, kind='cubic', assume_sorted=True) zp1 = 1 + z_for_ds(cols['distance']) cols['distance'] = dc_for_ds(cols['distance']) # Apply redshift factor to convert from comoving distance and source frame # masses to luminosity distance and observer frame masses. for key in ['distance', 'mass1', 'mass2']: cols[key] *= zp1 # Populate sim_inspiral table sims = xmlroot.appendChild(lsctables.New(lsctables.SimInspiralTable)) for row in zip(*cols.values()): sims.appendRow(**dict(dict.fromkeys(sims.validcolumns, None), process_id=process.process_id, simulation_id=sims.get_next_id(), waveform=args.waveform, f_lower=args.f_low, **dict(zip(cols.keys(), row)))) # Record process end time. process.comment = str(volumetric_rate) process.set_end_time_now() # Write output file. write_fileobj(xmldoc, args.output)
def create_bank_xml(flow, fhigh, band, duration, level=0, ndof=1, frequency_overlap=0, detector=None, units=utils.EXCESSPOWER_UNIT_SCALE['Hz']): """ Create a bank of sngl_burst XML entries. This file is then used by the trigger generator to do trigger generation. Takes in the frequency parameters and filter duration and returns an ligolw entity with a sngl_burst Table which can be saved to a file. """ xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) bank = lsctables.New(lsctables.SnglBurstTable, [ "peak_time_ns", "start_time_ns", "stop_time_ns", "process_id", "ifo", "peak_time", "start_time", "stop_time", "duration", "time_lag", "peak_frequency", "search", "central_freq", "channel", "amplitude", "snr", "confidence", "chisq", "chisq_dof", "flow", "fhigh", "bandwidth", "tfvolume", "hrss", "event_id" ]) bank.sync_next_id() # The first frequency band actually begins at flow, so we offset the # central frequency accordingly if level == 0: # Hann windows edge = band / 2 cfreq = flow + band else: # Tukey windows edge = band / 2**(level + 1) # The sin^2 tapering comes from the Hann windows, so we need to know # how far they extend to account for the overlap at the ends cfreq = flow + edge + (band / 2) while cfreq + edge + band / 2 <= fhigh: row = bank.RowType() row.search = u"gstlal_excesspower" row.duration = duration * ndof row.bandwidth = band row.peak_frequency = cfreq row.central_freq = cfreq # This actually marks the 50 % overlap point row.flow = cfreq - band / 2.0 # This actually marks the 50 % overlap point row.fhigh = cfreq + band / 2.0 row.ifo = detector row.chisq_dof = 2 * band * row.duration row.duration *= units # Stuff that doesn't matter, yet row.peak_time_ns = 0 row.peak_time = 0 row.start_time_ns = 0 row.start_time = 0 row.stop_time_ns = 0 row.stop_time = 0 row.tfvolume = 0 row.time_lag = 0 row.amplitude = 0 row.hrss = 0 row.snr = 0 row.chisq = 0 row.confidence = 0 row.event_id = bank.get_next_id() row.channel = "awesome full of GW channel" row.process_id = ilwd.ilwdchar(u"process:process_id:0") bank.append(row) #cfreq += band #band is half the full width of the window, so this is 50% overlap cfreq += band * (1 - frequency_overlap) xmldoc.childNodes[0].appendChild(bank) return xmldoc
def write_bank(filename, banks, psd_input, cliplefts=None, cliprights=None, verbose=False): """Write SVD banks to a LIGO_LW xml file.""" # Create new document xmldoc = ligolw.Document() lw = xmldoc.appendChild(ligolw.LIGO_LW()) for bank, clipleft, clipright in zip(banks, cliplefts, cliprights): # set up root for this sub bank root = lw.appendChild( ligolw.LIGO_LW(Attributes({u"Name": u"gstlal_svd_bank_Bank"}))) # FIXME FIXME FIXME move this clipping stuff to the Bank class # set the right clipping index clipright = len(bank.sngl_inspiral_table) - clipright # Apply clipping option to sngl inspiral table # put the bank table into the output document new_sngl_table = bank.sngl_inspiral_table.copy() for row in bank.sngl_inspiral_table[clipleft:clipright]: # FIXME need a proper id column row.Gamma1 = int(bank.bank_id.split("_")[0]) new_sngl_table.append(row) # put the possibly clipped table into the file root.appendChild(new_sngl_table) # Add root-level scalar params root.appendChild( ligolw_param.Param.from_pyvalue('filter_length', bank.filter_length)) root.appendChild( ligolw_param.Param.from_pyvalue('gate_threshold', bank.gate_threshold)) root.appendChild( ligolw_param.Param.from_pyvalue('logname', bank.logname or "")) root.appendChild( ligolw_param.Param.from_pyvalue('snr_threshold', bank.snr_threshold)) root.appendChild( ligolw_param.Param.from_pyvalue('template_bank_filename', bank.template_bank_filename)) root.appendChild( ligolw_param.Param.from_pyvalue('bank_id', bank.bank_id)) root.appendChild( ligolw_param.Param.from_pyvalue('new_deltaf', bank.newdeltaF)) root.appendChild( ligolw_param.Param.from_pyvalue('working_f_low', bank.working_f_low)) root.appendChild(ligolw_param.Param.from_pyvalue('f_low', bank.f_low)) root.appendChild( ligolw_param.Param.from_pyvalue('sample_rate_max', int(bank.sample_rate_max))) root.appendChild( ligolw_param.Param.from_pyvalue('gstlal_fir_whiten', os.environ['GSTLAL_FIR_WHITEN'])) # apply clipping to autocorrelations and sigmasq bank.autocorrelation_bank = bank.autocorrelation_bank[ clipleft:clipright, :] bank.autocorrelation_mask = bank.autocorrelation_mask[ clipleft:clipright, :] bank.sigmasq = bank.sigmasq[clipleft:clipright] # Add root-level arrays # FIXME: ligolw format now supports complex-valued data root.appendChild( ligolw_array.Array.build('autocorrelation_bank_real', bank.autocorrelation_bank.real)) root.appendChild( ligolw_array.Array.build('autocorrelation_bank_imag', bank.autocorrelation_bank.imag)) root.appendChild( ligolw_array.Array.build('autocorrelation_mask', bank.autocorrelation_mask)) root.appendChild( ligolw_array.Array.build('sigmasq', numpy.array(bank.sigmasq))) # Write bank fragments for i, frag in enumerate(bank.bank_fragments): # Start new bank fragment container el = root.appendChild(ligolw.LIGO_LW()) # Apply clipping option if frag.mix_matrix is not None: frag.mix_matrix = frag.mix_matrix[:, clipleft * 2:clipright * 2] frag.chifacs = frag.chifacs[clipleft * 2:clipright * 2] # Add scalar params el.appendChild( ligolw_param.Param.from_pyvalue('rate', int(frag.rate))) el.appendChild(ligolw_param.Param.from_pyvalue( 'start', frag.start)) el.appendChild(ligolw_param.Param.from_pyvalue('end', frag.end)) # Add arrays el.appendChild(ligolw_array.Array.build('chifacs', frag.chifacs)) if frag.mix_matrix is not None: el.appendChild( ligolw_array.Array.build('mix_matrix', frag.mix_matrix)) el.appendChild( ligolw_array.Array.build('orthogonal_template_bank', frag.orthogonal_template_bank)) if frag.singular_values is not None: el.appendChild( ligolw_array.Array.build('singular_values', frag.singular_values)) if frag.sum_of_squares_weights is not None: el.appendChild( ligolw_array.Array.build('sum_of_squares_weights', frag.sum_of_squares_weights)) # put a copy of the processed PSD file in # FIXME in principle this could be different for each bank included in # this file, but we only put one here psd = psd_input[bank.sngl_inspiral_table[0].ifo] lal.series.make_psd_xmldoc({bank.sngl_inspiral_table[0].ifo: psd}, lw) # Write to file ligolw_utils.write_filename(xmldoc, filename, gz=filename.endswith('.gz'), verbose=verbose)
def write_simplified_sngl_inspiral_table(m1, m2, s1x, s1y, s1z, s2x, s2y, s2z, instrument, approximant, filename=None): """Writing a simplified sngl_inspiral_table containing only one template. Args: m1 (float): mass1. m2 (float): mass2. s1x (float): spin 1 x-axis. s1y (float): spin 1 y-axis. s1z (float): spin 1 z-axis. s2x (float): spin 2 x-axis. s2y (float): spin 2 y-axis. s2z (float): spin 2 z-axis. instrument (str): The instrument for the template. approximant (str): The approximant used to simulate the waveform. filename (str, default=None): The output filename. Return: The file object representing the xmldoc. """ # Check if it is valid approximant templates.gstlal_valid_approximant(approximant) xmldoc = ligolw.Document() root = xmldoc.appendChild(ligolw.LIGO_LW()) table = lsctables.New(lsctables.SnglInspiralTable) rows = table.RowType() # set all slots to impossible/dummy value for t, c in zip(table.columntypes, table.columnnames): if t == u"real_4" or t == u"real_8": rows.__setattr__(c, 0) elif t == u"int_4s" or t == u"int_8s": rows.__setattr__(c, 0) elif t == u"lstring": rows.__setattr__(c, "") else: rows.__setattr__(c, None) rows.mass1 = m1 rows.mass2 = m2 rows.mtotal = m1 + m2 rows.mchirp = (m1 * m2)**0.6 / (m1 + m2)**0.2 rows.spin1x = s1x rows.spin1y = s1y rows.spin1z = s1z rows.spin2x = s2x rows.spin2y = s2y rows.spin2z = s2z rows.ifo = instrument table.append(rows) root.appendChild(table) #FIXME: do something better than this root.appendChild( ligolw_param.Param.from_pyvalue("approximant", approximant)) if filename is not None: ligolw_utils.write_filename(xmldoc, filename, gz=filename.endswith("gz")) return xmldoc
def to_xml(self, name): xml = ligolw.LIGO_LW({u"Name": u"%s:triggerrates" % name}) for key, value in self.items(): xml.appendChild(value.to_xml(key)) return xml
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : ligo.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if hasattr(row, entry): continue if cols[entry] in ['real_4', 'real_8']: setattr(row, entry, 0.) elif cols[entry] in ['int_4s', 'int_8s']: setattr(row, entry, 0) elif cols[entry] == 'lstring': setattr(row, entry, '') elif entry == 'process_id': row.process_id = 0 elif entry == 'event_id': row.event_id = 0 else: raise ValueError("Column %s not recognized" % entry) # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urljoin("file:", pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.add_pfn(xml_file_url, site="local") return xml_file
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos participating in this trigger. coinc_results: dict of values A dictionary of values. The format is defined in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. psds: dict of FrequencySeries Dictionary providing PSD estimates for all involved detectors. low_frequency_cutoff: float Minimum valid frequency for the PSD estimates. high_frequency_cutoff: float, optional Maximum frequency considered for the PSD estimates. Default None. followup_data: dict of dicts, optional Dictionary providing SNR time series for each detector, to be used in sky localization with BAYESTAR. The format should be `followup_data['H1']['snr_series']`. More detectors can be present than given in `ifos`. If so, the extra detectors will only be used for sky localization. channel_names: dict of strings, optional Strain channel names for each detector. Will be recorded in the sngl_inspiral table. mc_area_args: dict of dicts, optional Dictionary providing arguments to be used in source probability estimation with pycbc/mchirp_area.py """ self.template_id = coinc_results['foreground/%s/template_id' % ifos[0]] self.coinc_results = coinc_results self.ifos = ifos # remember if this should be marked as HWINJ self.is_hardware_injection = ('HWINJ' in coinc_results and coinc_results['HWINJ']) # Check if we need to apply a time offset (this may be permerger) self.time_offset = 0 rtoff = 'foreground/{}/time_offset'.format(ifos[0]) if rtoff in coinc_results: self.time_offset = coinc_results[rtoff] if 'followup_data' in kwargs: fud = kwargs['followup_data'] assert len({fud[ifo]['snr_series'].delta_t for ifo in fud}) == 1, \ "delta_t for all ifos do not match" self.snr_series = {ifo: fud[ifo]['snr_series'] for ifo in fud} usable_ifos = fud.keys() followup_ifos = list(set(usable_ifos) - set(ifos)) for ifo in self.snr_series: self.snr_series[ifo].start_time += self.time_offset else: self.snr_series = None usable_ifos = ifos followup_ifos = [] # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # FIXME is it safe (in terms of downstream operations) to let # `program_name` default to the actual script name? proc_id = create_process_table(outdoc, program_name='pycbc', detectors=usable_ifos).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(usable_ifos) coinc_event_row.instruments = ','.join(usable_ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_populated = None network_snrsq = 0 for sngl_id, ifo in enumerate(usable_ifos): sngl = return_empty_sngl(nones=True) sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl.process_id = proc_id sngl.ifo = ifo names = [ n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n ] for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': val += self.time_offset sngl.end = lal.LIGOTimeGPS(val) else: try: setattr(sngl, name, val) except AttributeError: pass if sngl.mass1 and sngl.mass2: sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl_populated = sngl if sngl.snr: sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr network_snrsq += sngl.snr**2.0 if 'channel_names' in kwargs and ifo in kwargs['channel_names']: sngl.channel = kwargs['channel_names'][ifo] sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) if self.snr_series is not None: snr_series_to_xml(self.snr_series[ifo], outdoc, sngl.event_id) # set merger time to the average of the ifo peaks self.merger_time = numpy.mean([ coinc_results['foreground/{}/end_time'.format(ifo)] for ifo in ifos ]) + self.time_offset # for subthreshold detectors, respect BAYESTAR's assumptions and checks bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x ' 'spin1y spin1z spin2x spin2y spin2z').split() for sngl in sngl_inspiral_table: if sngl.ifo in followup_ifos: for bcf in bayestar_check_fields: setattr(sngl, bcf, getattr(sngl_populated, bcf)) sngl.end = lal.LIGOTimeGPS(self.merger_time) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.instruments = tuple(usable_ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_populated.mchirp coinc_inspiral_row.mass = sngl_populated.mtotal coinc_inspiral_row.end_time = sngl_populated.end_time coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns coinc_inspiral_row.snr = network_snrsq**0.5 far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) # append the PSDs self.psds = kwargs['psds'] psds_lal = {} for ifo in self.psds: psd = self.psds[ifo] kmin = int(kwargs['low_frequency_cutoff'] / psd.delta_f) fseries = lal.CreateREAL8FrequencySeries( "psd", psd.epoch, kwargs['low_frequency_cutoff'], psd.delta_f, lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin) fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC**2.0 psds_lal[ifo] = fseries make_psd_xmldoc(psds_lal, outdoc) # source probabilities estimation if 'mc_area_args' in kwargs: eff_distances = [sngl.eff_distance for sngl in sngl_inspiral_table] probabilities = calc_probabilities(coinc_inspiral_row.mchirp, coinc_inspiral_row.snr, min(eff_distances), kwargs['mc_area_args']) self.probabilities = probabilities else: self.probabilities = None self.outdoc = outdoc self.time = sngl_populated.end
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict = None, outdoc=None): """ Function that converts the information produced by the various PyCBC bank generation codes into a valid LIGOLW XML file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc = create_process_table( outdoc, program_name=programName, detectors=ifos, options=optDict ) proc_id = proc.process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0,sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in range(len(GammaVals)): setattr(sngl, "Gamma"+str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary( start_time, end_time, len(sngl_inspiral_table), ifos ) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk ligolw_utils.write_filename(outdoc, outputFile)
injections['latitude'] = samples['latitude'] injections['inclination'] = inclination injections['coa_phase'] = samples['phi_orb'] injections['polarization'] = samples['polarization'] injections['spin1x'] = s1x injections['spin1y'] = s1y injections['spin1z'] = s1z injections['spin2x'] = s2x injections['spin2y'] = s2y injections['spin2z'] = s2z injections['amp_order'] = [opts.amporder for i in range(N)] injections['numrel_data'] = ["" for _ in range(N)] # Create a new XML document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) proc = ligo.lw.utils.process.register_to_xmldoc(doc, sys.argv[0], {}) sim_table = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[0].appendChild(sim_table) # Add empty rows to the sim_inspiral table for inj in range(N): row = sim_table.RowType() for slot in row.__slots__: setattr(row, slot, 0) sim_table.append(row) # Fill in IDs for i, row in enumerate(sim_table): row.process_id = proc.process_id row.simulation_id = sim_table.get_next_id()
def main(args=None): p = parser() opts = p.parse_args(args) # LIGO-LW XML imports. from ligo.lw import ligolw from ligo.lw.param import Param from ligo.lw.utils.search_summary import append_search_summary from ligo.lw import utils as ligolw_utils from ligo.lw.lsctables import ( New, CoincDefTable, CoincID, CoincInspiralTable, CoincMapTable, CoincTable, ProcessParamsTable, ProcessTable, SimInspiralTable, SnglInspiralTable, TimeSlideTable) # glue, LAL and pylal imports. from ligo import segments import lal import lal.series import lalsimulation from lalinspiral.inspinjfind import InspiralSCExactCoincDef from lalinspiral.thinca import InspiralCoincDef from tqdm import tqdm # BAYESTAR imports. from ..io.events.ligolw import ContentHandler from ..bayestar import filter from ..util.progress import progress_map # Read PSDs. xmldoc = ligolw_utils.load_fileobj( opts.reference_psd, contenthandler=lal.series.PSDContentHandler) psds = lal.series.read_psd_xmldoc(xmldoc, root_name=None) psds = { key: filter.InterpolatedPSD(filter.abscissa(psd), psd.data.data) for key, psd in psds.items() if psd is not None} psds = [psds[ifo] for ifo in opts.detector] # Extract simulation table from injection file. inj_xmldoc = ligolw_utils.load_fileobj( opts.input, contenthandler=ContentHandler) orig_sim_inspiral_table = SimInspiralTable.get_table(inj_xmldoc) # Prune injections that are outside distance limits. orig_sim_inspiral_table[:] = [ row for row in orig_sim_inspiral_table if opts.min_distance <= row.distance <= opts.max_distance] # Open output file. xmldoc = ligolw.Document() xmlroot = xmldoc.appendChild(ligolw.LIGO_LW()) # Create tables. Process and ProcessParams tables are copied from the # injection file. coinc_def_table = xmlroot.appendChild(New(CoincDefTable)) coinc_inspiral_table = xmlroot.appendChild(New(CoincInspiralTable)) coinc_map_table = xmlroot.appendChild(New(CoincMapTable)) coinc_table = xmlroot.appendChild(New(CoincTable)) xmlroot.appendChild(ProcessParamsTable.get_table(inj_xmldoc)) xmlroot.appendChild(ProcessTable.get_table(inj_xmldoc)) sim_inspiral_table = xmlroot.appendChild(New(SimInspiralTable)) sngl_inspiral_table = xmlroot.appendChild(New(SnglInspiralTable)) time_slide_table = xmlroot.appendChild(New(TimeSlideTable)) # Write process metadata to output file. process = register_to_xmldoc( xmldoc, p, opts, instruments=opts.detector, comment="Simulated coincidences") # Add search summary to output file. all_time = segments.segment([lal.LIGOTimeGPS(0), lal.LIGOTimeGPS(2e9)]) append_search_summary(xmldoc, process, inseg=all_time, outseg=all_time) # Create a time slide entry. Needed for coinc_event rows. time_slide_id = time_slide_table.get_time_slide_id( {ifo: 0 for ifo in opts.detector}, create_new=process) # Populate CoincDef table. inspiral_coinc_def = copy.copy(InspiralCoincDef) inspiral_coinc_def.coinc_def_id = coinc_def_table.get_next_id() coinc_def_table.append(inspiral_coinc_def) found_coinc_def = copy.copy(InspiralSCExactCoincDef) found_coinc_def.coinc_def_id = coinc_def_table.get_next_id() coinc_def_table.append(found_coinc_def) # Precompute values that are common to all simulations. detectors = [lalsimulation.DetectorPrefixToLALDetector(ifo) for ifo in opts.detector] responses = [det.response for det in detectors] locations = [det.location for det in detectors] if opts.jobs != 1: from .. import omp omp.num_threads = 1 # disable OpenMP parallelism func = functools.partial(simulate, psds=psds, responses=responses, locations=locations, measurement_error=opts.measurement_error, f_low=opts.f_low, f_high=opts.f_high, waveform=opts.waveform) # Make sure that each thread gets a different random number state. # We start by drawing a random integer s in the main thread, and # then the i'th subprocess will seed itself with the integer i + s. # # The seed must be an unsigned 32-bit integer, so if there are n # threads, then s must be drawn from the interval [0, 2**32 - n). # # Note that *we* are thread 0, so there are a total of # n=1+len(sim_inspiral_table) threads. seed = np.random.randint(0, 2 ** 32 - len(sim_inspiral_table) - 1) np.random.seed(seed) with tqdm(desc='accepted') as progress: for sim_inspiral, simulation in zip( orig_sim_inspiral_table, progress_map( func, np.arange(len(orig_sim_inspiral_table)) + seed + 1, orig_sim_inspiral_table, jobs=opts.jobs)): sngl_inspirals = [] used_snr_series = [] net_snr = 0.0 count_triggers = 0 # Loop over individual detectors and create SnglInspiral entries. for ifo, (horizon, abs_snr, arg_snr, toa, series) \ in zip(opts.detector, simulation): if np.random.uniform() > opts.duty_cycle: continue elif abs_snr >= opts.snr_threshold: # If SNR < threshold, then the injection is not found. # Skip it. count_triggers += 1 net_snr += np.square(abs_snr) elif not opts.keep_subthreshold: continue # Create SnglInspiral entry. used_snr_series.append(series) sngl_inspirals.append( sngl_inspiral_table.RowType(**dict( dict.fromkeys(sngl_inspiral_table.validcolumns, None), process_id=process.process_id, ifo=ifo, mass1=sim_inspiral.mass1, mass2=sim_inspiral.mass2, spin1x=sim_inspiral.spin1x, spin1y=sim_inspiral.spin1y, spin1z=sim_inspiral.spin1z, spin2x=sim_inspiral.spin2x, spin2y=sim_inspiral.spin2y, spin2z=sim_inspiral.spin2z, end=toa, snr=abs_snr, coa_phase=arg_snr, f_final=opts.f_high, eff_distance=horizon / abs_snr))) net_snr = np.sqrt(net_snr) # If too few triggers were found, then skip this event. if count_triggers < opts.min_triggers: continue # If network SNR < threshold, then the injection is not found. # Skip it. if net_snr < opts.net_snr_threshold: continue # Add Coinc table entry. coinc = coinc_table.appendRow( coinc_event_id=coinc_table.get_next_id(), process_id=process.process_id, coinc_def_id=inspiral_coinc_def.coinc_def_id, time_slide_id=time_slide_id, insts=opts.detector, nevents=len(opts.detector), likelihood=None) # Add CoincInspiral table entry. coinc_inspiral_table.appendRow( coinc_event_id=coinc.coinc_event_id, instruments=[ sngl_inspiral.ifo for sngl_inspiral in sngl_inspirals], end=lal.LIGOTimeGPS(1e-9 * np.mean([ sngl_inspiral.end.ns() for sngl_inspiral in sngl_inspirals if sngl_inspiral.end is not None])), mass=sim_inspiral.mass1 + sim_inspiral.mass2, mchirp=sim_inspiral.mchirp, combined_far=0.0, # Not provided false_alarm_rate=0.0, # Not provided minimum_duration=None, # Not provided snr=net_snr) # Record all sngl_inspiral records and associate them with coincs. for sngl_inspiral, series in zip(sngl_inspirals, used_snr_series): # Give this sngl_inspiral record an id and add it to the table. sngl_inspiral.event_id = sngl_inspiral_table.get_next_id() sngl_inspiral_table.append(sngl_inspiral) if opts.enable_snr_series: elem = lal.series.build_COMPLEX8TimeSeries(series) elem.appendChild( Param.from_pyvalue('event_id', sngl_inspiral.event_id)) xmlroot.appendChild(elem) # Add CoincMap entry. coinc_map_table.appendRow( coinc_event_id=coinc.coinc_event_id, table_name=sngl_inspiral_table.tableName, event_id=sngl_inspiral.event_id) # Record injection if not opts.preserve_ids: sim_inspiral.simulation_id = sim_inspiral_table.get_next_id() sim_inspiral_table.append(sim_inspiral) progress.update() # Record coincidence associating injections with events. for i, sim_inspiral in enumerate(sim_inspiral_table): coinc = coinc_table.appendRow( coinc_event_id=coinc_table.get_next_id(), process_id=process.process_id, coinc_def_id=found_coinc_def.coinc_def_id, time_slide_id=time_slide_id, instruments=None, nevents=None, likelihood=None) coinc_map_table.appendRow( coinc_event_id=coinc.coinc_event_id, table_name=sim_inspiral_table.tableName, event_id=sim_inspiral.simulation_id) coinc_map_table.appendRow( coinc_event_id=coinc.coinc_event_id, table_name=coinc_table.tableName, event_id=CoincID(i)) # Record process end time. process.set_end_time_now() # Write output file. write_fileobj(xmldoc, opts.output)