def append_process(xmldoc, cluster_algorithm, comment): return ligolw_process.register_to_xmldoc( xmldoc, program = process_program_name, paramdict = { "cluster_algorithm": cluster_algorithm }, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment )
def gen_likelihood_control(coinc_params_distributions, seglists, name = u"ligolw_burca_tailor", comment = u""): xmldoc = ligolw.Document() node = xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc(xmldoc, program = process_program_name, paramdict = {}, version = __version__, cvs_repository = "lscsoft", cvs_entry_time = __date__, comment = comment) coinc_params_distributions.process_id = process.process_id ligolw_search_summary.append_search_summary(xmldoc, process, ifos = seglists.keys(), inseg = seglists.extent_all(), outseg = seglists.extent_all()) node.appendChild(coinc_params_distributions.to_xml(name)) ligolw_process.set_process_end_time(process) return xmldoc
def save_xml(self, filename): """ Save the MDC set as an XML SimBurstTable. Parameters ---------- filename : str The location to save the xml file. The output is gzipped, so ending it with a ".gz" would stick with convention. """ xmldoc = ligolw.Document() lw = xmldoc.appendChild(ligolw.LIGO_LW()) sim = lsctables.New(self.table_type) lw.appendChild(sim) # This needs to be given the proper metadata once the package has the maturity to # write something sensible. for waveform in self.waveforms: procrow = process.register_to_xmldoc(xmldoc, "minke_burst_mdc+{}".format(minke.__version__), {}) # waveform.params) try: waveform_row = waveform._row(sim) waveform_row.process_id = procrow.process_id except: row = sim.RowType() for a in self.table_type.validcolumns.keys(): if a in waveform.params.keys(): setattr(row, a, waveform.params[a]) else: if not hasattr(waveform, a): setattr(row, a, 0) else: setattr(row, a, getattr(waveform, a)) row.waveform = waveform.waveform if self.table_type == lsctables.SimBurstTable: # Fill in the time row.set_time_geocent(GPS(float(waveform.time))) # Get the sky locations row.ra, row.dec, row.psi = waveform.ra, waveform.dec, waveform.psi row.simulation_id = waveform.simulation_id row.waveform_number = random.randint(0,int(2**32)-1) ### !! This needs to be updated. row.process_id = "process:process_id:0" #procrow.process_id waveform_row = row sim.append(waveform_row) #del waveform_row # Write out the xml and gzip it. utils.write_filename(xmldoc, filename, gz=True)
def append_process(xmldoc, match_algorithm, comment): """ Convenience wrapper for adding process metadata to the document. """ return ligolw_process.register_to_xmldoc( xmldoc, program = process_program_name, paramdict = { "match_algorithm": match_algorithm }, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment )
def new_doc(comment = None, **kwargs): doc = ligolw.Document() doc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc( doc, program = u"ligolw_tisi", paramdict = kwargs, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment ) timeslidetable = lsctables.New(lsctables.TimeSlideTable) doc.childNodes[0].appendChild(timeslidetable) return doc, process
def save_xml(self, filename): """ Save the MDC set as an XML SimBurstTable. Parameters ---------- filename : str The location to save the xml file. The output is gzipped, so ending it with a ".gz" would stick with convention. """ xmldoc = ligolw.Document() lw = xmldoc.appendChild(ligolw.LIGO_LW()) sim = lsctables.New(lsctables.SimBurstTable) lw.appendChild(sim) # This needs to be given the proper metadata once the package has the maturity to # write something sensible. for waveform in self.waveforms: procrow = process.register_to_xmldoc(xmldoc, "minke_burst_mdc", {}) # waveform.params) waveform_row = waveform._row(sim) waveform_row.process_id = procrow.process_id sim.append(waveform_row) #del waveform_row # Write out the xml and gzip it. utils.write_filename(xmldoc, filename, gz=True)
def setup_analysislogging(workflow, segs_list, insps, args, output_dir, program_name="workflow", tags=[]): """ This module sets up the analysis logging xml file that contains the following information: * Command line arguments that the code was run with * Segment list of times marked as SCIENCE * Segment list of times marked as SCIENCE and "OK" ie. not CAT_1 vetoed * Segment list of times marked as SCIENCE_OK and present on the cluster * The times that will be analysed by the matched-filter jobs Parameters ----------- workflow : pycbc.workflow.core.Workflow The Workflow instance. segs_list : pycbc.workflow.core.FileList A list of Files containing the information needed to generate the segments above. For segments generated at run time the associated segmentlist is a property of this object. insps : pycbc.workflow.core.FileList The output files from the matched-filtering module. Used to identify what times have been analysed in this workflow. output_dir : path Directory to output any files to. program_name : string (optional, default = "workflow") The program name to stick in the process/process_params tables. tags : list (optional, default = []) If given restrict to considering inspiral and segment files that are tagged with all tags in this list. """ logging.info("Entering analysis logging module.") make_analysis_dir(output_dir) # Construct the summary XML file outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # Add process and process_params tables proc_id = process.register_to_xmldoc(outdoc, program_name, vars(args) ).process_id # Now add the various segment lists to this file summ_segs = segmentlist([workflow.analysis_time]) # If tags is given filter by tags if tags: for tag in tags: segs_list = segs_list.find_output_with_tag(tag) insps = insps.find_output_with_tag(tag) for ifo in workflow.ifos: # Lets get the segment lists we need seg_ifo_files = segs_list.find_output_with_ifo(ifo) # SCIENCE sci_seg_file = seg_ifo_files.find_output_with_tag('SCIENCE') if len(sci_seg_file) == 1: sci_seg_file = sci_seg_file[0] print sci_seg_file.segment_dict.keys() sci_segs = sci_seg_file.segment_dict['%s:%s' %(ifo, 'RESULT')] sci_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_def_id, sci_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_def_id, summ_segs, comment='') elif sci_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_seg_file), ifo, 'SCIENCE') #raise ValueError(err_msg) # SCIENCE_OK sci_ok_seg_file = seg_ifo_files.find_output_with_tag('SCIENCE_OK') if len(sci_ok_seg_file) == 1: sci_ok_seg_file = sci_ok_seg_file[0] sci_ok_segs = \ sci_ok_seg_file.segment_dict['%s:%s' %(ifo,'SCIENCE_OK')] sci_ok_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE_OK", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_ok_def_id, sci_ok_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_ok_def_id, summ_segs, comment='') elif sci_ok_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_ok_seg_file), ifo, 'SCIENCE_OK') #raise ValueError(err_msg) # SCIENCE_AVAILABLE sci_available_seg_file = seg_ifo_files.find_output_with_tag(\ 'SCIENCE_AVAILABLE') if len(sci_available_seg_file) == 1: sci_available_seg_file = sci_available_seg_file[0] sci_available_segs = sci_available_seg_file.segment_dict sci_available_segs = \ sci_available_segs['%s:%s' %(ifo, 'SCIENCE_AVAILABLE')] sci_available_def_id = segmentdb_utils.add_to_segment_definer(\ outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE_AVAILABLE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_available_def_id, sci_available_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_available_def_id, summ_segs, comment='') elif sci_available_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_available_seg_file), ifo, 'SCIENCE_AVAILABLE') #raise ValueError(err_msg) # ANALYSABLE - This one needs to come from inspiral outs ifo_insps = insps.find_output_with_ifo(ifo) analysable_segs = ifo_insps.get_times_covered_by_files() analysable_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_ANALYSABLE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, analysable_def_id, analysable_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, analysable_def_id, summ_segs, comment='') summ_file = File(workflow.ifos, "WORKFLOW_SUMMARY", workflow.analysis_time, extension=".xml", directory=output_dir) summ_file.PFN(summ_file.storage_path, site='local') utils.write_filename(outdoc, summ_file.storage_path) return FileList([summ_file])
xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) lsctables.SnglInspiralTable.RowType = SnglInspiralTable tbl = lsctables.New(lsctables.SnglInspiralTable) xmldoc.childNodes[-1].appendChild(tbl) # initialize random seed np.random.mtrand.seed(opts.seed) # # prepare process table with information about the current program # opts_dict = dict((k, v) for k, v in opts.__dict__.iteritems() if v is not False and v is not None) process = ligolw_process.register_to_xmldoc(xmldoc, "lalapps_cbc_sbank", opts_dict, version="no version", cvs_repository="sbank", cvs_entry_time=strftime('%Y/%m/%d %H:%M:%S')) # # populate params dictionary to be passed to the generators # if opts.trial_waveforms: trialdoc = utils.load_filename(opts.trial_waveforms, contenthandler=ContentHandler, gz=opts.trial_waveforms.endswith('.gz')) trial_sngls = lsctables.SnglInspiralTable.get_table(trialdoc) proposal = [tmplt_class.from_sngl(t, bank=bank) for t in trial_sngls] else: params = {'mass1': (opts.mass1_min, opts.mass1_max), 'mass2': (opts.mass2_min, opts.mass2_max),
del CMAP["mass2"] CMAP[("mass1", "mass2")] = ("mass1", "mass2") ar = numpy.random.random((len(CMAP), 10)) samp_dict = dict(zip(CMAP, ar)) ar = samp_dict[("mass1", "mass2")] samp_dict[("mass1", "mass2")] = numpy.array([ar, ar]) del CMAP[("mass1", "mass2")] CMAP["mass1"] = "mass1" CMAP["mass2"] = "mass2" samp_dict["samp_n"] = numpy.array(range(0, 10)) CMAP["sample_n"] = "sample_n" xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process.register_to_xmldoc(xmldoc, sys.argv[0], {}) append_samples_to_xmldoc(xmldoc, samp_dict) def gaussian(x, mu=0, std=1): return 1 / numpy.sqrt(numpy.pi * 2) / std * numpy.exp( -(x - mu)**2 / 2 / std**2) m1m, m2m = 1.4, 1.5 m1, m2 = numpy.random.random(2000).reshape(2, 1000) * 1.0 + 1.0 loglikes = [ gaussian(m1i, m1m) * gaussian(m2i, m2m) for m1i, m2i in zip(m1, m2) ] #loglikelihood = - 7.5**2/2 #append_likelihood_result_to_xmldoc(xmldoc, loglikelihood, **{"mass1": 1.4, "mass2": 1.4, "ifos": "H1,L1,V1"}) for m1i, m2i, loglikelihood in zip(m1, m2, loglikes):
def to_coinc_xml_object(self, file_name): # FIXME: This function will only work with two ifos!! outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = [ifo for ifo in self.sngl_files.keys()] proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/' + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file start_time = coinc_h5file['segments']['coinc']['start'][:].min() end_time = coinc_h5file['segments']['coinc']['end'][:].max() num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z'] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ['ifar', 'time1', 'fap', 'stat'] coinc_event_vals = {} for name in coinc_event_names: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = [ 'snr', 'chisq', 'chisq_dof', 'bank_chisq', 'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof', 'end_time', 'template_duration', 'coa_phase', 'sigmasq' ] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) for idx in xrange(len(self.sort_arr)): # Set up IDs and mapping values coinc_id = lsctables.CoincID(idx) # Set up sngls # FIXME: As two-ifo is hardcoded loop over all ifos sngl_combined_mchirp = 0 sngl_combined_mtot = 0 for ifo in ifos: sngl_id = self.trig_id[ifo][idx] event_id = lsctables.SnglInspiralID(sngl_id) sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo for name in sngl_col_names: val = sngl_col_vals[name][ifo][idx] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_combined_mchirp += sngl.mchirp sngl_combined_mtot += sngl.mtotal sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = sngl_combined_mchirp / len(ifos) sngl_combined_mtot = sngl_combined_mtot / len(ifos) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_inspiral_row.set_ifos(ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end(\ LIGOTimeGPS(coinc_event_vals['time1'][idx])) coinc_inspiral_row.snr = coinc_event_vals['stat'][idx] coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx] coinc_inspiral_row.combined_far = 1. / coinc_event_vals['ifar'][idx] # Transform to Hz coinc_inspiral_row.combined_far = \ coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = 0. coinc_inspiral_row.minimum_duration = 0. coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos participating in this trigger. coinc_results: dict of values A dictionary of values. The format is defined in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. psds: dict of FrequencySeries Dictionary providing PSD estimates for all involved detectors. low_frequency_cutoff: float Minimum valid frequency for the PSD estimates. high_frequency_cutoff: float, optional Maximum frequency considered for the PSD estimates. Default None. followup_data: dict of dicts, optional Dictionary providing SNR time series for each detector, to be used in sky localization with BAYESTAR. The format should be `followup_data['H1']['snr_series']`. More detectors can be present than given in `ifos`. If so, the extra detectors will only be used for sky localization. channel_names: dict of strings, optional Strain channel names for each detector. Will be recorded in the sngl_inspiral table. mc_area_args: dict of dicts, optional Dictionary providing arguments to be used in source probability estimation with pycbc/mchirp_area.py """ self.template_id = coinc_results['foreground/%s/template_id' % ifos[0]] self.coinc_results = coinc_results self.ifos = ifos # remember if this should be marked as HWINJ self.is_hardware_injection = ('HWINJ' in coinc_results and coinc_results['HWINJ']) # Check if we need to apply a time offset (this may be permerger) self.time_offset = 0 rtoff = 'foreground/{}/time_offset'.format(ifos[0]) if rtoff in coinc_results: self.time_offset = coinc_results[rtoff] if 'followup_data' in kwargs: fud = kwargs['followup_data'] assert len({fud[ifo]['snr_series'].delta_t for ifo in fud}) == 1, \ "delta_t for all ifos do not match" self.snr_series = {ifo: fud[ifo]['snr_series'] for ifo in fud} usable_ifos = fud.keys() followup_ifos = list(set(usable_ifos) - set(ifos)) for ifo in self.snr_series: self.snr_series[ifo].start_time += self.time_offset else: self.snr_series = None usable_ifos = ifos followup_ifos = [] # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=usable_ifos, comment='', version=pycbc_version.version, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(usable_ifos) coinc_event_row.instruments = ','.join(usable_ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_populated = None network_snrsq = 0 for sngl_id, ifo in enumerate(usable_ifos): sngl = return_empty_sngl(nones=True) sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl.process_id = proc_id sngl.ifo = ifo names = [n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n] for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': val += self.time_offset sngl.set_end(lal.LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass if sngl.mass1 and sngl.mass2: sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl_populated = sngl if sngl.snr: sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr network_snrsq += sngl.snr ** 2.0 if 'channel_names' in kwargs and ifo in kwargs['channel_names']: sngl.channel = kwargs['channel_names'][ifo] sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) if self.snr_series is not None: snr_series_to_xml(self.snr_series[ifo], outdoc, sngl.event_id) # set merger time to the average of the ifo peaks self.merger_time = numpy.mean( [coinc_results['foreground/{}/end_time'.format(ifo)] for ifo in ifos]) + self.time_offset # for subthreshold detectors, respect BAYESTAR's assumptions and checks bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x ' 'spin1y spin1z spin2x spin2y spin2z').split() for sngl in sngl_inspiral_table: if sngl.ifo in followup_ifos: for bcf in bayestar_check_fields: setattr(sngl, bcf, getattr(sngl_populated, bcf)) sngl.set_end(lal.LIGOTimeGPS(self.merger_time)) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.instruments = tuple(usable_ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_populated.mchirp coinc_inspiral_row.mass = sngl_populated.mtotal coinc_inspiral_row.end_time = sngl_populated.end_time coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns coinc_inspiral_row.snr = network_snrsq ** 0.5 far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) # append the PSDs self.psds = kwargs['psds'] psds_lal = {} for ifo in self.psds: psd = self.psds[ifo] kmin = int(kwargs['low_frequency_cutoff'] / psd.delta_f) fseries = lal.CreateREAL8FrequencySeries( "psd", psd.epoch, kwargs['low_frequency_cutoff'], psd.delta_f, lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin) fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0 psds_lal[ifo] = fseries make_psd_xmldoc(psds_lal, outdoc) # source probabilities estimation if 'mc_area_args' in kwargs: eff_distances = [sngl.eff_distance for sngl in sngl_inspiral_table] probabilities = calc_probabilities(coinc_inspiral_row.mchirp, coinc_inspiral_row.snr, min(eff_distances), kwargs['mc_area_args']) self.probabilities = probabilities else: self.probabilities = None self.outdoc = outdoc self.time = sngl_populated.get_end()
return options, filenames options, filenames = parse_command_line() opts_dict = dict((k, v) for k, v in options.__dict__.iteritems() if v is not False and v is not None) for fname in filenames: xmldoc=utils.load_filename(fname, gz=fname.endswith(".gz"), verbose = options.verbose, contenthandler=ContentHandler) sngl_inspiral_table=lsctables.SnglInspiralTable.get_table(xmldoc) process_params_table = lsctables.ProcessParamsTable.get_table(xmldoc) # Prepare process table with information about the current program process = ligolw_process.register_to_xmldoc(xmldoc, "lalapps_cbc_sbank_splitter", opts_dict, version="no version", cvs_repository="sbank", cvs_entry_time=strftime('%Y/%m/%d %H:%M:%S')) # sort template rows sngl_inspiral_table.sort(key=attrgetter(options.sort_by)) sngl_inspiral_table_split = lsctables.table.new_from_template(sngl_inspiral_table) sngl_inspiral_table.parentNode.replaceChild(sngl_inspiral_table_split, sngl_inspiral_table) if len(sngl_inspiral_table) < options.nbanks: raise ValueError, "Not enough templates to create the requested number of subbanks." # override/read instrument column if options.instrument: for row in sngl_inspiral_table: row.ifo = options.instrument else:
def register_to_xmldoc(xmldoc, parser, opts, **kwargs): from glue.ligolw.utils import process return process.register_to_xmldoc( xmldoc, parser.prog, {key: (value.name if hasattr(value, 'read') else value) for key, value in opts.__dict__.items()})
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos pariticipating in this trigger coinc_results: dict of values A dictionary of values. The format is defined in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. """ self.ifos = ifos if 'followup_ifos' in kwargs and kwargs['followup_ifos'] is not None: self.followup_ifos = kwargs['followup_ifos'] else: self.followup_ifos = [] self.template_id = coinc_results['foreground/%s/template_id' % self.ifos[0]] # remember if this should be marked as HWINJ self.is_hardware_injection = ('HWINJ' in coinc_results) # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/' + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_event_id_map = {} sngl_populated = None for sngl_id, ifo in enumerate(ifos + self.followup_ifos): sngl = return_empty_sngl(nones=True) sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl_event_id_map[ifo] = sngl.event_id sngl.process_id = proc_id sngl.ifo = ifo names = [ n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n ] for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': sngl.set_end(lal.LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass if sngl.mass1 and sngl.mass2: sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl_populated = sngl if sngl.snr: sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) # for subthreshold detectors, respect BAYESTAR's assumptions and checks bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x ' 'spin1y spin1z spin2x spin2y spin2z').split() subthreshold_sngl_time = numpy.mean( [coinc_results['foreground/%s/end_time' % ifo] for ifo in ifos]) for sngl in sngl_inspiral_table: if sngl.ifo in self.followup_ifos: for bcf in bayestar_check_fields: setattr(sngl, bcf, getattr(sngl_populated, bcf)) sngl.set_end(lal.LIGOTimeGPS(subthreshold_sngl_time)) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.set_ifos(ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_populated.mchirp coinc_inspiral_row.mass = sngl_populated.mtotal coinc_inspiral_row.end_time = sngl_populated.end_time coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns coinc_inspiral_row.snr = coinc_results['foreground/stat'] far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) self.outdoc = outdoc self.time = sngl_populated.get_end() # compute SNR time series self.upload_snr_series = kwargs['upload_snr_series'] if self.upload_snr_series: htilde = kwargs['bank'][self.template_id] self.snr_series = {} self.snr_series_psd = {} for ifo in self.ifos + self.followup_ifos: if ifo in ifos: trig_time = coinc_results['foreground/%s/end_time' % ifo] else: trig_time = subthreshold_sngl_time self.snr_series[ifo], self.snr_series_psd[ifo] = \ compute_followup_snr_series( kwargs['data_readers'][ifo], htilde, trig_time) snr_series_to_xml(self.snr_series[ifo], outdoc, sngl_event_id_map[ifo])
# # Main # # ============================================================================= # options, filenames = parse_command_line() if options.verbose: print >> sys.stderr, "time-frequency tiles have %g degrees of freedom" % ( 2 * options.delta_t * options.delta_f) xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc(xmldoc, u"lalapps_binj_pic", options.options_dict, version=git_version.version) time_slide_table = xmldoc.childNodes[-1].appendChild( lsctables.TimeSlideTable.get_table( ligolw_utils.load_filename(options.time_slide_xml, verbose=options.verbose, contenthandler=LIGOLWContentHandler))) time_slide_id = time_slide_table[0].time_slide_id if options.verbose: print >> sys.stderr, "associating injections with time slide ID \"%s\": %s" % ( time_slide_id, time_slide_table.as_dict()[time_slide_id]) for row in time_slide_table: row.process_id = process.process_id sim_burst_tbl = xmldoc.childNodes[-1].appendChild( lsctables.New(lsctables.SimBurstTable, [ "process_id", "simulation_id", "time_slide_id", "waveform",
def excess_power( ts_data, # Time series from magnetic field data band=None, # Channel bandwidth channel_name='channel-name', # Channel name fmin=0, # Lowest frequency of the filter bank. fmax=None, # Highest frequency of the filter bank. impulse=False, # Impulse response make_plot=True, # Condition to produce plots max_duration=None, # Maximum duration of the tile nchans=256, # Total number of channels psd_estimation='median-mean', # Average method psd_segment_length=60, # Length of each segment in seconds psd_segment_stride=30, # Separation between 2 consecutive segments in seconds station='station-name', # Station name tile_fap=1e-7, # Tile false alarm probability threshold in Gaussian noise. verbose=True, # Print details window_fraction=0, # Withening window fraction wtype='tukey'): # Whitening type, can tukey or hann ''' Perform excess-power search analysis on magnetic field data. This method will produce a bunch of time-frequency plots for every tile duration and bandwidth analysed as well as a XML file identifying all the triggers found in the selected data within the user-defined time range. Parameters ---------- ts_data : TimeSeries Time Series from magnetic field data psd_segment_length : float Length of each segment in seconds psd_segment_stride : float Separation between 2 consecutive segments in seconds psd_estimation : string Average method window_fraction : float Withening window fraction tile_fap : float Tile false alarm probability threshold in Gaussian noise. nchans : int Total number of channels band : float Channel bandwidth fmin : float Lowest frequency of the filter bank. fmax : float Highest frequency of the filter bank Examples -------- The program can be ran as an executable by using the ``excesspower`` command line as follows:: excesspower --station "mainz01" \\ --start-time "2017-04-15-17-1" \\ --end-time "2017-04-15-18" \\ --rep "/Users/vincent/ASTRO/data/GNOME/GNOMEDrive/gnome/serverdata/" \\ --resample 512 \\ --verbose ''' # Determine sampling rate based on extracted time series sample_rate = ts_data.sample_rate # Check if tile maximum frequency is not defined if fmax is None or fmax > sample_rate / 2.: # Set the tile maximum frequency equal to the Nyquist frequency # (i.e. half the sampling rate) fmax = sample_rate / 2.0 # Check whether or not tile bandwidth and channel are defined if band is None and nchans is None: # Exit program with error message exit("Either bandwidth or number of channels must be specified...") else: # Check if tile maximum frequency larger than its minimum frequency assert fmax >= fmin # Define spectral band of data data_band = fmax - fmin # Check whether tile bandwidth or channel is defined if band is not None: # Define number of possible filter bands nchans = int(data_band / band) elif nchans is not None: # Define filter bandwidth band = data_band / nchans nchans -= 1 # Check if number of channels is superior than unity assert nchans > 1 # Print segment information if verbose: print '|- Estimating PSD from segments of', if verbose: print '%.2f s, with %.2f s stride...' % (psd_segment_length, psd_segment_stride) # Convert time series as array of float data = ts_data.astype(numpy.float64) # Define segment length for PSD estimation in sample unit seg_len = int(psd_segment_length * sample_rate) # Define separation between consecutive segments in sample unit seg_stride = int(psd_segment_stride * sample_rate) # Minimum frequency of detectable signal in a segment delta_f = 1. / psd_segment_length # Calculate PSD length counting the zero frequency element fd_len = fmax / delta_f + 1 # Calculate the overall PSD from individual PSD segments if impulse: # Produce flat data flat_data = numpy.ones(int(fd_len)) * 2. / fd_len # Create PSD frequency series fd_psd = types.FrequencySeries(flat_data, 1. / psd_segment_length, ts_data.start_time) else: # Create overall PSD using Welch's method fd_psd = psd.welch(data, avg_method=psd_estimation, seg_len=seg_len, seg_stride=seg_stride) if make_plot: # Plot the power spectral density plot_spectrum(fd_psd) # We need this for the SWIG functions lal_psd = fd_psd.lal() # Create whitening window if verbose: print "|- Whitening window and spectral correlation..." if wtype == 'hann': window = lal.CreateHannREAL8Window(seg_len) elif wtype == 'tukey': window = lal.CreateTukeyREAL8Window(seg_len, window_fraction) else: raise ValueError("Can't handle window type %s" % wtype) # Create FFT plan fft_plan = lal.CreateForwardREAL8FFTPlan(len(window.data.data), 1) # Perform two point spectral correlation spec_corr = lal.REAL8WindowTwoPointSpectralCorrelation(window, fft_plan) # Determine length of individual filters filter_length = int(2 * band / fd_psd.delta_f) + 1 # Initialise filter bank if verbose: print "|- Create bank of %i filters of %i Hz bandwidth..." % ( nchans, filter_length) # Initialise array to store filter's frequency series and metadata lal_filters = [] # Initialise array to store filter's time series fdb = [] # Loop over the channels for i in range(nchans): # Define central position of the filter freq = fmin + band / 2 + i * band # Create excess power filter lal_filter = lalburst.CreateExcessPowerFilter(freq, band, lal_psd, spec_corr) # Testing spectral correlation on filter #print lalburst.ExcessPowerFilterInnerProduct(lal_filter, lal_filter, spec_corr, None) # Append entire filter structure lal_filters.append(lal_filter) # Append filter's spectrum fdb.append(FrequencySeries.from_lal(lal_filter)) #print fdb[0].frequencies #print fdb[0] if make_plot: # Plot filter bank plot_bank(fdb) # Convert filter bank from frequency to time domain if verbose: print "|- Convert all the frequency domain to the time domain..." tdb = [] # Loop for each filter's spectrum for fdt in fdb: zero_padded = numpy.zeros(int((fdt.f0 / fdt.df).value) + len(fdt)) st = int((fdt.f0 / fdt.df).value) zero_padded[st:st + len(fdt)] = numpy.real_if_close(fdt.value) n_freq = int(sample_rate / 2 / fdt.df.value) * 2 tdt = numpy.fft.irfft(zero_padded, n_freq) * math.sqrt(sample_rate) tdt = numpy.roll(tdt, len(tdt) / 2) tdt = TimeSeries(tdt, name="", epoch=fdt.epoch, sample_rate=sample_rate) tdb.append(tdt) # Plot time series filter plot_filters(tdb, fmin, band) # Computer whitened inner products of input filters with themselves #white_filter_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f, f, spec_corr, None) for f in lal_filters]) # Computer unwhitened inner products of input filters with themselves #unwhite_filter_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f, f, spec_corr, lal_psd) for f in lal_filters]) # Computer whitened filter inner products between input adjacent filters #white_ss_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f1, f2, spec_corr, None) for f1, f2 in zip(lal_filters[:-1], lal_filters[1:])]) # Computer unwhitened filter inner products between input adjacent filters #unwhite_ss_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f1, f2, spec_corr, lal_psd) for f1, f2 in zip(lal_filters[:-1], lal_filters[1:])]) # Check filter's bandwidth is equal to user defined channel bandwidth min_band = (len(lal_filters[0].data.data) - 1) * lal_filters[0].deltaF / 2 assert min_band == band # Create an event list where all the triggers will be stored event_list = lsctables.New(lsctables.SnglBurstTable, [ 'start_time', 'start_time_ns', 'peak_time', 'peak_time_ns', 'duration', 'bandwidth', 'central_freq', 'chisq_dof', 'confidence', 'snr', 'amplitude', 'channel', 'ifo', 'process_id', 'event_id', 'search', 'stop_time', 'stop_time_ns' ]) # Create repositories to save TF and time series plots os.system('mkdir -p segments/time-frequency') os.system('mkdir -p segments/time-series') # Define time edges t_idx_min, t_idx_max = 0, seg_len # Loop over each segment while t_idx_max <= len(ts_data): # Define first and last timestamps of the block start_time = ts_data.start_time + t_idx_min / float( ts_data.sample_rate) end_time = ts_data.start_time + t_idx_max / float(ts_data.sample_rate) if verbose: print "\n|- Analyzing block %i to %i (%.2f percent)" % ( start_time, end_time, 100 * float(t_idx_max) / len(ts_data)) # Debug for impulse response if impulse: for i in range(t_idx_min, t_idx_max): ts_data[i] = 1000. if i == (t_idx_max + t_idx_min) / 2 else 0. # Model a withen time series for the block tmp_ts_data = types.TimeSeries(ts_data[t_idx_min:t_idx_max] * window.data.data, delta_t=1. / ts_data.sample_rate, epoch=start_time) # Save time series in relevant repository os.system('mkdir -p segments/%i-%i' % (start_time, end_time)) if make_plot: # Plot time series plot_ts(tmp_ts_data, fname='segments/time-series/%i-%i.png' % (start_time, end_time)) # Convert times series to frequency series fs_data = tmp_ts_data.to_frequencyseries() if verbose: print "|- Frequency series data has variance: %s" % fs_data.data.std( )**2 # Whitening (FIXME: Whiten the filters, not the data) fs_data.data /= numpy.sqrt(fd_psd) / numpy.sqrt(2 * fd_psd.delta_f) if verbose: print "|- Whitened frequency series data has variance: %s" % fs_data.data.std( )**2 if verbose: print "|- Create time-frequency plane for current block" # Return the complex snr, along with its associated normalization of the template, # matched filtered against the data #filter.matched_filter_core(types.FrequencySeries(tmp_filter_bank,delta_f=fd_psd.delta_f), # fs_data,h_norm=1,psd=fd_psd,low_frequency_cutoff=lal_filters[0].f0, # high_frequency_cutoff=lal_filters[0].f0+2*band) if verbose: print "|- Filtering all %d channels...\n" % nchans, # Initialise 2D zero array tmp_filter_bank = numpy.zeros(len(fd_psd), dtype=numpy.complex128) # Initialise 2D zero array for time-frequency map tf_map = numpy.zeros((nchans, seg_len), dtype=numpy.complex128) # Loop over all the channels for i in range(nchans): # Reset filter bank series tmp_filter_bank *= 0.0 # Index of starting frequency f1 = int(lal_filters[i].f0 / fd_psd.delta_f) # Index of last frequency bin f2 = int((lal_filters[i].f0 + 2 * band) / fd_psd.delta_f) + 1 # (FIXME: Why is there a factor of 2 here?) tmp_filter_bank[f1:f2] = lal_filters[i].data.data * 2 # Define the template to filter the frequency series with template = types.FrequencySeries(tmp_filter_bank, delta_f=fd_psd.delta_f, copy=False) # Create filtered series filtered_series = filter.matched_filter_core( template, fs_data, h_norm=None, psd=None, low_frequency_cutoff=lal_filters[i].f0, high_frequency_cutoff=lal_filters[i].f0 + 2 * band) # Include filtered series in the map tf_map[i, :] = filtered_series[0].numpy() if make_plot: # Plot spectrogram plot_spectrogram(numpy.abs(tf_map).T, dt=tmp_ts_data.delta_t, df=band, ymax=ts_data.sample_rate / 2., t0=start_time, t1=end_time, fname='segments/time-frequency/%i-%i.png' % (start_time, end_time)) plot_tiles_ts(numpy.abs(tf_map), 2, 1, sample_rate=ts_data.sample_rate, t0=start_time, t1=end_time, fname='segments/%i-%i/ts.png' % (start_time, end_time)) #plot_tiles_tf(numpy.abs(tf_map),2,1,ymax=ts_data.sample_rate/2, # sample_rate=ts_data.sample_rate,t0=start_time,t1=end_time, # fname='segments/%i-%i/tf.png'%(start_time,end_time)) # Loop through powers of 2 up to number of channels for nc_sum in range(0, int(math.log(nchans, 2)))[::-1]: # Calculate total number of summed channels nc_sum = 2**nc_sum if verbose: print "\n\t|- Contructing tiles containing %d narrow band channels" % nc_sum # Compute full bandwidth of virtual channel df = band * nc_sum # Compute minimal signal's duration in virtual channel dt = 1.0 / (2 * df) # Compute under sampling rate us_rate = int(round(dt / ts_data.delta_t)) if verbose: print "\t|- Undersampling rate for this level: %f" % ( ts_data.sample_rate / us_rate) if verbose: print "\t|- Calculating tiles..." # Clip the boundaries to remove window corruption clip_samples = int(psd_segment_length * window_fraction * ts_data.sample_rate / 2) # Undersample narrow band channel's time series # Apply clipping condition because [0:-0] does not give the full array tf_map_temp = tf_map[:,clip_samples:-clip_samples:us_rate] \ if clip_samples > 0 else tf_map[:,::us_rate] # Initialise final tile time-frequency map tiles = numpy.zeros(((nchans + 1) / nc_sum, tf_map_temp.shape[1])) # Loop over tile index for i in xrange(len(tiles)): # Sum all inner narrow band channels ts_tile = numpy.absolute(tf_map_temp[nc_sum * i:nc_sum * (i + 1)].sum(axis=0)) # Define index of last narrow band channel for given tile n = (i + 1) * nc_sum - 1 n = n - 1 if n == len(lal_filters) else n # Computer withened inner products of each input filter with itself mu_sq = nc_sum * lalburst.ExcessPowerFilterInnerProduct( lal_filters[n], lal_filters[n], spec_corr, None) #kmax = nc_sum-1 if n==len(lal_filters) else nc_sum-2 # Loop over the inner narrow band channels for k in xrange(0, nc_sum - 1): # Computer whitened filter inner products between input adjacent filters mu_sq += 2 * lalburst.ExcessPowerFilterInnerProduct( lal_filters[n - k], lal_filters[n - 1 - k], spec_corr, None) # Normalise tile's time series tiles[i] = ts_tile.real**2 / mu_sq if verbose: print "\t|- TF-plane is %dx%s samples" % tiles.shape if verbose: print "\t|- Tile energy mean %f, var %f" % (numpy.mean(tiles), numpy.var(tiles)) # Define maximum number of degrees of freedom and check it larger or equal to 2 max_dof = 32 if max_duration == None else int(max_duration / dt) assert max_dof >= 2 # Loop through multiple degrees of freedom for j in [2**l for l in xrange(0, int(math.log(max_dof, 2)))]: # Duration is fixed by the NDOF and bandwidth duration = j * dt if verbose: print "\n\t\t|- Summing DOF = %d ..." % (2 * j) if verbose: print "\t\t|- Explore signal duration of %f s..." % duration # Construct filter sum_filter = numpy.array([1, 0] * (j - 1) + [1]) # Calculate length of filtered time series tlen = tiles.shape[1] - sum_filter.shape[0] + 1 # Initialise filtered time series array dof_tiles = numpy.zeros((tiles.shape[0], tlen)) # Loop over tiles for f in range(tiles.shape[0]): # Sum and drop correlate tiles dof_tiles[f] = fftconvolve(tiles[f], sum_filter, 'valid') if verbose: print "\t\t|- Summed tile energy mean: %f" % ( numpy.mean(dof_tiles)) if verbose: print "\t\t|- Variance tile energy: %f" % ( numpy.var(dof_tiles)) if make_plot: plot_spectrogram( dof_tiles.T, dt, df, ymax=ts_data.sample_rate / 2, t0=start_time, t1=end_time, fname='segments/%i-%i/%02ichans_%02idof.png' % (start_time, end_time, nc_sum, 2 * j)) plot_tiles_ts( dof_tiles, 2 * j, df, sample_rate=ts_data.sample_rate / us_rate, t0=start_time, t1=end_time, fname='segments/%i-%i/%02ichans_%02idof_ts.png' % (start_time, end_time, nc_sum, 2 * j)) plot_tiles_tf( dof_tiles, 2 * j, df, ymax=ts_data.sample_rate / 2, sample_rate=ts_data.sample_rate / us_rate, t0=start_time, t1=end_time, fname='segments/%i-%i/%02ichans_%02idof_tf.png' % (start_time, end_time, nc_sum, 2 * j)) threshold = scipy.stats.chi2.isf(tile_fap, j) if verbose: print "\t\t|- Threshold for this level: %f" % threshold spant, spanf = dof_tiles.shape[1] * dt, dof_tiles.shape[0] * df if verbose: print "\t\t|- Processing %.2fx%.2f time-frequency map." % ( spant, spanf) # Since we clip the data, the start time needs to be adjusted accordingly window_offset_epoch = fs_data.epoch + psd_segment_length * window_fraction / 2 window_offset_epoch = LIGOTimeGPS(float(window_offset_epoch)) for i, j in zip(*numpy.where(dof_tiles > threshold)): event = event_list.RowType() # The points are summed forward in time and thus a `summed point' is the # sum of the previous N points. If this point is above threshold, it # corresponds to a tile which spans the previous N points. However, the # 0th point (due to the convolution specifier 'valid') is actually # already a duration from the start time. All of this means, the + # duration and the - duration cancels, and the tile 'start' is, by # definition, the start of the time frequency map if j = 0 # FIXME: I think this needs a + dt/2 to center the tile properly event.set_start(window_offset_epoch + float(j * dt)) event.set_stop(window_offset_epoch + float(j * dt) + duration) event.set_peak(event.get_start() + duration / 2) event.central_freq = lal_filters[ 0].f0 + band / 2 + i * df + 0.5 * df event.duration = duration event.bandwidth = df event.chisq_dof = 2 * duration * df event.snr = math.sqrt(dof_tiles[i, j] / event.chisq_dof - 1) # FIXME: Magic number 0.62 should be determine empircally event.confidence = -lal.LogChisqCCDF( event.snr * 0.62, event.chisq_dof * 0.62) event.amplitude = None event.process_id = None event.event_id = event_list.get_next_id() event_list.append(event) for event in event_list[::-1]: if event.amplitude != None: continue etime_min_idx = float(event.get_start()) - float( fs_data.epoch) etime_min_idx = int(etime_min_idx / tmp_ts_data.delta_t) etime_max_idx = float(event.get_start()) - float( fs_data.epoch) + event.duration etime_max_idx = int(etime_max_idx / tmp_ts_data.delta_t) # (band / 2) to account for sin^2 wings from finest filters flow_idx = int((event.central_freq - event.bandwidth / 2 - (df / 2) - fmin) / df) fhigh_idx = int((event.central_freq + event.bandwidth / 2 + (df / 2) - fmin) / df) # TODO: Check that the undersampling rate is always commensurate # with the indexing: that is to say that # mod(etime_min_idx, us_rate) == 0 always z_j_b = tf_map[flow_idx:fhigh_idx, etime_min_idx:etime_max_idx:us_rate] # FIXME: Deal with negative hrss^2 -- e.g. remove the event try: event.amplitude = measure_hrss( z_j_b, unwhite_filter_ip[flow_idx:fhigh_idx], unwhite_ss_ip[flow_idx:fhigh_idx - 1], white_ss_ip[flow_idx:fhigh_idx - 1], fd_psd.delta_f, tmp_ts_data.delta_t, len(lal_filters[0].data.data), event.chisq_dof) except ValueError: event.amplitude = 0 if verbose: print "\t\t|- Total number of events: %d" % len(event_list) t_idx_min += int(seg_len * (1 - window_fraction)) t_idx_max += int(seg_len * (1 - window_fraction)) setname = "MagneticFields" __program__ = 'pyburst_excesspower_gnome' start_time = LIGOTimeGPS(int(ts_data.start_time)) end_time = LIGOTimeGPS(int(ts_data.end_time)) inseg = segment(start_time, end_time) xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) ifo = channel_name.split(":")[0] straindict = psd.insert_psd_option_group.__dict__ proc_row = register_to_xmldoc(xmldoc, __program__, straindict, ifos=[ifo], version=git_version.id, cvs_repository=git_version.branch, cvs_entry_time=git_version.date) dt_stride = psd_segment_length sample_rate = ts_data.sample_rate # Amount to overlap successive blocks so as not to lose data window_overlap_samples = window_fraction * sample_rate outseg = inseg.contract(window_fraction * dt_stride / 2) # With a given dt_stride, we cannot process the remainder of this data remainder = math.fmod(abs(outseg), dt_stride * (1 - window_fraction)) # ...so make an accounting of it outseg = segment(outseg[0], outseg[1] - remainder) ss = append_search_summary(xmldoc, proc_row, ifos=(station, ), inseg=inseg, outseg=outseg) for sb in event_list: sb.process_id = proc_row.process_id sb.search = proc_row.program sb.ifo, sb.channel = station, setname xmldoc.childNodes[0].appendChild(event_list) ifostr = ifo if isinstance(ifo, str) else "".join(ifo) st_rnd, end_rnd = int(math.floor(inseg[0])), int(math.ceil(inseg[1])) dur = end_rnd - st_rnd fname = "%s-excesspower-%d-%d.xml.gz" % (ifostr, st_rnd, dur) utils.write_filename(xmldoc, fname, gz=fname.endswith("gz")) plot_triggers(fname)
def to_coinc_xml_object(self, file_name): # FIXME: This function will only work with two ifos!! outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = [ifo for ifo in self.sngl_files.keys()] proc_id = ligolw_process.register_to_xmldoc( outdoc, "pycbc", {}, ifos=ifos, comment="", version=pycbc_version.git_hash, cvs_repository="pycbc/" + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date, ).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file start_time = coinc_h5file["segments"]["coinc"]["start"][:].min() end_time = coinc_h5file["segments"]["coinc"]["end"][:].max() num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral-sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ["mass1", "mass2", "spin1z", "spin2z"] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ["ifar", "time1", "fap", "stat"] coinc_event_vals = {} for name in coinc_event_names: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = [ "snr", "chisq", "chisq_dof", "bank_chisq", "bank_chisq_dof", "cont_chisq", "cont_chisq_dof", "end_time", "template_duration", "coa_phase", "sigmasq", ] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) for idx, coinc_idx in enumerate(self.sort_arr): # Set up IDs and mapping values curr_tmplt_id = self.template_id[idx] coinc_id = lsctables.CoincID(idx) # Set up sngls # FIXME: As two-ifo is hardcoded loop over all ifos sngl_combined_mchirp = 0 sngl_combined_mtot = 0 for ifo in ifos: sngl_id = self.trig_id[ifo][idx] event_id = lsctables.SnglInspiralID(sngl_id) sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo curr_sngl_file = self.sngl_files[ifo].h5file[ifo] for name in sngl_col_names: val = sngl_col_vals[name][ifo][idx] setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta(sngl.mass1, sngl.mass2) sngl.mchirp, junk = pnutils.mass1_mass2_to_mchirp_eta(sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq) ** 0.5 / sngl.snr sngl_combined_mchirp += sngl.mchirp sngl_combined_mtot += sngl.mtotal sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = "sngl_inspiral" coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = sngl_combined_mchirp / len(ifos) sngl_combined_mtot = sngl_combined_mtot / len(ifos) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ",".join(ifos) coinc_inspiral_row.set_ifos(ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end(LIGOTimeGPS(coinc_event_vals["time1"][idx])) coinc_inspiral_row.snr = coinc_event_vals["stat"][idx] coinc_inspiral_row.false_alarm_rate = coinc_event_vals["fap"][idx] coinc_inspiral_row.combined_far = 1.0 / coinc_event_vals["ifar"][idx] # Transform to Hz coinc_inspiral_row.combined_far = coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = 0.0 coinc_inspiral_row.minimum_duration = 0.0 coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
if not opts.output_file: print >>sys.stderr, "Must specify an output file" sys.exit(1) if not opts.veto_file: print >>sys.stderr, "Must specify a veto file" sys.exit(1) trigs, livetime = get_loud_trigs(fList, opts.veto_file, opts.new_snr_cut) output_doc=ligolw.Document() output_doc.appendChild(ligolw.LIGO_LW()) proc_id = process.register_to_xmldoc(output_doc, "get_loud_trigs", opts.__dict__, comment="", ifos=[""], version=git_version.id, cvs_repository=git_version.branch, cvs_entry_time=git_version.date).process_id vartable = lsctables.New(lsctables.SearchSummVarsTable) var = lsctables.SearchSummVars() var.process_id = proc_id var.value = livetime var.name = "livetime" var.string = "Single detector trigger collection time in seconds." var.search_summvar_id = vartable.next_id vartable.append(var) output_doc.childNodes[0].appendChild(vartable) output_doc.childNodes[0].appendChild(trigs) utils.write_filename(output_doc, opts.output_file, gz=opts.output_file.endswith("gz"))
# FIXME: sample rate could be a command line option; template duration and data # duration should be determined from chirp time sample_rate = 4096 # sample rate in Hz template_duration = 128 # template duration in seconds template_length = sample_rate * template_duration # template length in samples data_duration = 512 # data duration in seconds data_length = sample_rate * data_duration # data length in samples # Open output file. out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = ligolw_process.register_to_xmldoc(out_xmldoc, parser.get_prog_name(), opts.__dict__, ifos=opts.detector, comment="Little hope!") # Add search summary to output file. all_time = segments.segment([glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)]) search_summary_table = lsctables.New(lsctables.SearchSummaryTable) out_xmldoc.childNodes[0].appendChild(search_summary_table) summary = ligolw_search_summary.append_search_summary(out_xmldoc, process, inseg=all_time, outseg=all_time) # Read template bank file. progress.update(-1, 'reading ' + opts.template_bank) xmldoc = ligolw_utils.load_filename( opts.template_bank, contenthandler=ligolw_bayestar.LSCTablesContentHandler) # Determine the low frequency cutoff from the template bank file. template_bank_f_low = ligolw_bayestar.get_temlate_bank_f_low(xmldoc)
#======================== # iterate through classifiers -> generate segments #======================== ### set up xml document from glue.ligolw import ligolw from glue.ligolw import utils as ligolw_utils from glue.ligolw import lsctables from glue.ligolw.utils import process xmldoc = ligolw.Document() xml_element = ligolw.LIGO_LW() xmldoc.appendChild( xml_element ) proc_id = process.register_to_xmldoc( xmldoc, __prog__, opts.__dict__, version=__version__ ).process_id segdef = lsctables.New( lsctables.SegmentDefTable, columns=["process_id", "segment_def_id", "ifos", "name", "version", "comment"] ) segsum = lsctables.New( lsctables.SegmentSumTable, columns=["process_id", "segment_sum_id", "start_time", "start_time_ns", "end_time", "end_time_ns", "comment", "segment_def_id"] ) segtab = lsctables.New( lsctables.SegmentTable, columns=["process_id", "segment_def_id", "segment_id", "start_time", "start_time_ns", "end_time", "end_time_ns"] ) xml_element.appendChild( segdef ) xml_element.appendChild( segsum ) xml_element.appendChild( segtab ) ### iterate through classifiers for classifier in opts.classifier: logger.info('Begin: generating segments for %s'%classifier) faps = fapsD[classifier] logger.info(' found %d files'%(len(faps)))
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos pariticipating in this trigger coinc_results: dict of values A dictionary of values. The format is defined in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. psds: dict of FrequencySeries Dictionary providing PSD estimates for all involved detectors. low_frequency_cutoff: float Minimum valid frequency for the PSD estimates. followup_data: dict of dicts, optional Dictionary providing SNR time series for each detector, to be used in sky localization with BAYESTAR. The format should be `followup_data['H1']['snr_series']`. More detectors can be present than given in `ifos`. If so, the extra detectors will only be used for sky localization. channel_names: dict of strings, optional Strain channel names for each detector. Will be recorded in the sngl_inspiral table. """ self.template_id = coinc_results['foreground/%s/template_id' % ifos[0]] # remember if this should be marked as HWINJ self.is_hardware_injection = ('HWINJ' in coinc_results and coinc_results['HWINJ']) if 'followup_data' in kwargs: fud = kwargs['followup_data'] assert len({fud[ifo]['snr_series'].delta_t for ifo in fud}) == 1, \ "delta_t for all ifos do not match" self.snr_series = {ifo: fud[ifo]['snr_series'] for ifo in fud} usable_ifos = fud.keys() followup_ifos = list(set(usable_ifos) - set(ifos)) else: self.snr_series = None usable_ifos = ifos followup_ifos = [] # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=usable_ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(usable_ifos) coinc_event_row.instruments = ','.join(usable_ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_populated = None network_snrsq = 0 for sngl_id, ifo in enumerate(usable_ifos): sngl = return_empty_sngl(nones=True) sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl.process_id = proc_id sngl.ifo = ifo names = [n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n] for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': sngl.set_end(lal.LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass if sngl.mass1 and sngl.mass2: sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl_populated = sngl if sngl.snr: sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr network_snrsq += sngl.snr ** 2.0 if 'channel_names' in kwargs and ifo in kwargs['channel_names']: sngl.channel = kwargs['channel_names'][ifo] sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) if self.snr_series is not None: snr_series_to_xml(self.snr_series[ifo], outdoc, sngl.event_id) # for subthreshold detectors, respect BAYESTAR's assumptions and checks bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x ' 'spin1y spin1z spin2x spin2y spin2z').split() subthreshold_sngl_time = numpy.mean( [coinc_results['foreground/{}/end_time'.format(ifo)] for ifo in ifos]) for sngl in sngl_inspiral_table: if sngl.ifo in followup_ifos: for bcf in bayestar_check_fields: setattr(sngl, bcf, getattr(sngl_populated, bcf)) sngl.set_end(lal.LIGOTimeGPS(subthreshold_sngl_time)) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.set_ifos(usable_ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_populated.mchirp coinc_inspiral_row.mass = sngl_populated.mtotal coinc_inspiral_row.end_time = sngl_populated.end_time coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns coinc_inspiral_row.snr = network_snrsq ** 0.5 far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) # append the PSDs self.psds = kwargs['psds'] psds_lal = {} for ifo in self.psds: psd = self.psds[ifo] kmin = int(kwargs['low_frequency_cutoff'] / psd.delta_f) fseries = lal.CreateREAL8FrequencySeries( "psd", psd.epoch, kwargs['low_frequency_cutoff'], psd.delta_f, lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin) fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0 psds_lal[ifo] = fseries make_psd_xmldoc(psds_lal, outdoc) self.outdoc = outdoc self.time = sngl_populated.get_end()
workflow, scienceSegs, dfDir, segsList) # This is needed to know what times will be analysed by daily ahope # Template bank stuff banks = _workflow.setup_tmpltbank_workflow(workflow, scienceSegs, datafinds, dfDir) # Do matched-filtering insps = _workflow.setup_matchedfltr_workflow(workflow, scienceSegs, datafinds, banks, dfDir) # Now construct the summary XML file outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # FIXME: PROGRAM NAME and dictionary of opts should be variables defined up above proc_id = ligolw_process.register_to_xmldoc(outdoc, 'dayhopetest', vars(args)).process_id for ifo in workflow.ifos: # Lets get the segment lists we need segIfoFiles = segsList.find_output_with_ifo(ifo) # SCIENCE sciSegFile = segIfoFiles.find_output_with_tag('SCIENCE') assert (len(sciSegFile) == 1) sciSegFile = sciSegFile[0] sciSegs = sciSegFile.segmentList # SCIENCE_OK sciokSegFile = segIfoFiles.find_output_with_tag('SCIENCE_OK') assert (len(sciokSegFile) == 1) sciokSegFile = sciokSegFile[0] sciokSegs = sciokSegFile.segmentList # SCIENCE_AVAILABLE sciavailableSegFile = segIfoFiles.find_output_with_tag('SCIENCE_AVAILABLE')
# BAYESTAR imports. from lalinference.bayestar import ligolw as ligolw_bayestar from lalinference.bayestar import timing # Other imports. import numpy as np import scipy.linalg # Open output file. xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = ligolw_process.register_to_xmldoc(xmldoc, parser.get_prog_name(), opts.__dict__) # Create a SnglInspiral table and initialize its row ID counter. sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) xmldoc.childNodes[0].appendChild(sngl_inspiral_table) sngl_inspiral_table.set_next_id(lsctables.SnglInspiralID(0)) f_low = 10. f_high = 2048. df = 0.1 initial_mchirp = lalinspiral.sbank.tau0tau3.m1m2_to_mchirp(opts.initial_mass1, opts.initial_mass2) initial_eta = opts.initial_mass1 * opts.initial_mass2 / (opts.initial_mass1 + opts.initial_mass2)**2 initial_chi = 0. initial_chirp_times = lalsimulation.SimInspiralTaylorF2RedSpinChirpTimesFromMchirpEtaChi(initial_mchirp, initial_eta, initial_chi, f_low) initial_theta0_theta3 = initial_chirp_times[:2]
if options.verbose: print >>sys.stderr, "warning: %s already processed," % (filename or "stdin"), if not options.force: if options.verbose: print >>sys.stderr, "skipping" continue if options.verbose: print >>sys.stderr, "continuing by --force" # # Add an entry to the process table. # process = ligolw_process.register_to_xmldoc(xmldoc, process_program_name, paramdict, comment = options.comment, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__ ) # # Run coincidence algorithm. # burca.burca( xmldoc = xmldoc, process_id = process.process_id, EventListType = EventListType, CoincTables = CoincTables, coinc_definer_row = CoincDef, thresholds = options.threshold, ntuple_comparefunc = ntuple_comparefunc,
def register_to_xmldoc(xmldoc, parser, opts, **kwargs): from glue.ligolw.utils import process params = {key: value.name if hasattr(value, 'read') else value for key, value in opts.__dict__.items()} return process.register_to_xmldoc(xmldoc, parser.prog, params)
f_low, = f_lows except ValueError: raise ValueError( "sim_inspiral:f_lower columns are not unique, got values: " + ' '.join(f_lows)) else: f_low = opts.f_low # Open output file. out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. Masquerade as lalapps_tmpltbank and # encode low frequency cutoff in command line arguments. process = ligolw_process.register_to_xmldoc(out_xmldoc, "tmpltbank", dict(opts.__dict__, low_frequency_cutoff=f_low), ifos="H1", comment="Exact-match template bank") # Record low-frequency cutoff in the SearchSummVars table. search_summvars_table = lsctables.New(lsctables.SearchSummVarsTable) out_xmldoc.childNodes[0].appendChild(search_summvars_table) search_summvars = lsctables.SearchSummVars() search_summvars.search_summvar_id = search_summvars_table.get_next_id() search_summvars.process_id = process.process_id search_summvars.name = "low-frequency cutoff" search_summvars.string = None search_summvars.value = f_low search_summvars_table.append(search_summvars) # Create a SnglInspiral table and initialize its row ID counter. sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
# In the interest of not blocking later DAGs for completion in an # uberdag, this is now dependent on the SQL step pos_plot_node.add_parent(sql_node) dag_name = opts.output_name dag.set_dag_file(dag_name) dag.write_concrete_dag() if opts.write_script: dag.write_script() print("Created a DAG named %s\n" % dag_name) print("This will run %i instances of %s in parallel\n" % (len(tmplt_bnk), ile_sub_name)) # FIXME: Adjust name on command line if use_bayespe_postproc: ppdag_name = "posterior_pp" ppdag.set_dag_file(ppdag_name) ppdag.add_maxjobs_category("ANALYSIS", MAXJOBS["ANALYSIS"]) ppdag.add_maxjobs_category("POST", MAXJOBS["POST"]) ppdag.write_concrete_dag() if opts.write_script: ppdag.write_script() print("Created a postprocessing DAG named %s\n" % ppdag_name) xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process.register_to_xmldoc(xmldoc, sys.argv[0], opts.__dict__) utils.write_filename(xmldoc, opts.output_name + ".xml.gz", gz=True)
input_table = lsctables.SimInspiralTable.get_table(indoc) inputtabletype = lsctables.SimInspiralTable # #print tabletype length = len(input_table) else: print ("Waning: No input table given to append to, will construct one from scratch") inputtabletype = lsctables.SimInspiralTable #raise IOError("Please give a table to add the information about NR waveforms to.") # Re-write the input table files # create a blank xml document and add the process id outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc(outdoc, PROGRAM_NAME, options.__dict__, ifos=["G1"], version=git_version.id, cvs_repository=git_version.branch, cvs_entry_time=git_version.date).process_id out_table = lsctables.New(inputtabletype,columns=['mass1','mass2','mchirp','eta','spin1x','spin1y','spin1z','spin2x','spin2y','spin2z','inclination','polarization','latitude','longitude','bandpass','alpha','alpha1','alpha2','process_id','waveform','numrel_data','numrel_mode_min','numrel_mode_max','t_end_time','f_lower']) outdoc.childNodes[0].appendChild(out_table) if options.input_catalog is not None: # Fill in the INPUT table for point in input_table: if not os.path.exists( point.numrel_data ): print "waveform file %s for %s does NOT exist! " % \ (point.numrel_data, point.waveform) if options.force_file_exists: print "(SKIPPING)\n" continue
fmm = interp1d( qmt, qmm ) mismatches.append( [qmt, qmm, fmm] ) dicfmm = {} for idx in range(len(mass_ratios)): dicfmm.update({mass_ratios[idx]:mismatches[idx][2]}) ################### Process the bank amd write it to disk ############ subfile_name = options.output_bank if options.verbose: print "Writing substitute-bank file %s" % subfile_name out_subbank_doc = ligolw.Document() out_subbank_doc.appendChild(ligolw.LIGO_LW()) out_proc_id = ligolw_process.register_to_xmldoc(out_subbank_doc, PROGRAM_NAME, options.__dict__, comment=options.comment, version=git_version.id, cvs_repository=git_version.branch, cvs_entry_time=git_version.date).process_id # Use exactly the same columns as in the input bank table tmp0 = in_bank_table[0] cols_list = tmp0.__slots__ cols = [] for tmpc in cols_list: if hasattr(tmp0, tmpc): cols.append( tmpc ) if options.table == 'sim': out_subbank_table = lsctables.New(lsctables.SimInspiralTable,columns=cols) else: out_subbank_table = lsctables.New(lsctables.SnglInspiralTable,columns=cols)
(options, args) = parser.parse_args() ############################################################################## # Sanity check of input arguments if not options.ihope_cache: raise ValueError, "An ihope-cache file is required." if not options.config_file: raise ValueError, "A config-file is required." if not options.log_path: raise ValueError, "A log-path is required." ############################################################################## # Create log file logdoc = ligolw.Document() logdoc.appendChild(ligolw.LIGO_LW()) proc_id = process.register_to_xmldoc(logdoc, __prog__, options.__dict__, version = git_version.id) ############################################################################## # parse the ini file and initialize cp = pipeline.DeepCopyableConfigParser() cp.read(options.config_file) tmp_space = cp.get('pipeline', 'node-tmp-dir') experiment_start = options.gps_start_time experiment_duration = str(int(options.gps_end_time) - int(options.gps_start_time)) try: do_cats = cp.get('pipeline', 'pipedown-cats') do_cats = do_cats.split(',') except:
def setup_analysislogging(workflow, segs_list, insps, args, output_dir, program_name="workflow", tags=[]): """ This module sets up the analysis logging xml file that contains the following information: * Command line arguments that the code was run with * Segment list of times marked as SCIENCE * Segment list of times marked as SCIENCE and "OK" ie. not CAT_1 vetoed * Segment list of times marked as SCIENCE_OK and present on the cluster * The times that will be analysed by the matched-filter jobs Parameters ----------- workflow : pycbc.workflow.core.Workflow The Workflow instance. segs_list : pycbc.workflow.core.FileList A list of Files containing the information needed to generate the segments above. For segments generated at run time the associated segmentlist is a property of this object. insps : pycbc.workflow.core.FileList The output files from the matched-filtering module. Used to identify what times have been analysed in this workflow. output_dir : path Directory to output any files to. program_name : string (optional, default = "workflow") The program name to stick in the process/process_params tables. tags : list (optional, default = []) If given restrict to considering inspiral and segment files that are tagged with all tags in this list. """ logging.info("Entering analysis logging module.") make_analysis_dir(output_dir) # Construct the summary XML file outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # Add process and process_params tables proc_id = process.register_to_xmldoc(outdoc, program_name, vars(args) ).process_id # Now add the various segment lists to this file summ_segs = segmentlist([workflow.analysis_time]) # If tags is given filter by tags if tags: for tag in tags: segs_list = segs_list.find_output_with_tag(tag) insps = insps.find_output_with_tag(tag) for ifo in workflow.ifos: # Lets get the segment lists we need seg_ifo_files = segs_list.find_output_with_ifo(ifo) # SCIENCE sci_seg_file = seg_ifo_files.find_output_with_tag('SCIENCE') if len(sci_seg_file) == 1: sci_seg_file = sci_seg_file[0] sci_segs = sci_seg_file.segmentList sci_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_def_id, sci_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_def_id, summ_segs, comment='') elif sci_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_seg_file), ifo, 'SCIENCE') #raise ValueError(err_msg) # SCIENCE_OK sci_ok_seg_file = seg_ifo_files.find_output_with_tag('SCIENCE_OK') if len(sci_ok_seg_file) == 1: sci_ok_seg_file = sci_ok_seg_file[0] sci_ok_segs = sci_ok_seg_file.segmentList sci_ok_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE_OK", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_ok_def_id, sci_ok_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_ok_def_id, summ_segs, comment='') elif sci_ok_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_ok_seg_file), ifo, 'SCIENCE_OK') #raise ValueError(err_msg) # SCIENCE_AVAILABLE sci_available_seg_file = seg_ifo_files.find_output_with_tag(\ 'SCIENCE_AVAILABLE') if len(sci_available_seg_file) == 1: sci_available_seg_file = sci_available_seg_file[0] sci_available_segs = sci_available_seg_file.segmentList sci_available_def_id = segmentdb_utils.add_to_segment_definer(\ outdoc, proc_id, ifo, "CBC_WORKFLOW_SCIENCE_AVAILABLE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_available_def_id, sci_available_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_available_def_id, summ_segs, comment='') elif sci_available_seg_file: # FIXME: While the segment module is still fractured (#127) this # may not work. Please update when #127 is resolved pass #err_msg = "Got %d files matching %s and %s. Expected 1 or 0." \ # %(len(sci_available_seg_file), ifo, 'SCIENCE_AVAILABLE') #raise ValueError(err_msg) # ANALYSABLE - This one needs to come from inspiral outs ifo_insps = insps.find_output_with_ifo(ifo) analysable_segs = ifo_insps.get_times_covered_by_files() analysable_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_WORKFLOW_ANALYSABLE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, analysable_def_id, analysable_segs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, analysable_def_id, summ_segs, comment='') summ_file = File(workflow.ifos, "WORKFLOW_SUMMARY", workflow.analysis_time, extension=".xml", directory=output_dir) summ_file.PFN(summ_file.storage_path, site='local') utils.write_filename(outdoc, summ_file.storage_path) return FileList([summ_file])
def to_coinc_xml_object(self, file_name): outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = list(self.sngl_files.keys()) proc_id = ligolw_process.register_to_xmldoc(outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file try: start_time = coinc_h5file['segments']['coinc']['start'][:].min() end_time = coinc_h5file['segments']['coinc']['end'][:].max() except KeyError: start_times = [] end_times = [] for ifo_comb in coinc_h5file['segments']: if ifo_comb == 'foreground_veto': continue seg_group = coinc_h5file['segments'][ifo_comb] start_times.append(seg_group['start'][:].min()) end_times.append(seg_group['end'][:].max()) start_time = min(start_times) end_time = max(end_times) num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = \ "sngl_inspiral<-->sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z'] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ['ifar', 'time', 'fap', 'stat'] coinc_event_vals = {} for name in coinc_event_names: if name == 'time': coinc_event_vals[name] = self.get_end_time() else: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = ['snr', 'chisq', 'chisq_dof', 'bank_chisq', 'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof', 'end_time', 'template_duration', 'coa_phase', 'sigmasq'] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) sngl_event_count = 0 for idx in range(len(self.sort_arr)): # Set up IDs and mapping values coinc_id = lsctables.CoincID(idx) # Set up sngls sngl_mchirps = [] sngl_mtots = [] net_snrsq = 0 triggered_ifos = [] for ifo in ifos: # If this ifo is not participating in this coincidence then # ignore it and move on. if not sngl_col_vals['snr'][ifo][1][idx]: continue triggered_ifos += [ifo] event_id = lsctables.SnglInspiralID(sngl_event_count) sngl_event_count += 1 sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo net_snrsq += sngl_col_vals['snr'][ifo][0][idx]**2 for name in sngl_col_names: val = sngl_col_vals[name][ifo][0][idx] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr # If exact match is not used, then take mean # masses from the single triggers sngl_mchirps += [sngl.mchirp] sngl_mtots += [sngl.mtotal] sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = np.mean(sngl_mchirps) sngl_combined_mtot = np.mean(sngl_mtots) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(triggered_ifos) coinc_event_row.instruments = ','.join(triggered_ifos) coinc_inspiral_row.set_ifos(triggered_ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end( LIGOTimeGPS(coinc_event_vals['time'][idx]) ) coinc_inspiral_row.snr = net_snrsq**0.5 coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx] coinc_inspiral_row.combined_far = 1./coinc_event_vals['ifar'][idx] # Transform to Hz coinc_inspiral_row.combined_far = \ coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = coinc_event_vals['stat'][idx] coinc_inspiral_row.minimum_duration = 0. coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
del CMAP["mass2"] CMAP[("mass1", "mass2")] = ("mass1", "mass2") ar = numpy.random.random((len(CMAP), 10)) samp_dict = dict(zip(CMAP, ar)) ar = samp_dict[("mass1", "mass2")] samp_dict[("mass1", "mass2")] = numpy.array([ar, ar]) del CMAP[("mass1", "mass2")] CMAP["mass1"] = "mass1" CMAP["mass2"] = "mass2" samp_dict["samp_n"] = numpy.array(range(0,10)) CMAP["sample_n"] = "sample_n" xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process.register_to_xmldoc(xmldoc, sys.argv[0], {}) append_samples_to_xmldoc(xmldoc, samp_dict) def gaussian(x, mu=0, std=1): return 1/numpy.sqrt(numpy.pi*2)/std * numpy.exp(-(x-mu)**2/2/std**2) m1m, m2m = 1.4, 1.5 m1, m2 = numpy.random.random(2000).reshape(2,1000)*1.0+1.0 loglikes = [gaussian(m1i, m1m)*gaussian(m2i, m2m) for m1i, m2i in zip(m1, m2)] #loglikelihood = - 7.5**2/2 #append_likelihood_result_to_xmldoc(xmldoc, loglikelihood, **{"mass1": 1.4, "mass2": 1.4, "ifos": "H1,L1,V1"}) for m1i, m2i, loglikelihood in zip(m1, m2, loglikes): append_likelihood_result_to_xmldoc(xmldoc, loglikelihood, **{"mass1": m1i, "mass2": m2i}) from glue.ligolw import utils
def register_to_xmldoc(xmldoc, parser, opts, **kwargs): from glue.ligolw.utils import process params = {key: _sanitize_arg_value_for_xmldoc(value) for key, value in opts.__dict__.items()} return process.register_to_xmldoc(xmldoc, parser.prog, params, **kwargs)
# Main # # ============================================================================= # options, filenames = parse_command_line() if options.verbose: print >>sys.stderr, "time-frequency tiles have %g degrees of freedom" % (2 * options.delta_t * options.delta_f) xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc(xmldoc, u"lalapps_binj_pic", options.options_dict, version = git_version.version) time_slide_table = xmldoc.childNodes[-1].appendChild(lsctables.TimeSlideTable.get_table(ligolw_utils.load_filename(options.time_slide_xml, verbose = options.verbose, contenthandler = LIGOLWContentHandler))) time_slide_id = time_slide_table[0].time_slide_id if options.verbose: print >>sys.stderr, "associating injections with time slide ID \"%s\": %s" % (time_slide_id, time_slide_table.as_dict()[time_slide_id]) for row in time_slide_table: row.process_id = process.process_id sim_burst_tbl = xmldoc.childNodes[-1].appendChild(lsctables.New(lsctables.SimBurstTable, ["process_id", "simulation_id", "time_slide_id", "waveform", "waveform_number", "ra", "dec", "psi", "time_geocent_gps", "time_geocent_gps_ns", "duration", "frequency", "bandwidth", "egw_over_rsquared", "pol_ellipse_angle", "pol_ellipse_e"])) for filename in filenames: if options.verbose: print >>sys.stderr, "loading %s ..." % filename img = Image.open(filename) width, height = img.size
file=sys.stderr) if not options.force: if options.verbose: print("skipping", file=sys.stderr) continue if options.verbose: print("continuing by --force", file=sys.stderr) # # Add an entry to the process table. # process = ligolw_process.register_to_xmldoc(xmldoc, process_program_name, process_params, comment=options.comment, version=__version__, cvs_repository=u"lscsoft", cvs_entry_time=__date__) # # LAL writes all triggers with event_id = 0. Detect this and barf # on it. # tbl = lsctables.SnglInspiralTable.get_table(xmldoc) assert len(set(tbl.getColumnByName("event_id"))) == len( tbl), "degenerate sngl_inspiral event_id detected" # # Extract veto segments if present.
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict = None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank generation codes into a valid LIGOLW xml file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. kwargs : key-word arguments All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc_id = ligolw_process.register_to_xmldoc(outdoc, programName, optDict, ifos=ifos, **kwargs).process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0,sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in xrange(len(GammaVals)): setattr(sngl, "Gamma"+str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary(start_time, end_time, len(sngl_inspiral_table), ifos, **kwargs) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk proctable = table.get_table(outdoc, lsctables.ProcessTable.tableName) ligolw_utils.write_filename(outdoc, outputFile, gz=outputFile.endswith('.gz'))
else: raise IOError("Could not split mchirp into bins suitably") if options.verbose: logging.info("Mchirp bin edges chosen are: {}".format(mc_bins_edges)) sys.stdout.flush() for i in range(len(mc_bins_edges) - 1): # create a blank xml document and add points that fall within the i'th bin outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) new_inspiral_table = lsctables.New(tabletype, columns=template_bank_table.columnnames) outdoc.childNodes[0].appendChild(new_inspiral_table) out_proc_id = ligolw_process.register_to_xmldoc( outdoc, PROGRAM_NAME, options.__dict__, comment=options.comment).process_id for p in template_bank_table: if p.mchirp >= mc_bins_edges[i] and p.mchirp < mc_bins_edges[i + 1]: p.process_id = out_proc_id new_inspiral_table.append(p) if options.verbose: logging.info("\t {} templates in sub-bank {}.".format(\ len(new_inspiral_table), i)) # write the xml doc to disk proctable = table.get_table(outdoc, lsctables.ProcessTable.tableName) proctable[0].end_time = gpstime.GpsSecondsFromPyUTC(time.time()) outname = options.named + '%06d.xml' % i
# Clear the statistics book-keeping object. # distributions = stringutils.StringCoincParamsDistributions() segs = segments.segmentlistdict() # # Start output document # xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc(xmldoc, program = u"lalapps_string_meas_likelihood", paramdict = paramdict, version = __version__, cvs_repository = "lscsoft", cvs_entry_time = __date__, comment = u"") distributions.process_id = process.process_id # # Iterate over files # for n, filename in enumerate(filenames): # # Open the database file. # if options.verbose: print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
dfDir, segsList) # This is needed to know what times will be analysed by daily ahope # Template bank stuff banks = _workflow.setup_tmpltbank_workflow(workflow, scienceSegs, datafinds, dfDir) # Do matched-filtering insps = _workflow.setup_matchedfltr_workflow(workflow, scienceSegs, datafinds, banks, dfDir) # Now construct the summary XML file outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # FIXME: PROGRAM NAME and dictionary of opts should be variables defined up above proc_id = ligolw_process.register_to_xmldoc(outdoc, 'dayhopetest', vars(args) ).process_id for ifo in workflow.ifos: # Lets get the segment lists we need segIfoFiles = segsList.find_output_with_ifo(ifo) # SCIENCE sciSegFile = segIfoFiles.find_output_with_tag('SCIENCE') assert(len(sciSegFile) == 1) sciSegFile = sciSegFile[0] sciSegs = sciSegFile.segmentList # SCIENCE_OK sciokSegFile = segIfoFiles.find_output_with_tag('SCIENCE_OK') assert(len(sciokSegFile) == 1) sciokSegFile = sciokSegFile[0] sciokSegs = sciokSegFile.segmentList # SCIENCE_AVAILABLE sciavailableSegFile = segIfoFiles.find_output_with_tag('SCIENCE_AVAILABLE')
lnL[i] = factored_likelihood.factored_log_likelihood_time_marginalized(tvals, P, rholms_intp, rholms, cross_terms, det_epochs, opts.l_max,interpolate=opts.interpolate_time) i+=1 return numpy.exp(lnL) res, var, neff, dict_return = sampler.integrate(likelihood_function, *unpinned_params, **pinned_params) print " lnLmarg is ", numpy.log(res), " with expected relative error ", numpy.sqrt(var)/res print " note neff is ", neff # FIXME: This is turning into a mess if opts.output_file: xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process.register_to_xmldoc(xmldoc, sys.argv[0], opts.__dict__) result_dict = {"mass1": opts.mass1, "mass2": opts.mass2, "event_duration": numpy.sqrt(var)/res, "ttotal": sampler.ntotal} if opts.spin1z is not None or sngl_inspiral_table: result_dict["spin1z"] = opts.spin1z or 0.0 if opts.spin2z is not None or sngl_inspiral_table: result_dict["spin2z"] = opts.spin2z or 0.0 if opts.eff_lambda is not None: result_dict["psi0"] = opts.eff_lambda if opts.deff_lambda is not None: result_dict["psi3"] = opts.deff_lambda xmlutils.append_likelihood_result_to_xmldoc(xmldoc, numpy.log(res), neff=neff, **result_dict) utils.write_filename(xmldoc, opts.output_file, gz=opts.output_file.endswith(".gz")) if opts.save_samples: samples = sampler._rvs samples["distance"] = samples["distance"]
# iterate through classifiers -> generate segments #======================== ### set up xml document from glue.ligolw import ligolw from glue.ligolw import utils as ligolw_utils from glue.ligolw import lsctables from glue.ligolw.utils import process xmldoc = ligolw.Document() xml_element = ligolw.LIGO_LW() xmldoc.appendChild(xml_element) proc_id = process.register_to_xmldoc(xmldoc, __prog__, opts.__dict__, version=__version__).process_id segdef = lsctables.New(lsctables.SegmentDefTable, columns=[ "process_id", "segment_def_id", "ifos", "name", "version", "comment" ]) segsum = lsctables.New(lsctables.SegmentSumTable, columns=[ "process_id", "segment_sum_id", "start_time", "start_time_ns", "end_time", "end_time_ns", "comment", "segment_def_id" ]) segtab = lsctables.New(lsctables.SegmentTable, columns=[
import lalsimulation # Other imports. import numpy as np from scipy import linalg # BAYESTAR imports. from lalinference import bayestar.ligolw # Read input file. xmldoc = ligolw_utils.load_filename(, infilename, contenthandler=bayestar.ligolw.LSCTablesContentHandler) # Write process metadata to output file. process = ligolw_process.register_to_xmldoc(xmldoc, parser.get_prog_name(), opts.__dict__) # Determine the low frequency cutoff from the template bank file. f_low = bayestar.ligolw.get_template_bank_f_low(xmldoc) # Get the SnglInspiral table. sngl_inspiral_table = ligolw_table.get_table(xmldoc, lsctables.SnglInspiralTable.tableName) # Determine central values of intrinsic parameters. mchirp0 = lalinspiral.sbank.tau0tau3.m1m2_to_mchirp(opts.mass1, opts.mass2) eta0 = opts.mass1 * opts.mass2 / np.square(opts.mass1 + opts.mass2) chi0 = 0. # Transform to chirp times. thetas_0 = lalsimulation.SimInspiralTaylorF2RedSpinChirpTimesFromMchirpEtaChi(
# # FIXME: don't hard-code instruments distributions = stringutils.StringCoincParamsDistributions(["H1", "L1", "V1"]) segs = segments.segmentlistdict() # # Start output document # xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) process = ligolw_process.register_to_xmldoc( xmldoc, program=u"lalapps_string_meas_likelihood", paramdict=paramdict, version=__version__, cvs_repository="lscsoft", cvs_entry_time=__date__, comment=u"") # # Iterate over files # for n, filename in enumerate(filenames): # # Open the database file. # if options.verbose: print >> sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
from lalinference.bayestar import filter from lalinference.bayestar import timing # Other imports. import numpy as np progress = pylal.progress.ProgressBar() # Open output file. progress.update(-1, 'setting up output document') out_xmldoc = ligolw.Document() out_xmldoc.appendChild(ligolw.LIGO_LW()) # Write process metadata to output file. process = ligolw_process.register_to_xmldoc(out_xmldoc, parser.get_prog_name(), opts.__dict__, ifos=opts.detector, comment="Simulated coincidences") # Add search summary to output file. all_time = segments.segment( [glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)]) search_summary_table = lsctables.New(lsctables.SearchSummaryTable) out_xmldoc.childNodes[0].appendChild(search_summary_table) summary = ligolw_search_summary.append_search_summary(out_xmldoc, process, inseg=all_time, outseg=all_time) # Read PSDs. progress.update(-1, 'reading ' + opts.reference_psd) xmldoc = ligolw_utils.load_filename( opts.reference_psd, contenthandler=lal.series.PSDContentHandler) psds = lal.series.read_psd_xmldoc(xmldoc) psds = dict(
def __init__(self, ifos, coinc_results): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos pariticipating in this trigger coinc_results: dict of values A dictionary of values. The format is define in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. """ # remember if this should be marked as HWINJ self.is_hardware_injection = False if 'HWINJ' in coinc_results: self.is_hardware_injection = True # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_id = 0 for ifo in ifos: names = [n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n] sngl_id += 1 sngl = return_empty_sngl() sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl.ifo = ifo for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.set_ifos(ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl.mchirp coinc_inspiral_row.mass = sngl.mtotal coinc_inspiral_row.end_time = sngl.end_time coinc_inspiral_row.end_time_ns = sngl.end_time_ns coinc_inspiral_row.snr = coinc_results['foreground/stat'] far = 1.0 / (YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) self.outdoc = outdoc
def to_coinc_xml_object(self, file_name): # FIXME: This function will only work with two ifos!! outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = [ifo for ifo in self.sngl_files.keys()] proc_id = ligolw_process.register_to_xmldoc(outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file start_time = coinc_h5file['segments']['coinc']['start'][:].min() end_time = coinc_h5file['segments']['coinc']['end'][:].max() num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z'] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ['ifar', 'time1', 'fap', 'stat'] coinc_event_vals = {} for name in coinc_event_names: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = ['snr', 'chisq', 'chisq_dof', 'bank_chisq', 'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof', 'end_time', 'template_duration', 'coa_phase', 'sigmasq'] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) for idx in xrange(len(self.sort_arr)): # Set up IDs and mapping values coinc_id = lsctables.CoincID(idx) # Set up sngls # FIXME: As two-ifo is hardcoded loop over all ifos sngl_combined_mchirp = 0 sngl_combined_mtot = 0 for ifo in ifos: sngl_id = self.trig_id[ifo][idx] event_id = lsctables.SnglInspiralID(sngl_id) sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo for name in sngl_col_names: val = sngl_col_vals[name][ifo][idx] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_combined_mchirp += sngl.mchirp sngl_combined_mtot += sngl.mtotal sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = sngl_combined_mchirp / len(ifos) sngl_combined_mtot = sngl_combined_mtot / len(ifos) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_inspiral_row.set_ifos(ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end(\ LIGOTimeGPS(coinc_event_vals['time1'][idx])) coinc_inspiral_row.snr = coinc_event_vals['stat'][idx] coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx] coinc_inspiral_row.combined_far = 1./coinc_event_vals['ifar'][idx] # Transform to Hz coinc_inspiral_row.combined_far = \ coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = 0. coinc_inspiral_row.minimum_duration = 0. coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
# Dump data: p(<x) with open(fnameBase + "-pp-data.dat", 'w') as f: for key in ['ra', 'dec', 'tref', 'phi', 'incl', 'psi', 'dist', 'lnL']: f.write(key + " " + str(ppdata[key][0]) + ' ' + str(ppdata[key][1]) + '\n') # Save the outputs in CP's format, for comparison. NOT YET ACTIVE CODE -- xmlutils has a bug on master (lacking terms in dictionary) if True: # opts.points_file_base: print("==== Exporting to xml: <base>.xml.gz =====") xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) opts.NR_template_param = "" # process.register_to_xmldoc(xmldoc, sys.argv[0], opts.__dict__) # the process-params generation has not been reliable process.register_to_xmldoc( xmldoc, sys.argv[0], {}) # the process-params generation has not been reliable samples = {} samples["distance"] = ret[:, -3 - 1] #retNiceIndexed['dist'] samples["t_ref"] = ret[:, -3 - 5] #retNiceIndexed['tref'] samples["polarization"] = ret[:, -3 - 1] #retNiceIndexed['psi'] samples["coa_phase"] = ret[:, -3 - 4] #retNiceIndexed['phi'] samples["latitude"] = retNiceIndexed['dec'] samples["longitude"] = retNiceIndexed['ra'] samples["inclination"] = retNiceIndexed['incl'] samples["loglikelihood"] = ret[:, -1] #alpha1 samples["joint_prior"] = ret[:, -3] #alpha2 samples["joint_s_prior"] = ret[:, -2] #alpha3 # samples = sampler._rvs # samples["distance"] = samples["distance"] # samples["polarization"] = samples["psi"]
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict = None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank generation codes into a valid LIGOLW xml file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. kwargs : key-word arguments All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc_id = ligolw_process.register_to_xmldoc(outdoc, programName, optDict, ifos=ifos, **kwargs).process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0,sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in range(len(GammaVals)): setattr(sngl, "Gamma"+str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary(start_time, end_time, len(sngl_inspiral_table), ifos, **kwargs) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk ligolw_utils.write_filename(outdoc, outputFile, gz=outputFile.endswith('.gz'))
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos pariticipating in this trigger coinc_results: dict of values A dictionary of values. The format is define in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. """ self.ifos = ifos self.template_id = coinc_results['foreground/%s/template_id' % self.ifos[0]] # remember if this should be marked as HWINJ self.is_hardware_injection = False if 'HWINJ' in coinc_results: self.is_hardware_injection = True # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/' + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_id = 0 sngl_event_id_map = {} for ifo in ifos: names = [ n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n ] sngl_id += 1 sngl = return_empty_sngl() sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl_event_id_map[ifo] = sngl.event_id sngl.ifo = ifo for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': sngl.set_end(lal.LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.set_ifos(ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl.mchirp coinc_inspiral_row.mass = sngl.mtotal coinc_inspiral_row.end_time = sngl.end_time coinc_inspiral_row.end_time_ns = sngl.end_time_ns coinc_inspiral_row.snr = coinc_results['foreground/stat'] far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) self.outdoc = outdoc self.time = sngl.get_end() # compute SNR time series self.upload_snr_series = kwargs['upload_snr_series'] if self.upload_snr_series: data_readers = kwargs['data_readers'] bank = kwargs['bank'] htilde = bank[self.template_id] self.snr_series = {} self.snr_series_psd = {} for ifo in self.ifos: stilde = data_readers[ifo].overwhitened_data(htilde.delta_f) norm = 4.0 * htilde.delta_f / (htilde.sigmasq(stilde.psd)**0.5) qtilde = zeros((len(htilde) - 1) * 2, dtype=htilde.dtype) correlate(htilde, stilde, qtilde) snr = qtilde * 0 ifft(qtilde, snr) valid_end = int(len(qtilde) - data_readers[ifo].trim_padding) valid_start = int(valid_end - data_readers[ifo].blocksize * data_readers[ifo].sample_rate) seg = slice(valid_start, valid_end) snr = snr[seg] snr *= norm delta_t = 1.0 / data_readers[ifo].sample_rate start = data_readers[ifo].start_time snr = TimeSeries(snr, delta_t=delta_t, epoch=start) self.snr_series[ifo] = snr self.snr_series_psd[ifo] = stilde.psd # store the on-source slice of the series into the XML doc snr_onsource_time = coinc_results['foreground/%s/end_time' % ifo] - snr.start_time snr_onsource_dur = lal.REARTH_SI / lal.C_SI onsource_idx = round(snr_onsource_time * snr.sample_rate) onsource_start = onsource_idx - int( snr.sample_rate * snr_onsource_dur / 2) onsource_end = onsource_idx + int( snr.sample_rate * snr_onsource_dur / 2) onsource_slice = slice(onsource_start, onsource_end + 1) snr_lal = snr[onsource_slice].lal() snr_lal.name = 'snr' snr_lal.sampleUnits = '' snr_xml = _build_series(snr_lal, (u'Time', u'Time,Real,Imaginary'), None, 'deltaT', 's') snr_node = outdoc.childNodes[-1].appendChild(snr_xml) eid_param = ligolw_param.new_param( u'event_id', u'ilwd:char', unicode(sngl_event_id_map[ifo])) snr_node.appendChild(eid_param)