def make_psd_xmldoc(psddict): Attributes = ligolw.sax.xmlreader.AttributesImpl xmldoc = ligolw.Document() root_name = u"psd" lw = xmldoc.appendChild(ligolw.LIGO_LW(Attributes({u"Name": root_name}))) for instrument, psd in psddict.items(): xmlseries = _build_series(psd, (u"Frequency,Real", u"Frequency"), None, 'deltaF', 's^-1') fs = lw.appendChild(xmlseries) fs.appendChild(ligolw_param.from_pyvalue(u"instrument", instrument)) return xmldoc
def run_segment_operation(outdoc, filenames, segments, use_segment_table, operation, result_name = 'RESULT', preserve = True): """ Performs an operation (intersect or union) across a set of segments. That is, given a set of files each with segment definers DMT-FLAG1, DMT-FLAG2 etc and a list of segments DMT-FLAG1,DMT-FLAG1 this returns RESULT = (table 1's DMT-FLAG1 union table 2's DMT-FLAG1 union ...) operation (table 1's DMT-FLAG2 union table 2's DMT-FLAG2 union ...) operation etc """ proc_id = table.get_table(outdoc, lsctables.ProcessTable.tableName)[0].process_id if preserve: indoc = ligolw_add.ligolw_add(outdoc, filenames) else: indoc = ligolw_add.ligolw_add(ligolw.Document(), filenames) # Start with a segment covering all of time, then # intersect with each of the fields of interest keys = segments.split(',') if operation == INTERSECT: sgmntlist = pycbc_glue.segments.segmentlist([pycbc_glue.segments.segment(-pycbc_glue.segments.infinity(), pycbc_glue.segments.infinity())]) for key in keys: sgmntlist &= find_segments(indoc, key, use_segment_table) elif operation == UNION: sgmntlist = pycbc_glue.segments.segmentlist([]) for key in keys: sgmntlist |= find_segments(indoc, key, use_segment_table) elif operation == DIFF: sgmntlist = find_segments(indoc, keys[0], use_segment_table) for key in keys[1:]: sgmntlist -= find_segments(indoc, key, use_segment_table) else: raise NameError("%s is not a known operation (intersect, union or diff)" % operation) # Add a segment definer and segments seg_def_id = add_to_segment_definer(outdoc, proc_id, '', result_name, 1) if use_segment_table: add_to_segment(outdoc, proc_id, seg_def_id, sgmntlist) else: add_to_segment_summary(outdoc, proc_id, seg_def_id, sgmntlist) return outdoc, abs(sgmntlist)
def setUp(self): self.detectors = [Detector(d) for d in ['H1', 'L1', 'V1']] self.sample_rate = 4096. self.earth_time = lal.REARTH_SI / lal.C_SI # create a few random injections self.injections = [] start_time = float(lal.GPSTimeNow()) taper_choices = ('TAPER_NONE', 'TAPER_START', 'TAPER_END', 'TAPER_STARTEND') for i, taper in zip(xrange(20), itertools.cycle(taper_choices)): inj = MyInjection() inj.end_time = start_time + 40000 * i + \ numpy.random.normal(scale=3600) random = numpy.random.uniform inj.mass1 = random(low=1., high=20.) inj.mass2 = random(low=1., high=20.) inj.distance = random(low=0.9, high=1.1) * 1e6 * lal.PC_SI inj.latitude = numpy.arccos(random(low=-1, high=1)) inj.longitude = random(low=0, high=2 * lal.PI) inj.inclination = numpy.arccos(random(low=-1, high=1)) inj.polarization = random(low=0, high=2 * lal.PI) inj.taper = taper self.injections.append(inj) # create LIGOLW document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) # create sim inspiral table, link it to document and fill it sim_table = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[-1].appendChild(sim_table) for i in xrange(len(self.injections)): row = sim_table.RowType() self.injections[i].fill_sim_inspiral_row(row) row.process_id = 'process:process_id:0' row.simulation_id = 'sim_inspiral:simulation_id:%d' % i sim_table.append(row) # write document to temp file self.inj_file = tempfile.NamedTemporaryFile(suffix='.xml') utils.write_fileobj(xmldoc, self.inj_file)
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict=None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank generation codes into a valid LIGOLW xml file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. kwargs : key-word arguments All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc_id = ligolw_process.register_to_xmldoc(outdoc, programName, optDict, ifos=ifos, **kwargs).process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0, sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in xrange(len(GammaVals)): setattr(sngl, "Gamma" + str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary(start_time, end_time, len(sngl_inspiral_table), ifos, **kwargs) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk proctable = table.get_table(outdoc, lsctables.ProcessTable.tableName) ligolw_utils.write_filename(outdoc, outputFile, gz=outputFile.endswith('.gz'))
def __init__(self, ifos, coinc_results, **kwargs): """Initialize a ligolw xml representation of a zerolag trigger for upload from pycbc live to gracedb. Parameters ---------- ifos: list of strs A list of the ifos pariticipating in this trigger coinc_results: dict of values A dictionary of values. The format is defined in pycbc/events/coinc.py and matches the on disk representation in the hdf file for this time. """ self.ifos = ifos followup_ifos = kwargs.get('followup_ifos') or [] self.template_id = coinc_results['foreground/%s/template_id' % self.ifos[0]] # remember if this should be marked as HWINJ self.is_hardware_injection = ('HWINJ' in coinc_results) # Set up the bare structure of the xml document outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/' + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id # Set up coinc_definer table coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) outdoc.childNodes[0].appendChild(coinc_def_table) # Set up coinc inspiral and coinc event tables coinc_id = lsctables.CoincID(0) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_event_row = lsctables.Coinc() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_event_row.time_slide_id = lsctables.TimeSlideID(0) coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_event_row.likelihood = 0. coinc_event_table.append(coinc_event_row) outdoc.childNodes[0].appendChild(coinc_event_table) # compute SNR time series subthreshold_sngl_time = numpy.mean( [coinc_results['foreground/%s/end_time' % ifo] for ifo in ifos]) self.upload_snr_series = kwargs.get('upload_snr_series') if self.upload_snr_series: self.snr_series = {} self.snr_series_psd = {} htilde = kwargs['bank'][self.template_id] for ifo in ifos + followup_ifos: if ifo in ifos: trig_time = coinc_results['foreground/%s/end_time' % ifo] else: trig_time = subthreshold_sngl_time # NOTE we only check the state/DQ of followup IFOs here. # IFOs producing the coincidence are assumed to also # produce valid SNR series. snr_series, snr_series_psd = compute_followup_snr_series( kwargs['data_readers'][ifo], htilde, trig_time, check_state=(ifo in followup_ifos)) if snr_series is not None: self.snr_series[ifo] = snr_series self.snr_series_psd[ifo] = snr_series_psd # Set up sngls sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) sngl_populated = None for sngl_id, ifo in enumerate(ifos + followup_ifos): if self.upload_snr_series and ifo not in self.snr_series: # SNR series could not be computed, so skip this continue sngl = return_empty_sngl(nones=True) sngl.event_id = lsctables.SnglInspiralID(sngl_id) sngl.process_id = proc_id sngl.ifo = ifo names = [ n.split('/')[-1] for n in coinc_results if 'foreground/%s' % ifo in n ] for name in names: val = coinc_results['foreground/%s/%s' % (ifo, name)] if name == 'end_time': sngl.set_end(lal.LIGOTimeGPS(val)) else: try: setattr(sngl, name, val) except AttributeError: pass if sngl.mass1 and sngl.mass2: sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl_populated = sngl if sngl.snr: sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = sngl.event_id coinc_event_map_table.append(coinc_map_row) if self.upload_snr_series and ifo in self.snr_series: snr_series_to_xml(self.snr_series[ifo], outdoc, sngl.event_id) # for subthreshold detectors, respect BAYESTAR's assumptions and checks bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x ' 'spin1y spin1z spin2x spin2y spin2z').split() for sngl in sngl_inspiral_table: if sngl.ifo in followup_ifos: for bcf in bayestar_check_fields: setattr(sngl, bcf, getattr(sngl_populated, bcf)) sngl.set_end(lal.LIGOTimeGPS(subthreshold_sngl_time)) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # Set up the coinc inspiral table coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_inspiral_row = lsctables.CoincInspiral() # This seems to be used as FAP, which should not be in gracedb coinc_inspiral_row.false_alarm_rate = 0 coinc_inspiral_row.minimum_duration = 0. coinc_inspiral_row.set_ifos(ifos) coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_populated.mchirp coinc_inspiral_row.mass = sngl_populated.mtotal coinc_inspiral_row.end_time = sngl_populated.end_time coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns coinc_inspiral_row.snr = coinc_results['foreground/stat'] far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar']) coinc_inspiral_row.combined_far = far coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_inspiral_table) self.outdoc = outdoc self.time = sngl_populated.get_end()
def to_coinc_xml_object(self, file_name): # FIXME: This function will only work with two ifos!! outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = [ifo for ifo in self.sngl_files.keys()] proc_id = ligolw_process.register_to_xmldoc( outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/' + pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file start_time = coinc_h5file['segments']['coinc']['start'][:].min() end_time = coinc_h5file['segments']['coinc']['end'][:].max() num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z'] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ['ifar', 'time1', 'fap', 'stat'] coinc_event_vals = {} for name in coinc_event_names: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = [ 'snr', 'chisq', 'chisq_dof', 'bank_chisq', 'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof', 'end_time', 'template_duration', 'coa_phase', 'sigmasq' ] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) for idx in xrange(len(self.sort_arr)): # Set up IDs and mapping values coinc_id = lsctables.CoincID(idx) # Set up sngls # FIXME: As two-ifo is hardcoded loop over all ifos sngl_combined_mchirp = 0 sngl_combined_mtot = 0 for ifo in ifos: sngl_id = self.trig_id[ifo][idx] event_id = lsctables.SnglInspiralID(sngl_id) sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo for name in sngl_col_names: val = sngl_col_vals[name][ifo][idx] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_combined_mchirp += sngl.mchirp sngl_combined_mtot += sngl.mtotal sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = sngl_combined_mchirp / len(ifos) sngl_combined_mtot = sngl_combined_mtot / len(ifos) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_inspiral_row.set_ifos(ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end(\ LIGOTimeGPS(coinc_event_vals['time1'][idx])) coinc_inspiral_row.snr = coinc_event_vals['stat'][idx] coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx] coinc_inspiral_row.combined_far = 1. / coinc_event_vals['ifar'][idx] # Transform to Hz coinc_inspiral_row.combined_far = \ coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = 0. coinc_inspiral_row.minimum_duration = 0. coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
def run_file_operation(outdoc, filenames, use_segment_table, operation, preserve = True): """ Performs an operation (intersect or union) across a set of files. That is, given a set of files each with segment definers DMT-FLAG1, DMT-FLAG2 etc the result is a file where DMT-FLAG1 = (file 1's DMT-FLAG1 operation file 2's DMT-FLAG1 operation ...) DMT-FLAG2 = (file 1's DMT-FLAG2 operation file 2's DMT-FLAG2 operation ...) etc """ proc_id = table.get_table(outdoc, lsctables.ProcessTable.tableName)[0].process_id # load up the files into individual documents xmldocs = [ligolw_add.ligolw_add(ligolw.Document(), [fname]) for fname in filenames] # Get the list of dinstinct segment_definers across all docs segment_definers = {} def register_definer(seg_def): key = (seg_def.ifos, seg_def.name, seg_def.version) segment_definers[key] = True return key for xmldoc in xmldocs: seg_def_table = table.get_table(xmldoc, lsctables.SegmentDefTable.tableName) map (register_definer, seg_def_table) # For each unique segment definer, find the intersection for ifo, name, version in segment_definers: if operation == INTERSECT: # If I were feeling especially functional-ist I'd write this # with reduce() result = pycbc_glue.segments.segmentlist([pycbc_glue.segments.segment(-pycbc_glue.segments.infinity(), pycbc_glue.segments.infinity())]) for xmldoc in xmldocs: result &= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == UNION: result = pycbc_glue.segments.segmentlist([]) for xmldoc in xmldocs: result |= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == DIFF: result = find_segments(xmldocs[0], '%s:%s:%d' % (ifo, name, version), use_segment_table) for xmldoc in xmldocs[1:]: result -= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) else: raise NameError ("%s is not a known operation (intersect, union or diff)" % operation) # Add a segment definer for the result seg_def_id = add_to_segment_definer(outdoc, proc_id, ifo, name, version) # Add the segments if use_segment_table: add_to_segment(outdoc, proc_id, seg_def_id, result) else: add_to_segment_summary(outdoc, proc_id, seg_def_id, result) # If we're preserving, also load up everything into the output document. if preserve: # Add them to the output document map(lambda x: outdoc.appendChild(x.childNodes[0]), xmldocs) # Merge the ligolw elements and tables ligolw_add.merge_ligolws(outdoc) ligolw_add.merge_compatible_tables(outdoc) return outdoc, abs(result)
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : glue.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if not hasattr(row, entry): if cols[entry] in ['real_4','real_8']: setattr(row,entry,0.) elif cols[entry] == 'int_4s': setattr(row,entry,0) elif cols[entry] == 'lstring': setattr(row,entry,'') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("external_trigger:process_id:0") elif entry == 'event_id': row.event_id = ilwd.ilwdchar("external_trigger:event_id:0") else: print("Column %s not recognized" %(entry), file=sys.stderr) raise ValueError # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urlparse.urljoin("file:", urllib.pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.PFN(xml_file_url, site="local") return xml_file
print "RUNNING DATAFIND" datafinds, scienceSegs = _workflow.setup_datafind_workflow( workflow, scienceSegs, dfDir, segsList) # This is needed to know what times will be analysed by daily ahope # Template bank stuff banks = _workflow.setup_tmpltbank_workflow(workflow, scienceSegs, datafinds, dfDir) # Do matched-filtering insps = _workflow.setup_matchedfltr_workflow(workflow, scienceSegs, datafinds, banks, dfDir) # Now construct the summary XML file outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # FIXME: PROGRAM NAME and dictionary of opts should be variables defined up above proc_id = ligolw_process.register_to_xmldoc(outdoc, 'dayhopetest', vars(args)).process_id for ifo in workflow.ifos: # Lets get the segment lists we need segIfoFiles = segsList.find_output_with_ifo(ifo) # SCIENCE sciSegFile = segIfoFiles.find_output_with_tag('SCIENCE') assert (len(sciSegFile) == 1) sciSegFile = sciSegFile[0] sciSegs = sciSegFile.segmentList # SCIENCE_OK sciokSegFile = segIfoFiles.find_output_with_tag('SCIENCE_OK') assert (len(sciokSegFile) == 1)