def save(self, filename): """Write this trigger to gracedb compatible xml format Parameters ---------- filename: str Name of file to write to disk. """ ligolw_utils.write_filename(self.outdoc, filename)
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict = None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank generation codes into a valid LIGOLW xml file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. kwargs : key-word arguments All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc_id = ligolw_process.register_to_xmldoc(outdoc, programName, optDict, ifos=ifos, **kwargs).process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0,sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in xrange(len(GammaVals)): setattr(sngl, "Gamma"+str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary(start_time, end_time, len(sngl_inspiral_table), ifos, **kwargs) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk ligolw_utils.write_filename(outdoc, outputFile, gz=outputFile.endswith('.gz'))
def to_coinc_xml_object(self, file_name): # FIXME: This function will only work with two ifos!! outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) ifos = [ifo for ifo in self.sngl_files.keys()] proc_id = ligolw_process.register_to_xmldoc(outdoc, 'pycbc', {}, ifos=ifos, comment='', version=pycbc_version.git_hash, cvs_repository='pycbc/'+pycbc_version.git_branch, cvs_entry_time=pycbc_version.date).process_id search_summ_table = lsctables.New(lsctables.SearchSummaryTable) coinc_h5file = self.coinc_file.h5file start_time = coinc_h5file['segments']['coinc']['start'][:].min() end_time = coinc_h5file['segments']['coinc']['end'][:].max() num_trigs = len(self.sort_arr) search_summary = return_search_summary(start_time, end_time, num_trigs, ifos) search_summ_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summ_table) sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) coinc_def_table = lsctables.New(lsctables.CoincDefTable) coinc_event_table = lsctables.New(lsctables.CoincTable) coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable) coinc_event_map_table = lsctables.New(lsctables.CoincMapTable) time_slide_table = lsctables.New(lsctables.TimeSlideTable) # Set up time_slide table time_slide_id = lsctables.TimeSlideID(0) for ifo in ifos: time_slide_row = lsctables.TimeSlide() time_slide_row.instrument = ifo time_slide_row.time_slide_id = time_slide_id time_slide_row.offset = 0 time_slide_row.process_id = proc_id time_slide_table.append(time_slide_row) # Set up coinc_definer table coinc_def_id = lsctables.CoincDefID(0) coinc_def_row = lsctables.CoincDef() coinc_def_row.search = "inspiral" coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincidences" coinc_def_row.coinc_def_id = coinc_def_id coinc_def_row.search_coinc_type = 0 coinc_def_table.append(coinc_def_row) bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z'] bank_col_vals = {} for name in bank_col_names: bank_col_vals[name] = self.get_bankfile_array(name) coinc_event_names = ['ifar', 'time1', 'fap', 'stat'] coinc_event_vals = {} for name in coinc_event_names: coinc_event_vals[name] = self.get_coincfile_array(name) sngl_col_names = ['snr', 'chisq', 'chisq_dof', 'bank_chisq', 'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof', 'end_time', 'template_duration', 'coa_phase', 'sigmasq'] sngl_col_vals = {} for name in sngl_col_names: sngl_col_vals[name] = self.get_snglfile_array_dict(name) for idx in xrange(len(self.sort_arr)): # Set up IDs and mapping values coinc_id = lsctables.CoincID(idx) # Set up sngls # FIXME: As two-ifo is hardcoded loop over all ifos sngl_combined_mchirp = 0 sngl_combined_mtot = 0 for ifo in ifos: sngl_id = self.trig_id[ifo][idx] event_id = lsctables.SnglInspiralID(sngl_id) sngl = return_empty_sngl() sngl.event_id = event_id sngl.ifo = ifo for name in sngl_col_names: val = sngl_col_vals[name][ifo][idx] if name == 'end_time': sngl.set_end(LIGOTimeGPS(val)) else: setattr(sngl, name, val) for name in bank_col_names: val = bank_col_vals[name][idx] setattr(sngl, name, val) sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta( sngl.mass1, sngl.mass2) sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( sngl.mass1, sngl.mass2) sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr sngl_combined_mchirp += sngl.mchirp sngl_combined_mtot += sngl.mtotal sngl_inspiral_table.append(sngl) # Set up coinc_map entry coinc_map_row = lsctables.CoincMap() coinc_map_row.table_name = 'sngl_inspiral' coinc_map_row.coinc_event_id = coinc_id coinc_map_row.event_id = event_id coinc_event_map_table.append(coinc_map_row) sngl_combined_mchirp = sngl_combined_mchirp / len(ifos) sngl_combined_mtot = sngl_combined_mtot / len(ifos) # Set up coinc inspiral and coinc event tables coinc_event_row = lsctables.Coinc() coinc_inspiral_row = lsctables.CoincInspiral() coinc_event_row.coinc_def_id = coinc_def_id coinc_event_row.nevents = len(ifos) coinc_event_row.instruments = ','.join(ifos) coinc_inspiral_row.set_ifos(ifos) coinc_event_row.time_slide_id = time_slide_id coinc_event_row.process_id = proc_id coinc_event_row.coinc_event_id = coinc_id coinc_inspiral_row.coinc_event_id = coinc_id coinc_inspiral_row.mchirp = sngl_combined_mchirp coinc_inspiral_row.mass = sngl_combined_mtot coinc_inspiral_row.set_end(\ LIGOTimeGPS(coinc_event_vals['time1'][idx])) coinc_inspiral_row.snr = coinc_event_vals['stat'][idx] coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx] coinc_inspiral_row.combined_far = 1./coinc_event_vals['ifar'][idx] # Transform to Hz coinc_inspiral_row.combined_far = \ coinc_inspiral_row.combined_far / YRJUL_SI coinc_event_row.likelihood = 0. coinc_inspiral_row.minimum_duration = 0. coinc_event_table.append(coinc_event_row) coinc_inspiral_table.append(coinc_inspiral_row) outdoc.childNodes[0].appendChild(coinc_def_table) outdoc.childNodes[0].appendChild(coinc_event_table) outdoc.childNodes[0].appendChild(coinc_event_map_table) outdoc.childNodes[0].appendChild(time_slide_table) outdoc.childNodes[0].appendChild(coinc_inspiral_table) outdoc.childNodes[0].appendChild(sngl_inspiral_table) ligolw_utils.write_filename(outdoc, file_name)
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : glue.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if not hasattr(row, entry): if cols[entry] in ['real_4', 'real_8']: setattr(row, entry, 0.) elif cols[entry] == 'int_4s': setattr(row, entry, 0) elif cols[entry] == 'lstring': setattr(row, entry, '') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("external_trigger:process_id:0") elif entry == 'event_id': row.event_id = ilwd.ilwdchar("external_trigger:event_id:0") else: print("Column %s not recognized" % (entry), file=sys.stderr) raise ValueError # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urlparse.urljoin("file:", urllib.pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.PFN(xml_file_url, site="local") return xml_file
# And add these to the output file # Start with the segment summary summSegs = segments.segmentlist([workflow.analysis_time]) sci_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_DAYHOPE_SCIENCE", 0) sciok_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_DAYHOPE_SCIENCE_OK", 0) sciavailable_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_DAYHOPE_SCIENCE_AVAILABLE", 0) analysable_def_id = segmentdb_utils.add_to_segment_definer(outdoc, proc_id, ifo, "CBC_DAYHOPE_ANALYSABLE", 0) segmentdb_utils.add_to_segment(outdoc, proc_id, sci_def_id, sciSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, sciok_def_id, sciokSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, sciavailable_def_id, sciavailableSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, analysable_def_id, analysableSegs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sciok_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sciavailable_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, analysable_def_id, summSegs, comment='') ligolw_utils.write_filename(outdoc, "SUMMARY.xml")
def output_sngl_inspiral_table(outputFile, tempBank, metricParams, ethincaParams, programName="", optDict=None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank generation codes into a valid LIGOLW xml file containing a sngl_inspiral table and outputs to file. Parameters ----------- outputFile : string Name of the file that the bank will be written to tempBank : iterable Each entry in the tempBank iterable should be a sequence of [mass1,mass2,spin1z,spin2z] in that order. metricParams : metricParameters instance Structure holding all the options for construction of the metric and the eigenvalues, eigenvectors and covariance matrix needed to manipulate the space. ethincaParams: {ethincaParameters instance, None} Structure holding options relevant to the ethinca metric computation including the upper frequency cutoff to be used for filtering. NOTE: The computation is currently only valid for non-spinning systems and uses the TaylorF2 approximant. programName (key-word-argument) : string Name of the executable that has been run optDict (key-word argument) : dictionary Dictionary of the command line arguments passed to the program outdoc (key-word argument) : ligolw xml document If given add template bank to this representation of a xml document and write to disk. If not given create a new document. kwargs : key-word arguments All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ if optDict is None: optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) # get IFO to put in search summary table ifos = [] if 'channel_name' in optDict.keys(): if optDict['channel_name'] is not None: ifos = [optDict['channel_name'][0:2]] proc_id = ligolw_process.register_to_xmldoc(outdoc, programName, optDict, ifos=ifos, **kwargs).process_id sngl_inspiral_table = convert_to_sngl_inspiral_table(tempBank, proc_id) # Calculate Gamma components if needed if ethincaParams is not None: if ethincaParams.doEthinca: for sngl in sngl_inspiral_table: # Set tau_0 and tau_3 values needed for the calculation of # ethinca metric distances (sngl.tau0, sngl.tau3) = pnutils.mass1_mass2_to_tau0_tau3( sngl.mass1, sngl.mass2, metricParams.f0) fMax_theor, GammaVals = calculate_ethinca_metric_comps( metricParams, ethincaParams, sngl.mass1, sngl.mass2, spin1z=sngl.spin1z, spin2z=sngl.spin2z, full_ethinca=ethincaParams.full_ethinca) # assign the upper frequency cutoff and Gamma0-5 values sngl.f_final = fMax_theor for i in xrange(len(GammaVals)): setattr(sngl, "Gamma" + str(i), GammaVals[i]) # If Gamma metric components are not wanted, assign f_final from an # upper frequency cutoff specified in ethincaParams elif ethincaParams.cutoff is not None: for sngl in sngl_inspiral_table: sngl.f_final = pnutils.frequency_cutoff_from_name( ethincaParams.cutoff, sngl.mass1, sngl.mass2, sngl.spin1z, sngl.spin2z) # set per-template low-frequency cutoff if 'f_low_column' in optDict and 'f_low' in optDict and \ optDict['f_low_column'] is not None: for sngl in sngl_inspiral_table: setattr(sngl, optDict['f_low_column'], optDict['f_low']) outdoc.childNodes[0].appendChild(sngl_inspiral_table) # get times to put in search summary table start_time = 0 end_time = 0 if 'gps_start_time' in optDict.keys() and 'gps_end_time' in optDict.keys(): start_time = optDict['gps_start_time'] end_time = optDict['gps_end_time'] # make search summary table search_summary_table = lsctables.New(lsctables.SearchSummaryTable) search_summary = return_search_summary(start_time, end_time, len(sngl_inspiral_table), ifos, **kwargs) search_summary_table.append(search_summary) outdoc.childNodes[0].appendChild(search_summary_table) # write the xml doc to disk ligolw_utils.write_filename(outdoc, outputFile, gz=outputFile.endswith('.gz'))
def save(self, path, group=None, ifo='P1'): """ Save frequency series to a Numpy .npy, hdf, or text file. The first column contains the sample frequencies, the second contains the values. In the case of a complex frequency series saved as text, the imaginary part is written as a third column. When using hdf format, the data is stored as a single vector, along with relevant attributes. Parameters ---------- path: string Destination file path. Must end with either .hdf, .npy or .txt. group: string Additional name for internal storage use. Ex. hdf storage uses this as the key value. Raises ------ ValueError If path does not end in .npy or .txt. """ ext = _os.path.splitext(path)[1] if ext == '.npy': output = _numpy.vstack((self.sample_frequencies.numpy(), self.numpy())).T _numpy.save(path, output) elif ext == '.txt': if self.kind == 'real': output = _numpy.vstack((self.sample_frequencies.numpy(), self.numpy())).T elif self.kind == 'complex': output = _numpy.vstack((self.sample_frequencies.numpy(), self.numpy().real, self.numpy().imag)).T _numpy.savetxt(path, output) elif ext == '.xml' or path.endswith('.xml.gz'): from pycbc.io.live import make_psd_xmldoc from pycbc.ligolw import utils if self.kind != 'real': raise ValueError('XML only supports real frequency series') output = self.lal() # When writing in this format we must *not* have the 0 values at # frequencies less than flow. To resolve this we set the first # non-zero value < flow. data_lal = output.data.data first_idx = _numpy.argmax(data_lal>0) if not first_idx == 0: data_lal[:first_idx] = data_lal[first_idx] psddict = {ifo: output} utils.write_filename(make_psd_xmldoc(psddict), path, gz=path.endswith(".gz")) elif ext =='.hdf': key = 'data' if group is None else group f = h5py.File(path) ds = f.create_dataset(key, data=self.numpy(), compression='gzip', compression_opts=9, shuffle=True) ds.attrs['epoch'] = float(self.epoch) ds.attrs['delta_f'] = float(self.delta_f) else: raise ValueError('Path must end with .npy, .txt, .xml, .xml.gz ' 'or .hdf')
def save(self, path, group=None, ifo='P1'): """ Save frequency series to a Numpy .npy, hdf, or text file. The first column contains the sample frequencies, the second contains the values. In the case of a complex frequency series saved as text, the imaginary part is written as a third column. When using hdf format, the data is stored as a single vector, along with relevant attributes. Parameters ---------- path: string Destination file path. Must end with either .hdf, .npy or .txt. group: string Additional name for internal storage use. Ex. hdf storage uses this as the key value. Raises ------ ValueError If path does not end in .npy or .txt. """ ext = _os.path.splitext(path)[1] if ext == '.npy': output = _numpy.vstack( (self.sample_frequencies.numpy(), self.numpy())).T _numpy.save(path, output) elif ext == '.txt': if self.kind == 'real': output = _numpy.vstack( (self.sample_frequencies.numpy(), self.numpy())).T elif self.kind == 'complex': output = _numpy.vstack( (self.sample_frequencies.numpy(), self.numpy().real, self.numpy().imag)).T _numpy.savetxt(path, output) elif ext == '.xml' or path.endswith('.xml.gz'): from pycbc.io.live import make_psd_xmldoc from pycbc.ligolw import utils if self.kind != 'real': raise ValueError('XML only supports real frequency series') output = self.lal() # When writing in this format we must *not* have the 0 values at # frequencies less than flow. To resolve this we set the first # non-zero value < flow. data_lal = output.data.data first_idx = _numpy.argmax(data_lal > 0) if not first_idx == 0: data_lal[:first_idx] = data_lal[first_idx] psddict = {ifo: output} utils.write_filename(make_psd_xmldoc(psddict), path, gz=path.endswith(".gz")) elif ext == '.hdf': key = 'data' if group is None else group f = h5py.File(path) ds = f.create_dataset(key, data=self.numpy(), compression='gzip', compression_opts=9, shuffle=True) ds.attrs['epoch'] = float(self.epoch) ds.attrs['delta_f'] = float(self.delta_f) else: raise ValueError('Path must end with .npy, .txt, .xml, .xml.gz ' 'or .hdf')
segmentdb_utils.add_to_segment(outdoc, proc_id, sci_def_id, sciSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, sciok_def_id, sciokSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, sciavailable_def_id, sciavailableSegs) segmentdb_utils.add_to_segment(outdoc, proc_id, analysable_def_id, analysableSegs) segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sci_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sciok_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, sciavailable_def_id, summSegs, comment='') segmentdb_utils.add_to_segment_summary(outdoc, proc_id, analysable_def_id, summSegs, comment='') ligolw_utils.write_filename(outdoc, "SUMMARY.xml")
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : glue.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if not hasattr(row, entry): if cols[entry] in ['real_4','real_8']: setattr(row,entry,0.) elif cols[entry] == 'int_4s': setattr(row,entry,0) elif cols[entry] == 'lstring': setattr(row,entry,'') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("external_trigger:process_id:0") elif entry == 'event_id': row.event_id = ilwd.ilwdchar("external_trigger:event_id:0") else: print("Column %s not recognized" %(entry), file=sys.stderr) raise ValueError # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urlparse.urljoin("file:", urllib.pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.PFN(xml_file_url, site="local") return xml_file
def upload( self, fname, psds, low_frequency_cutoff, testing=True, extra_strings=None, ): """Upload this trigger to gracedb Parameters ---------- fname: str The name to give the xml file associated with this trigger pds: dict of pybc.types.FrequencySeries A ifo keyed dictionary of psds to be uploaded in association with this trigger. low_frequency_cutoff: float The low frequency cutoff of the psds. testing: bool Switch to determine if the upload should be sent to gracedb as a test trigger (True) or a production trigger (False) """ from ligo.gracedb.rest import GraceDb # first of all, make sure the event and PSDs are saved on disk # as GraceDB operations can fail later self.save(fname) psds_lal = {} for ifo in psds: psd = psds[ifo] kmin = int(low_frequency_cutoff / psd.delta_f) fseries = lal.CreateREAL8FrequencySeries( "psd", psd.epoch, low_frequency_cutoff, psd.delta_f, lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin) fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC**2.0 psds_lal[ifo] = fseries psd_xmldoc = make_psd_xmldoc(psds_lal) psd_xml_path = os.path.splitext(fname)[0] + '-psd.xml.gz' ligolw_utils.write_filename(psd_xmldoc, psd_xml_path, gz=True) if self.upload_snr_series: snr_series_fname = os.path.splitext(fname)[0] + '.hdf' for ifo in self.snr_series: self.snr_series[ifo].save(snr_series_fname, group='%s/snr' % ifo) self.snr_series_psd[ifo].save(snr_series_fname, group='%s/psd' % ifo) # try connecting to GraceDB try: gracedb = GraceDb(self.gracedb_server) \ if self.gracedb_server is not None else GraceDb() except Exception as exc: logging.error('Cannot connect to GraceDB') logging.error(str(exc)) logging.error('Carrying on, but event %s will NOT be uploaded!', fname) return None # create GraceDB event group = 'Test' if testing else 'CBC' try: r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json() except Exception as exc: logging.error('Cannot create GraceDB event') logging.error(str(exc)) logging.error('Carrying on, but event %s will NOT be uploaded!', fname) return None logging.info("Uploaded event %s", r["graceid"]) if self.is_hardware_injection: try: gracedb.writeLabel(r['graceid'], 'INJ') except Exception as exc: logging.error("Cannot tag event %s as an injection", r["graceid"]) logging.error(str(exc)) logging.info("Tagging event %s as an injection", r["graceid"]) # upload PSDs try: gracedb.writeLog(r["graceid"], "PyCBC PSD estimate from the time of event", "psd.xml.gz", open(psd_xml_path, "rb").read(), "psd").json() except Exception as exc: logging.error("Cannot upload PSDs for event %s", r["graceid"]) logging.error(str(exc)) logging.info("Uploaded PSDs for event %s", r["graceid"]) # add other tags and comments try: gracedb.writeLog( r["graceid"], "Using PyCBC code hash %s" % pycbc_version.git_hash).json() extra_strings = [] if extra_strings is None else extra_strings for text in extra_strings: gracedb.writeLog(r["graceid"], text).json() except Exception as exc: logging.error("Cannot write comments for event %s", r["graceid"]) logging.error(str(exc)) # upload SNR series in HDF format if self.upload_snr_series: try: gracedb.writeFile(r['graceid'], snr_series_fname) except Exception as exc: logging.error("Cannot upload HDF SNR series for event %s", r["graceid"]) logging.error(str(exc)) return r['graceid']
def upload(self, fname, psds, low_frequency_cutoff, testing=True, extra_strings=None, ): """Upload this trigger to gracedb Parameters ---------- fname: str The name to give the xml file associated with this trigger pds: dict of pybc.types.FrequencySeries A ifo keyed dictionary of psds to be uploaded in association with this trigger. low_frequency_cutoff: float The low frequency cutoff of the psds. testing: bool Switch to determine if the upload should be sent to gracedb as a test trigger (True) or a production trigger (False) """ from ligo.gracedb.rest import GraceDb # first of all, make sure the event and PSDs are saved on disk # as GraceDB operations can fail later self.save(fname) psds_lal = {} for ifo in psds: psd = psds[ifo] kmin = int(low_frequency_cutoff / psd.delta_f) fseries = lal.CreateREAL8FrequencySeries( "psd", psd.epoch, low_frequency_cutoff, psd.delta_f, lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin) fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0 psds_lal[ifo] = fseries psd_xmldoc = make_psd_xmldoc(psds_lal) psd_xml_path = os.path.splitext(fname)[0] + '-psd.xml.gz' ligolw_utils.write_filename(psd_xmldoc, psd_xml_path, gz=True) if self.upload_snr_series: snr_series_fname = os.path.splitext(fname)[0] + '.hdf' for ifo in self.snr_series: self.snr_series[ifo].save(snr_series_fname, group='%s/snr' % ifo) self.snr_series_psd[ifo].save(snr_series_fname, group='%s/psd' % ifo) # try connecting to GraceDB try: gracedb = GraceDb(self.gracedb_server) \ if self.gracedb_server is not None else GraceDb() except Exception as exc: logging.error('Cannot connect to GraceDB') logging.error(str(exc)) logging.error('Carrying on, but event %s will NOT be uploaded!', fname) return None # create GraceDB event group = 'Test' if testing else 'CBC' try: r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json() except Exception as exc: logging.error('Cannot create GraceDB event') logging.error(str(exc)) logging.error('Carrying on, but event %s will NOT be uploaded!', fname) return None logging.info("Uploaded event %s", r["graceid"]) if self.is_hardware_injection: try: gracedb.writeLabel(r['graceid'], 'INJ') except Exception as exc: logging.error("Cannot tag event %s as an injection", r["graceid"]) logging.error(str(exc)) logging.info("Tagging event %s as an injection", r["graceid"]) # upload PSDs try: gracedb.writeLog(r["graceid"], "PyCBC PSD estimate from the time of event", "psd.xml.gz", open(psd_xml_path, "rb").read(), "psd").json() except Exception as exc: logging.error("Cannot upload PSDs for event %s", r["graceid"]) logging.error(str(exc)) logging.info("Uploaded PSDs for event %s", r["graceid"]) # add other tags and comments try: gracedb.writeLog(r["graceid"], "Using PyCBC code hash %s" % pycbc_version.git_hash).json() extra_strings = [] if extra_strings is None else extra_strings for text in extra_strings: gracedb.writeLog(r["graceid"], text).json() except Exception as exc: logging.error("Cannot write comments for event %s", r["graceid"]) logging.error(str(exc)) # upload SNR series in HDF format if self.upload_snr_series: try: gracedb.writeFile(r['graceid'], snr_series_fname) except Exception as exc: logging.error("Cannot upload HDF SNR series for event %s", r["graceid"]) logging.error(str(exc)) return r['graceid']