def lalburst_sb_to_glue_sb(sb_in, desired_columns): """ Convert a lalburst SnglBurst structure to a SnglBurst row from glue. """ sb = lsctables.SnglBurstTable.RowType() for att in desired_columns: if att == "start_time": start_time = float(sb_in.start_time) sb.start_time = int(start_time) sb.start_time_ns = int(1e9 * (start_time - int(start_time))) continue elif att == "peak_time": peak_time = float(sb_in.peak_time) sb.peak_time = int(peak_time) sb.peak_time_ns = int(1e9 * (peak_time - int(peak_time))) continue elif att == "process_id": sb.process_id = ilwd.ilwdchar("process:process_id:%d" % getattr(sb_in, att)) continue elif att == "event_id": sb.event_id = ilwd.ilwdchar("sngl_burst:sngl_burst_id:%d" % getattr(sb_in, att)) continue try: setattr(sb, att, getattr(sb_in, att)) except AttributeError: pass return sb
def return_empty_sngl(): """ Function to create a SnglInspiral object where all columns are populated but all are set to values that test False (ie. strings to '', floats/ints to 0, ...). This avoids errors when you try to create a table containing columns you don't care about, but which still need populating. NOTE: This will also produce a process_id and event_id with 0 values. For most applications these should be set to their correct values. Returns -------- lsctables.SnglInspiral The "empty" SnglInspiral object. """ sngl = lsctables.SnglInspiral() cols = lsctables.SnglInspiralTable.validcolumns for entry in cols.keys(): if cols[entry] in ['real_4', 'real_8']: setattr(sngl, entry, 0.) elif cols[entry] == 'int_4s': setattr(sngl, entry, 0) elif cols[entry] == 'lstring': setattr(sngl, entry, '') elif entry == 'process_id': sngl.process_id = ilwd.ilwdchar("process:process_id:0") elif entry == 'event_id': sngl.event_id = ilwd.ilwdchar("sngl_inspiral:event_id:0") else: raise ValueError("Column %s not recognized" % (entry)) return sngl
def return_empty_sngl(): """ Function to create a SnglInspiral object where all columns are populated but all are set to values that test False (ie. strings to '', floats/ints to 0, ...). This avoids errors when you try to create a table containing columns you don't care about, but which still need populating. NOTE: This will also produce a process_id and event_id with 0 values. For most applications these should be set to their correct values. Returns -------- lsctables.SnglInspiral The "empty" SnglInspiral object. """ sngl = lsctables.SnglInspiral() cols = lsctables.SnglInspiralTable.validcolumns for entry in cols.keys(): if cols[entry] in ['real_4','real_8']: setattr(sngl,entry,0.) elif cols[entry] == 'int_4s': setattr(sngl,entry,0) elif cols[entry] == 'lstring': setattr(sngl,entry,'') elif entry == 'process_id': sngl.process_id = ilwd.ilwdchar("process:process_id:0") elif entry == 'event_id': sngl.event_id = ilwd.ilwdchar("sngl_inspiral:event_id:0") else: raise ValueError("Column %s not recognized" %(entry) ) return sngl
def create_empty_row(obj): """Create an empty sngl_inspiral row where the columns have default values of 0.0 for a float, 0 for an int, '' for a string. The ilwd columns have a default where the index is 0. """ # check if sim_inspiral or sngl_inspiral row = lsctables.SimInspiral() cols = lsctables.SimInspiralTable.validcolumns # populate columns with default values for entry in cols.keys(): if cols[entry] in ['real_4', 'real_8']: setattr(row, entry, 0.) elif cols[entry] == 'int_4s': setattr(row, entry, 0) elif cols[entry] == 'lstring': setattr(row, entry, '') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("sim_inspiral:process_id:0") elif entry == 'simulation_id': row.simulation_id = ilwd.ilwdchar("sim_inspiral:simulation_id:0") else: raise ValueError("Column %s not recognized." % (entry)) return row
def create_empty_row(obj): """Create an empty sngl_inspiral row where the columns have default values of 0.0 for a float, 0 for an int, '' for a string. The ilwd columns have a default where the index is 0. """ # check if sim_inspiral or sngl_inspiral row = lsctables.SimInspiral() cols = lsctables.SimInspiralTable.validcolumns # populate columns with default values for entry in cols.keys(): if cols[entry] in ['real_4','real_8']: setattr(row,entry,0.) elif cols[entry] == 'int_4s': setattr(row,entry,0) elif cols[entry] == 'lstring': setattr(row,entry,'') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("sim_inspiral:process_id:0") elif entry == 'simulation_id': row.simulation_id = ilwd.ilwdchar("sim_inspiral:simulation_id:0") else: raise ValueError("Column %s not recognized." %(entry) ) return row
def write_coinc_tables( vetotrigs, xmldoc, refchannel, twind, time_slide_id=None): """ Write a set of coinc tables for this round. We only write coincidences for coincs with refchannel. Note: This is probably gonna be slow... aaaand that's why we implemented the real algorithm in C. """ # Retrieve process information process = [ p for p in table.get_table( xmldoc, lsctables.ProcessTable.tableName ) if p.program == "laldetchar-hveto" ][0] process_id = process.process_id # Insert a time slide ID. It's not yet really necessary if time_slide_id is None: timeslidetable = lsctables.New(lsctables.TimeSlideTable) time_slide = timeslidetable.RowType time_slide.process_id = process_id time_slide.time_slide_id = time_slide_id = ilwd.ilwdchar( "time_slide:time_slide_id:0" ) time_slide.instrument = opt.instrument time_slide.offset = 0.0 timeslidetable.append(time_slide) xmldoc.childNodes[0].appendChild( timeslidetable ) # Set up coinc tables coinc_def = HVetoBBCoincDef coincdeftable = lsctables.New(lsctables.CoincDefTable) coinc_def.coinc_def_id = coinc_def_id = coincdeftable.get_next_id() coincdeftable.append( coinc_def ) xmldoc.childNodes[0].appendChild( coincdeftable ) coinc_def = HVetoCoincTables( xmldoc ) reftrigs = [ (segment( sb.get_peak()-twind/2.0, sb.get_peak()+twind/2.0 ), sb) for sb in vetotrigs if sb.channel == refchannel ] for vt in vetotrigs: if vt.channel == refchannel: continue for (s, t) in reftrigs: if vt.get_peak() in s: coinc_def.append_coinc( process_id, time_slide_id, coinc_def_id, (t, vt)) return xmldoc
def write_to_xml(cells, intr_prms, fvals=None, fname=None, verbose=False): """ Write a set of cells, with dimensions corresponding to intr_prms to an XML file as sim_inspiral rows. """ xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) procrow = process.append_process(xmldoc, program=sys.argv[0]) procid = procrow.process_id process.append_process_params(xmldoc, procrow, process.process_params_from_dict(opts.__dict__)) rows = ["simulation_id", "process_id", "numrel_data"] + list(intr_prms) if fvals is not None: rows.append("alpha1") sim_insp_tbl = lsctables.New(lsctables.SimInspiralTable, rows) for itr, intr_prm in enumerate(cells): sim_insp = sim_insp_tbl.RowType() # FIXME: Need better IDs sim_insp.numrel_data = "INTR_SET_%d" % itr sim_insp.simulation_id = ilwd.ilwdchar("sim_inspiral:sim_inspiral_id:%d" % itr) sim_insp.process_id = procid if fvals: sim_insp.alpha1 = fvals[itr] for p, v in zip(intr_prms, intr_prm._center): setattr(sim_insp, p, v) sim_insp_tbl.append(sim_insp) xmldoc.childNodes[0].appendChild(sim_insp_tbl) if fname is None: channel_name = ["H=H", "L=L"] ifos = "".join([o.split("=")[0][0] for o in channel_name]) #start = int(event_time) start = 0 fname = "%s-MASS_POINTS-%d-1.xml.gz" % (ifos, start) utils.write_filename(xmldoc, fname, gz=True, verbose=verbose)
def _row(self, sim=None, slide_id=1): """ Produce a simburst table row for this waveform. Parameters ---------- sim : table The table which the row should be made for. If this is left empty the table is assumed to be a sim_burst_table. slide_id : int The timeslide id. Defaults to 1. """ if not sim: sim = self.sim row = sim.RowType() for a in lsctables.SimBurstTable.validcolumns.keys(): setattr(row, a, self.params[a]) row.waveform = self.waveform # Fill in the time row.set_time_geocent(GPS(float(self.time))) # Get the sky locations row.ra, row.dec, row.psi = self.sky_dist() row.simulation_id = sim.get_next_id() row.waveform_number = random.randint(0,int(2**32)-1) ### !! This needs to be updated. row.process_id = "process:process_id:0" #procrow.process_id row.time_slide_id = ilwd.ilwdchar("time_slide:time_slide_id:%d" % slide_id) return row
def test_to_table_type_ilwd(): from glue.ligolw.ilwd import ilwdchar from glue.ligolw.lsctables import SnglBurstTable ilwd = ilwdchar('process:process_id:0') with pytest.raises(ValueError) as exc: io_ligolw.to_table_type(ilwd, SnglBurstTable, 'event_id') assert str(exc.value) == ('ilwdchar \'process:process_id:0\' doesn\'t ' 'match column \'event_id\'')
def test_to_table_type_glue_ligolw(value, name, result): from glue.ligolw.lsctables import SnglBurstTable from glue.ligolw.ilwd import ilwdchar from glue.ligolw._ilwd import ilwdchar as IlwdChar out = io_ligolw.to_table_type(value, SnglBurstTable, name) if isinstance(out, IlwdChar): result = ilwdchar(result) assert isinstance(out, type(result)) assert out == result
def test_to_table_type(value, name, result): from glue.ligolw.lsctables import SnglBurstTable from glue.ligolw.ilwd import ilwdchar from glue.ligolw._ilwd import ilwdchar as IlwdChar out = io_ligolw.to_table_type(value, SnglBurstTable, name) if isinstance(out, IlwdChar): result = ilwdchar(result) assert isinstance(out, type(result)) assert out == result
def return_search_summary(start_time=0, end_time=0, nevents=0, ifos=None, **kwargs): """ Function to create a SearchSummary object where all columns are populated but all are set to values that test False (ie. strings to '', floats/ints to 0, ...). This avoids errors when you try to create a table containing columns you don't care about, but which still need populating. NOTE: This will also produce a process_id with 0 values. For most applications these should be set to their correct values. It then populates columns if given them as options. Returns -------- lsctables.SeachSummary The "empty" SearchSummary object. """ if ifos is None: ifos = [] # create an empty search summary search_summary = lsctables.SearchSummary() cols = lsctables.SearchSummaryTable.validcolumns for entry in cols.keys(): if cols[entry] in ['real_4', 'real_8']: setattr(search_summary, entry, 0.) elif cols[entry] == 'int_4s': setattr(search_summary, entry, 0) elif cols[entry] == 'lstring': setattr(search_summary, entry, '') elif entry == 'process_id': search_summary.process_id = ilwd.ilwdchar("process:process_id:0") else: raise ValueError("Column %s not recognized" % (entry)) # fill in columns if len(ifos): search_summary.ifos = ','.join(ifos) if nevents: search_summary.nevents = nevents if start_time and end_time: search_summary.in_start_time = int(start_time) search_summary.in_start_time_ns = int(start_time % 1 * 1e9) search_summary.in_end_time = int(end_time) search_summary.in_end_time_ns = int(end_time % 1 * 1e9) search_summary.out_start_time = int(start_time) search_summary.out_start_time_ns = int(start_time % 1 * 1e9) search_summary.out_end_time = int(end_time) search_summary.out_end_time_ns = int(end_time % 1 * 1e9) return search_summary
def db_to_samples(db_fname, tbltype, cols): """ Pull samples from db_fname and return object that resembles a row from an XML table. """ if "geocent_end_time" in cols: cols.append("geocent_end_time_ns") # FIXME: Get columns from db #if cols is None: #colsspec = "*" #else: colsspec = ", ".join(cols) if tbltype == lsctables.SimInspiralTable: sql = """select %s from sim_inspiral""" % colsspec elif tbltype == lsctables.SnglInspiralTable: sql = """select %s from sngl_inspiral""" % colsspec else: raise ValueError("Don't know SQL for table %s" % tbltype.tableName) Sample = namedtuple("Sample", cols) samples = [] try: connection = sqlite3.connect(db_fname) connection.row_factory = sqlite3.Row for row in connection.execute(sql): # FIXME: UGH! res = dict(zip(cols, row)) if "geocent_end_time" in res.keys(): res["geocent_end_time"] += res["geocent_end_time_ns"] * 1e-9 if "simulation_id" in res.keys(): res["simulation_id"] = ilwd.ilwdchar(res["simulation_id"]) if "process_id" in res.keys(): res["process_id"] = ilwd.ilwdchar(res["process_id"]) samples.append(Sample(**res)) finally: connection.close() return samples
def db_to_samples(db_fname, tbltype, cols): """ Pull samples from db_fname and return object that resembles a row from an XML table. """ if "geocent_end_time" in cols: cols.append("geocent_end_time_ns") # FIXME: Get columns from db #if cols is None: #colsspec = "*" #else: colsspec = ", ".join(cols) if tbltype == lsctables.SimInspiralTable: sql = """select %s from sim_inspiral""" % colsspec elif tbltype == lsctables.SnglInspiralTable: sql = """select %s from sngl_inspiral""" % colsspec else: raise ValueError("Don't know SQL for table %s" % tbltype.tableName) Sample = namedtuple("Sample", cols) samples = [] try: connection = sqlite3.connect(db_fname) connection.row_factory = sqlite3.Row for row in connection.execute(sql): # FIXME: UGH! res = dict(zip(cols, row)) if "geocent_end_time" in res.keys(): res["geocent_end_time"] += res["geocent_end_time_ns"]*1e-9 if "simulation_id" in res.keys(): res["simulation_id"] = ilwd.ilwdchar(res["simulation_id"]) if "process_id" in res.keys(): res["process_id"] = ilwd.ilwdchar(res["process_id"]) samples.append(Sample(**res)) finally: connection.close() return samples
def _to_ilwd(value, tablename, colname): from glue.ligolw.ilwd import (ilwdchar, get_ilwdchar_class) from glue.ligolw._ilwd import ilwdchar as IlwdChar if isinstance(value, IlwdChar) and value.column_name != colname: raise ValueError("ilwdchar '{0!s}' doesn't match column " "{1!r}".format(value, colname)) if isinstance(value, IlwdChar): return value if isinstance(value, int): return get_ilwdchar_class(tablename, colname)(value) return ilwdchar(value)
def return_search_summary(start_time=0, end_time=0, nevents=0, ifos=None, **kwargs): """ Function to create a SearchSummary object where all columns are populated but all are set to values that test False (ie. strings to '', floats/ints to 0, ...). This avoids errors when you try to create a table containing columns you don't care about, but which still need populating. NOTE: This will also produce a process_id with 0 values. For most applications these should be set to their correct values. It then populates columns if given them as options. Returns -------- lsctables.SeachSummary The "empty" SearchSummary object. """ if ifos is None: ifos = [] # create an empty search summary search_summary = lsctables.SearchSummary() cols = lsctables.SearchSummaryTable.validcolumns for entry in cols.keys(): if cols[entry] in ['real_4','real_8']: setattr(search_summary,entry,0.) elif cols[entry] == 'int_4s': setattr(search_summary,entry,0) elif cols[entry] == 'lstring': setattr(search_summary,entry,'') elif entry == 'process_id': search_summary.process_id = ilwd.ilwdchar("process:process_id:0") else: raise ValueError("Column %s not recognized" %(entry) ) # fill in columns if len(ifos): search_summary.ifos = ','.join(ifos) if nevents: search_summary.nevents = nevents if start_time and end_time: search_summary.in_start_time = int(start_time) search_summary.in_start_time_ns = int(start_time % 1 * 1e9) search_summary.in_end_time = int(end_time) search_summary.in_end_time_ns = int(end_time % 1 * 1e9) search_summary.out_start_time = int(start_time) search_summary.out_start_time_ns = int(start_time % 1 * 1e9) search_summary.out_end_time = int(end_time) search_summary.out_end_time_ns = int(end_time % 1 * 1e9) return search_summary
def write_to_xml(cells, intr_prms, pin_prms={}, fvals=None, fname=None, verbose=False): """ Write a set of cells, with dimensions corresponding to intr_prms to an XML file as sim_inspiral rows. """ xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) procrow = process.append_process(xmldoc, program=sys.argv[0]) procid = procrow.process_id process.append_process_params( xmldoc, procrow, process.process_params_from_dict(opts.__dict__)) rows = ["simulation_id", "process_id", "numrel_data"] # Override eff_lambda to with psi0, its shoehorn column if "eff_lambda" in intr_prms: intr_prms[intr_prms.index("eff_lambda")] = "psi0" if "deff_lambda" in intr_prms: intr_prms[intr_prms.index("deff_lambda")] = "psi3" rows += list(intr_prms) rows += list(pin_prms) if fvals is not None: rows.append("alpha1") sim_insp_tbl = lsctables.New(lsctables.SimInspiralTable, rows) for itr, intr_prm in enumerate(cells): sim_insp = sim_insp_tbl.RowType() # FIXME: Need better IDs sim_insp.numrel_data = "INTR_SET_%d" % itr sim_insp.simulation_id = ilwd.ilwdchar( "sim_inspiral:sim_inspiral_id:%d" % itr) sim_insp.process_id = procid if fvals: sim_insp.alpha1 = fvals[itr] for p, v in zip(intr_prms, intr_prm._center): setattr(sim_insp, p, v) for p, v in pin_prms.iteritems(): setattr(sim_insp, p, v) sim_insp_tbl.append(sim_insp) xmldoc.childNodes[0].appendChild(sim_insp_tbl) if fname is None: channel_name = ["H=H", "L=L"] ifos = "".join([o.split("=")[0][0] for o in channel_name]) #start = int(event_time) start = 0 fname = "%s-MASS_POINTS-%d-1.xml.gz" % (ifos, start) utils.write_filename(xmldoc, fname, gz=True, verbose=verbose)
def write_coinc_tables(vetotrigs, xmldoc, refchannel, twind, time_slide_id=None): """ Write a set of coinc tables for this round. We only write coincidences for coincs with refchannel. Note: This is probably gonna be slow... aaaand that's why we implemented the real algorithm in C. """ # Retrieve process information process = [ p for p in table.get_table(xmldoc, lsctables.ProcessTable.tableName) if p.program == "laldetchar-hveto" ][0] process_id = process.process_id # Insert a time slide ID. It's not yet really necessary if time_slide_id is None: from pylal import ligolw_tisi timeslidetable = lsctables.New(lsctables.TimeSlideTable) time_slide = timeslidetable.RowType time_slide.process_id = process_id time_slide.time_slide_id = time_slide_id = ilwd.ilwdchar( "time_slide:time_slide_id:0") time_slide.instrument = opt.instrument time_slide.offset = 0.0 timeslidetable.append(time_slide) xmldoc.childNodes[0].appendChild(timeslidetable) # Set up coinc tables coinc_def = HVetoBBCoincDef coincdeftable = lsctables.New(lsctables.CoincDefTable) coinc_def.coinc_def_id = coinc_def_id = coincdeftable.get_next_id() coincdeftable.append(coinc_def) xmldoc.childNodes[0].appendChild(coincdeftable) coinc_def = HVetoCoincTables(xmldoc) reftrigs = [(segment(sb.get_peak() - twind / 2.0, sb.get_peak() + twind / 2.0), sb) for sb in vetotrigs if sb.channel == refchannel] for vt in vetotrigs: if vt.channel == refchannel: continue for (s, t) in reftrigs: if vt.get_peak() in s: coinc_def.append_coinc(process_id, time_slide_id, coinc_def_id, (t, vt)) return xmldoc
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for time_slide_id, rows in itertools.groupby(connection.cursor().execute(""" SELECT time_slide_id, instrument, offset FROM time_slide ORDER BY time_slide_id """), lambda (time_slide_id, instrument, offset): ilwd.ilwdchar(time_slide_id)): offset_vector = offsetvector.offsetvector((instrument, offset) for time_slide_id, instrument, offset in rows) if any(offset_vector.values()): background_time_slides[time_slide_id] = offset_vector else: zero_lag_time_slides[time_slide_id] = offset_vector
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : glue.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if not hasattr(row, entry): if cols[entry] in ['real_4','real_8']: setattr(row,entry,0.) elif cols[entry] == 'int_4s': setattr(row,entry,0) elif cols[entry] == 'lstring': setattr(row,entry,'') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("external_trigger:process_id:0") elif entry == 'event_id': row.event_id = ilwd.ilwdchar("external_trigger:event_id:0") else: print >> sys.stderr, "Column %s not recognized" %(entry) raise ValueError # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urlparse.urljoin("file:", urllib.pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.PFN(xml_file_url, site="local") return xml_file
# Create a new XML document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) sim_table = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[0].appendChild(sim_table) # Add empty rows to the sim_inspiral table for inj in range(N): row = sim_table.RowType() for slot in row.__slots__: setattr(row, slot, 0) sim_table.append(row) # Fill in IDs for i, row in enumerate(sim_table): row.process_id = ilwd.ilwdchar("process:process_id:{0:d}".format(i)) row.simulation_id = ilwd.ilwdchar( "sim_inspiral:simulation_id:{0:d}".format(ids[i])) # Fill rows for field in injections.dtype.names: vals = injections[field] for row, val in zip(sim_table, vals): setattr(row, field, val) # Write file output_file = open(opts.output, 'w') xmldoc.write(output_file) output_file.close()
def get_sim_hash(N=1, num_digits=10): return ilwd.ilwdchar(":%s:0" % DA.get_unique_hex_tag(N=N, num_digits=num_digits))
def assign_id(row, i): row.simulation_id = ilwd.ilwdchar("sim_inspiral_table:sim_inspiral:%d" % i)
print "\nbank size: %d\t\tproposed: %d\trejection rate: %.6f / (%.6f)" % ( len(bank), nprop, 1 - float(len(ks)) / float(sum(ks)), 1 - 1. / opts.convergence_threshold) print >> sys.stdout, "accepted:\t\t", tmplt if matcher is not None: print >> sys.stdout, "max match (%.4f):\t" % match, matcher k = 0 # Add to single inspiral table. Do not store templates that # were in the original bank, only store the additions. if not hasattr(tmplt, 'is_seed_point'): if opts.output_filename.endswith(('.xml', '.xml.gz')): row = tmplt.to_sngl() # Event ids must be unique, or the table isn't valid, # SQL needs this row.event_id = ilwd.ilwdchar('sngl_inspiral:event_id:%d' % (len(bank), )) # If we figure out how to use metaio's SnglInspiralTable the # following change then defines the event_id #curr_id = EventIDColumn() #curr_id.id = len(bank) #curr_id.snglInspiralTable = row #row.event_id = curr_id row.ifo = opts.instrument row.process_id = process.process_id tbl.append(row) if opts.output_filename.endswith(('.hdf', '.h5', '.hdf5')): row = tmplt.to_storage_arr() if len(tbl) == 0: tbl = row else: tbl = np.append(tbl, row)
xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) procrow = process.append_process(xmldoc, program=sys.argv[0]) procid = procrow.process_id process.append_process_params( xmldoc, procrow, process.process_params_from_dict(opts.__dict__)) sim_insp_tbl = lsctables.New(lsctables.SimInspiralTable, [ "simulation_id", "process_id", "numrel_data", "mass1", "mass2", "psi0", "psi3" ]) for itr, (m1, m2) in enumerate(m1m2_grid): for l1 in np.linspace(common_cl.param_limits["lam_tilde"][0], common_cl.param_limits["lam_tilde"][1], Nlam): sim_insp = sim_insp_tbl.RowType() sim_insp.numrel_data = "MASS_SET_%d" % itr sim_insp.simulation_id = ilwd.ilwdchar( "sim_inspiral:sim_inspiral_id:%d" % itr) sim_insp.process_id = procid sim_insp.mass1, sim_insp.mass2 = m1, m2 sim_insp.psi0, sim_insp.psi3 = opts.eff_lambda or l1, opts.delta_eff_lambda or 0 sim_insp_tbl.append(sim_insp) xmldoc.childNodes[0].appendChild(sim_insp_tbl) if opts.channel_name: ifos = "".join([o.split("=")[0][0] for o in opts.channel_name]) else: ifos = "HLV" start = int(event_time) fname = "%s-MASS_POINTS-%d-1.xml.gz" % (ifos, start) utils.write_filename(xmldoc, fname, gz=True)
# Set up PSD for metric computation; calling into pylal, so need pylal types psd = REAL8FrequencySeries(name="psd", f0=0., deltaF=1., data=get_PSD(1., opts.flow, 1570., noise_model)) # insert our rows # Replace row with C datatype; nice side effect: initializes elements to 0 or "" lsctables.SnglInspiralTable.RowType = SnglInspiralTable tbl = lsctables.New(lsctables.SnglInspiralTable) xmldoc.childNodes[-1].appendChild(tbl) for idx, template in enumerate(bank): # Do not store templates that were in the original bank, only store the # additions. if hasattr(template, 'is_seed_point'): continue row = template.to_sngl() # Event ids must be unique, or the table isn't valid, SQL needs this row.event_id = ilwd.ilwdchar('sngl_inspiral:event_id:%d' %(idx,)) row.ifo = opts.instrument row.process_id = process.process_id row.Gamma0, row.Gamma1, row.Gamma2, row.Gamma3, row.Gamma4, row.Gamma5,\ row.Gamma6, row.Gamma7, row.Gamma8, row.Gamma9 = \ compute_metric(opts.flow, 1570., 4, row.tau0, row.tau3, psd) tbl.append(row) # write out the document ligolw_process.set_process_end_time(process) # FIXME output naming conventions to match IHOPE/tmpltbank break sbank_pipe if opts.user_tag: fout = "%s-SBANK_%s-%d-%d.xml.gz" % (opts.instrument, opts.user_tag, opts.gps_start_time, opts.gps_end_time-opts.gps_start_time) else: fout = "%s-SBANK-%d-%d.xml.gz" % (opts.instrument, opts.gps_start_time, opts.gps_end_time-opts.gps_start_time)
def write_coincidences(connection, map_label, search, process_id, verbose = False): """ Writes coincidences to coinc_event_map table. """ # for all the maps, see if there is another coincidence if verbose: print >> sys.stdout, "Getting mapped sngls belonging to a coincident event..." connection.create_aggregate("ag_cat", 1, sqlutils.aggregate_concatenate) connection.create_function("issubset", 2, strlst_is_subset) sqlquery = ''' CREATE INDEX finj_simid_idx ON found_inj (sim_id); CREATE INDEX finj_eid_idx ON found_inj (event_id); CREATE TEMP TABLE coinc_inj AS SELECT found_inj.sim_id AS sid, found_inj.event_id AS evid, coinc_event_map.coinc_event_id AS ceid FROM found_inj JOIN coinc_event_map ON ( coinc_event_map.event_id == evid ) WHERE issubset( ( SELECT ag_cat(c.event_id) FROM coinc_event_map AS c WHERE c.coinc_event_id == ceid GROUP BY c.coinc_event_id ORDER BY c.event_id ASC), ( SELECT ag_cat(b.event_id) FROM found_inj AS b WHERE b.sim_id == sid GROUP BY b.sim_id ORDER BY b.event_id ASC) ); CREATE INDEX cij_eid_idx ON coinc_inj (evid); ''' connection.cursor().executescript(sqlquery) # get the sim_coincs sqlquery = "SELECT DISTINCT sid, ceid FROM coinc_inj" sim_coincs = [(ilwd.ilwdchar(sim_id), ilwd.ilwdchar(ceid)) for ceid, sim_id in connection.cursor().execute( sqlquery ).fetchall()] # get the sim_sngls sqlquery = "SELECT sim_id, event_id FROM found_inj WHERE event_id NOT IN (SELECT DISTINCT evid FROM coinc_inj)" sim_sngls = [(ilwd.ilwdchar(sim_id), ilwd.ilwdchar(eid)) for sim_id, eid in connection.cursor().execute( sqlquery ).fetchall()] # create a new coinc_def id for this map label, if it already doesn't exist coinc_def_id = sqlutils.write_newstyle_coinc_def_entry( connection, map_label, search=search ) # get the time_slide id # XXX: NOTE: We are assuming that all simulation entries have the same time_slide id sqlquery = 'SELECT DISTINCT time_slide_id FROM experiment_summary WHERE datatype LIKE "simulation%"' time_slide_id = connection.cursor().execute(sqlquery).fetchall() if len(time_slide_id) > 1: raise ValueError, "more than one time_slide_id found for the simulation datatype" elif len(time_slide_id) == 0: raise ValueError, "no time_slide_id found for the simulation datatype" time_slide_id = ilwd.ilwdchar(time_slide_id.pop()[0]) # write the number of new entries needed for the sim_coincs to the coinc_event table if verbose: print >> sys.stdout, "Adding injection maps to coinc_event table..." new_ceids = sqlutils.add_coinc_event_entries( connection, process_id, coinc_def_id, time_slide_id, len(sim_coincs) ) # add these new entries to coinc_event_map table if verbose: print >> sys.stdout, "Adding injection-coinc_event maps to coinc_event_map table..." sqlquery = 'INSERT INTO coinc_event_map (coinc_event_id, table_name, event_id) VALUES (?,?,?)' connection.cursor().executemany( sqlquery, [(str(ceid), sim_id.table_name, str(sim_id)) for ceid, (sim_id, _) in zip(new_ceids, sim_coincs)] ) connection.cursor().executemany( sqlquery, [(str(ceid), coinc_ceid.table_name, str(coinc_ceid)) for ceid, (_, coinc_ceid) in zip(new_ceids, sim_coincs)] ) # ditto for the sim-sngls if verbose: print >> sys.stdout, "Adding injection-sngl maps to coinc_event_map table..." new_ceids = sqlutils.add_coinc_event_entries( connection, process_id, coinc_def_id, time_slide_id, len(sim_sngls) ) connection.cursor().executemany( sqlquery, [(str(ceid), sim_id.table_name, str(sim_id)) for ceid, (sim_id, _) in zip(new_ceids, sim_sngls)] ) connection.cursor().executemany( sqlquery, [(str(ceid), eid.table_name, str(eid)) for ceid, (_, eid) in zip(new_ceids, sim_sngls)] ) # update the number of events in the coinc_event table if verbose: print >> sys.stdout, "Updating coinc_event nevents column..." sqlutils.update_coinctab_nevents( connection )
def get_sim_hash(N=1, num_digits=10): return ilwd.ilwdchar(":{}:0".format( get_unique_hex_tag(N=N, num_digits=num_digits)))
def make_exttrig_file(cp, ifos, sci_seg, out_dir): ''' Make an ExtTrig xml file containing information on the external trigger Parameters ---------- cp : pycbc.workflow.configuration.WorkflowConfigParser object The parsed configuration options of a pycbc.workflow.core.Workflow. ifos : str String containing the analysis interferometer IDs. sci_seg : glue.segments.segment The science segment for the analysis run. out_dir : str The output directory, destination for xml file. Returns ------- xml_file : pycbc.workflow.File object The xml file with external trigger information. ''' # Initialise objects xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) tbl = lsctables.New(lsctables.ExtTriggersTable) cols = tbl.validcolumns xmldoc.childNodes[-1].appendChild(tbl) row = tbl.appendRow() # Add known attributes for this GRB setattr(row, "event_ra", float(cp.get("workflow", "ra"))) setattr(row, "event_dec", float(cp.get("workflow", "dec"))) setattr(row, "start_time", int(cp.get("workflow", "trigger-time"))) setattr(row, "event_number_grb", str(cp.get("workflow", "trigger-name"))) # Fill in all empty rows for entry in cols.keys(): if not hasattr(row, entry): if cols[entry] in ['real_4', 'real_8']: setattr(row, entry, 0.) elif cols[entry] == 'int_4s': setattr(row, entry, 0) elif cols[entry] == 'lstring': setattr(row, entry, '') elif entry == 'process_id': row.process_id = ilwd.ilwdchar("external_trigger:process_id:0") elif entry == 'event_id': row.event_id = ilwd.ilwdchar("external_trigger:event_id:0") else: print >> sys.stderr, "Column %s not recognized" % (entry) raise ValueError # Save file xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow", "trigger-name")) xml_file_path = os.path.join(out_dir, xml_file_name) utils.write_filename(xmldoc, xml_file_path) xml_file_url = urlparse.urljoin("file:", urllib.pathname2url(xml_file_path)) xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url) xml_file.PFN(xml_file_url, site="local") return xml_file
print "No .trg files found. Nothing to convert." sys.exit(0) for f in glob.glob("%s/*.trg" % sys.argv[1]): #print f xmldoc = Document() xmldoc.appendChild( LIGO_LW() ) sbt = lsctables.New(lsctables.SnglBurstTable, ["ifo", "peak_time", "peak_time_ns", "event_id", "process_id", "start_time", "start_time_ns", "confidence", "chisq", "chisq_dof", "amplitude", "duration", "search", "central_freq", "channel", "snr", "bandwidth"]) #H1_TCS-ITMY_PD_ISS_OUT_AC_1_1024.xml fspl = os.path.basename(f).split("_") ifo = fspl[0] channel = "_".join( fspl[1:-2] ) sbt += fromkwfile( f, ifo=ifo, channel=channel, columns = ["duration", "start_time", "peak_time", "central_freq", "bandwidth", "snr", "confidence"] ) for i, sb in enumerate(sbt): sb.search = "KleineWelle" sb.process_id = ilwd.ilwdchar("process:process_id:0") sb.event_id = ilwd.ilwdchar("sngl_burst:event_id:%d"% i ) #sb.confidence = 0 sb.chisq_dof = 0 sb.chisq = 0 sb.amplitude = 0 xmldoc.childNodes[0].appendChild( sbt ) write_filename( xmldoc, re.sub( "trg", "xml", f ) ) #write_fileobj( xmldoc, sys.stdout )
if match < opts.match_min: bank.insort(tmplt) ks.append(k) if opts.verbose: print "\nbank size: %d\t\tproposed: %d\trejection rate: %.6f / (%.6f)" % (len(bank), k, 1 - float(len(ks))/float(sum(ks)), 1 - 1./opts.convergence_threshold ) print >>sys.stdout, "accepted:\t\t", status_format % tmplt.params if matcher is not None: print >>sys.stdout, "max match (%.4f):\t" % match, status_format % matcher.params k = 0 # Add to single inspiral table. Do not store templates that # were in the original bank, only store the additions. if not hasattr(tmplt, 'is_seed_point'): row = tmplt.to_sngl() # Event ids must be unique, or the table isn't valid, SQL needs this row.event_id = ilwd.ilwdchar('sngl_inspiral:event_id:%d' %(len(bank),)) row.ifo = opts.instrument row.process_id = process.process_id row.Gamma0, row.Gamma1, row.Gamma2, row.Gamma3, row.Gamma4, row.Gamma5,\ row.Gamma6, row.Gamma7, row.Gamma8, row.Gamma9 = \ compute_metric(opts.flow, 1570., 4, row.tau0, row.tau3, psd) tbl.append(row) if opts.checkpoint and not len(bank) % opts.checkpoint: checkpoint_save(xmldoc, fout, process) # clear the proposal template if caching is not enabled if not opts.cache_waveforms: tmplt.clear()
for idx, tmplt in enumerate(input_table): if options.verbose and idx % 1000 == 0: print(" .. copying row %d" % idx, file=sys.stderr) newp = new_row(outputtabletype) # First copy over all columns that can be possibly copied over for col in outcols: if col in tmplt.__slots__: newp.__setattr__(col, tmplt.__getattribute__(col)) else: continue # Now create an identifier ID if outputtabletype == lsctables.SnglInspiralTable: newp.event_id = ilwd.ilwdchar("sngl_inspiral:event_id:%d" % idx) if options.verbose and idx % 1000 == 0: print(newp.event_id) newp.__setattr__('process_id', proc_id) # Add the point to the output table out_table.append(newp) # write the xml doc to disk proctable = lsctables.ProcessTable.get_table(outdoc) proctable[0].end_time = gpstime.GpsSecondsFromPyUTC(time.time()) outname = options.output_catalog if '.xml' not in outname: outname = outname + '.xml'
injections['q'] = q try: injections['hrss'] = samples['hrss'] except: injections['hrss'] = np.exp(samples['loghrss']) injections['ra'] = samples['ra'] injections['dec'] = samples['dec'] injections['psi'] = samples['psi'] # Create a new XML document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) #create timeslide table and set offsets to 0 timeslide_table = lsctables.New(lsctables.TimeSlideTable) p = lsctables.Process p.process_id = ilwd.ilwdchar("process:process_id:{0:d}".format(0)) timeslide_table.append_offsetvector({ 'H1': 0, 'V1': 0, 'L1': 0, 'H2': 0 }, p) sim_table = lsctables.New(lsctables.SimBurstTable) xmldoc.childNodes[0].appendChild(timeslide_table) xmldoc.childNodes[0].appendChild(sim_table) # Add empty rows to the sim_inspiral table for inj in range(N): row = sim_table.RowType() for slot in row.__slots__:
def ReadMultiInspiralTimeSlidesFromFiles(fileList,generate_output_tables=False): """ Read time-slid multiInspiral tables from a list of files @param fileList: list of input files """ if not fileList: return multiInspiralTable(), None multis = None timeSlides = [] segmentDict = {} for thisFile in fileList: doc = utils.load_filename(thisFile, gz=(thisFile or "stdin").endswith(".gz"), contenthandler = lsctables.use_in(ligolw.LIGOLWContentHandler)) # Extract the time slide table timeSlideTable = lsctables.TimeSlideTable.get_table(doc) slideMapping = {} currSlides = {} # NOTE: I think some of this is duplicated in the glue definition of the # time slide table. Probably should move over to that for slide in timeSlideTable: currID = int(slide.time_slide_id) if currID not in currSlides.keys(): currSlides[currID] = {} currSlides[currID][slide.instrument] = slide.offset elif slide.instrument not in currSlides[currID].keys(): currSlides[currID][slide.instrument] = slide.offset for slideID,offsetDict in currSlides.items(): try: # Is the slide already in the list and where? offsetIndex = timeSlides.index(offsetDict) slideMapping[slideID] = offsetIndex except ValueError: # If not then add it timeSlides.append(offsetDict) slideMapping[slideID] = len(timeSlides) - 1 # Get the mapping table segmentMap = {} timeSlideMapTable = lsctables.TimeSlideSegmentMapTable.get_table(doc) for entry in timeSlideMapTable: segmentMap[int(entry.segment_def_id)] = int(entry.time_slide_id) # Extract the segment table segmentTable = lsctables.SegmentTable.get_table(doc) for entry in segmentTable: currSlidId = segmentMap[int(entry.segment_def_id)] currSeg = entry.get() if not segmentDict.has_key(slideMapping[currSlidId]): segmentDict[slideMapping[currSlidId]] = segments.segmentlist() segmentDict[slideMapping[currSlidId]].append(currSeg) segmentDict[slideMapping[currSlidId]].coalesce() # extract the multi inspiral table try: multiInspiralTable = lsctables.MultiInspiralTable.get_table(doc) # Remap the time slide IDs for multi in multiInspiralTable: newID = slideMapping[int(multi.time_slide_id)] multi.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (newID)) if multis: multis.extend(multiInspiralTable) else: multis = multiInspiralTable # except: multiInspiralTable = None except: raise if not generate_output_tables: return multis,timeSlides,segmentDict else: # Make a new time slide table timeSlideTab = lsctables.New(lsctables.TimeSlideTable) for slideID,offsetDict in enumerate(timeSlides): for instrument in offsetDict.keys(): currTimeSlide = lsctables.TimeSlide() currTimeSlide.instrument = instrument currTimeSlide.offset = offsetDict[instrument] currTimeSlide.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (slideID)) currTimeSlide.process_id = ilwd.ilwdchar(\ "process:process_id:%d" % (0)) timeSlideTab.append(currTimeSlide) # Make a new mapping table timeSlideSegMapTab = lsctables.New(lsctables.TimeSlideSegmentMapTable) for i in range(len(timeSlides)): currMapEntry = lsctables.TimeSlideSegmentMap() currMapEntry.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (i)) currMapEntry.segment_def_id = ilwd.ilwdchar(\ "segment_def:segment_def_id:%d" % (i)) timeSlideSegMapTab.append(currMapEntry) # Make a new segment table newSegmentTable = lsctables.New(lsctables.SegmentTable) segmentIDCount = 0 for i in range(len(timeSlides)): currSegList = segmentDict[i] for seg in currSegList: currSegment = lsctables.Segment() currSegment.segment_id = ilwd.ilwdchar(\ "segment:segment_id:%d" %(segmentIDCount)) segmentIDCount += 1 currSegment.segment_def_id = ilwd.ilwdchar(\ "segment_def:segment_def_id:%d" % (i)) currSegment.process_id = ilwd.ilwdchar(\ "process:process_id:%d" % (0)) currSegment.set(seg) currSegment.creator_db = -1 currSegment.segment_def_cdb = -1 newSegmentTable.append(currSegment) return multis,timeSlides,segmentDict,timeSlideTab,newSegmentTable,\ timeSlideSegMapTab
def write_coincidences(connection, map_label, search, process_id, verbose=False): """ Writes coincidences to coinc_event_map table. """ # for all the maps, see if there is another coincidence if verbose: print >> sys.stdout, "Getting mapped sngls belonging to a coincident event..." connection.create_aggregate("ag_cat", 1, sqlutils.aggregate_concatenate) connection.create_function("issubset", 2, strlst_is_subset) sqlquery = ''' CREATE INDEX finj_simid_idx ON found_inj (sim_id); CREATE INDEX finj_eid_idx ON found_inj (event_id); CREATE TEMP TABLE coinc_inj AS SELECT found_inj.sim_id AS sid, found_inj.event_id AS evid, coinc_event_map.coinc_event_id AS ceid FROM found_inj JOIN coinc_event_map ON ( coinc_event_map.event_id == evid ) WHERE issubset( ( SELECT ag_cat(c.event_id) FROM coinc_event_map AS c WHERE c.coinc_event_id == ceid GROUP BY c.coinc_event_id ORDER BY c.event_id ASC), ( SELECT ag_cat(b.event_id) FROM found_inj AS b WHERE b.sim_id == sid GROUP BY b.sim_id ORDER BY b.event_id ASC) ); CREATE INDEX cij_eid_idx ON coinc_inj (evid); ''' connection.cursor().executescript(sqlquery) # get the sim_coincs sqlquery = "SELECT DISTINCT sid, ceid FROM coinc_inj" sim_coincs = [ (ilwd.ilwdchar(sim_id), ilwd.ilwdchar(ceid)) for ceid, sim_id in connection.cursor().execute(sqlquery).fetchall() ] # get the sim_sngls sqlquery = "SELECT sim_id, event_id FROM found_inj WHERE event_id NOT IN (SELECT DISTINCT evid FROM coinc_inj)" sim_sngls = [ (ilwd.ilwdchar(sim_id), ilwd.ilwdchar(eid)) for sim_id, eid in connection.cursor().execute(sqlquery).fetchall() ] # create a new coinc_def id for this map label, if it already doesn't exist coinc_def_id = sqlutils.write_newstyle_coinc_def_entry(connection, map_label, search=search) # get the time_slide id # XXX: NOTE: We are assuming that all simulation entries have the same time_slide id sqlquery = 'SELECT DISTINCT time_slide_id FROM experiment_summary WHERE datatype LIKE "simulation%"' time_slide_id = connection.cursor().execute(sqlquery).fetchall() if len(time_slide_id) > 1: raise ValueError, "more than one time_slide_id found for the simulation datatype" elif len(time_slide_id) == 0: raise ValueError, "no time_slide_id found for the simulation datatype" time_slide_id = ilwd.ilwdchar(time_slide_id.pop()[0]) # write the number of new entries needed for the sim_coincs to the coinc_event table if verbose: print >> sys.stdout, "Adding injection maps to coinc_event table..." new_ceids = sqlutils.add_coinc_event_entries(connection, process_id, coinc_def_id, time_slide_id, len(sim_coincs)) # add these new entries to coinc_event_map table if verbose: print >> sys.stdout, "Adding injection-coinc_event maps to coinc_event_map table..." sqlquery = 'INSERT INTO coinc_event_map (coinc_event_id, table_name, event_id) VALUES (?,?,?)' connection.cursor().executemany( sqlquery, [(str(ceid), sim_id.table_name, str(sim_id)) for ceid, (sim_id, _) in zip(new_ceids, sim_coincs)]) connection.cursor().executemany( sqlquery, [(str(ceid), coinc_ceid.table_name, str(coinc_ceid)) for ceid, (_, coinc_ceid) in zip(new_ceids, sim_coincs)]) # ditto for the sim-sngls if verbose: print >> sys.stdout, "Adding injection-sngl maps to coinc_event_map table..." new_ceids = sqlutils.add_coinc_event_entries(connection, process_id, coinc_def_id, time_slide_id, len(sim_sngls)) connection.cursor().executemany( sqlquery, [(str(ceid), sim_id.table_name, str(sim_id)) for ceid, (sim_id, _) in zip(new_ceids, sim_sngls)]) connection.cursor().executemany( sqlquery, [(str(ceid), eid.table_name, str(eid)) for ceid, (_, eid) in zip(new_ceids, sim_sngls)]) # update the number of events in the coinc_event table if verbose: print >> sys.stdout, "Updating coinc_event nevents column..." sqlutils.update_coinctab_nevents(connection)
injections['amp_order'] = [opts.amporder for i in range(N)] injections['numrel_data'] = [ "" for _ in range(N)] # Create a new XML document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) sim_table = lsctables.New(lsctables.SimInspiralTable) xmldoc.childNodes[0].appendChild(sim_table) # Add empty rows to the sim_inspiral table for inj in range(N): row = sim_table.RowType() for slot in row.__slots__: setattr(row, slot, 0) sim_table.append(row) # Fill in IDs for i,row in enumerate(sim_table): row.process_id = ilwd.ilwdchar("process:process_id:{0:d}".format(i)) row.simulation_id = ilwd.ilwdchar("sim_inspiral:simulation_id:{0:d}".format(ids[i])) # Fill rows for field in injections.dtype.names: vals = injections[field] for row, val in zip(sim_table, vals): setattr(row, field, val) # Write file output_file = open(opts.output, 'w') xmldoc.write(output_file) output_file.close()
injections['q'] = q try: injections['hrss'] = samples['hrss'] except: injections['hrss'] = np.exp(samples['loghrss']) injections['ra'] = samples['ra'] injections['dec'] = samples['dec'] injections['psi'] = samples['psi'] # Create a new XML document xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) #create timeslide table and set offsets to 0 timeslide_table = lsctables.New(lsctables.TimeSlideTable) p=lsctables.Process p.process_id=ilwd.ilwdchar("process:process_id:{0:d}".format(0)) timeslide_table.append_offsetvector({'H1':0,'V1':0,'L1':0,'H2':0},p) sim_table = lsctables.New(lsctables.SimBurstTable) xmldoc.childNodes[0].appendChild(timeslide_table) xmldoc.childNodes[0].appendChild(sim_table) # Add empty rows to the sim_inspiral table for inj in xrange(N): row = sim_table.RowType() for slot in row.__slots__: setattr(row, slot, 0) sim_table.append(row) # Fill in IDs for i,row in enumerate(sim_table): row.process_id = ilwd.ilwdchar("process:process_id:{0:d}".format(i))
def ReadMultiInspiralTimeSlidesFromFiles(fileList, generate_output_tables=False): """ Read time-slid multiInspiral tables from a list of files @param fileList: list of input files """ if not fileList: return multiInspiralTable(), None multis = None timeSlides = [] segmentDict = {} for thisFile in fileList: doc = utils.load_filename(thisFile, gz=(thisFile or "stdin").endswith(".gz"), contenthandler=lsctables.use_in( ligolw.LIGOLWContentHandler)) # Extract the time slide table timeSlideTable = table.get_table(doc, lsctables.TimeSlideTable.tableName) slideMapping = {} currSlides = {} # NOTE: I think some of this is duplicated in the glue definition of the # time slide table. Probably should move over to that for slide in timeSlideTable: currID = int(slide.time_slide_id) if currID not in currSlides.keys(): currSlides[currID] = {} currSlides[currID][slide.instrument] = slide.offset elif slide.instrument not in currSlides[currID].keys(): currSlides[currID][slide.instrument] = slide.offset for slideID, offsetDict in currSlides.items(): try: # Is the slide already in the list and where? offsetIndex = timeSlides.index(offsetDict) slideMapping[slideID] = offsetIndex except ValueError: # If not then add it timeSlides.append(offsetDict) slideMapping[slideID] = len(timeSlides) - 1 # Get the mapping table segmentMap = {} timeSlideMapTable = table.get_table( doc, lsctables.TimeSlideSegmentMapTable.tableName) for entry in timeSlideMapTable: segmentMap[int(entry.segment_def_id)] = int(entry.time_slide_id) # Extract the segment table segmentTable = table.get_table(doc, lsctables.SegmentTable.tableName) for entry in segmentTable: currSlidId = segmentMap[int(entry.segment_def_id)] currSeg = entry.get() if not segmentDict.has_key(slideMapping[currSlidId]): segmentDict[slideMapping[currSlidId]] = segments.segmentlist() segmentDict[slideMapping[currSlidId]].append(currSeg) segmentDict[slideMapping[currSlidId]].coalesce() # extract the multi inspiral table try: multiInspiralTable = table.get_table( doc, lsctables.MultiInspiralTable.tableName) # Remap the time slide IDs for multi in multiInspiralTable: newID = slideMapping[int(multi.time_slide_id)] multi.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (newID)) if multis: multis.extend(multiInspiralTable) else: multis = multiInspiralTable # except: multiInspiralTable = None except: raise if not generate_output_tables: return multis, timeSlides, segmentDict else: # Make a new time slide table timeSlideTab = lsctables.New(lsctables.TimeSlideTable) for slideID, offsetDict in enumerate(timeSlides): for instrument in offsetDict.keys(): currTimeSlide = lsctables.TimeSlide() currTimeSlide.instrument = instrument currTimeSlide.offset = offsetDict[instrument] currTimeSlide.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (slideID)) currTimeSlide.process_id = ilwd.ilwdchar(\ "process:process_id:%d" % (0)) timeSlideTab.append(currTimeSlide) # Make a new mapping table timeSlideSegMapTab = lsctables.New(lsctables.TimeSlideSegmentMapTable) for i in range(len(timeSlides)): currMapEntry = lsctables.TimeSlideSegmentMap() currMapEntry.time_slide_id = ilwd.ilwdchar(\ "time_slide:time_slide_id:%d" % (i)) currMapEntry.segment_def_id = ilwd.ilwdchar(\ "segment_def:segment_def_id:%d" % (i)) timeSlideSegMapTab.append(currMapEntry) # Make a new segment table newSegmentTable = lsctables.New(lsctables.SegmentTable) segmentIDCount = 0 for i in range(len(timeSlides)): currSegList = segmentDict[i] for seg in currSegList: currSegment = lsctables.Segment() currSegment.segment_id = ilwd.ilwdchar(\ "segment:segment_id:%d" %(segmentIDCount)) segmentIDCount += 1 currSegment.segment_def_id = ilwd.ilwdchar(\ "segment_def:segment_def_id:%d" % (i)) currSegment.process_id = ilwd.ilwdchar(\ "process:process_id:%d" % (0)) currSegment.set(seg) currSegment.creator_db = -1 currSegment.segment_def_cdb = -1 newSegmentTable.append(currSegment) return multis,timeSlides,segmentDict,timeSlideTab,newSegmentTable,\ timeSlideSegMapTab
(xmldoc, "nr_catalog", args.__dict__, comment="", version=lalapps.git_version.version, cvs_repository='lalsuite/' + lalapps.git_version.branch, cvs_entry_time=lalapps.git_version.date).process_id sim_table = lsctables.New(lsctables.SimInspiralTable) inj_list = args.inputs for count, inj in enumerate(inj_list): curr_sim = lsctables.SimInspiral() # Add the empty columns fill_missing_columns(curr_sim) # Set id columns curr_sim.process_id = proc_id curr_sim.simulation_id = ilwd.ilwdchar("sim_inspiral:simulation_id:%d"\ %(count)) curr_sim.numrel_data = inj f = h5py.File(inj, 'r') curr_sim.eta = f.attrs['eta'] if curr_sim.eta > 0.25 and curr_sim.eta < 0.2501: curr_sim.eta = 0.25 # Populate spins columns with spins in LAL frame! Need to be # transformed from NR frame curr_sim.f_lower = f.attrs['f_lower_at_1MSUN'] f.close() # mtotal is factored out when defining the spins in this case. mtotal = 1.0 spins = SimInspiralNRWaveformGetSpinsFromHDF5File\ (curr_sim.f_lower / mtotal, mtotal, inj) curr_sim.spin1x = spins[0] curr_sim.spin1y = spins[1]
# Convert to m1, m2 m1m2_grid = np.array([lsu.m1m2(cart_grid[i][0], cart_grid[i][1]) for i in xrange(len(cart_grid))]) m1m2_grid /= lal.MSUN_SI if opts.mass_points_xml: xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) procrow = process.append_process(xmldoc, program=sys.argv[0]) procid = procrow.process_id process.append_process_params(xmldoc, procrow, process.process_params_from_dict(opts.__dict__)) sim_insp_tbl = lsctables.New(lsctables.SimInspiralTable, ["simulation_id", "process_id", "numrel_data", "mass1", "mass2", "psi0", "psi3"]) for itr, (m1, m2) in enumerate(m1m2_grid): for l1 in np.linspace(common_cl.param_limits["lam_tilde"][0], common_cl.param_limits["lam_tilde"][1], Nlam): sim_insp = sim_insp_tbl.RowType() sim_insp.numrel_data = "MASS_SET_%d" % itr sim_insp.simulation_id = ilwd.ilwdchar("sim_inspiral:sim_inspiral_id:%d" % itr) sim_insp.process_id = procid sim_insp.mass1, sim_insp.mass2 = m1, m2 sim_insp.psi0, sim_insp.psi3 = opts.eff_lambda or l1, opts.delta_eff_lambda or 0 sim_insp_tbl.append(sim_insp) xmldoc.childNodes[0].appendChild(sim_insp_tbl) if opts.channel_name: ifos = "".join([o.split("=")[0][0] for o in opts.channel_name]) else: ifos = "HLV" start = int(event_time) fname = "%s-MASS_POINTS-%d-1.xml.gz" % (ifos, start) utils.write_filename(xmldoc, fname, gz=True)
xmldoc = Document() xmldoc.appendChild(LIGO_LW()) sbt = lsctables.New(lsctables.SnglBurstTable, [ "ifo", "peak_time", "peak_time_ns", "event_id", "process_id", "start_time", "start_time_ns", "confidence", "chisq", "chisq_dof", "amplitude", "duration", "search", "central_freq", "channel", "snr", "bandwidth" ]) #H1_TCS-ITMY_PD_ISS_OUT_AC_1_1024.xml fspl = os.path.basename(f).split("_") ifo = fspl[0] channel = "_".join(fspl[1:-2]) sbt += fromkwfile(f, ifo=ifo, channel=channel, columns=[ "duration", "start_time", "peak_time", "central_freq", "bandwidth", "snr", "confidence" ]) for i, sb in enumerate(sbt): sb.search = "KleineWelle" sb.process_id = ilwd.ilwdchar("process:process_id:0") sb.event_id = ilwd.ilwdchar("sngl_burst:event_id:%d" % i) #sb.confidence = 0 sb.chisq_dof = 0 sb.chisq = 0 sb.amplitude = 0 xmldoc.childNodes[0].appendChild(sbt) write_filename(xmldoc, re.sub("trg", "xml", f)) #write_fileobj( xmldoc, sys.stdout )