tables_err_file.close()
    exit_status = proc.wait()  # block!

    if exit_status != 0:
        logger.info("    WARNING: idq-gdb-glitch-tables failed for " +
                    classifier)
        if not options.skip_gracedb_upload:
            gracedb.writeLog(gdb_id,
                             message="FAILED: iDQ glitch tables for " +
                             classifier + " at " + ifo,
                             tagname=idq.tagnames)
        gch_xml = None
    else:
        logger.info("    Done: idq-gdb-glitch-tables for " + classifier + ".")
        gch_xml = idq.gdb_xml(idq_gdb_main_dir, classifier, ifo,
                              "_%s" % usertag, gps_start,
                              gps_end - gps_start)  ### compute name

    ### run idq-gdb-timeseries for each classifier
    logger.info("    Begin: executing idq-gdb-timeseries for " + classifier +
                " ...")

    timeseries_cmd = "%s -c %s -s %.4f -e %.4f --gps %f -g %s -C %s --plotting-gps-start %.4f --plotting-gps-end %.4f" % (
        timeseries_script, options.config_file, gps_start, gps_end,
        event_gps_time, gdb_id, classifier, plotting_gps_start,
        plotting_gps_end)

    if options.verbose:
        timeseries_cmd += " -v"
    if options.skip_gracedb_upload:
        timeseries_cmd += " --skip-gracedb-upload"
Exemplo n.º 2
0
# form two open segments using start and stop times
seglist = segments.segmentlist()
seglist.append(segments.segment([-segments.infinity(), lal.LIGOTimeGPS(opts.start)]))
seglist.append(segments.segment([lal.LIGOTimeGPS(opts.end), segments.infinity()]))

# delete glitch events that fall inside of these segments
idq_tables_dbutils.delete_glitch_events_in_segmentlist(connection, cursor, seglist)


###############################################################################
# ## save merged xmldoc
###############################################################################
#merged_xmldoc_filename = '%s/%s_idq_%s_glitch_%s%d-%d.xml' % (
#    opts.output_dir,
#    opts.ifo,
#    opts.classifier,
#    opts.tag,
#    int(opts.start),
#    int(opts.end - opts.start)
#    )
merged_xmldoc_filename = idq.gdb_xml(opts.output_dir, opts.classifier, opts.ifo, opts.tag, int(opts.start), int(opts.end-opts.start))

if opts.verbose:
    print 'saving ' + merged_xmldoc_filename
# exctract data base into xml file and write it to disk
ligolw_sqlite.extract(connection, merged_xmldoc_filename , verbose = opts.verbose) # exctract data base into xml file
connection.close()
if not opts.skip_gracedb_upload:
    #write log message to gracedb and upload file
    gracedb.writeLog(opts.gracedb_id, message="iDQ glitch tables " + opts.ifo + ":", filename=merged_xmldoc_filename)
    tables_out_file = open(tables_out, 'a')
    tables_err_file = open(tables_err, 'a')
    proc = subprocess.Popen(tables_cmd.split(), stdout=tables_out_file, stderr=tables_err_file, cwd=cwd)
    tables_out_file.close()
    tables_err_file.close()
    exit_status = proc.wait() # block!

    if exit_status != 0:
        logger.info("    WARNING: idq-gdb-glitch-tables failed for " + classifier)
        if not options.skip_gracedb_upload:
            gracedb.writeLog(gdb_id, message="FAILED: iDQ glitch tables for " + classifier + " at " + ifo)
        gch_xml = None
    else:
        logger.info("    Done: idq-gdb-glitch-tables for " + classifier + ".")
        gch_xml = idq.gdb_xml(idq_gdb_main_dir, classifier, ifo, "_%s"%usertag, gps_start, gps_end-gps_start) ### compute name

    ### run idq-gdb-timeseries for each classifier
    logger.info("    Begin: executing idq-gdb-timeseries for " + classifier + " ...")

    timeseries_cmd = "%s -c %s -s %.4f -e %.4f --gps %f -g %s -C %s --plotting-gps-start %.4f --plotting-gps-end %.4f"%(timeseries_script, options.config_file, gps_start, gps_end, event_gps_time, gdb_id, classifier, plotting_gps_start, plotting_gps_end)

    if options.verbose:
        timeseries_cmd += " -v"
    if options.skip_gracedb_upload:
        timeseries_cmd += " --skip-gracedb-upload"

#    if gch_xml: ### currently broken, uncomment when fixed!
#        timeseries_cmd += " --gch-xml %s"%gch_xml

#    if cln_xml: ### never built!
# delete glitch events that fall inside of these segments
idq_tables_dbutils.delete_glitch_events_in_segmentlist(connection, cursor,
                                                       seglist)

###############################################################################
# ## save merged xmldoc
###############################################################################
#merged_xmldoc_filename = '%s/%s_idq_%s_glitch_%s%d-%d.xml' % (
#    opts.output_dir,
#    opts.ifo,
#    opts.classifier,
#    opts.tag,
#    int(opts.start),
#    int(opts.end - opts.start)
#    )
merged_xmldoc_filename = idq.gdb_xml(gdbdir, opts.classifier, ifo, tag,
                                     int(opts.start),
                                     int(opts.end - opts.start))

if opts.verbose:
    print 'saving ' + merged_xmldoc_filename
# exctract data base into xml file and write it to disk
ligolw_sqlite.extract(connection, merged_xmldoc_filename,
                      verbose=opts.verbose)  # exctract data base into xml file
connection.close()
if not opts.skip_gracedb_upload:
    #write log message to gracedb and upload file
    gracedb.writeLog(opts.gracedb_id,
                     message="iDQ glitch tables " + ifo + ":",
                     filename=merged_xmldoc_filename)
seglist.append(segments.segment([-segments.infinity(), lal.LIGOTimeGPS(opts.start)]))
seglist.append(segments.segment([lal.LIGOTimeGPS(opts.end), segments.infinity()]))

# delete glitch events that fall inside of these segments
idq_tables_dbutils.delete_glitch_events_in_segmentlist(connection, cursor, seglist)


###############################################################################
# ## save merged xmldoc
###############################################################################
# merged_xmldoc_filename = '%s/%s_idq_%s_glitch_%s%d-%d.xml' % (
#    opts.output_dir,
#    opts.ifo,
#    opts.classifier,
#    opts.tag,
#    int(opts.start),
#    int(opts.end - opts.start)
#    )
merged_xmldoc_filename = idq.gdb_xml(
    opts.output_dir, opts.classifier, opts.ifo, opts.tag, int(opts.start), int(opts.end - opts.start)
)

if opts.verbose:
    print "saving " + merged_xmldoc_filename
# exctract data base into xml file and write it to disk
ligolw_sqlite.extract(connection, merged_xmldoc_filename, verbose=opts.verbose)  # exctract data base into xml file
connection.close()
if not opts.skip_gracedb_upload:
    # write log message to gracedb and upload file
    gracedb.writeLog(opts.gracedb_id, message="iDQ glitch tables " + opts.ifo + ":", filename=merged_xmldoc_filename)
# form two open segments using start and stop times
seglist = segments.segmentlist()
seglist.append(segments.segment([-segments.infinity(), lal.LIGOTimeGPS(opts.start)]))
seglist.append(segments.segment([lal.LIGOTimeGPS(opts.end), segments.infinity()]))

# delete glitch events that fall inside of these segments
idq_tables_dbutils.delete_glitch_events_in_segmentlist(connection, cursor, seglist)


###############################################################################
# ## save merged xmldoc
###############################################################################
#merged_xmldoc_filename = '%s/%s_idq_%s_glitch_%s%d-%d.xml' % (
#    opts.output_dir,
#    opts.ifo,
#    opts.classifier,
#    opts.tag,
#    int(opts.start),
#    int(opts.end - opts.start)
#    )
merged_xmldoc_filename = idq.gdb_xml(gdbdir, opts.classifier, ifo, tag, int(opts.start), int(opts.end-opts.start))

if opts.verbose:
    print 'saving ' + merged_xmldoc_filename
# exctract data base into xml file and write it to disk
ligolw_sqlite.extract(connection, merged_xmldoc_filename , verbose = opts.verbose) # exctract data base into xml file
connection.close()
if not opts.skip_gracedb_upload:
    #write log message to gracedb and upload file
    gracedb.writeLog(opts.gracedb_id, message="iDQ glitch tables " + ifo + ":", filename=merged_xmldoc_filename)