예제 #1
0
    fUL_timeseries = [f[1] for f in f_timeseries]
    f_timeseries = [f[0] for f in f_timeseries]

    ### write combined data to disk
    if opts.verbose:
        print "writing combined fap frames to disk"
    for t, ts, tS in zip(f_times, f_timeseries, fUL_timeseries):
        truth = (opts.plotting_gps_start <= t) * (t <= opts.plotting_gps_end)
        t = t[truth]
        ts = ts[truth]
        tS = tS[truth]

        start = int(t[0])
        dt = t[1] - t[0]
        dur = int(len(t) * dt)
        fapfr = idq.gdb_timeseriesgwf(gdbdir, opts.classifier, ifo,
                                      "_fap%s" % filetag, start, dur)
        if opts.verbose:
            print "    %s" % fapfr
        idq.timeseries2frame(fapfr, {
            fap_channame: ts,
            fapUL_channame: tS
        }, t[0], dt)
        if not opts.skip_gracedb_upload:
            message = "iDQ fap timeseries for %s at %s within [%d, %d] :" % (
                opts.classifier, ifo, start, start + dur)
            if opts.verbose:
                print "    %s" % message
            gracedb.writeLog(opts.gracedb_id,
                             message=message,
                             filename=fapfr,
                             tagname=idq.tagnames)  #+['data_quality'] )
예제 #2
0
    #        opts.classifier,
    #        opts.tag,
    #        int(_start),
    #        int(_dur))

    merged_rank_filename = idq.gdb_timeseries(opts.output_dir, opts.classifier,
                                              opts.ifo, "_rank%s" % opts.tag,
                                              int(_start), int(_dur))

    if opts.verbose:
        print "\twriting " + merged_rank_filename
    np.save(event.gzopen(merged_rank_filename, 'w'), ts)
    merged_rank_filenames.append(merged_rank_filename)

    rankfr = idq.gdb_timeseriesgwf(opts.output_dir, opts.classifier,
                                   opts.ifo, "_rank%s" % opts.tag, int(_start),
                                   int(_dur))
    if opts.verbose:
        print "\twriting " + rankfr
    idq.timeseries2frame(rankfr, {rank_channame: ts}, _start,
                         _dur / (len(t) - 1))
    merged_rank_frames.append(rankfr)

    # generate and write summary statistics
    (r_min, r_max, r_mean, r_stdv) = idq.stats_ts(ts)
    if r_max > max_rank:
        max_rank = r_max
        max_rank_segNo = segNo
    rank_summaries.append([
        _start,
        _end,
    fUL_timeseries = [f[1] for f in f_timeseries]
    f_timeseries = [f[0] for f in f_timeseries]

    ### write combined data to disk
    if opts.verbose:
        print "writing combined fap frames to disk"
    for t, ts, tS in zip(f_times, f_timeseries, fUL_timeseries):
        truth = (opts.plotting_gps_start <= t)*(t <= opts.plotting_gps_end)
        t = t[truth]
        ts = ts[truth]
        tS = tS[truth]

        start = int(t[0])
        dt = t[1]-t[0]
        dur = int(len(t)*dt)
        fapfr = idq.gdb_timeseriesgwf( gdbdir , opts.classifier, ifo, "_fap%s"%filetag, start, dur)                      
        if opts.verbose:
            print "    %s"%fapfr
        idq.timeseries2frame( fapfr, {fap_channame:ts, fapUL_channame:tS}, t[0], dt )
        if not opts.skip_gracedb_upload:
            message = "iDQ fap timeseries for %s at %s within [%d, %d] :"%(opts.classifier, ifo, start, start+dur)
            if opts.verbose:
                print "    %s"%message
            gracedb.writeLog( opts.gracedb_id, message=message, filename=fapfr ) #, tagname=['data_quality'] )

    ### post min-fap value
    if opts.verbose:
        print "finding minimum FAP observed within [%.3f, %.3f]"%(opts.start, opts.end)
    min_fap = 1.0
    for (t, ts) in zip(f_times, f_timeseries):
        # ensure time series only fall within desired range
#    merged_rank_filename = '%s/%s_idq_%s_rank_%s%d-%d.npy.gz' % (
#        opts.output_dir,
#        opts.ifo,
#        opts.classifier,
#        opts.tag,
#        int(_start),
#        int(_dur))

    merged_rank_filename = idq.gdb_timeseries(opts.output_dir, opts.classifier, opts.ifo, "_rank%s"%opts.tag, int(_start), int(_dur))

    if opts.verbose:
        print "\twriting " + merged_rank_filename
    np.save(event.gzopen(merged_rank_filename, 'w'), ts)
    merged_rank_filenames.append(merged_rank_filename)

    rankfr = idq.gdb_timeseriesgwf(opts.output_dir, opts.classifier, opts.ifo, "_rank%s"%opts.tag, int(_start), int(_dur))
    if opts.verbose:
        print "\twriting " + rankfr
    idq.timeseries2frame( rankfr, {rank_channame:ts}, _start, _dur/(len(t)-1) )
    merged_rank_frames.append( rankfr )

    # generate and write summary statistics
    (r_min, r_max, r_mean, r_stdv) = idq.stats_ts(ts)
    if r_max > max_rank:
        max_rank = r_max
        max_rank_segNo = segNo
    rank_summaries.append([
        _start,
        _end,
        _dur / (len(t) - 1),
        r_min,