def check_calibration(segs, times, timeseries, FAPthrs):
    """
    checks the pipeline's calibration at each "FAPthr in FAPThrs"

    this may be sped up with a call to timeseries_to_livetime() instead of idq.timeseries_to_segments() -> event.livetime
    however, we currently use some segment logic and it is not clear that we can avoid actually generating segments
    """
    idq_livetime = event.livetime(segs)

    segments = []
    deadtimes = []
    statedFAPs = []
    errs = []
    for FAPthr in FAPthrs:
        SEGS = []
        max_statedFAP = 0.0
        for (t, ts) in zip(times, timeseries):
            (_segs, _min_ts) = idq.timeseries_to_segments(
                t, -ts, -FAPthr)  # we want FAP <= FAPthr <--> -FAP >= -FAPthr
            SEGS += (
                _segs
            )  ### ensure this is a list in case the behavior of timeseries_to_segments changes...
            if _min_ts != None:
                statedFAP = -_min_ts
                if max_statedFAP < statedFAP:
                    max_statedFAP = statedFAP

        SEGS = event.andsegments([SEGS, segs])
        segments.append(SEGS)

        SEGS_livetime = event.livetime(SEGS)

        if not idq_livetime:
            if SEGS_livetime:
                raise ValueError(
                    "something is weird with segments... idq_livetime is zero but SEGS_livetime is not"
                )
            else:
                deadtime = 0.0
        else:
            deadtime = 1.0 * SEGS_livetime / idq_livetime
            if deadtime > 1.0:
                raise ValueError(
                    "deadtime > 1.0, something is weird...\n  SEGS_livetime = %f\n  idq_livetime = %f"
                    % (SEGS_livetime, idq_livetime))
        deadtimes.append(deadtime)

        statedFAPs.append(max_statedFAP)

        if max_statedFAP > 0:
            err = deadtime / max_statedFAP - 1
        elif deadtime:
            err = 1
        else:
            err = 0
        errs.append(err)

    return segments, deadtimes, statedFAPs, errs
Exemplo n.º 2
0
def check_calibration( segs, times, timeseries, FAPthrs):
    """
    checks the pipeline's calibration at each "FAPthr in FAPThrs"

    this may be sped up with a call to timeseries_to_livetime() instead of idq.timeseries_to_segments() -> event.livetime
    however, we currently use some segment logic and it is not clear that we can avoid actually generating segments
    """
    idq_livetime = event.livetime(segs)

    segments = []
    deadtimes = []
    statedFAPs = []
    errs = []
    for FAPthr in FAPthrs:
        SEGS = []
        max_statedFAP = 0.0
        for (t, ts) in zip(times, timeseries):
            (_segs, _min_ts) = idq.timeseries_to_segments(t, -ts, -FAPthr)  # we want FAP <= FAPthr <--> -FAP >= -FAPthr
            SEGS += (_segs) ### ensure this is a list in case the behavior of timeseries_to_segments changes...
            if _min_ts != None:
                statedFAP = -_min_ts
                if max_statedFAP < statedFAP:
                    max_statedFAP = statedFAP

        SEGS = event.andsegments([SEGS, segs])
        segments.append(SEGS)

        SEGS_livetime = event.livetime(SEGS)

        if not idq_livetime:
            if SEGS_livetime:
                raise ValueError("something is weird with segments... idq_livetime is zero but SEGS_livetime is not")
            else:
                deadtime = 0.0
        else:
            deadtime = 1.0 * SEGS_livetime / idq_livetime
            if deadtime > 1.0:
                raise ValueError("deadtime > 1.0, something is weird...\n  SEGS_livetime = %f\n  idq_livetime = %f"%(SEGS_livetime, idq_livetime))
        deadtimes.append( deadtime )

        statedFAPs.append(max_statedFAP)

        if max_statedFAP > 0:
            err = deadtime/max_statedFAP - 1
        elif deadtime:
            err = 1
        else:
            err = 0
        errs.append( err )

    return segments, deadtimes, statedFAPs, errs
### compute total time covered
#T = event.livetime( [idq.extract_start_stop(fap, suffix='.gwf') for fap in faps] )*1.0
T = event.livetime(idqsegs) * 1.0

### combine timeseries and generate segments
if opts.verbose:
    print "generating segments from %d fap files" % (len(faps))
segs = dict((fapThr, [[], 1.0]) for fapThr in opts.FAPthr)
t, ts = idq.combine_gwf(faps, [fap_channame])
for t, ts in zip(t, ts):

    t, ts = idq.timeseries_in_segments(t, ts, idqsegs)

    for fapThr in opts.FAPthr:
        s, minFAP = idq.timeseries_to_segments(
            t, -ts, -fapThr)  # we want FAP <= FAPthr <--> -FAP >= -FAPthr
        s = event.andsegments(
            [s, idqsegs]
        )  ### necessary because of how timeseries_to_segments may interact with timeseries_in_segments

        segs[fapThr][0] += s
        if minFAP != None:
            segs[fapThr][1] = min(segs[fapThr][1], -minFAP)
if opts.verbose:
    print "computing associated deadtimes"
dt = [event.livetime(segs[fapThr][0]) / T for fapThr in opts.FAPthr]
maxFAP = [segs[fapThr][1] for fapThr in opts.FAPthr]

### write json for calibration check
jsonfilename = idq.gdb_calib_json(gdbdir, ifo, opts.classifier, filetag,
                                  opts.start, opts.end - opts.start)
### compute total time covered
#T = event.livetime( [idq.extract_start_stop(fap, suffix='.gwf') for fap in faps] )*1.0
T = event.livetime( idqsegs )*1.0

### combine timeseries and generate segments
if opts.verbose:
    print "generating segments from %d fap files"%(len(faps))
segs = dict( (fapThr, [[], 1.0]) for fapThr in opts.FAPthr )
t, ts = idq.combine_gwf(faps, [fap_channame])
for t, ts in zip(t, ts):

    t, ts = idq.timeseries_in_segments( t, ts, idqsegs )

    for fapThr in opts.FAPthr:
        s, minFAP = idq.timeseries_to_segments(t, -ts, -fapThr) # we want FAP <= FAPthr <--> -FAP >= -FAPthr
        s = event.andsegments( [s, idqsegs] ) ### necessary because of how timeseries_to_segments may interact with timeseries_in_segments

        segs[fapThr][0] += s
        if minFAP!=None:
            segs[fapThr][1] = min(segs[fapThr][1], -minFAP)
if opts.verbose:
    print "computing associated deadtimes"
dt = [event.livetime(segs[fapThr][0])/T for fapThr in opts.FAPthr]
maxFAP = [segs[fapThr][1] for fapThr in opts.FAPthr]

### write json for calibration check
jsonfilename = idq.gdb_calib_json( gdbdir, ifo, opts.classifier, filetag, opts.start, opts.end-opts.start )
if opts.verbose:
    print "  %s"%jsonfilename
file_obj = open(jsonfilename, "w")
            segsum_row = lsctables.SegmentSum()

            segsum_row.segment_sum_id = segsum.get_next_id()

            segsum_row.comment = ''
            segsum_row.process_id = proc_id
            segsum_row.segment_def_id = segdef_id
            segsum_row.start_time    = start_time
            segsum_row.start_time_ns = start_time_ns
            segsum_row.end_time      = end_time
            segsum_row.end_time_ns   = end_time_ns

            segsum.append( segsum_row )

            ### generate segments for this threshold
            segs, min_TS = idq.timeseries_to_segments(T[:], -TS[:], -FAPthr)

            if opts.right_padding != 0: ### transform all segments from [start, end] -> [start, end+right_padding]
                logger.info('      moving right edge of segments: end->end+%.6f'%(opts.right_padding))
                for ind, (s, e) in enumerate(segs):
                    segs[ind] = [s, e + opts.right_padding]

            if opts.left_padding != 0: ### transform all segments from [start, end] -> [start-left_padding, end]
                logger.info('      moving left edge of segments: start->start+%.6f'%(opts.right_padding))
                for ind, (s, e) in enumerate(segs):
                    segs[ind] = [s - opts.left_padding, e]

            if (opts.right_padding != 0) or (opts.left_padding != 0): ### clean up segments
                logger.info('      ensuring segments still make sense after moving the edges')
                good_segs = []
                for (s, e) in segs:
            segsum_row = lsctables.SegmentSum()

            segsum_row.segment_sum_id = segsum.get_next_id()

            segsum_row.comment = ''
            segsum_row.process_id = proc_id
            segsum_row.segment_def_id = segdef_id
            segsum_row.start_time = start_time
            segsum_row.start_time_ns = start_time_ns
            segsum_row.end_time = end_time
            segsum_row.end_time_ns = end_time_ns

            segsum.append(segsum_row)

            ### generate segments for this threshold
            segs, min_TS = idq.timeseries_to_segments(T[:], -TS[:], -FAPthr)

            if opts.right_padding != 0:  ### transform all segments from [start, end] -> [start, end+right_padding]
                logger.info(
                    '      moving right edge of segments: end->end+%.6f' %
                    (opts.right_padding))
                for ind, (s, e) in enumerate(segs):
                    segs[ind] = [s, e + opts.right_padding]

            if opts.left_padding != 0:  ### transform all segments from [start, end] -> [start-left_padding, end]
                logger.info(
                    '      moving left edge of segments: start->start+%.6f' %
                    (opts.right_padding))
                for ind, (s, e) in enumerate(segs):
                    segs[ind] = [s - opts.left_padding, e]