Example #1
0
                    % classifier)

        elif opts.mode == "npy":
            ### write list of dats to cache file
            cache = idq.cache(output_dir, classifier, "_rankcache%s" % usertag)
            logger.info('writing list of rank files to %s' % cache)
            f = open(cache, 'w')
            for rank in ranksD[classifier]:
                print >> f, rank
            f.close()

            logger.info(
                '  analyzing rank timeseries to obtain mapping from rank->fap')

            ### load in timeseries
            _times, timeseries = idq.combine_ts(ranksD[classifier], n=1)

            times = []
            ranks = []
            for t, ts in zip(_times, timeseries):
                _t, _ts = idq.timeseries_in_segments(t, ts, idqsegs)
                if len(_ts):
                    times.append(_t)
                    ranks.append(_ts)

            ### need to compute deadsecs for every rank in r -> function call (probably within calibration module)!
            crank = []
            for _r in r:

                dsec = 0
                for t, ts in zip(times, ranks):
            else:
                logger.warning('WARNING: not enough samples to trust calibration. skipping calibration update for %s'%classifier)

        elif opts.mode == "npy":
            ### write list of dats to cache file
            cache = idq.cache(output_dir, classifier, "_rankcache%s"%usertag)
            logger.info('writing list of rank files to %s'%cache)
            f = open(cache, 'w')
            for rank in ranksD[classifier]:
                print >>f, rank
            f.close()

            logger.info('  analyzing rank timeseries to obtain mapping from rank->fap')

            ### load in timeseries
            _times, timeseries = idq.combine_ts(ranksD[classifier], n=1)

            times = []
            ranks = []
            for t, ts in zip(_times, timeseries):
                _t, _ts = idq.timeseries_in_segments(t, ts, idqsegs)
                if len(_ts):
                    times.append( _t )
                    ranks.append( _ts )

            ### need to compute deadsecs for every rank in r -> function call (probably within calibration module)!
            crank = []
            for _r in r:

                dsec = 0
                for t, ts in zip(times, ranks):
Example #3
0
)  # this may be fragile if fap=0 for all points in the plot. That's super rare, so maybe we don't have to worry about it?

r_ax.set_title(opts.ifo)

#=================================================
# RANK
#=================================================
if opts.verbose:
    print "reading rank timeseries from:"
    for filename in rank_filenames:
        print '\t' + filename

# merge time-series
if opts.verbose:
    print "merging rank timeseries"
(r_times, r_timeseries) = idq.combine_ts(rank_filenames)

# for each bit of continuous data:
#   add to plot
#   write merged timeseries file
#   generate and write summary statistics
if opts.verbose:
    print "plotting and summarizing rank timeseries"

merged_rank_filenames = []
merged_rank_frames = []
rank_summaries = []
max_rank = -np.infty
max_rank_segNo = 0
segNo = 0
end = opts.plotting_gps_start
f_ax.set_yscale('log') # this may be fragile if fap=0 for all points in the plot. That's super rare, so maybe we don't have to worry about it?

r_ax.set_title(opts.ifo)

#=================================================
# RANK
#=================================================
if opts.verbose:
    print "reading rank timeseries from:"
    for filename in rank_filenames:
        print '\t' + filename

# merge time-series
if opts.verbose:
    print "merging rank timeseries"
(r_times, r_timeseries) = idq.combine_ts(rank_filenames)

# for each bit of continuous data:
#   add to plot
#   write merged timeseries file
#   generate and write summary statistics
if opts.verbose:
    print "plotting and summarizing rank timeseries"

merged_rank_filenames = []
merged_rank_frames = []
rank_summaries = []
max_rank = -np.infty
max_rank_segNo = 0
segNo = 0
end = opts.plotting_gps_start
Example #5
0
        elif (opts.mode == "npy") or (opts.mode == "gwf"):
            ### write list of dats to cache file
            cache = idq.cache(output_dir, classifier, "_rankcache%s" % usertag)
            logger.info('writing list of rank files to %s' % cache)
            f = open(cache, 'w')
            for rank in ranksD[classifier]:
                print >> f, rank
            f.close()

            logger.info(
                '  analyzing rank timeseries to obtain mapping from rank->fap')

            ### load in timeseries
            if opts.mode == "npy":
                _times, timeseries = idq.combine_ts(ranksD[classifier], n=1)
            else:  ### opts.mode=="gwf"
                _times, timeseries = idq.combine_gwf(
                    ranksD[classifier], [channameD[classifier]['rank']])

            times = []
            ranks = []
            for t, ts in zip(_times, timeseries):
                _t, _ts = idq.timeseries_in_segments(t, ts, idqsegs)
                if len(_ts):
                    times.append(_t)
                    ranks.append(_ts)

            ### need to compute deadsecs for every rank in r -> function call (probably within calibration module)!
            crank = []
            for _r in r:
                logger.warning('WARNING: not enough samples to trust calibration. skipping calibration update for %s'%classifier)

        elif (opts.mode == "npy") or (opts.mode == "gwf"):
            ### write list of dats to cache file
            cache = idq.cache(output_dir, classifier, "_rankcache%s"%usertag)
            logger.info('writing list of rank files to %s'%cache)
            f = open(cache, 'w')
            for rank in ranksD[classifier]:
                print >>f, rank
            f.close()

            logger.info('  analyzing rank timeseries to obtain mapping from rank->fap')

            ### load in timeseries
            if opts.mode == "npy":
                _times, timeseries = idq.combine_ts(ranksD[classifier], n=1)
            else: ### opts.mode=="gwf"
                _times, timeseries = idq.combine_gwf(ranksD[classifier], [channameD[classifier]['rank']])

            times = []
            ranks = []
            for t, ts in zip(_times, timeseries):
                _t, _ts = idq.timeseries_in_segments(t, ts, idqsegs)
                if len(_ts):
                    times.append( _t )
                    ranks.append( _ts )

            ### need to compute deadsecs for every rank in r -> function call (probably within calibration module)!
            crank = []
            for _r in r: