Ejemplo n.º 1
0
 def test_write_segments_ascii(self):
     for ncol in [2, 4]:
         with NamedTemporaryFile(suffix='.txt', delete=False) as f:
             segments.write_ascii(f.name, TEST_SEGMENTS, ncol=ncol)
             f.delete = True
             a = SegmentList.read(f.name, gpstype=float, strict=False)
             self.assertEqual(a, TEST_SEGMENTS_2)
Ejemplo n.º 2
0
 def test_write_segments_ascii(self):
     for ncol in [2, 4]:
         with NamedTemporaryFile(suffix='.txt', delete=False) as f:
             segments.write_ascii(f.name, TEST_SEGMENTS, ncol=ncol)
             f.delete = True
             a = SegmentList.read(f.name, gpstype=float, strict=False)
             self.assertEqual(a, TEST_SEGMENTS_2)
Ejemplo n.º 3
0
def test_write_segments_ascii(ncol, tmpdir):
    outdir = str(tmpdir)
    out = os.path.join(outdir, 'test.txt')
    segments.write_ascii(out, TEST_SEGMENTS, ncol=ncol)
    a = SegmentList.read(out, gpstype=float, strict=False)
    assert a == TEST_SEGMENTS_2
    # clean up
    shutil.rmtree(outdir, ignore_errors=True)
Ejemplo n.º 4
0
def read_segmentlist(total,skip=True,**kwargs):
    '''
    '''
    if skip:
        log.debug('Skip chekking segment')
        total  = SegmentList.read('./segmentlist/total.txt')
        none   = SegmentList.read('./segmentlist/nodata.txt')
        good   = SegmentList.read('./segmentlist/available.txt')
        lack   = SegmentList.read('./segmentlist/lackofdata.txt')
        glitch = SegmentList.read('./segmentlist/glitch.txt')
    else:
        good,none        = check_nodata(total,skip=True,**kwargs)
        good,lack,glitch = check_baddata(good,**kwargs)
        log.debug('Checking done. Close.')
        exit()
    if not (len(total)-len(none)-len(lack)-len(glitch)==len(good)):
        log.debug('SegmentListError!')
        raise ValueError('Missmatch SegmentLists!')
    return good,none,lack,glitch
Ejemplo n.º 5
0
def grab_time_triggers(glob_wildcard):
    time_segs = SegmentList([])
    start_time_utc = tconvert(args.gps_start_time)
    for filename in glob.glob(glob_wildcard):
        data = SegmentList.read(filename)
        print 'grabbing trigger file:' + filename
        time_segs += data
    # print time_segs
        start_time_utc += datetime.timedelta(days=1)
    return time_segs
Ejemplo n.º 6
0
def grab_time_triggers(glob_wildcard):
    time_segs = SegmentList([])
    start_time_utc = tconvert(args.gps_start_time)
    for filename in glob.glob(glob_wildcard):
        data = SegmentList.read(filename)
        print 'grabbing trigger file:' + filename
        time_segs += data
        # print time_segs
        start_time_utc += datetime.timedelta(days=1)
    return time_segs
Ejemplo n.º 7
0
def grab_time_triggers(wildcard, start, end):
    """Retrieve triggers from a given GPS time range
    """
    time_segs = SegmentList([])
    start_time_utc = tconvert(start)
    for filename in glob.glob(wildcard):
        data = SegmentList.read(filename)
        LOGGER.info(' '.join(['grabbing trigger file:', filename]))
        start_end_seg = Segment(start, end)
        c = data & SegmentList([start_end_seg])
        time_segs += c
        start_time_utc += datetime.timedelta(days=1)
    return time_segs
Ejemplo n.º 8
0
def check_nodata(segmentlist,
                 prefix='./data',
                 write=True,
                 skip=False,
                 **kwargs):
    ''' 
    
    '''
    from gwpy.segments import SegmentList
    if not skip:
        log.debug('Find segments')
        # find unchecked segments
        exists = iofunc.existance(segmentlist, ftype='gwf')
        not_checked = [
            segmentlist[i] for i, exist in enumerate(exists) if not exist
        ]
        log.debug('{0}(/{1}) are not checked'.format(len(not_checked),
                                                     len(segmentlist)))
        n = len(not_checked)
        #ans = [_check_nodata(segment,**kwargs)[1] for segment in not_checked]
        ans = [
            _check_nodata(segment,
                          headder='{0:04d}(/{1:04d})'.format(i, n),
                          **kwargs)[1] for i, segment in enumerate(not_checked)
        ]
        # nodata segments
        nodata = SegmentList(
            [not_checked[i] for i, _ans in enumerate(ans) if 'NoData' in _ans])
    else:
        nodata = SegmentList.read('./segmentlist/nodata.txt')

    # exist segments
    exist = diff(segmentlist, nodata)

    if len(exist) == 0:
        log.debug('No data are existed...')
        raise ValueError('No data Error.')

    if write:
        exist.write('./segmentlist/exist.txt')
        nodata.write('./segmentlist/nodata.txt')
        log.debug('./segmentlist/exist.txt Saved')
        log.debug('./segmentlist/nodata.txt Saved')

    log.debug('{0} segments are existed'.format(len(exist)))
    return exist, nodata
Ejemplo n.º 9
0
def main(args=None):
    """Run the hveto-trace command-line tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # initialize variables
    time = float(args.trigger_time)
    segment = None

    # initialize logger
    logger = cli.logger(name=PROG.split('python -m ').pop(),
                        level=args.loglevel)
    logger.debug('Running in verbose mode')
    logger.debug('Search directory: {}'.format(args.directory))

    try:  # read veto segment statistics
        segment_stats = json.load(
            open(os.path.join(args.directory, 'summary-stats.json')))
    except IOError:
        logger.critical("'summary-stats.json' was not found "
                        "in the input directory")
        raise

    # loop over and log results to output
    for (i, cround) in enumerate(segment_stats['rounds']):
        seg_files = filter(lambda f_name: '.txt' in f_name,
                           cround[u'files'][u'VETO_SEGS'])
        for f in seg_files:
            segments = SegmentList.read(os.path.join(args.directory, f))
            if time in segments:
                segment = segments[segments.find(time)]
                logger.info('Trigger time {0} was vetoed in round {1} by '
                            'segment {2}'.format(time, (i + 1), segment))
                logger.debug('Round winner: {}'.format(cround['name']))
                logger.debug('Significance: {}'.format(cround['significance']))
                logger.debug('SNR: {}'.format(cround['snr']))
                logger.debug('Window: {}'.format(cround['window']))

    if segment is None:
        # if we got here, the signal was not vetoed
        logger.info('Trigger time {} was not vetoed'.format(time))
Ejemplo n.º 10
0
def main(args=None):
    """Run the trace tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)
    directory = args.directory

    logger = cli.logger(name='hveto.trace', level=args.loglevel)
    logger.debug('Running in verbose mode')
    logger.debug('Search directory: %s' % directory)

    trigger_time = float(args.trigger_time)
    if directory[-1] != '/':
        directory += '/'

    try:
        segment_stats = json.load(open('%ssummary-stats.json' % directory))
    except IOError:
        logger.error("'summary-stats.json' was not found "
                     "in the input directory")
        sys.exit(0)

    for i, cround in enumerate(segment_stats['rounds']):
        seg_files = filter(lambda f_name: '.txt' in f_name,
                           cround[u'files'][u'VETO_SEGS'])
        for f in seg_files:
            segments = SegmentList.read(os.path.join(directory, f))
            for segment in segments:
                if segment[0] <= trigger_time <= segment[1]:
                    logger.info('Signal was vetoed in round %d by '
                                'segment %s' % ((i + 1), segment))
                    logger.debug('Winner: %s' % cround['name'])
                    logger.debug('Significance: %s' % cround['significance'])
                    logger.debug('SNR: %s' % cround['snr'])
                    logger.debug('Window: %s' % cround['window'])
                    sys.exit(0)

    logger.info('Signal was not vetoed.')
Ejemplo n.º 11
0
def test_write_segments_ascii(ncol):
    with NamedTemporaryFile(suffix='.txt', delete=False) as tmp:
        segments.write_ascii(tmp.name, TEST_SEGMENTS, ncol=ncol)
        tmp.delete = True
        a = SegmentList.read(tmp.name, gpstype=float, strict=False)
        assert a == TEST_SEGMENTS_2
Ejemplo n.º 12
0
#! /usr/bin/env python

from __future__ import (division, print_function)

import sys

from gwpy.segments import SegmentList
from gwpy.table.lsctables import SnglInspiralTable

import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt

import numpy as np

bbh_file = sys.argv[1]

trigs = SnglInspiralTable.read(bbh_file)
segs = SegmentList.read('L1_ER7_segments.txt')
trigs = trigs.vetoed(segs)

plot = trigs.plot('time', 'snr', edgecolor='none')#, epoch=1117378816)
#plot.set_xlim(1117378816, 1117378816+(24*3600*11.0))
plot.set_ylabel('SNR')
plot.set_yscale('log', nonposy='clip')
plot.set_title('BBH triggers during the ER7 run')
plot.savefig('H1_BBH_SNR.png')


Ejemplo n.º 13
0
 def test_read_segwizard(self):
     active = SegmentList.read(SEGWIZ, coalesce=False)
     self.assertTrue(active == ACTIVE,
                     'SegmentList.read(segwizard) mismatch:\n\n%s\n\n%s'
                     % (ACTIVE, active))
Ejemplo n.º 14
0
    def process(self, config=GWSummConfigParser(), **kwargs):

        # set params
        self.rounds = None

        if not os.path.isdir(self.directory):
            self.rounds = None
            return

        # get some basic info
        ifo = config.get('DEFAULT', 'ifo')

        # read the configuration
        d = os.path.realpath(self.directory).rstrip('/')
        self.conf = dict()
        confs = glob(os.path.join(d, '%s-HVETO_CONF-*-*.txt' % ifo))
        if len(confs) != 1:
            self.rounds = 'FAIL'
            return
        conffile = confs[0]
        try:
            with open(conffile) as f:
                self.conf = dict()
                lines = f.readlines()[3:]
                for line in lines:
                    try:
                        key, val = line.split(': ', 1)
                        self.conf[key.strip()] = eval(val)
                    except (ValueError, SyntaxError, NameError):
                        pass
        except IOError:
            self.rounds = 'FAIL'
            return
        else:
            etg = self.conf.pop('AUXtype', None)
            if 'DEfnm' in self.conf:
                name = re_quote.sub('', self.conf['DEfnm'])
                self.primary = '%s:%s' % (ifo, name)
                if 'DEtype' in self.conf:
                    hetg = re_quote.sub('', self.conf['DEtype'])
                    if re.search('_%s\Z' % hetg, self.primary, re.I):
                        self.primary = self.primary[:-len(hetg)-1]
            else:
                self.primary = None

        # find the segments
        try:
            ce = CacheEntry.from_T050017(conffile)
        except (ValueError):
            start = int(self.span[0])
            duration = int(abs(self.span))
            span = self.span
        else:
            start = int(ce.segment[0])
            duration = int(abs(ce.segment))
            span = ce.segment
        try:
            statefile = self.conf['dqfnm']
        except KeyError:
            statefile = '%s-HVETO_DQ_SEGS-%d-%d.txt' % (ifo, start, duration)
        if not os.path.isfile(os.path.join(self.directory, statefile)):
            self.rounds = 'NOSEGMENTS'
            return

        # find the results table
        resultsfile = os.path.join(self.directory, 'summary_stats.txt')
        if not os.path.isfile(resultsfile):
            self.rounds = 'FAIL'
            return

        # determine the Hveto state
        cache = Cache([CacheEntry.from_T050017(
                           os.path.join(self.directory, statefile))])
        segments = SegmentList.read(cache)
        globalv.SEGMENTS[self.states[0].definition] = DataQualityFlag(
            self.states[0].definition, known=[span], active=segments)
        self.finalize_states(config=config, query=False)

        # read results file
        self.rounds = []
        with open(resultsfile, 'r') as f:
            for line in f.readlines():
                self.rounds.append(dict(zip(self.summaryrows,
                                            line.split(' ')[1:])))
                # fix channel name
                c = '%s:%s' % (ifo, self.rounds[-1]['Winning channel'])
                if etg and re.search('_%s\Z' % etg, c, re.I):
                     c = c.rsplit('_', 1)[0]
                self.rounds[-1]['Winning channel'] = c

        # read starting triggers
        rawfile = ('%s-HVETO_RAW_TRIGS_ROUND_0-%d-%d.txt'
                   % (ifo, start, duration))
        cache = Cache([CacheEntry.from_T050017(
                           os.path.join(self.directory, rawfile))])
        get_triggers('%s:hveto_start' % ifo, 'hveto', [self.span],
                     config=config, cache=cache, return_=False)
        get_triggers('%s:hveto_vetoed_all' % ifo, 'hveto', [self.span],
                     config=config, cache=Cache(), return_=False)

        for r in range(1, len(self.rounds) + 1):
            # read round veto triggers
            rawfile = ('%s-HVETO_VETOED_TRIGS_ROUND_%d-%d-%d.txt'
                       % (ifo, r, start, duration))
            cache = Cache([CacheEntry.from_T050017(
                               os.path.join(self.directory, rawfile))])
            trigs = get_triggers('%s:hveto_vetoed_round %d' % (ifo, r), 'hveto',
                         [self.span], config=config, cache=cache)
            add_triggers(trigs, '%s:hveto_vetoed_all,hveto' % ifo,
                         segments=SegmentList([self.span]))
            # read round veto segments
            segfile = ('%s-HVETO_VETO_SEGS_ROUND_%d-%d-%d.txt'
                       % (ifo, r, start, duration))
            cache = Cache([CacheEntry.from_T050017(
                               os.path.join(self.directory, segfile))])
            get_segments('%s:hveto_veto_segs_round_%d' % (ifo, r), [self.span],
                         config=config, cache=cache, return_=False)

        for plot in self.plots:
            if isinstance(plot, HvetoSegmentSummaryPlot):
                plot.find_flags()

        kwargs['trigcache'] = Cache()
        kwargs['segmentcache'] = Cache()
        super(HvetoTab, self).process(config=config, **kwargs)

        # find some plots
        for plot in ['OVERAL_HISTOGRAM', 'OVERAL_EFF_DT'][::-1]:
             filename = (
                 '%s-HVETO_%s-%d-%d.png' % (ifo, plot, start, duration))
             plotfile = os.path.join(self.directory, filename)
             if os.path.isfile(plotfile):
                 p = SummaryPlot(os.path.join(self.url, filename), new=False)
                 p.state = self.states[0]
                 self.plots.insert(0, p)

        # delete data from archive
        del globalv.SEGMENTS[self.states[0].definition]
        for row in range(1, len(self.rounds) + 1):
            del globalv.SEGMENTS['%s:hveto_veto_segs_round_%s' % (ifo, row)]
Ejemplo n.º 15
0
 def test_read_segwizard(self):
     active = SegmentList.read(SEGWIZ, coalesce=False)
     self.assertTrue(
         active == ACTIVE,
         'SegmentList.read(segwizard) mismatch:\n\n%s\n\n%s' %
         (ACTIVE, active))
Ejemplo n.º 16
0
def test_write_segments_ascii(ncol):
    with NamedTemporaryFile(suffix='.txt', delete=False) as tmp:
        segments.write_ascii(tmp.name, TEST_SEGMENTS, ncol=ncol)
        tmp.delete = True
        a = SegmentList.read(tmp.name, gpstype=float, strict=False)
        assert a == TEST_SEGMENTS_2
Ejemplo n.º 17
0
def read_segments(source, coltype=int):
    return SegmentList.read(
        source,
        gpstype=coltype,
        format="segwizard",
    )
Ejemplo n.º 18
0
		pattern_trigs_hveto = os.path.join(args.directory_path, '{}{:02}','{}{:02}{:02}', '*86400-DARM','*VETO_SEGS_ROUND*.txt')	

		start_time_utc = tconvert(args.gps_start_time)
		end_time_utc = tconvert(args.gps_end_time)
		triggers = SegmentList([])
		while start_time_utc < end_time_utc:
    			day = start_time_utc.day
    			month = start_time_utc.month
    			year = start_time_utc.year
    
    			wildcard_trigs_hveto = pattern_trigs_hveto.format(year, month, year, month, day)
   			triggers = SegmentList([])
    			#grabbing the trigger files
    			for filename in glob.glob(wildcard_trigs_hveto):
	    			#loading the triggers in
				data = SegmentList.read(filename)
				print data
	       			triggers += data
        			start_time_utc += datetime.timedelta(days=1)
        
    
			#triggers.coalesce()
			start_end_seg = Segment(args.gps_start_time, args.gps_end_time)    
			triggers = triggers & SegmentList([start_end_seg])    
        		#print triggers

			triggers.write("total_hveto_trigs.txt")

			start_time_utc += datetime.timedelta(days=1)

		#SEGMENT HANDLING: begin for loop that loops over the range of all days/months/years
Ejemplo n.º 19
0
def read_segments(source, coltype=int):
    return SegmentList.read(
        source,
        gpstype=coltype,
        format="segwizard",
    )
Ejemplo n.º 20
0
#"IMC-REFL_DC_OUT_DQ",
#"ALS-X_REFL_ERR_OUT_DQ",
#"PEM-CS_MAG_EBAY_SUSRACK_Y_DQ",
#"ASC-Y_TR_B_NSUM_OUT_DQ",
#"ASC-AS_B_RF45_Q_PIT_OUT_DQ",
#"SUS-OMC_M1_ISIWIT_T_DQ",
#"PSL-ISS_AOM_DRIVER_MON_OUT_DQ",
#"LSC-PRCL_OUT_DQ"]

ifo = sys.argv[1]
bbhdir = sys.argv[2]
bbhfile= glob.glob(os.path.join(bbhdir, ifo+'*.xml.gz'))[0]
omiccachedir = sys.argv[3]

# Read in the segment file
segments = SegmentList.read('/home/albert.wandui/detchar'+\
    '/ER7/jul13/%s_ER7_segments.txt' %ifo)

# Read in the BBH triggers
bbh_trigs = SnglInspiralTable.read(bbhfile)
# We only want the triggers in the given segments
bbh_trigs = bbh_trigs.vetoed(segments)
#bbh_trigs.sort(key=lambda x: x.end_time + x.end_time_ns * 1.0e-9)

print "Read in all the BBH triggers!!!\n"
print "Let's start working on the Omicron triggers...\n"
# ---------------------------------------------------------------------------- #

# Read in all the Omicron caches
# Also get an idea of the speed of the code when reading from cache file vs
# letting vet get the data itself
Ejemplo n.º 21
0
#
#! coding:utf-8
import numpy as np
import matplotlib.pyplot as plt
from astropy import units as u

from gwpy.timeseries import TimeSeriesDict, TimeSeries
from gwpy.segments import SegmentList
from gwpy.time import tconvert
from gwpy.detector import Channel

ok = SegmentList.read('segments_locked.txt')

segnum = 0
start = ok[segnum][0]
end = ok[segnum][1]

chname = 'K1:GIF-X_STRAIN_OUT16'
gif_x = TimeSeries.read('./segment_{0}/{1}.gwf'.format(segnum, chname),
                        chname,
                        verbose=True,
                        nproc=2) * 1e6 * 3000  # disp
chname = 'K1:PEM-SEIS_IXV_GND_X_OUT16'
ixv_x = TimeSeries.read('./segment_{0}/{1}.gwf'.format(segnum, chname),
                        chname,
                        verbose=True,
                        nproc=2)
chname = 'K1:PEM-SEIS_EXV_GND_X_OUT16'
exv_x = TimeSeries.read('./segment_{0}/{1}.gwf'.format(segnum, chname),
                        chname,
                        verbose=True,
Ejemplo n.º 22
0
    def process(self, config=GWSummConfigParser(), **kwargs):

        # set params
        self.rounds = None

        if not os.path.isdir(self.directory):
            self.rounds = None
            return

        # get some basic info
        ifo = config.get('DEFAULT', 'ifo')

        # read the configuration
        d = os.path.realpath(self.directory).rstrip('/')
        self.conf = dict()
        confs = glob(os.path.join(d, '%s-HVETO_CONF-*-*.txt' % ifo))
        if len(confs) != 1:
            self.rounds = 'FAIL'
            return
        conffile = confs[0]
        try:
            with open(conffile) as f:
                self.conf = dict()
                lines = f.readlines()[3:]
                for line in lines:
                    try:
                        key, val = line.split(': ', 1)
                        self.conf[key.strip()] = eval(val)
                    except (ValueError, SyntaxError, NameError):
                        pass
        except IOError:
            self.rounds = 'FAIL'
            return
        else:
            etg = self.conf.pop('AUXtype', None)
            if 'DEfnm' in self.conf:
                name = re_quote.sub('', self.conf['DEfnm'])
                self.primary = '%s:%s' % (ifo, name)
                if 'DEtype' in self.conf:
                    hetg = re_quote.sub('', self.conf['DEtype'])
                    if re.search('_%s\Z' % hetg, self.primary, re.I):
                        self.primary = self.primary[:-len(hetg) - 1]
            else:
                self.primary = None

        # find the segments
        try:
            ce = CacheEntry.from_T050017(conffile)
        except (ValueError):
            start = int(self.span[0])
            duration = int(abs(self.span))
            span = self.span
        else:
            start = int(ce.segment[0])
            duration = int(abs(ce.segment))
            span = ce.segment
        try:
            statefile = self.conf['dqfnm']
        except KeyError:
            statefile = '%s-HVETO_DQ_SEGS-%d-%d.txt' % (ifo, start, duration)
        if not os.path.isfile(os.path.join(self.directory, statefile)):
            self.rounds = 'NOSEGMENTS'
            return

        # find the results table
        resultsfile = os.path.join(self.directory, 'summary_stats.txt')
        if not os.path.isfile(resultsfile):
            self.rounds = 'FAIL'
            return

        # determine the Hveto state
        cache = Cache(
            [CacheEntry.from_T050017(os.path.join(self.directory, statefile))])
        segments = SegmentList.read(cache)
        globalv.SEGMENTS[self.states[0].definition] = DataQualityFlag(
            self.states[0].definition, known=[span], active=segments)
        self.finalize_states(config=config, query=False)

        # read results file
        self.rounds = []
        with open(resultsfile, 'r') as f:
            for line in f.readlines():
                self.rounds.append(
                    dict(zip(self.summaryrows,
                             line.split(' ')[1:])))
                # fix channel name
                c = '%s:%s' % (ifo, self.rounds[-1]['Winning channel'])
                if etg and re.search('_%s\Z' % etg, c, re.I):
                    c = c.rsplit('_', 1)[0]
                self.rounds[-1]['Winning channel'] = c

        # read starting triggers
        rawfile = ('%s-HVETO_RAW_TRIGS_ROUND_0-%d-%d.txt' %
                   (ifo, start, duration))
        cache = Cache(
            [CacheEntry.from_T050017(os.path.join(self.directory, rawfile))])
        get_triggers('%s:hveto_start' % ifo,
                     'hveto', [self.span],
                     config=config,
                     cache=cache,
                     tablename='sngl_burst',
                     return_=False)

        get_triggers('%s:hveto_vetoed_all' % ifo,
                     'hveto', [self.span],
                     config=config,
                     cache=Cache(),
                     tablename='sngl_burst')
        for r in range(1, len(self.rounds) + 1):
            # read round veto triggers
            rawfile = ('%s-HVETO_VETOED_TRIGS_ROUND_%d-%d-%d.txt' %
                       (ifo, r, start, duration))
            cache = Cache([
                CacheEntry.from_T050017(os.path.join(self.directory, rawfile))
            ])
            trigs = get_triggers('%s:hveto_vetoed_round %d' % (ifo, r),
                                 'hveto', [self.span],
                                 config=config,
                                 cache=cache,
                                 tablename='sngl_burst')
            globalv.TRIGGERS['%s:hveto_vetoed_all,hveto' % ifo].extend(trigs)
            # read round veto segments
            segfile = ('%s-HVETO_VETO_SEGS_ROUND_%d-%d-%d.txt' %
                       (ifo, r, start, duration))
            cache = Cache([
                CacheEntry.from_T050017(os.path.join(self.directory, segfile))
            ])
            get_segments('%s:hveto_veto_segs_round_%d' % (ifo, r), [self.span],
                         config=config,
                         cache=cache,
                         return_=False)

        for plot in self.plots:
            if isinstance(plot, HvetoSegmentSummaryPlot):
                plot.find_flags()

        kwargs['trigcache'] = Cache()
        kwargs['segmentcache'] = Cache()
        super(HvetoTab, self).process(config=config, **kwargs)

        # find some plots
        for plot in ['OVERAL_HISTOGRAM', 'OVERAL_EFF_DT'][::-1]:
            filename = ('%s-HVETO_%s-%d-%d.png' % (ifo, plot, start, duration))
            plotfile = os.path.join(self.directory, filename)
            if os.path.isfile(plotfile):
                p = SummaryPlot(os.path.join(self.url, filename), new=False)
                p.state = self.states[0]
                self.plots.insert(0, p)

        # delete data from archive
        del globalv.SEGMENTS[self.states[0].definition]
        for row in range(1, len(self.rounds) + 1):
            del globalv.SEGMENTS['%s:hveto_veto_segs_round_%s' % (ifo, row)]
Ejemplo n.º 23
0
def load_segs():
  segs = SegmentList.read('L1_ER7_segments.txt')
  return segs
Ejemplo n.º 24
0
def main(args=None):
    use('agg')
    rcParams.update({
        'figure.subplot.bottom': 0.15,
        'figure.subplot.left': 0.1,
        'figure.subplot.right': 0.83,
        'figure.subplot.top': 0.93,
        'figure.subplot.hspace': 0.25,
        'axes.labelsize': 20,
        'grid.color': 'gray',
    })
    grid = GridSpec(2, 1)

    logger = log.Logger('omicron-status')

    try:
        omicronversion = str(get_omicron_version())
    except KeyError:
        omicronversion = 'Unknown'
        logger.warning("Omicron version unknown")
    else:
        logger.info("Found omicron version: %s" % omicronversion)

    parser = create_parser()
    args = parser.parse_args(args=args)

    if args.ifo is None:
        parser.error("Cannot determine IFO prefix from sytem, "
                     "please pass --ifo on the command line")

    group = args.group

    logger.info("Checking status for %r group" % group)

    archive = args.archive_directory
    proddir = args.production_directory.with_name(
        args.production_directory.name.format(group=args.group), )
    outdir = args.output_directory
    outdir.mkdir(exist_ok=True, parents=True)
    tag = args.latency_archive_tag.format(group=args.group)

    filetypes = ['h5', 'xml.gz', 'root']

    logger.debug("Set output directory to %s" % outdir)
    logger.debug(
        "Will process the following filetypes: {}".format(
            ", ".join(filetypes), ), )

    # -- parse configuration file and get parameters --------------------------

    cp = configparser.ConfigParser()
    ok = cp.read(args.config_file)
    if args.config_file not in ok:
        raise IOError(
            "Failed to read configuration file %r" % args.config_file, )
    logger.info("Configuration read")

    # validate
    if not cp.has_section(group):
        raise configparser.NoSectionError(group)

    # get parameters
    obs = args.ifo[0]
    frametype = cp.get(group, 'frametype')
    padding = cp.getint(group, 'overlap-duration') / 2.
    mingap = cp.getint(group, 'chunk-duration')

    channels = args.channel
    if not channels:
        channels = [
            c.split()[0]
            for c in cp.get(group, 'channels').strip('\n').split('\n')
        ]
    channels.sort()
    logger.debug("Found %d channels" % len(channels))

    start = args.gps_start_time
    end = args.gps_end_time
    if end == NOW:
        end -= padding

    if args.state_flag:
        stateflag = args.state_flag
        statepad = tuple(map(float, args.state_pad.split(',')))
    else:
        try:
            stateflag = cp.get(group, 'state-flag')
        except configparser.NoOptionError:
            stateflag = None
        else:
            try:
                statepad = tuple(
                    map(
                        float,
                        cp.get(group, 'state-padding').split(','),
                    ))
            except configparser.NoOptionError:
                statepad = (0, 0)
    if stateflag:
        logger.debug("Parsed state flag: %r" % stateflag)
        logger.debug("Parsed state padding: %s" % repr(statepad))
    logger.info("Processing %d-%d" % (start, end))

    # -- define nagios JSON printer -------------------------------------------

    def print_nagios_json(code, message, outfile, tag='status', **extras):
        out = {
            'created_gps':
            NOW,
            'status_intervals': [
                {
                    'start_sec': 0,
                    'end_sec': args.unknown,
                    'num_status': code,
                    'txt_status': message
                },
                {
                    'start_sec': args.unknown,
                    'num_status': 3,
                    'txt_status': 'Omicron %s check is not running' % tag
                },
            ],
            'author': {
                'name': 'Duncan Macleod',
                'email': '*****@*****.**',
            },
            'omicron': {
                'version': omicronversion,
                'group': group,
                'channels': ' '.join(channels),
                'frametype': frametype,
                'state-flag': stateflag,
            },
            'pyomicron': {
                'version': __version__,
            },
        }
        out.update(extras)
        with open(outfile, 'w') as f:
            f.write(json.dumps(out))
        logger.debug("nagios info written to %s" % outfile)

    # -- get condor status ------------------------------------------------

    if not args.skip_condor:
        # connect to scheduler
        try:
            schedd = htcondor.Schedd()
        except RuntimeError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
            schedd = None

    if not args.skip_condor and schedd:
        logger.info("-- Checking condor status --")

        # get DAG status
        jsonfp = outdir / "nagios-condor-{}.json".format(group)
        okstates = ['Running', 'Idle', 'Completed']
        try:
            # check manager status
            qstr = 'OmicronManager == "{}" && Owner == "{}"'.format(
                group,
                args.user,
            )
            try:
                jobs = schedd.query(qstr, ['JobStatus'])
            except IOError as e:
                warnings.warn("Caught IOError: %s [retrying...]" % str(e))
                sleep(2)
                jobs = schedd.query(qstr, ['JobStatus'])
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            if len(jobs) > 1:
                raise RuntimeError(
                    "Multiple OmicronManager jobs found for %r" % group)
            elif len(jobs) == 0:
                raise RuntimeError(
                    "No OmicronManager job found for %r" % group, )
            status = condor.JOB_STATUS[jobs[0]['JobStatus']]
            if status not in okstates:
                raise RuntimeError("OmicronManager status for %r: %r" %
                                   (group, status))
            logger.debug("Manager status is %r" % status)
            # check node status
            jobs = schedd.query(
                'OmicronProcess == "{}" && Owner == "{}"'.format(
                    group,
                    args.user,
                ),
                ['JobStatus', 'ClusterId'],
            )
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            for job in jobs:
                status = condor.JOB_STATUS[job['JobStatus']]
                if status not in okstates:
                    raise RuntimeError("Omicron node %s (%r) is %r" %
                                       (job['ClusterId'], group, status))
        except RuntimeError as e:
            print_nagios_json(2, str(e), jsonfp, tag='condor')
            logger.warning("Failed to determine condor status: %r" % str(e))
        except IOError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
        else:
            print_nagios_json(
                0,
                "Condor processing for %r is OK" % group,
                jsonfp,
                tag='condor',
            )
            logger.info("Condor processing is OK")

    if not args.skip_job_duration:
        # get job duration history
        plot = Plot(figsize=[12, 3])
        plot.subplots_adjust(bottom=.22, top=.87)
        ax = plot.gca(xscale="auto-gps")
        times, jobdur = condor.get_job_duration_history_shell('OmicronProcess',
                                                              group,
                                                              maxjobs=5000)
        logger.debug("Recovered duration history for %d omicron.exe jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Omicron.exe')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        times, jobdur = condor.get_job_duration_history_shell(
            'OmicronPostProcess', group, maxjobs=5000)
        logger.debug("Recovered duration history for %d post-processing jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Post-processing')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        ax.legend(loc='upper left',
                  borderaxespad=0,
                  bbox_to_anchor=(1.01, 1),
                  handlelength=1)
        ax.set_xlim(args.gps_start_time, args.gps_end_time)
        ax.set_epoch(ax.get_xlim()[1])
        ax.set_yscale('log')
        ax.set_title('Omicron job durations for %r' % group)
        ax.set_ylabel('Job duration [seconds]')
        ax.xaxis.labelpad = 5
        png = str(outdir / "nagios-condor-{}.png".format(group))
        plot.save(png)
        plot.close()
        logger.debug("Saved condor plot to %s" % png)

    if args.skip_file_checks:
        sys.exit(0)

    # -- get file latency and archive completeness ----------------------------

    logger.info("-- Checking file archive --")

    # get frame segments
    segs = segments.get_frame_segments(obs, frametype, start, end)

    # get state segments
    if stateflag is not None:
        segs &= segments.query_state_segments(
            stateflag,
            start,
            end,
            pad=statepad,
        )

    try:
        end = segs[-1][1]
    except IndexError:
        pass

    # apply inwards padding to generate resolvable segments
    for i in range(len(segs) - 1, -1, -1):
        # if segment is shorter than padding, ignore it completely
        if abs(segs[i]) <= padding * 2:
            del segs[i]
        # otherwise apply padding to generate trigger segment
        else:
            segs[i] = segs[i].contract(padding)
    logger.debug("Found %d seconds of analysable time" % abs(segs))

    # load archive latency
    latencyfile = outdir / "nagios-latency-{}.h5".format(tag)
    times = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    ldata = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    try:
        with h5py.File(latencyfile, 'r') as h5file:
            for c in channels:
                for ft in filetypes:
                    try:
                        times[c][ft] = h5file[c]['time'][ft][:]
                        ldata[c][ft] = h5file[c]['latency'][ft][:]
                    except KeyError:
                        times[c][ft] = numpy.ndarray((0, ))
                        ldata[c][ft] = numpy.ndarray((0, ))
    except OSError as exc:  # file not found, or is corrupt
        warnings.warn("failed to load latency data from {}: {}".format(
            latencyfile,
            str(exc),
        ))
        for c in channels:
            for ft in filetypes:
                if not times[c].get(ft):
                    times[c][ft] = numpy.ndarray((0, ))
                    ldata[c][ft] = numpy.ndarray((0, ))
    else:
        logger.debug("Parsed latency data from %s" % latencyfile)

    # load acknowledged gaps
    acksegfile = str(outdir / "acknowledged-gaps-{}.txt".format(tag))
    try:
        acknowledged = SegmentList.read(acksegfile,
                                        gpstype=float,
                                        format="segwizard")
    except IOError:  # no file
        acknowledged = SegmentList()
    else:
        logger.debug(
            "Read %d segments from %s" % (len(acknowledged), acksegfile), )
        acknowledged.coalesce()

    # build legend for segments
    leg = OrderedDict()
    leg['Analysable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgray',
        edgecolor='gray',
    )
    leg['Available'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgreen',
        edgecolor='green',
    )
    leg['Missing'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='red',
        edgecolor='darkred',
    )
    leg['Unresolvable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='magenta',
        edgecolor='purple',
    )
    leg['Overlapping'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='yellow',
        edgecolor='orange',
    )
    leg['Pending'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightskyblue',
        edgecolor='blue',
    )
    leg['Acknowledged'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='sandybrown',
        edgecolor='brown',
    )

    logger.debug("Checking archive for each channel...")

    # find files
    latency = {}
    gaps = {}
    overlap = {}
    pending = {}
    plots = {}
    for c in channels:
        # create data storate
        latency[c] = {}
        gaps[c] = {}
        overlap[c] = {}
        pending[c] = {}

        # create figure
        plot = Plot(figsize=[12, 5])
        lax = plot.add_subplot(grid[0, 0], xscale="auto-gps")
        sax = plot.add_subplot(grid[1, 0], sharex=lax, projection='segments')
        colors = ['lightblue', 'dodgerblue', 'black']

        for y, ft in enumerate(filetypes):
            # find files
            cache = io.find_omicron_files(c, start, end, archive, ext=ft)
            cpend = sieve_cache(io.find_pending_files(c, proddir, ext=ft),
                                segment=Segment(start, end))
            # get available segments
            avail = segments.cache_segments(cache)
            found = avail & segs
            pending[c][ft] = segments.cache_segments(cpend) & segs
            # remove gaps at the end that represent latency
            try:
                latency[c][ft] = abs(segs & type(
                    segs)([type(segs[0])(found[-1][1], segs[-1][1])])) / 3600.
            except IndexError:
                latency[c][ft] = 0
                processed = segs
            else:
                processed = segs & type(segs)(
                    [type(segs[0])(start, found[-1][1])])
            gaps[c][ft] = type(found)()
            lost = type(found)()
            for s in processed - found:
                if abs(s) < mingap and s in list(segs):
                    lost.append(s)
                else:
                    gaps[c][ft].append(s)
            # remove acknowledged gaps
            ack = gaps[c][ft] & acknowledged
            gaps[c][ft] -= acknowledged
            # print warnings
            if abs(gaps[c][ft]):
                warnings.warn("Gaps found in %s files for %s:\n%s" %
                              (c, ft, gaps[c][ft]))
            overlap[c][ft] = segments.cache_overlaps(cache)
            if abs(overlap[c][ft]):
                warnings.warn("Overlap found in %s files for %s:\n%s" %
                              (c, ft, overlap[c][ft]))

            # append archive
            times[c][ft] = numpy.concatenate((times[c][ft][-99999:], [NOW]))
            ldata[c][ft] = numpy.concatenate(
                (ldata[c][ft][-99999:], [latency[c][ft]]))

            # plot
            line = lax.plot(
                times[c][ft],
                ldata[c][ft],
                label=ft,
                color=colors[y],
            )[0]
            lax.plot(times[c][ft],
                     ldata[c][ft],
                     marker='.',
                     linestyle=' ',
                     color=line.get_color())
            sax.plot_segmentlist(segs,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Analysable'].get_facecolor(),
                                 edgecolor=leg['Analysable'].get_edgecolor())
            sax.plot_segmentlist(pending[c][ft],
                                 y=y,
                                 facecolor=leg['Pending'].get_facecolor(),
                                 edgecolor=leg['Pending'].get_edgecolor())
            sax.plot_segmentlist(avail,
                                 y=y,
                                 label=ft,
                                 alpha=.2,
                                 height=.1,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(found,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(lost,
                                 y=y,
                                 facecolor=leg['Unresolvable'].get_facecolor(),
                                 edgecolor=leg['Unresolvable'].get_edgecolor())
            sax.plot_segmentlist(gaps[c][ft],
                                 y=y,
                                 facecolor=leg['Missing'].get_facecolor(),
                                 edgecolor=leg['Missing'].get_edgecolor())
            sax.plot_segmentlist(overlap[c][ft],
                                 y=y,
                                 facecolor=leg['Overlapping'].get_facecolor(),
                                 edgecolor=leg['Overlapping'].get_edgecolor())
            sax.plot_segmentlist(ack,
                                 y=y,
                                 facecolor=leg['Acknowledged'].get_facecolor(),
                                 edgecolor=leg['Acknowledged'].get_edgecolor())

        # finalise plot
        lax.axhline(args.warning / 3600.,
                    color=(1.0, 0.7, 0.0),
                    linestyle='--',
                    linewidth=2,
                    label='Warning',
                    zorder=-1)
        lax.axhline(args.error / 3600.,
                    color='red',
                    linestyle='--',
                    linewidth=2,
                    label='Critical',
                    zorder=-1)
        lax.set_title('Omicron status: {}'.format(c))
        lax.set_ylim(0, args.error / 1800.)
        lax.set_ylabel('Latency [hours]')
        lax.legend(loc='upper left',
                   bbox_to_anchor=(1.01, 1),
                   borderaxespad=0,
                   handlelength=2,
                   fontsize=12.4)
        lax.set_xlabel(' ')
        for ax in plot.axes:
            ax.set_xlim(args.gps_start_time, args.gps_end_time)
            ax.set_epoch(ax.get_xlim()[1])
        sax.xaxis.labelpad = 5
        sax.set_ylim(-.5, len(filetypes) - .5)
        sax.legend(leg.values(),
                   leg.keys(),
                   handlelength=1,
                   fontsize=12.4,
                   loc='lower left',
                   bbox_to_anchor=(1.01, 0),
                   borderaxespad=0)
        plots[c] = png = outdir / "nagios-latency-{}.png".format(
            c.replace(':', '-'), )
        plot.save(png)
        plot.close()
        logger.debug("    %s" % c)

    # update latency and write archive
    h5file = h5py.File(latencyfile, 'w')
    for c in channels:
        g = h5file.create_group(c)
        for name, d in zip(['time', 'latency'], [times[c], ldata[c]]):
            g2 = g.create_group(name)
            for ft in filetypes:
                g2.create_dataset(ft, data=d[ft], compression='gzip')
    h5file.close()
    logger.debug("Stored latency data as HDF in %s" % latencyfile)

    # write nagios output for files
    status = []
    for segset, tag in zip([gaps, overlap], ['gaps', 'overlap']):
        chans = [(c, segset[c]) for c in segset
                 if abs(reduce(operator.or_, segset[c].values()))]
        jsonfp = outdir / "nagios-{}-{}.json".format(tag, group)
        status.append((tag, jsonfp))
        if chans:
            gapstr = '\n'.join('%s: %s' % c for c in chans)
            code = 1
            message = ("%s found in Omicron files for group %r\n%s" %
                       (tag.title(), group, gapstr))
        else:
            code = 0
            message = ("No %s found in Omicron files for group %r" %
                       (tag, group))
        print_nagios_json(code, message, jsonfp, tag=tag, **{tag: dict(chans)})

    # write group JSON
    jsonfp = outdir / "nagios-latency-{}.json".format(group)
    status.append(('latency', jsonfp))
    code = 0
    message = 'No channels have high latency for group %r' % group
    ldict = dict((c, max(latency[c].values())) for c in latency)
    for x, dt in zip([2, 1], [args.error, args.warning]):
        dh = dt / 3600.
        chans = [c for c in ldict if ldict[c] >= dh]
        if chans:
            code = x
            message = (
                "%d channels found with high latency (above %s seconds)" %
                (len(chans), dt))
            break
    print_nagios_json(code, message, jsonfp, tag='latency', latency=ldict)

    # auto-detect 'standard' JSON files
    for tag, name in zip(
        ['condor', 'omicron-online'],
        ['condor', 'processing'],
    ):
        f = outdir / "nagios-{}-{}.json".format(tag, group)
        if f.is_file():
            status.insert(0, (name, f))

    # write HTML summary
    if args.html:
        page = markup.page()
        page.init(
            title="%s Omicron Online status" % group,
            css=[
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/'
                 'bootstrap.min.css'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.css'),
            ],
            script=[
                '//code.jquery.com/jquery-1.11.2.min.js',
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/'
                 'bootstrap.min.js'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.js'),
            ],
        )
        page.div(class_='container')
        # write header
        page.div(class_='page-header')
        page.h1('Omicron Online status: %s' % group)
        page.div.close()  # page-header
        # write summary
        page.div(id_='json')
        page.h2("Processing status")
        for tag, f in status:
            jf = f.name
            page.a("%s status" % tag.title(),
                   href=jf,
                   role='button',
                   target="_blank",
                   id_="nagios-%s" % tag,
                   class_='btn btn-default json-status')
        page.p(style="padding-top: 5px;")
        page.small(
            "Hover over button for explanation, click to open JSON file", )
        page.p.close()
        page.div.close()  # id=json
        # show plots
        page.div(id_='plots')
        page.h2("Channel details")
        page.div(class_='row')
        for channel in sorted(channels):
            png = plots[channel].name
            page.div(class_="col-sm-6 col-md-4")
            page.div(class_="panel panel-default")
            page.div(class_='panel-heading')
            page.h3(channel, class_='panel-title', style="font-size: 14px;")
            page.div.close()  # panel-heading
            page.div(class_='panel-body')
            page.a(href=png,
                   target="_blank",
                   class_="fancybox",
                   rel="channel-status-img")
            page.img(src=png, class_='img-responsive')
            page.a.close()
            page.div.close()  # panel-body
            page.div.close()  # panel
            page.div.close()  # col
        page.div.close()  # row
        page.div.close()  # id=plots

        # dump parameters
        page.div(id_="parameters")
        page.h2("Parameters")
        for key, val in cp.items(group):
            page.p()
            page.strong("%s:" % key)
            page.add(val)
            page.p.close()
        page.div.close()  # id=parameters

        # finish and close
        page.div.close()  # container
        page.script("""
        function setStatus(data, id) {
            var txt = data.status_intervals[0].txt_status.split("\\n")[0];
            $("#"+id).attr("title", txt);
            var stat = data.status_intervals[0].num_status;
            if (stat == 0) {
                $("#"+id).addClass("btn-success"); }
            else if (stat == 1) {
                $("#"+id).addClass("btn-warning"); }
            else if (stat == 2){
                $("#"+id).addClass("btn-danger"); }
        }

        $(document).ready(function() {
            $(".json-status").each(function() {
                var jsonf = $(this).attr("href");
                var id = $(this).attr("id");
                $.getJSON(jsonf, function(data) { setStatus(data, id); });
            });

            $(".fancybox").fancybox({nextEffect: 'none', prevEffect: 'none'});
        });""",
                    type="text/javascript")
        with (outdir / "index.html").open("w") as f:
            f.write(str(page))
        logger.debug("HTML summary written to %s" % f.name)