예제 #1
0
파일: trace.py 프로젝트: gwdetchar/hveto
def main(args=None):
    """Run the hveto-trace command-line tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # initialize variables
    time = float(args.trigger_time)
    segment = None

    # initialize logger
    logger = cli.logger(name=PROG.split('python -m ').pop(),
                        level=args.loglevel)
    logger.debug('Running in verbose mode')
    logger.debug('Search directory: {}'.format(args.directory))

    try:  # read veto segment statistics
        segment_stats = json.load(
            open(os.path.join(args.directory, 'summary-stats.json')))
    except IOError:
        logger.critical("'summary-stats.json' was not found "
                        "in the input directory")
        raise

    # loop over and log results to output
    for (i, cround) in enumerate(segment_stats['rounds']):
        seg_files = filter(lambda f_name: '.txt' in f_name,
                           cround[u'files'][u'VETO_SEGS'])
        for f in seg_files:
            segments = SegmentList.read(os.path.join(args.directory, f))
            if time in segments:
                segment = segments[segments.find(time)]
                logger.info('Trigger time {0} was vetoed in round {1} by '
                            'segment {2}'.format(time, (i + 1), segment))
                logger.debug('Round winner: {}'.format(cround['name']))
                logger.debug('Significance: {}'.format(cround['significance']))
                logger.debug('SNR: {}'.format(cround['snr']))
                logger.debug('Window: {}'.format(cround['window']))

    if segment is None:
        # if we got here, the signal was not vetoed
        logger.info('Trigger time {} was not vetoed'.format(time))
예제 #2
0
def main(args=None):
    """Run the trace tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)
    directory = args.directory

    logger = cli.logger(name='hveto.trace', level=args.loglevel)
    logger.debug('Running in verbose mode')
    logger.debug('Search directory: %s' % directory)

    trigger_time = float(args.trigger_time)
    if directory[-1] != '/':
        directory += '/'

    try:
        segment_stats = json.load(open('%ssummary-stats.json' % directory))
    except IOError:
        logger.error("'summary-stats.json' was not found "
                     "in the input directory")
        sys.exit(0)

    for i, cround in enumerate(segment_stats['rounds']):
        seg_files = filter(lambda f_name: '.txt' in f_name,
                           cround[u'files'][u'VETO_SEGS'])
        for f in seg_files:
            segments = SegmentList.read(os.path.join(directory, f))
            for segment in segments:
                if segment[0] <= trigger_time <= segment[1]:
                    logger.info('Signal was vetoed in round %d by '
                                'segment %s' % ((i + 1), segment))
                    logger.debug('Winner: %s' % cround['name'])
                    logger.debug('Significance: %s' % cround['significance'])
                    logger.debug('SNR: %s' % cround['snr'])
                    logger.debug('Window: %s' % cround['window'])
                    sys.exit(0)

    logger.info('Signal was not vetoed.')
예제 #3
0
from .. import (__version__, config)
from ..triggers import (
    get_triggers,
    find_auxiliary_channels,
    find_trigger_files,
)
from ..utils import write_lal_cache

__author__ = 'Duncan Macleod <*****@*****.**>'

IFO = os.getenv('IFO')

# set up logger
PROG = ('python -m hveto.cli.cache_events'
        if sys.argv[0].endswith('.py') else os.path.basename(sys.argv[0]))
LOGGER = cli.logger(name=PROG.split('python -m ').pop())

# -- parse command line -------------------------------------------------------


def _abs_path(p):
    return Path(p).expanduser().resolve()


def create_parser():
    """Create a command-line parser for this entry point
    """
    parser = cli.create_parser(
        prog=PROG,
        description=__doc__,
        version=__version__,
예제 #4
0
파일: batch.py 프로젝트: eagoetz/gwsumm
def main(args=None):
    """Run the command-line Omega scan tool in batch mode
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # initialize logger
    logger = cli.logger(
        name=PROG.split('python -m ').pop(),
        level='DEBUG' if args.verbose else 'INFO',
    )

    # check time options
    N = sum([
        args.day is not None, args.month is not None, args.gps_start_time
        is not None, args.gps_end_time is not None
    ])
    if N > 1 and not (args.gps_start_time and args.gps_end_time):
        raise parser.error("Please give only one of --day, --month, or "
                           "--gps-start-time and --gps-end-time.")

    for (i, cf) in enumerate(args.config_file):
        args.config_file[i] = ','.join(map(os.path.abspath, cf.split(',')))
    args.global_config = list(
        map(
            os.path.abspath,
            [fp for csv in args.global_config for fp in csv.split(',')],
        ))

    # -- build workflow directories -----------------

    # move to output directory
    indir = os.getcwd()
    mkdir(args.output_dir)
    os.chdir(args.output_dir)
    outdir = os.curdir

    # set node log path, and condor log path
    logdir = os.path.join(outdir, 'logs')
    htclogdir = args.log_dir or logdir
    mkdir(logdir, htclogdir)

    # set config directory and copy config files
    etcdir = os.path.join(outdir, 'etc')
    mkdir(etcdir)

    for (i, fp) in enumerate(args.global_config):
        inicopy = os.path.join(etcdir, os.path.basename(fp))
        if not os.path.isfile(inicopy) or not os.path.samefile(fp, inicopy):
            shutil.copyfile(fp, inicopy)
        args.global_config[i] = os.path.abspath(inicopy)
    for (i, csv) in enumerate(args.config_file):
        inicopy = []
        for fp in csv.split(','):
            fp2 = os.path.join(etcdir, os.path.basename(fp))
            if not os.path.isfile(fp2) or not os.path.samefile(fp, fp2):
                shutil.copyfile(fp, fp2)
            inicopy.append(os.path.abspath(fp2))
        args.config_file[i] = ','.join(inicopy)
    logger.debug("Copied all INI configuration files to %s" % etcdir)

    # -- configure X509 and kerberos for condor -----

    if args.universe != 'local':
        # copy X509 grid certificate into local location
        (x509cert, _) = find_credential()
        x509copy = os.path.join(etcdir, os.path.basename(x509cert))
        shutil.copyfile(x509cert, x509copy)

        # rerun kerberos with new path
        krb5cc = os.path.abspath(os.path.join(etcdir, 'krb5cc.krb5'))
        gwkerberos.kinit(krb5ccname=krb5cc)
        logger.debug("Configured Condor and Kerberos "
                     "for NFS-shared credentials")

    # -- build DAG ----------------------------------

    dag = pipeline.CondorDAG(os.path.join(htclogdir, '%s.log' % args.file_tag))
    dag.set_dag_file(os.path.join(outdir, args.file_tag))

    universe = args.universe

    # -- parse condor commands ----------------------

    # parse into a dict
    condorcmds = {}
    if args.condor_timeout:
        condorcmds['periodic_remove'] = (
            'CurrentTime-EnteredCurrentStatus > %d' %
            (3600 * args.condor_timeout))
    for cmd_ in args.condor_command:
        (key, value) = cmd_.split('=', 1)
        condorcmds[key.rstrip().lower()] = value.strip()

    if args.universe != 'local':
        # add X509 to environment
        for (env_, val_) in zip(['X509_USER_PROXY', 'KRB5CCNAME'],
                                [os.path.abspath(x509copy), krb5cc]):
            condorenv = '%s=%s' % (env_, val_)
            if ('environment' in condorcmds
                    and env_ not in condorcmds['environment']):
                condorcmds['environment'] += ';%s' % condorenv
            elif 'environment' not in condorcmds:
                condorcmds['environment'] = condorenv

    # -- build individual gw_summary jobs -----------

    globalconfig = ','.join(args.global_config)

    jobs = []
    if not args.skip_html_wrapper:
        htmljob = GWSummaryJob('local',
                               subdir=outdir,
                               logdir=logdir,
                               tag='%s_local' % args.file_tag,
                               **condorcmds)
        jobs.append(htmljob)
    if not args.html_wrapper_only:
        datajob = GWSummaryJob(universe,
                               subdir=outdir,
                               logdir=logdir,
                               tag=args.file_tag,
                               **condorcmds)
        jobs.append(datajob)

    # add common command-line options
    for job in jobs:
        if args.day:
            job.set_command('day')
            job.add_arg(args.day)
        elif args.week:
            job.set_command('week')
            job.add_arg(args.week)
        elif args.month:
            job.set_command('month')
            job.add_arg(args.month)
        elif args.year:
            job.set_command('year')
            job.add_arg(args.year)
        elif args.gps_start_time or args.gps_end_time:
            job.set_command('gps')
            job.add_arg(str(args.gps_start_time))
            job.add_arg(str(args.gps_end_time))
        else:
            job.set_command('day')
        if args.nds is True:
            job.add_opt('nds')
        if args.single_process:
            job.add_opt('single-process')
        elif args.multi_process is not None:
            job.add_opt('multi-process', args.multi_process)
        if args.verbose:
            job.add_opt('verbose')
        if args.ifo:
            job.add_opt('ifo', args.ifo)
        job.add_opt('on-segdb-error', args.on_segdb_error)
        job.add_opt('on-datafind-error', args.on_datafind_error)
        job.add_opt('output-dir', outdir)
        for (opt, fplist) in zip(
            ['--data-cache', '--event-cache', '--segment-cache'],
            [args.data_cache, args.event_cache, args.segment_cache]):
            if fplist:
                job.add_arg('%s %s' % (opt, (' %s ' % opt).join(fplist)))
        if args.no_htaccess:
            job.add_opt('no-htaccess')

    # make surrounding HTML first
    if not args.skip_html_wrapper:
        htmljob.add_opt('html-only', '')
        htmljob.add_opt('config-file',
                        ','.join([globalconfig] + args.config_file).strip(','))

        htmlnode = GWSummaryDAGNode(htmljob)
        for configfile in args.config_file:
            htmlnode.add_input_file(args.config_file)
        htmlnode.set_category('gw_summary')
        dag.add_node(htmlnode)
        logger.debug(" -- Configured HTML htmlnode job")

    # create node for each config file
    if not args.html_wrapper_only:
        # add html opts
        datajob.add_opt('no-html', '')
        if args.archive:
            datajob.add_condor_cmd('+SummaryNodeType', '"$(macroarchive)"')
        # configure each data node
        for (i, configfile) in enumerate(args.config_file):
            node = GWSummaryDAGNode(datajob)
            node.add_var_arg('--config-file %s' %
                             ','.join([globalconfig, configfile]).strip(','))
            if args.archive:
                jobtag = os.path.splitext(os.path.basename(configfile))[0]
                archivetag = jobtag.upper().replace('-', '_')
                if args.ifo and archivetag.startswith(
                        '%s_' % args.ifo.upper()):
                    archivetag = archivetag[3:]
                node.add_var_opt('archive', archivetag)
            for cf in configfile.split(','):
                node.add_input_file(cf)
            node.set_category('gw_summary')
            try:
                node.set_priority(args.priority[i])
            except IndexError:
                node.set_priority(0)
            node.set_retry(1)
            if not args.skip_html_wrapper:
                node.add_parent(htmlnode)
            dag.add_node(node)
            logger.debug(" -- Configured job for config %s" % configfile)

    if args.maxjobs:
        dag.add_maxjobs_category('gw_summary', args.maxjobs)

    # -- finish up ----------------------------------

    dag.write_sub_files()
    dag.write_dag()
    dag.write_script()
    logger.debug("Setup complete, DAG written to: {}".format(
        os.path.abspath(dag.get_dag_file())))

    # return to original directory
    os.chdir(indir)
예제 #5
0
from hveto.segments import (write_ascii as write_ascii_segments,
                            read_veto_definer_file)
from hveto.triggers import (get_triggers, find_auxiliary_channels)

# set matplotlib backend
from matplotlib import use

use("Agg")

# backend-dependent imports
from gwdetchar.plot import texify  # noqa: E402
from hveto import plot  # noqa: E402

IFO = os.getenv('IFO')
JOBSTART = time.time()
LOGGER = cli.logger(name='hveto')

__author__ = 'Duncan Macleod <*****@*****.**>'
__credits__ = ('Joshua Smith <*****@*****.**>, '
               'Alex Urban <*****@*****.**>')

# -- parse command line -------------------------------------------------------


def _abs_path(p):
    return os.path.abspath(os.path.expanduser(p))


def _find_max_significance(channels):
    """Utility to find hveto maximum significance with multiprocessing
    """
예제 #6
0
from gwdetchar import cli

from .. import (__version__, config)
from ..triggers import (
    get_triggers,
    find_auxiliary_channels,
    find_trigger_files,
)
from ..utils import write_lal_cache

__author__ = 'Duncan Macleod <*****@*****.**>'

IFO = os.getenv('IFO')

LOGGER = cli.logger(name='hveto.cache_events')


# -- parse command line -------------------------------------------------------

def _abs_path(p):
    return Path(p).expanduser().resolve()


def create_parser():
    """Create a command-line parser for this entry point
    """
    parser = cli.create_parser(
        description=__doc__,
        version=__version__,
    )