Exemplo n.º 1
0
def lal_ndebug():
    """Temporarily disable lal error messages, except for memory errors."""
    mask = ~(lal.LALERRORBIT | lal.LALWARNINGBIT | lal.LALINFOBIT
             | lal.LALTRACEBIT)
    old_level = lal.GetDebugLevel()
    lal.ClobberDebugLevel(old_level & mask)
    try:
        yield
    finally:
        lal.ClobberDebugLevel(old_level)
Exemplo n.º 2
0
def main(args=None):
    opts = parser().parse_args(args)

    import logging
    log = logging.getLogger('BAYESTAR')

    # BAYESTAR imports.
    from .. import omp
    from ..io import fits, events
    from ..bayestar import localize

    # Other imports.
    import os
    from collections import OrderedDict
    import numpy as np
    import subprocess
    import sys

    # Squelch annoying and uninformative LAL log messages.
    import lal
    lal.ClobberDebugLevel(lal.LALNDEBUG)

    log.info('Using %d OpenMP thread(s)', omp.num_threads)

    # Read coinc file.
    log.info(
        '%s:reading input files', ','.join(file.name for file in opts.input))
    event_source = events.open(*opts.input, sample=opts.pycbc_sample)

    if opts.disable_detector:
        event_source = events.detector_disabled.open(
            event_source, opts.disable_detector)

    mkpath(opts.output)

    if opts.condor_submit:
        if opts.seed is not None:
            raise NotImplementedError(
                '--seed does not yet work with --condor-submit')
        if opts.coinc_event_id:
            raise ValueError(
                'must not set --coinc-event-id with --condor-submit')
        with subprocess.Popen(['condor_submit'],
                              # FIXME: use text=True instead in Python >= 3.7
                              encoding=sys.stdin.encoding,
                              stdin=subprocess.PIPE) as proc:
            f = proc.stdin
            print('''
                  accounting_group = ligo.dev.o3.cbc.pe.bayestar
                  on_exit_remove = (ExitBySignal == False) && (ExitCode == 0)
                  on_exit_hold = (ExitBySignal == True) || (ExitCode != 0)
                  on_exit_hold_reason = (ExitBySignal == True \
                    ? strcat("The job exited with signal ", ExitSignal) \
                    : strcat("The job exited with code ", ExitCode))
                  request_memory = 1000 MB
                  universe = vanilla
                  getenv = true
                  executable = /usr/bin/env
                  JobBatchName = BAYESTAR
                  environment = "OMP_NUM_THREADS=1"
                  ''', file=f)
            print('error =', os.path.join(opts.output, '$(cid).err'), file=f)
            print('log =', os.path.join(opts.output, '$(cid).log'), file=f)
            print('arguments = "',
                  *(arg for arg in sys.argv if arg != '--condor-submit'),
                  '--coinc-event-id $(cid)"', file=f)
            print('queue cid in', *event_source, file=f)
        sys.exit(proc.returncode)

    if opts.coinc_event_id:
        event_source = OrderedDict(
            (key, event_source[key]) for key in opts.coinc_event_id)

    count_sky_maps_failed = 0

    # Loop over all sngl_inspiral <-> sngl_inspiral coincs.
    for int_coinc_event_id, event in event_source.items():
        coinc_event_id = 'coinc_event:coinc_event_id:{}'.format(
            int_coinc_event_id)

        # Loop over sky localization methods
        log.info('%s:computing sky map', coinc_event_id)
        if opts.chain_dump:
            chain_dump = '%s.hdf5' % int_coinc_event_id
        else:
            chain_dump = None
        try:
            sky_map = localize(
                event, opts.waveform, opts.f_low,
                np.deg2rad(opts.min_inclination),
                np.deg2rad(opts.max_inclination),
                opts.min_distance,
                opts.max_distance, opts.prior_distance_power,
                opts.cosmology, mcmc=opts.mcmc, chain_dump=chain_dump,
                enable_snr_series=opts.enable_snr_series,
                f_high_truncate=opts.f_high_truncate)
            sky_map.meta['objid'] = coinc_event_id
        except (ArithmeticError, ValueError):
            log.exception('%s:sky localization failed', coinc_event_id)
            count_sky_maps_failed += 1
            if not opts.keep_going:
                raise
        else:
            log.info('%s:saving sky map', coinc_event_id)
            filename = '%d.fits' % int_coinc_event_id
            fits.write_sky_map(
                os.path.join(opts.output, filename), sky_map, nest=True)

    if count_sky_maps_failed > 0:
        raise RuntimeError("{0} sky map{1} did not converge".format(
            count_sky_maps_failed, 's' if count_sky_maps_failed > 1 else ''))
Exemplo n.º 3
0
def main(args=None):
    opts = parser().parse_args(args)

    import logging
    log = logging.getLogger('BAYESTAR')

    # BAYESTAR imports.
    from ..io import events, hdf5
    from ..bayestar import condition, condition_prior, ez_emcee, log_post

    # Other imports.
    from astropy.table import Table
    import numpy as np
    import os
    from collections import OrderedDict
    import subprocess
    import sys

    # Squelch annoying and uniformative LAL log messages.
    import lal
    lal.ClobberDebugLevel(lal.LALNDEBUG)

    # Read coinc file.
    log.info('%s:reading input files',
             ','.join(file.name for file in opts.input))
    event_source = events.open(*opts.input, sample=opts.pycbc_sample)

    mkpath(opts.output)

    if opts.condor_submit:
        if opts.seed is not None:
            raise NotImplementedError(
                '--seed does not yet work with --condor-submit')
        if opts.coinc_event_id:
            raise ValueError(
                'must not set --coinc-event-id with --condor-submit')
        with subprocess.Popen(
            ['condor_submit'],
                # FIXME: use text=True instead in Python >= 3.7
                encoding=sys.stdin.encoding,
                stdin=subprocess.PIPE) as proc:
            f = proc.stdin
            print('''
                  accounting_group = ligo.dev.o3.cbc.pe.bayestar
                  on_exit_remove = (ExitBySignal == False) && (ExitCode == 0)
                  on_exit_hold = (ExitBySignal == True) || (ExitCode != 0)
                  on_exit_hold_reason = (ExitBySignal == True \
                    ? strcat("The job exited with signal ", ExitSignal) \
                    : strcat("The job exited with code ", ExitCode))
                  request_memory = 1000 MB
                  universe = vanilla
                  getenv = true
                  executable = /usr/bin/env
                  JobBatchName = BAYESTAR
                  environment = "OMP_NUM_THREADS=1"
                  ''',
                  file=f)
            print('error =', os.path.join(opts.output, '$(cid).err'), file=f)
            print('log =', os.path.join(opts.output, '$(cid).log'), file=f)
            print('arguments = "',
                  *(arg for arg in sys.argv if arg != '--condor-submit'),
                  '--coinc-event-id $(cid)"',
                  file=f)
            print('queue cid in', *event_source, file=f)
        sys.exit(proc.returncode)

    if opts.coinc_event_id:
        event_source = OrderedDict(
            (key, event_source[key]) for key in opts.coinc_event_id)

    # Loop over all sngl_inspiral <-> sngl_inspiral coincs.
    for int_coinc_event_id, event in event_source.items():
        coinc_event_id = 'coinc_event:coinc_event_id:{}'.format(
            int_coinc_event_id)

        log.info('%s:preparing', coinc_event_id)

        epoch, sample_rate, epochs, snrs, responses, locations, horizons = \
            condition(event, waveform=opts.waveform, f_low=opts.f_low,
                      enable_snr_series=opts.enable_snr_series,
                      f_high_truncate=opts.f_high_truncate)

        min_distance, max_distance, prior_distance_power, cosmology = \
            condition_prior(horizons, opts.min_distance, opts.max_distance,
                            opts.prior_distance_power, opts.cosmology)

        gmst = lal.GreenwichMeanSiderealTime(epoch)

        max_abs_t = 2 * snrs.data.shape[1] / sample_rate
        xmin = [0, -1, min_distance, -1, 0, 0]
        xmax = [2 * np.pi, 1, max_distance, 1, 2 * np.pi, 2 * max_abs_t]
        names = 'ra dec distance inclination twopsi time'.split()
        transformed_names = 'ra sin_dec distance u twopsi time'.split()
        forward_transforms = [
            identity, np.sin, identity, np.cos, identity, identity
        ]
        reverse_transforms = [
            identity, np.arcsin, identity, np.arccos, identity, identity
        ]
        kwargs = dict(min_distance=min_distance,
                      max_distance=max_distance,
                      prior_distance_power=prior_distance_power,
                      cosmology=cosmology,
                      gmst=gmst,
                      sample_rate=sample_rate,
                      epochs=epochs,
                      snrs=snrs,
                      responses=responses,
                      locations=locations,
                      horizons=horizons)

        # Fix parameters
        for i, key in reversed(list(enumerate(['ra', 'dec', 'distance']))):
            value = getattr(opts, key)
            if value is None:
                continue

            if key in ['ra', 'dec']:
                # FIXME: figure out a more elegant way to address different
                # units in command line arguments and posterior samples
                value = np.deg2rad(value)

            kwargs[transformed_names[i]] = forward_transforms[i](value)
            del (xmin[i], xmax[i], names[i], transformed_names[i],
                 forward_transforms[i], reverse_transforms[i])

        log.info('%s:sampling', coinc_event_id)

        # Run MCMC
        chain = ez_emcee(log_post, xmin, xmax, kwargs=kwargs, vectorize=True)

        # Transform back from sin_dec to dec and cos_inclination to inclination
        for i, func in enumerate(reverse_transforms):
            chain[:, i] = func(chain[:, i])

        # Create Astropy table
        chain = Table(rows=chain, names=names)

        log.info('%s:saving posterior samples', coinc_event_id)

        hdf5.write_samples(chain,
                           os.path.join(opts.output,
                                        '{}.hdf5'.format(int_coinc_event_id)),
                           path='/bayestar/posterior_samples',
                           overwrite=True)
Exemplo n.º 4
0
import logging
log = logging.getLogger('BAYESTAR')

# BAYESTAR imports.
from lalinference.io import fits, events
from lalinference.bayestar.sky_map import localize

# Other imports.
import os
from collections import OrderedDict
import sys
import six

# Squelch annoying and uniformative LAL log messages.
import lal
lal.ClobberDebugLevel(lal.LALNDEBUG)

# Read coinc file.
log.info('%s:reading input files', ','.join(file.name for file in opts.input))
event_source = events.open(*opts.input, sample=opts.pycbc_sample)

command.mkpath(opts.output)

if opts.condor_submit:
    if opts.coinc_event_id:
        raise ValueError('must not set --coinc-event-id with --condor-submit')
    cmd = [
        'condor_submit', 'accounting_group=ligo.dev.o3.cbc.pe.bayestar',
        'on_exit_remove = (ExitBySignal == False) && (ExitCode == 0)',
        'on_exit_hold = (ExitBySignal == True) || (ExitCode != 0)',
        'on_exit_hold_reason = (ExitBySignal == True ? strcat("The job exited with signal ", ExitSignal) : strcat("The job exited with signal ", ExitCode))',
def main(args=None):
    opts = parser().parse_args(args)

    import logging
    import os
    import re
    import sys
    import tempfile
    import urllib.parse
    from ..bayestar import localize, rasterize
    from ..io import fits
    from ..io import events
    from .. import omp
    from ..util.file import rename
    import ligo.gracedb.logging
    import ligo.gracedb.rest
    import numpy as np

    # Squelch annoying and uniformative LAL log messages.
    import lal
    lal.ClobberDebugLevel(lal.LALNDEBUG)

    log = logging.getLogger('BAYESTAR')

    log.info('Using %d OpenMP thread(s)', omp.num_threads)

    # If no GraceDB IDs were specified on the command line, then read them
    # from stdin line-by-line.
    graceids = opts.graceid if opts.graceid else iterlines(sys.stdin)

    # Fire up a GraceDb client
    # FIXME: Mimic the behavior of the GraceDb command line client, where the
    # environment variable GRACEDB_SERVICE_URL overrides the default service
    # URL. It would be nice to get this behavior into the gracedb package
    # itself.
    gracedb = ligo.gracedb.rest.GraceDb(
        os.environ.get('GRACEDB_SERVICE_URL',
                       ligo.gracedb.rest.DEFAULT_SERVICE_URL))

    # Determine the base URL for event pages.
    scheme, netloc, *_ = urllib.parse.urlparse(gracedb._service_url)
    base_url = urllib.parse.urlunparse((scheme, netloc, 'events', '', '', ''))

    if opts.chain_dump:
        chain_dump = re.sub(r'.fits(.gz)?$', r'.hdf5', opts.output)
    else:
        chain_dump = None

    tags = ("sky_loc", )
    if not opts.no_tag:
        tags += ("lvem", )

    event_source = events.gracedb.open(graceids, gracedb)

    if opts.disable_detector:
        event_source = events.detector_disabled.open(event_source,
                                                     opts.disable_detector)

    for graceid in event_source.keys():

        try:
            event = event_source[graceid]
        except:  # noqa: E722
            log.exception('failed to read event %s from GraceDB', graceid)
            continue

        # Send log messages to GraceDb too
        if not opts.dry_run:
            handler = ligo.gracedb.logging.GraceDbLogHandler(gracedb, graceid)
            handler.setLevel(logging.INFO)
            logging.root.addHandler(handler)

        # A little bit of Cylon humor
        log.info('by your command...')

        try:
            # perform sky localization
            log.info("starting sky localization")
            sky_map = localize(event,
                               opts.waveform,
                               opts.f_low,
                               np.deg2rad(opts.min_inclination),
                               np.deg2rad(opts.max_inclination),
                               opts.min_distance,
                               opts.max_distance,
                               opts.prior_distance_power,
                               opts.cosmology,
                               mcmc=opts.mcmc,
                               chain_dump=chain_dump,
                               enable_snr_series=opts.enable_snr_series,
                               f_high_truncate=opts.f_high_truncate)
            if not opts.enable_multiresolution:
                sky_map = rasterize(sky_map)
            sky_map.meta['objid'] = str(graceid)
            sky_map.meta['url'] = '{}/{}'.format(base_url, graceid)
            log.info("sky localization complete")

            # upload FITS file
            with tempfile.TemporaryDirectory() as fitsdir:
                fitspath = os.path.join(fitsdir, opts.output)
                fits.write_sky_map(fitspath, sky_map, nest=True)
                log.debug('wrote FITS file: %s', opts.output)
                if opts.dry_run:
                    rename(fitspath, os.path.join('.', opts.output))
                else:
                    gracedb.writeLog(graceid,
                                     "BAYESTAR rapid sky localization ready",
                                     filename=fitspath,
                                     tagname=tags)
                log.debug('uploaded FITS file')
        except KeyboardInterrupt:
            # Produce log message and then exit if we receive SIGINT (ctrl-C).
            log.exception("sky localization failed")
            raise
        except:  # noqa: E722
            # Produce log message for any otherwise uncaught exception.
            # Unless we are in dry-run mode, keep going.
            log.exception("sky localization failed")
            if opts.dry_run:
                # Then re-raise the exception if we are in dry-run mode
                raise

        if not opts.dry_run:
            # Remove old log handler
            logging.root.removeHandler(handler)
            del handler