Exemple #1
0
def get_segment_definer_comments(xml_file, include_version=True):
    """Returns a dict with the comment column as the value for each segment"""

    from pycbc.io.ligolw import LIGOLWContentHandler as h

    # read segment definer table
    xmldoc = ligolw_utils.load_fileobj(xml_file,
                                       compress='auto',
                                       contenthandler=h)
    seg_def_table = lsctables.SegmentDefTable.get_table(xmldoc)

    # put comment column into a dict
    comment_dict = {}
    for seg_def in seg_def_table:
        if include_version:
            full_channel_name = ':'.join(
                [str(seg_def.ifos),
                 str(seg_def.name),
                 str(seg_def.version)])
        else:
            full_channel_name = ':'.join(
                [str(seg_def.ifos), str(seg_def.name)])

        comment_dict[full_channel_name] = seg_def.comment

    return comment_dict
Exemple #2
0
def psd_from_event(gid,
                   outdir=".",
                   save=False,
                   filename="psd.xml.gz",
                   verbose=False):
    """Get the psd.xml.gz given a gracedb event id.

	Args:
		gid (str): The gracedb event id.
		outdir (str, default="."): The output directory.
		filename (str, default="psd.xml.gz"): The output filename.
		save (bool, default=False): True if want to save the file, False otherwise.
		verbose (bool, default=False): Be verbose.

	Returns:
		A dictionary of complex LAL Series key by instrument.

	"""
    gracedb_client = gracedb.GraceDb()
    psd_fileobj = lvalert_helper.get_filename(gracedb_client, gid, filename)
    xmldoc = ligolw_utils.load_fileobj(
        psd_fileobj, contenthandler=lal.series.PSDContentHandler)
    if save:
        if verbose:
            sys.stderr.write("saving psd file to %s ...\n" %
                             os.path.join(outdir, filename))
        ligolw_utils.write_filename(xmldoc,
                                    filename,
                                    gz=filename.endswith("gz"))
    return lal.series.read_psd_xmldoc(xmldoc)
Exemple #3
0
def from_xml(filename, length, delta_f, low_freq_cutoff, ifo_string=None,
             root_name='psd'):
    """Read an ASCII file containing one-sided ASD or PSD  data and generate
    a frequency series with the corresponding PSD. The ASD or PSD data is
    interpolated in order to match the desired resolution of the
    generated frequency series.

    Parameters
    ----------
    filename : string
        Path to a two-column ASCII file. The first column must contain
        the frequency (positive frequencies only) and the second column
        must contain the amplitude density OR power spectral density.
    length : int
        Length of the frequency series in samples.
    delta_f : float
        Frequency resolution of the frequency series in Herz.
    low_freq_cutoff : float
        Frequencies below this value are set to zero.
    ifo_string : string
        Use the PSD in the file's PSD dictionary with this ifo string.
        If not given and only one PSD present in the file return that, if not
        given and multiple (or zero) PSDs present an exception will be raised.
    root_name : string (default='psd')
        If given use this as the root name for the PSD XML file. If this means
        nothing to you, then it is probably safe to ignore this option.

    Returns
    -------
    psd : FrequencySeries
        The generated frequency series.

    """
    import lal.series
    from ligo.lw import utils as ligolw_utils

    with open(filename, 'rb') as fp:
        ct_handler = lal.series.PSDContentHandler
        xml_doc = ligolw_utils.load_fileobj(fp, compress='auto',
                                            contenthandler=ct_handler)
        psd_dict = lal.series.read_psd_xmldoc(xml_doc, root_name=root_name)

    if ifo_string is not None:
        psd_freq_series = psd_dict[ifo_string]
    elif len(psd_dict.keys()) == 1:
        psd_freq_series = psd_dict[tuple(psd_dict.keys())[0]]
    else:
        err_msg = "No ifo string given and input XML file contains not "
        err_msg += "exactly one PSD. Specify which PSD you want to use."
        raise ValueError(err_msg)

    noise_data = psd_freq_series.data.data[:]
    freq_data = numpy.arange(len(noise_data)) * psd_freq_series.deltaF

    return from_numpy_arrays(freq_data, noise_data, length, delta_f,
                             low_freq_cutoff)
Exemple #4
0
def test_pick_coinc():
    coinc = pick_coinc()
    xmldoc = utils.load_fileobj(io.BytesIO(coinc),
                                contenthandler=ContentHandler)

    coinc_inspiral_table = lsctables.CoincInspiralTable.get_table(xmldoc)

    assert len(coinc_inspiral_table) == 1
    coinc_inspiral, = coinc_inspiral_table
    assert coinc_inspiral.end <= mock_now()
Exemple #5
0
def _jitter_snr(coinc_bytes):
    coinc_xml = io.BytesIO(coinc_bytes)
    xmldoc = utils.load_fileobj(coinc_xml, contenthandler=ContentHandler)

    coinc_inspiral_table = lsctables.CoincInspiralTable.get_table(xmldoc)

    # Add a tiny amount of jitter in SNR so that uploads have random
    # preferred event precedence.
    for row in coinc_inspiral_table:
        row.snr += random.gauss(0, 1e-9)

    coinc_xml = io.BytesIO()
    utils.write_fileobj(xmldoc, coinc_xml)
    return coinc_xml.getvalue()
def main():
    import os
    import sys
    import json
    import numpy as np
    import pandas as pd
    from ligo.lw import utils as ligolw_utils
    from ligo.lw.lsctables import SimInspiralTable
    from ligo.lw.lsctables import SnglInspiralTable
    from ligo.skymap.io.events.ligolw import ContentHandler

    p = parser()
    args = p.parse_args()

    if args.inj:
        file_obj = open(args.inj, 'rb')
    elif args.coinc:
        file_obj = open(args.coinc, 'rb')
    else:
        print('Must supply either injection xml file or coinc xml file')
        sys.exit(0)

    xmldoc = ligolw_utils.load_fileobj(file_obj, contenthandler=ContentHandler)
    if args.inj:
        inspiral_table = SimInspiralTable.get_table(xmldoc)
    else:
        inspiral_table = SnglInspiralTable.get_table(xmldoc)

    df = pd.DataFrame()
    json_dict = {}
    for column in args.cols:
        values_obj = inspiral_table.getColumnByName(column)
        values = values_obj.asarray()
        df[column] = values
        json_dict[column] = values.tolist()
    df.index += 1

    print(df)
    if args.inj:
        jsonfilename = args.inj.split(".")[0] + ".json"
    elif args.coinc:
        jsonfilename = args.coinc.split(".")[0] + ".json"
    if args.out:
        jsonfilename = args.out
    with open(jsonfilename, 'w') as f:
        json.dump(json_dict, f, indent=2, sort_keys=True)
Exemple #7
0
def open_xmldoc(fobj, **kwargs):
    """Try and open an existing LIGO_LW-format file, or create a new Document

    Parameters
    ----------
    fobj : `str`, `file`
        file path or open file object to read

    **kwargs
        other keyword arguments to pass to
        :func:`~ligo.lw.utils.load_filename`, or
        :func:`~ligo.lw.utils.load_fileobj` as appropriate

    Returns
    --------
    xmldoc : :class:`~ligo.lw.ligolw.Document`
        either the `Document` as parsed from an existing file, or a new, empty
        `Document`
    """
    from ligo.lw.ligolw import (Document, LIGOLWContentHandler)
    from ligo.lw.lsctables import use_in
    from ligo.lw.utils import (load_filename, load_fileobj)

    use_in(kwargs.setdefault('contenthandler', LIGOLWContentHandler))

    try:  # try and load existing file
        if isinstance(fobj, str):
            return load_filename(fobj, **kwargs)
        if isinstance(fobj, FILE_LIKE):
            return load_fileobj(fobj, **kwargs)[0]
    except (OSError, IOError):  # or just create a new Document
        return Document()
    except LigolwElementError as exc:
        if LIGO_LW_COMPAT_ERROR.search(str(exc)):
            try:
                return open_xmldoc(fobj, ilwdchar_compat=True, **kwargs)
            except Exception:  # for any reason, raise original
                pass
        raise
Exemple #8
0
def open_xmldoc(fobj, **kwargs):
    """Try and open an existing LIGO_LW-format file, or create a new Document

    Parameters
    ----------
    fobj : `str`, `file`
        file path or open file object to read

    **kwargs
        other keyword arguments to pass to
        :func:`~ligo.lw.utils.load_filename`, or
        :func:`~ligo.lw.utils.load_fileobj` as appropriate

    Returns
    --------
    xmldoc : :class:`~ligo.lw.ligolw.Document`
        either the `Document` as parsed from an existing file, or a new, empty
        `Document`
    """
    from ligo.lw.ligolw import (Document, LIGOLWContentHandler)
    from ligo.lw.lsctables import use_in
    from ligo.lw.utils import (load_filename, load_fileobj)

    use_in(kwargs.setdefault('contenthandler', LIGOLWContentHandler))

    try:  # try and load existing file
        if isinstance(fobj, string_types):
            return load_filename(fobj, **kwargs)
        if isinstance(fobj, FILE_LIKE):
            return load_fileobj(fobj, **kwargs)[0]
    except (OSError, IOError):  # or just create a new Document
        return Document()
    except LigolwElementError as exc:
        if LIGO_LW_COMPAT_ERROR.search(str(exc)):
            try:
                return open_xmldoc(fobj, ilwdchar_compat=True, **kwargs)
            except Exception:  # for any reason, raise original
                pass
        raise
Exemple #9
0
def _read_xml(f, fallbackpath=None):
    if f is None:
        doc = filename = None
    elif isinstance(f, Element):
        doc = f
        filename = ''
    elif isinstance(f, str):
        try:
            doc = load_filename(f, contenthandler=ContentHandler)
        except IOError as e:
            if e.errno == errno.ENOENT and fallbackpath and \
                    not os.path.isabs(f):
                f = os.path.join(fallbackpath, f)
                doc = load_filename(f, contenthandler=ContentHandler)
            else:
                raise
        filename = f
    else:
        doc = load_fileobj(f, contenthandler=ContentHandler)
        try:
            filename = f.name
        except AttributeError:
            filename = ''
    return doc, filename
Exemple #10
0
def main(args=None):
    from ligo.lw import lsctables
    from ligo.lw import utils as ligolw_utils
    from ligo.lw import ligolw
    import lal.series
    from scipy import stats

    p = parser()
    args = p.parse_args(args)

    xmldoc = ligolw.Document()
    xmlroot = xmldoc.appendChild(ligolw.LIGO_LW())
    process = register_to_xmldoc(xmldoc, p, args)

    gwcosmo = GWCosmo(
        cosmology.default_cosmology.get_cosmology_from_string(args.cosmology))

    ns_mass_min = 1.0
    ns_mass_max = 2.0
    bh_mass_min = 5.0
    bh_mass_max = 50.0

    ns_astro_spin_min = -0.05
    ns_astro_spin_max = +0.05
    ns_astro_mass_dist = stats.norm(1.33, 0.09)
    ns_astro_spin_dist = stats.uniform(ns_astro_spin_min,
                                       ns_astro_spin_max - ns_astro_spin_min)

    ns_broad_spin_min = -0.4
    ns_broad_spin_max = +0.4
    ns_broad_mass_dist = stats.uniform(ns_mass_min, ns_mass_max - ns_mass_min)
    ns_broad_spin_dist = stats.uniform(ns_broad_spin_min,
                                       ns_broad_spin_max - ns_broad_spin_min)

    bh_astro_spin_min = -0.99
    bh_astro_spin_max = +0.99
    bh_astro_mass_dist = stats.pareto(b=1.3)
    bh_astro_spin_dist = stats.uniform(bh_astro_spin_min,
                                       bh_astro_spin_max - bh_astro_spin_min)

    bh_broad_spin_min = -0.99
    bh_broad_spin_max = +0.99
    bh_broad_mass_dist = stats.reciprocal(bh_mass_min, bh_mass_max)
    bh_broad_spin_dist = stats.uniform(bh_broad_spin_min,
                                       bh_broad_spin_max - bh_broad_spin_min)

    if args.distribution.startswith('bns_'):
        m1_min = m2_min = ns_mass_min
        m1_max = m2_max = ns_mass_max
        if args.distribution.endswith('_astro'):
            x1_min = x2_min = ns_astro_spin_min
            x1_max = x2_max = ns_astro_spin_max
            m1_dist = m2_dist = ns_astro_mass_dist
            x1_dist = x2_dist = ns_astro_spin_dist
        elif args.distribution.endswith('_broad'):
            x1_min = x2_min = ns_broad_spin_min
            x1_max = x2_max = ns_broad_spin_max
            m1_dist = m2_dist = ns_broad_mass_dist
            x1_dist = x2_dist = ns_broad_spin_dist
        else:  # pragma: no cover
            assert_not_reached()
    elif args.distribution.startswith('nsbh_'):
        m1_min = bh_mass_min
        m1_max = bh_mass_max
        m2_min = ns_mass_min
        m2_max = ns_mass_max
        if args.distribution.endswith('_astro'):
            x1_min = bh_astro_spin_min
            x1_max = bh_astro_spin_max
            x2_min = ns_astro_spin_min
            x2_max = ns_astro_spin_max
            m1_dist = bh_astro_mass_dist
            m2_dist = ns_astro_mass_dist
            x1_dist = bh_astro_spin_dist
            x2_dist = ns_astro_spin_dist
        elif args.distribution.endswith('_broad'):
            x1_min = bh_broad_spin_min
            x1_max = bh_broad_spin_max
            x2_min = ns_broad_spin_min
            x2_max = ns_broad_spin_max
            m1_dist = bh_broad_mass_dist
            m2_dist = ns_broad_mass_dist
            x1_dist = bh_broad_spin_dist
            x2_dist = ns_broad_spin_dist
        else:  # pragma: no cover
            assert_not_reached()
    elif args.distribution.startswith('bbh_'):
        m1_min = m2_min = bh_mass_min
        m1_max = m2_max = bh_mass_max
        if args.distribution.endswith('_astro'):
            x1_min = x2_min = bh_astro_spin_min
            x1_max = x2_max = bh_astro_spin_max
            m1_dist = m2_dist = bh_astro_mass_dist
            x1_dist = x2_dist = bh_astro_spin_dist
        elif args.distribution.endswith('_broad'):
            x1_min = x2_min = bh_broad_spin_min
            x1_max = x2_max = bh_broad_spin_max
            m1_dist = m2_dist = bh_broad_mass_dist
            x1_dist = x2_dist = bh_broad_spin_dist
        else:  # pragma: no cover
            assert_not_reached()
    else:  # pragma: no cover
        assert_not_reached()

    dists = (m1_dist, m2_dist, x1_dist, x2_dist)

    # Read PSDs
    psds = list(
        lal.series.read_psd_xmldoc(
            ligolw_utils.load_fileobj(
                args.reference_psd,
                contenthandler=lal.series.PSDContentHandler)).values())

    # Construct mass1, mass2, spin1z, spin2z grid.
    m1 = np.geomspace(m1_min, m1_max, 10)
    m2 = np.geomspace(m2_min, m2_max, 10)
    x1 = np.linspace(x1_min, x1_max, 10)
    x2 = np.linspace(x2_min, x2_max, 10)
    params = m1, m2, x1, x2

    # Calculate the maximum distance on the grid.
    max_z = gwcosmo.get_max_z(psds,
                              args.waveform,
                              args.f_low,
                              args.min_snr,
                              m1,
                              m2,
                              x1,
                              x2,
                              jobs=args.jobs)
    if args.max_distance is not None:
        new_max_z = cosmology.z_at_value(gwcosmo.cosmo.luminosity_distance,
                                         args.max_distance * units.Mpc)
        max_z[max_z > new_max_z] = new_max_z
    max_distance = gwcosmo.sensitive_distance(max_z).to_value(units.Mpc)

    # Find piecewise constant approximate upper bound on distance.
    max_distance = cell_max(max_distance)

    # Calculate V * T in each grid cell
    cdfs = [dist.cdf(param) for param, dist in zip(params, dists)]
    cdf_los = [cdf[:-1] for cdf in cdfs]
    cdfs = [np.diff(cdf) for cdf in cdfs]
    probs = np.prod(np.meshgrid(*cdfs, indexing='ij'), axis=0)
    probs /= probs.sum()
    probs *= 4 / 3 * np.pi * max_distance**3
    volume = probs.sum()
    probs /= volume
    probs = probs.ravel()

    volumetric_rate = args.nsamples / volume * units.year**-1 * units.Mpc**-3

    # Draw random grid cells
    dist = stats.rv_discrete(values=(np.arange(len(probs)), probs))
    indices = np.unravel_index(dist.rvs(size=args.nsamples),
                               max_distance.shape)

    # Draw random intrinsic params from each cell
    cols = {}
    cols['mass1'], cols['mass2'], cols['spin1z'], cols['spin2z'] = [
        dist.ppf(stats.uniform(cdf_lo[i], cdf[i]).rvs(size=args.nsamples))
        for i, dist, cdf_lo, cdf in zip(indices, dists, cdf_los, cdfs)
    ]

    # Swap binary components as needed to ensure that mass1 >= mass2.
    # Note that the .copy() is important.
    # See https://github.com/numpy/numpy/issues/14428
    swap = cols['mass1'] < cols['mass2']
    cols['mass1'][swap], cols['mass2'][swap] = \
        cols['mass2'][swap].copy(), cols['mass1'][swap].copy()
    cols['spin1z'][swap], cols['spin2z'][swap] = \
        cols['spin2z'][swap].copy(), cols['spin1z'][swap].copy()

    # Draw random extrinsic parameters
    cols['distance'] = stats.powerlaw(
        a=3, scale=max_distance[indices]).rvs(size=args.nsamples)
    cols['longitude'] = stats.uniform(0, 2 * np.pi).rvs(size=args.nsamples)
    cols['latitude'] = np.arcsin(stats.uniform(-1, 2).rvs(size=args.nsamples))
    cols['inclination'] = np.arccos(
        stats.uniform(-1, 2).rvs(size=args.nsamples))
    cols['polarization'] = stats.uniform(0, 2 * np.pi).rvs(size=args.nsamples)
    cols['coa_phase'] = stats.uniform(-np.pi,
                                      2 * np.pi).rvs(size=args.nsamples)
    cols['time_geocent'] = stats.uniform(1e9, units.year.to(
        units.second)).rvs(size=args.nsamples)

    # Convert from sensitive distance to redshift and comoving distance.
    # FIXME: Replace this brute-force lookup table with a solver.
    z = np.linspace(0, max_z.max(), 10000)
    ds = gwcosmo.sensitive_distance(z).to_value(units.Mpc)
    dc = gwcosmo.cosmo.comoving_distance(z).to_value(units.Mpc)
    z_for_ds = interp1d(ds, z, kind='cubic', assume_sorted=True)
    dc_for_ds = interp1d(ds, dc, kind='cubic', assume_sorted=True)
    zp1 = 1 + z_for_ds(cols['distance'])
    cols['distance'] = dc_for_ds(cols['distance'])

    # Apply redshift factor to convert from comoving distance and source frame
    # masses to luminosity distance and observer frame masses.
    for key in ['distance', 'mass1', 'mass2']:
        cols[key] *= zp1

    # Populate sim_inspiral table
    sims = xmlroot.appendChild(lsctables.New(lsctables.SimInspiralTable))
    for row in zip(*cols.values()):
        sims.appendRow(**dict(dict.fromkeys(sims.validcolumns, None),
                              process_id=process.process_id,
                              simulation_id=sims.get_next_id(),
                              waveform=args.waveform,
                              f_lower=args.f_low,
                              **dict(zip(cols.keys(), row))))

    # Record process end time.
    process.comment = str(volumetric_rate)
    process.set_end_time_now()

    # Write output file.
    write_fileobj(xmldoc, args.output)
parser.add_argument('-c',
                    '--cols',
                    nargs='+',
                    help='columns of parameters to be read',
                    required=True)
args = parser.parse_args()

if args.inj:
    file_obj = open(args.inj, 'rb')
elif args.coinc:
    file_obj = open(args.coinc, 'rb')
else:
    print('Must supply either injection xml file or coinc xml file')
    sys.exit(0)

xmldoc = ligolw_utils.load_fileobj(file_obj, contenthandler=ContentHandler)
if args.inj:
    inspiral_table = SimInspiralTable.get_table(xmldoc)
else:
    inspiral_table = SnglInspiralTable.get_table(xmldoc)

df = pd.DataFrame()
json_dict = {}
for column in args.cols:
    values = inspiral_table.get_column(column)
    df[column] = values
    json_dict[column] = values.tolist()
df.index += 1

print(df)
if args.inj:
Exemple #12
0
def pick_coinc():
    """Pick a coincidence from the "First Two Years" paper."""
    with resources.open_binary(data_first2years, 'gstlal.xml.gz') as f:
        xmldoc = utils.load_fileobj(f, contenthandler=ContentHandler)
    root, = xmldoc.childNodes

    # Remove unneeded tables
    for name in (
            'filter',  # lsctables.FilterTable removed from ligo.lw
            lsctables.SegmentTable.tableName,
            lsctables.SegmentDefTable.tableName,
            lsctables.SimInspiralTable.tableName,
            lsctables.SummValueTable.tableName,
            lsctables.SearchSummVarsTable.tableName):
        root.removeChild(ligo.lw.table.get_table(xmldoc, name))

    coinc_inspiral_table = table = lsctables.CoincInspiralTable.get_table(
        xmldoc)

    # Determine event with most recent sideral time
    gps_time_now = lal.GPSTimeNow()
    gmsts = np.asarray([lal.GreenwichMeanSiderealTime(_.end) for _ in table])
    gmst_now = lal.GreenwichMeanSiderealTime(gps_time_now)
    div, rem = divmod(gmst_now - gmsts, 2 * np.pi)
    i = np.argmin(rem)
    new_gmst = div[i] * 2 * np.pi + gmsts[i]
    old_time = table[i].end
    new_time = lal.LIGOTimeGPS()
    result = lal.GreenwichMeanSiderealTimeToGPS(new_gmst, new_time)
    result.disown()
    del result
    delta_t = new_time - old_time
    target_coinc_event_id = int(table[i].coinc_event_id)

    # Remove unneeded rows
    table[:] = [
        row for row in table
        if int(row.coinc_event_id) == target_coinc_event_id
    ]
    target_end_time = table[0].end

    coinc_table = table = lsctables.CoincTable.get_table(xmldoc)
    table[:] = [
        row for row in table
        if int(row.coinc_event_id) == target_coinc_event_id
    ]

    table = lsctables.CoincMapTable.get_table(xmldoc)
    table[:] = [
        row for row in table
        if int(row.coinc_event_id) == target_coinc_event_id
    ]
    target_sngl_inspirals = frozenset(row.event_id for row in table)

    sngl_inspiral_table = table = lsctables.SnglInspiralTable.get_table(xmldoc)
    table[:] = [row for row in table if row.event_id in target_sngl_inspirals]

    table = lsctables.ProcessTable.get_table(xmldoc)
    table[:] = [row for row in table if row.program == 'gstlal_inspiral']
    target_process_ids = frozenset(row.process_id for row in table)

    table = lsctables.SearchSummaryTable.get_table(xmldoc)
    table[:] = [
        row for row in table if target_end_time in row.out_segment
        and row.process_id in target_process_ids
    ]
    target_process_ids = frozenset(row.process_id for row in table)

    table = lsctables.ProcessTable.get_table(xmldoc)
    table[:] = [row for row in table if row.process_id in target_process_ids]

    table = lsctables.ProcessParamsTable.get_table(xmldoc)
    table[:] = [row for row in table if row.process_id in target_process_ids]

    # Shift event times
    for row in coinc_inspiral_table:
        row.end += delta_t
    for row in sngl_inspiral_table:
        row.end += delta_t
        row.end_time_gmst = lal.GreenwichMeanSiderealTime(row.end)

    # The old version of gstlal used to produce the "First Two Years" data set
    # stored likelihood in the coinc_event.likelihood column, but newer
    # versions store the *natural log* of the likelihood here. The p_astro
    # calculation requires this to be log likelihood.
    for row in coinc_table:
        row.likelihood = np.log(row.likelihood)

    # Gstlal stores the template's SVD bank index in the Gamma1 column.
    # Fill this in so that we can calculate p_astro
    # (see :mod:`gwcelery.tasks.p_astro_gstlal`).
    for row in sngl_inspiral_table:
        row.Gamma1 = 16

    coinc_xml = io.BytesIO()
    utils.write_fileobj(xmldoc, coinc_xml)
    return coinc_xml.getvalue()
Exemple #13
0
def localize(coinc_psd,
             graceid,
             filename='bayestar.fits.gz',
             disabled_detectors=None):
    """Generate a rapid sky localization using
    :mod:`BAYESTAR <ligo.skymap.bayestar>`.

    Parameters
    ----------
    coinc_psd : tuple
        Tuple consisting of the byte contents of the input event's
        ``coinc.xml`` and ``psd.xml.gz`` files.
    graceid : str
        The GraceDB ID, used for FITS metadata and recording log messages
        to GraceDB.
    filename : str, optional
        The name of the FITS file.
    disabled_detectors : list, optional
        List of detectors to disable.

    Returns
    -------
    bytes
        The byte contents of the finished FITS file.

    Notes
    -----
    This task is adapted from the command-line tool
    :doc:`bayestar-localize-lvalert
    <ligo.skymap:tool/bayestar_localize_lvalert>`.

    It should execute in a special queue for computationally intensive,
    multithreaded, OpenMP tasks.

    """
    # Determine the base URL for event pages.
    scheme, netloc, *_ = urllib.parse.urlparse(gracedb.client.url)
    base_url = urllib.parse.urlunparse((scheme, netloc, 'events', '', '', ''))

    try:
        # A little bit of Cylon humor
        log.info('by your command...')

        # Combine coinc.xml and psd.xml.gz into one XML document
        doc = None
        for filecontents in coinc_psd:
            doc = load_fileobj(io.BytesIO(filecontents),
                               xmldoc=doc,
                               contenthandler=events.ligolw.ContentHandler)
        merge_ligolws(doc)

        # Parse event
        event_source = events.ligolw.open(doc, psd_file=doc, coinc_def=None)
        if disabled_detectors:
            event_source = events.detector_disabled.open(
                event_source, disabled_detectors)
        event, = event_source.values()

        # Run BAYESTAR
        log.info('starting sky localization')
        # FIXME: the low frequency cutoff should not be hardcoded.
        # It should be provided in the coinc.xml file.
        skymap = _bayestar.localize(event, f_low=15.0)
        skymap.meta['objid'] = str(graceid)
        skymap.meta['url'] = '{}/{}'.format(base_url, graceid)
        log.info('sky localization complete')

        with io.BytesIO() as f:
            fits.write_sky_map(f, skymap, moc=True)
            return f.getvalue()
    except events.DetectorDisabledError:
        raise Ignore()
Exemple #14
0
def setup_roq(cp):
    """
    Generates cp objects with the different ROQs applied
    """
    use_roq = False
    if cp.has_option('paths', 'roq_b_matrix_directory') or cp.has_option(
            'paths', 'computeroqweights'):
        if not cp.has_option('analysis', 'roq'):
            print(
                "Warning: If you are attempting to enable ROQ by specifying roq_b_matrix_directory or computeroqweights,\n\
            please use analysis.roq in your config file in future. Enabling ROQ."
            )
            cp.set('analysis', 'roq', True)
    if not cp.getboolean('analysis', 'roq'):
        yield cp
        return
    path = cp.get('paths', 'roq_b_matrix_directory')
    if not os.path.isdir(path):
        print("The ROQ directory %s does not seem to exist\n" % path)
        sys.exit(1)
    use_roq = True
    roq_paths = os.listdir(path)
    roq_params = {}
    roq_force_flow = None

    if cp.has_option('lalinference', 'roq_force_flow'):
        roq_force_flow = cp.getfloat('lalinference', 'roq_force_flow')
        print("WARNING: Forcing the f_low to ", str(roq_force_flow), "Hz")
        print(
            "WARNING: Overwriting user choice of flow, srate, seglen, and (mc_min, mc_max and q-min) or (mass1_min, mass1_max, mass2_min, mass2_max)"
        )

    def key(item):  # to order the ROQ bases
        return float(item[1]['seglen'])

    coinc_xml_obj = None
    row = None

    # Get file object of coinc.xml
    if opts.gid is not None:
        from ligo.gracedb.rest import GraceDb
        gid = opts.gid
        cwd = os.getcwd()
        if cp.has_option('analysis', 'service-url'):
            client = GraceDb(cp.get('analysis', 'service-url'))
        else:
            client = GraceDb()
        coinc_xml_obj = ligolw_utils.load_fileobj(
            client.files(gid, "coinc.xml"),
            contenthandler=lsctables.use_in(ligolw.LIGOLWContentHandler))[0]
    elif cp.has_option('input', 'coinc-xml'):
        coinc_xml_obj = ligolw_utils.load_fileobj(
            open(cp.get('input', 'coinc-xml'), "rb"),
            contenthandler=lsctables.use_in(ligolw.LIGOLWContentHandler))[0]

    # Get sim_inspiral from injection file
    if cp.has_option('input', 'injection-file'):
        print(
            "Only 0-th event in the XML table will be considered while running with ROQ\n"
        )
        row = lsctables.SimInspiralTable.get_table(
            ligolw_utils.load_filename(cp.get('input', 'injection-file'),
                                       contenthandler=lsctables.use_in(
                                           ligolw.LIGOLWContentHandler)))[0]

    roq_bounds = pipe_utils.Query_ROQ_Bounds_Type(path, roq_paths)
    if roq_bounds == 'chirp_mass_q':
        print('ROQ has bounds in chirp mass and mass-ratio')
        mc_priors, trigger_mchirp = pipe_utils.get_roq_mchirp_priors(
            path,
            roq_paths,
            roq_params,
            key,
            coinc_xml_obj=coinc_xml_obj,
            sim_inspiral=row)
    elif roq_bounds == 'component_mass':
        print('ROQ has bounds in component masses')
        # get component mass bounds, then compute the chirp mass that can be safely covered
        # further below we pass along the component mass bounds to the sampler, not the tighter chirp-mass, q bounds
        m1_priors, m2_priors, trigger_mchirp = pipe_utils.get_roq_component_mass_priors(
            path,
            roq_paths,
            roq_params,
            key,
            coinc_xml_obj=coinc_xml_obj,
            sim_inspiral=row)
        mc_priors = {}
        for (roq, m1_prior), (roq2, m2_prior) in zip(m1_priors.items(),
                                                     m2_priors.items()):
            mc_priors[roq] = sorted([
                pipe_utils.mchirp_from_components(m1_prior[1], m2_prior[0]),
                pipe_utils.mchirp_from_components(m1_prior[0], m2_prior[1])
            ])

    if cp.has_option('lalinference', 'trigger_mchirp'):
        trigger_mchirp = float(cp.get('lalinference', 'trigger_mchirp'))
    roq_mass_freq_scale_factor = pipe_utils.get_roq_mass_freq_scale_factor(
        mc_priors, trigger_mchirp, roq_force_flow)
    if roq_mass_freq_scale_factor != 1.:
        print(
            'WARNING: Rescaling ROQ basis, please ensure it is allowed with the model used.'
        )

    # If the true chirp mass is unknown, add variations over the mass bins
    if opts.gid is not None or (opts.injections is not None or cp.has_option(
            'input', 'injection-file')) or cp.has_option(
                'lalinference', 'trigger_mchirp') or cp.has_option(
                    'input', 'coinc-xml'):

        for mc_prior in mc_priors:
            mc_priors[mc_prior] = np.array(mc_priors[mc_prior])
        # find mass bin containing the trigger
        candidate_roq_paths = \
            [roq for roq in roq_paths
             if mc_priors[roq][0]*roq_mass_freq_scale_factor
             <= trigger_mchirp <= mc_priors[roq][1]*roq_mass_freq_scale_factor]
        if candidate_roq_paths:
            margins = np.array([
                min(
                    mc_priors[roq][1] * roq_mass_freq_scale_factor -
                    trigger_mchirp, trigger_mchirp -
                    mc_priors[roq][0] * roq_mass_freq_scale_factor)
                for roq in candidate_roq_paths
            ])
            best_path = candidate_roq_paths[np.argmax(margins)]
            roq_paths = [best_path]
            print(
                'Prior in Mchirp will be [{}, {}] to contain the trigger Mchirp {}.'
                .format(mc_priors[best_path][0] * roq_mass_freq_scale_factor,
                        mc_priors[best_path][1] * roq_mass_freq_scale_factor,
                        trigger_mchirp))
        else:
            print("No roq mass bins containing the trigger")
            raise
    else:
        for mc_prior in mc_priors:
            mc_priors[mc_prior] = np.array(
                mc_priors[mc_prior]) * roq_mass_freq_scale_factor

    # write the master configparser
    cur_basedir = cp.get('paths', 'basedir')
    masterpath = os.path.join(cur_basedir, 'config.ini')
    with open(masterpath, 'w') as cpfile:
        cp.write(cpfile)

    for roq in roq_paths:
        this_cp = configparser.ConfigParser()
        this_cp.optionxform = str
        this_cp.read(masterpath)
        basedir = this_cp.get('paths', 'basedir')
        for dirs in 'basedir', 'daglogdir', 'webdir':
            val = this_cp.get('paths', dirs)
            newval = os.path.join(val, roq)
            mkdirs(newval)
            this_cp.set('paths', dirs, newval)
        this_cp.set(
            'paths', 'roq_b_matrix_directory',
            os.path.join(cp.get('paths', 'roq_b_matrix_directory'), roq))
        flow = roq_params[roq]['flow'] / roq_mass_freq_scale_factor
        srate = 2. * roq_params[roq]['fhigh'] / roq_mass_freq_scale_factor
        #if srate > 8192:
        #    srate = 8192

        seglen = roq_params[roq]['seglen'] * roq_mass_freq_scale_factor
        # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
        this_cp.set('engine', 'srate', str(srate))
        this_cp.set('engine', 'seglen', str(seglen))
        if this_cp.has_option('lalinference', 'flow'):
            tmp = this_cp.get('lalinference', 'flow')
            tmp = eval(tmp)
            ifos = tmp.keys()
        else:
            tmp = {}
            ifos = eval(this_cp.get('analysis', 'ifos'))
        for i in ifos:
            tmp[i] = flow
            this_cp.set('lalinference', 'flow', str(tmp))
        if roq_bounds == 'chirp_mass_q':
            mc_min = mc_priors[roq][0] * roq_mass_freq_scale_factor
            mc_max = mc_priors[roq][1] * roq_mass_freq_scale_factor
            # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
            q_min = 1. / float(roq_params[roq]['qmax'])
            this_cp.set('engine', 'chirpmass-min', str(mc_min))
            this_cp.set('engine', 'chirpmass-max', str(mc_max))
            this_cp.set('engine', 'q-min', str(q_min))
            this_cp.set(
                'engine', 'comp-min',
                str(
                    max(
                        roq_params[roq]['compmin'] *
                        roq_mass_freq_scale_factor,
                        mc_min * pow(1 + q_min, 1. / 5.) *
                        pow(q_min, 2. / 5.))))
            this_cp.set(
                'engine', 'comp-max',
                str(mc_max * pow(1 + q_min, 1. / 5.) * pow(q_min, -3. / 5.)))
        elif roq_bounds == 'component_mass':
            m1_min = m1_priors[roq][0]
            m1_max = m1_priors[roq][1]
            m2_min = m2_priors[roq][0]
            m2_max = m2_priors[roq][1]
            this_cp.set('engine', 'mass1-min', str(m1_min))
            this_cp.set('engine', 'mass1-max', str(m1_max))
            this_cp.set('engine', 'mass2-min', str(m2_min))
            this_cp.set('engine', 'mass2-max', str(m2_max))
        yield this_cp
    return
Exemple #15
0
def main(args=None):
    p = parser()
    opts = p.parse_args(args)

    # LIGO-LW XML imports.
    from ligo.lw import ligolw
    from ligo.lw.param import Param
    from ligo.lw.utils.search_summary import append_search_summary
    from ligo.lw import utils as ligolw_utils
    from ligo.lw.lsctables import (
        New, CoincDefTable, CoincID, CoincInspiralTable, CoincMapTable,
        CoincTable, ProcessParamsTable, ProcessTable, SimInspiralTable,
        SnglInspiralTable, TimeSlideTable)

    # glue, LAL and pylal imports.
    from ligo import segments
    import lal
    import lal.series
    import lalsimulation
    from lalinspiral.inspinjfind import InspiralSCExactCoincDef
    from lalinspiral.thinca import InspiralCoincDef
    from tqdm import tqdm

    # BAYESTAR imports.
    from ..io.events.ligolw import ContentHandler
    from ..bayestar import filter
    from ..util.progress import progress_map

    # Read PSDs.
    xmldoc = ligolw_utils.load_fileobj(
        opts.reference_psd, contenthandler=lal.series.PSDContentHandler)
    psds = lal.series.read_psd_xmldoc(xmldoc, root_name=None)
    psds = {
        key: filter.InterpolatedPSD(filter.abscissa(psd), psd.data.data)
        for key, psd in psds.items() if psd is not None}
    psds = [psds[ifo] for ifo in opts.detector]

    # Extract simulation table from injection file.
    inj_xmldoc = ligolw_utils.load_fileobj(
        opts.input, contenthandler=ContentHandler)
    orig_sim_inspiral_table = SimInspiralTable.get_table(inj_xmldoc)

    # Prune injections that are outside distance limits.
    orig_sim_inspiral_table[:] = [
        row for row in orig_sim_inspiral_table
        if opts.min_distance <= row.distance <= opts.max_distance]

    # Open output file.
    xmldoc = ligolw.Document()
    xmlroot = xmldoc.appendChild(ligolw.LIGO_LW())

    # Create tables. Process and ProcessParams tables are copied from the
    # injection file.
    coinc_def_table = xmlroot.appendChild(New(CoincDefTable))
    coinc_inspiral_table = xmlroot.appendChild(New(CoincInspiralTable))
    coinc_map_table = xmlroot.appendChild(New(CoincMapTable))
    coinc_table = xmlroot.appendChild(New(CoincTable))
    xmlroot.appendChild(ProcessParamsTable.get_table(inj_xmldoc))
    xmlroot.appendChild(ProcessTable.get_table(inj_xmldoc))
    sim_inspiral_table = xmlroot.appendChild(New(SimInspiralTable))
    sngl_inspiral_table = xmlroot.appendChild(New(SnglInspiralTable))
    time_slide_table = xmlroot.appendChild(New(TimeSlideTable))

    # Write process metadata to output file.
    process = register_to_xmldoc(
        xmldoc, p, opts, instruments=opts.detector,
        comment="Simulated coincidences")

    # Add search summary to output file.
    all_time = segments.segment([lal.LIGOTimeGPS(0), lal.LIGOTimeGPS(2e9)])
    append_search_summary(xmldoc, process, inseg=all_time, outseg=all_time)

    # Create a time slide entry.  Needed for coinc_event rows.
    time_slide_id = time_slide_table.get_time_slide_id(
        {ifo: 0 for ifo in opts.detector}, create_new=process)

    # Populate CoincDef table.
    inspiral_coinc_def = copy.copy(InspiralCoincDef)
    inspiral_coinc_def.coinc_def_id = coinc_def_table.get_next_id()
    coinc_def_table.append(inspiral_coinc_def)
    found_coinc_def = copy.copy(InspiralSCExactCoincDef)
    found_coinc_def.coinc_def_id = coinc_def_table.get_next_id()
    coinc_def_table.append(found_coinc_def)

    # Precompute values that are common to all simulations.
    detectors = [lalsimulation.DetectorPrefixToLALDetector(ifo)
                 for ifo in opts.detector]
    responses = [det.response for det in detectors]
    locations = [det.location for det in detectors]

    if opts.jobs != 1:
        from .. import omp
        omp.num_threads = 1  # disable OpenMP parallelism

    func = functools.partial(simulate, psds=psds,
                             responses=responses, locations=locations,
                             measurement_error=opts.measurement_error,
                             f_low=opts.f_low, f_high=opts.f_high,
                             waveform=opts.waveform)

    # Make sure that each thread gets a different random number state.
    # We start by drawing a random integer s in the main thread, and
    # then the i'th subprocess will seed itself with the integer i + s.
    #
    # The seed must be an unsigned 32-bit integer, so if there are n
    # threads, then s must be drawn from the interval [0, 2**32 - n).
    #
    # Note that *we* are thread 0, so there are a total of
    # n=1+len(sim_inspiral_table) threads.
    seed = np.random.randint(0, 2 ** 32 - len(sim_inspiral_table) - 1)
    np.random.seed(seed)

    with tqdm(desc='accepted') as progress:
        for sim_inspiral, simulation in zip(
                orig_sim_inspiral_table,
                progress_map(
                    func,
                    np.arange(len(orig_sim_inspiral_table)) + seed + 1,
                    orig_sim_inspiral_table, jobs=opts.jobs)):

            sngl_inspirals = []
            used_snr_series = []
            net_snr = 0.0
            count_triggers = 0

            # Loop over individual detectors and create SnglInspiral entries.
            for ifo, (horizon, abs_snr, arg_snr, toa, series) \
                    in zip(opts.detector, simulation):

                if np.random.uniform() > opts.duty_cycle:
                    continue
                elif abs_snr >= opts.snr_threshold:
                    # If SNR < threshold, then the injection is not found.
                    # Skip it.
                    count_triggers += 1
                    net_snr += np.square(abs_snr)
                elif not opts.keep_subthreshold:
                    continue

                # Create SnglInspiral entry.
                used_snr_series.append(series)
                sngl_inspirals.append(
                    sngl_inspiral_table.RowType(**dict(
                        dict.fromkeys(sngl_inspiral_table.validcolumns, None),
                        process_id=process.process_id,
                        ifo=ifo,
                        mass1=sim_inspiral.mass1,
                        mass2=sim_inspiral.mass2,
                        spin1x=sim_inspiral.spin1x,
                        spin1y=sim_inspiral.spin1y,
                        spin1z=sim_inspiral.spin1z,
                        spin2x=sim_inspiral.spin2x,
                        spin2y=sim_inspiral.spin2y,
                        spin2z=sim_inspiral.spin2z,
                        end=toa,
                        snr=abs_snr,
                        coa_phase=arg_snr,
                        f_final=opts.f_high,
                        eff_distance=horizon / abs_snr)))

            net_snr = np.sqrt(net_snr)

            # If too few triggers were found, then skip this event.
            if count_triggers < opts.min_triggers:
                continue

            # If network SNR < threshold, then the injection is not found.
            # Skip it.
            if net_snr < opts.net_snr_threshold:
                continue

            # Add Coinc table entry.
            coinc = coinc_table.appendRow(
                coinc_event_id=coinc_table.get_next_id(),
                process_id=process.process_id,
                coinc_def_id=inspiral_coinc_def.coinc_def_id,
                time_slide_id=time_slide_id,
                insts=opts.detector,
                nevents=len(opts.detector),
                likelihood=None)

            # Add CoincInspiral table entry.
            coinc_inspiral_table.appendRow(
                coinc_event_id=coinc.coinc_event_id,
                instruments=[
                    sngl_inspiral.ifo for sngl_inspiral in sngl_inspirals],
                end=lal.LIGOTimeGPS(1e-9 * np.mean([
                    sngl_inspiral.end.ns()
                    for sngl_inspiral in sngl_inspirals
                    if sngl_inspiral.end is not None])),
                mass=sim_inspiral.mass1 + sim_inspiral.mass2,
                mchirp=sim_inspiral.mchirp,
                combined_far=0.0,  # Not provided
                false_alarm_rate=0.0,  # Not provided
                minimum_duration=None,  # Not provided
                snr=net_snr)

            # Record all sngl_inspiral records and associate them with coincs.
            for sngl_inspiral, series in zip(sngl_inspirals, used_snr_series):
                # Give this sngl_inspiral record an id and add it to the table.
                sngl_inspiral.event_id = sngl_inspiral_table.get_next_id()
                sngl_inspiral_table.append(sngl_inspiral)

                if opts.enable_snr_series:
                    elem = lal.series.build_COMPLEX8TimeSeries(series)
                    elem.appendChild(
                        Param.from_pyvalue('event_id', sngl_inspiral.event_id))
                    xmlroot.appendChild(elem)

                # Add CoincMap entry.
                coinc_map_table.appendRow(
                    coinc_event_id=coinc.coinc_event_id,
                    table_name=sngl_inspiral_table.tableName,
                    event_id=sngl_inspiral.event_id)

            # Record injection
            if not opts.preserve_ids:
                sim_inspiral.simulation_id = sim_inspiral_table.get_next_id()
            sim_inspiral_table.append(sim_inspiral)

            progress.update()

    # Record coincidence associating injections with events.
    for i, sim_inspiral in enumerate(sim_inspiral_table):
        coinc = coinc_table.appendRow(
            coinc_event_id=coinc_table.get_next_id(),
            process_id=process.process_id,
            coinc_def_id=found_coinc_def.coinc_def_id,
            time_slide_id=time_slide_id,
            instruments=None,
            nevents=None,
            likelihood=None)
        coinc_map_table.appendRow(
            coinc_event_id=coinc.coinc_event_id,
            table_name=sim_inspiral_table.tableName,
            event_id=sim_inspiral.simulation_id)
        coinc_map_table.appendRow(
            coinc_event_id=coinc.coinc_event_id,
            table_name=coinc_table.tableName,
            event_id=CoincID(i))

    # Record process end time.
    process.set_end_time_now()

    # Write output file.
    write_fileobj(xmldoc, opts.output)