def time_slides_vacuum(time_slides, verbose=False):
    """
	Given a dictionary mapping time slide IDs to instrument-->offset
	mappings, for example as returned by the as_dict() method of the
	TimeSlideTable class in glue.ligolw.lsctables or by the
	load_time_slides() function in this module, construct and return a
	mapping indicating time slide equivalences.  This can be used to
	delete redundant time slides from a time slide table, and then also
	used via the applyKeyMapping() method of glue.ligolw.table.Table
	instances to update cross references (for example in the
	coinc_event table).

	Example:

	>>> slides = {"time_slide_id:0": {"H1": 0, "H2": 0},
	"time_slide_id:1": {"H1": 10, "H2": 10}, "time_slide_id:2": {"H1":
	0, "H2": 10}}
	>>> time_slides_vacuum(slides)
	{'time_slide_id:1': 'time_slide_id:0'}

	indicating that time_slide_id:1 describes a time slide that is
	equivalent to time_slide_id:0.  The calling code could use this
	information to delete time_slide_id:1 from the time_slide table,
	and replace references to that ID in other tables with references
	to time_slide_id:0.
	"""
    # convert offsets to deltas
    time_slides = dict((time_slide_id, offsetvect.deltas)
                       for time_slide_id, offsetvect in time_slides.items())
    if verbose:
        progressbar = ProgressBar(max=len(time_slides))
    else:
        progressbar = None
    # old --> new mapping
    mapping = {}
    # while there are time slide offset dictionaries remaining
    while time_slides:
        # pick an ID/offset dictionary pair at random
        id1, deltas1 = time_slides.popitem()
        # for every other ID/offset dictionary pair in the time
        # slides
        ids_to_delete = []
        for id2, deltas2 in time_slides.items():
            # if the relative offset dictionaries are
            # equivalent record in the old --> new mapping
            if deltas2 == deltas1:
                mapping[id2] = id1
                ids_to_delete.append(id2)
        for id2 in ids_to_delete:
            time_slides.pop(id2)
        if progressbar is not None:
            progressbar.update(progressbar.max - len(time_slides))
    # done
    del progressbar
    return mapping
示例#2
0
def time_slides_vacuum(time_slides, verbose = False):
	"""
	Given a dictionary mapping time slide IDs to instrument-->offset
	mappings, for example as returned by the as_dict() method of the
	TimeSlideTable class in glue.ligolw.lsctables or by the
	load_time_slides() function in this module, construct and return a
	mapping indicating time slide equivalences.  This can be used to
	delete redundant time slides from a time slide table, and then also
	used via the applyKeyMapping() method of glue.ligolw.table.Table
	instances to update cross references (for example in the
	coinc_event table).

	Example:

	>>> slides = {"time_slide_id:0": {"H1": 0, "H2": 0},
	"time_slide_id:1": {"H1": 10, "H2": 10}, "time_slide_id:2": {"H1":
	0, "H2": 10}}
	>>> time_slides_vacuum(slides)
	{'time_slide_id:1': 'time_slide_id:0'}

	indicating that time_slide_id:1 describes a time slide that is
	equivalent to time_slide_id:0.  The calling code could use this
	information to delete time_slide_id:1 from the time_slide table,
	and replace references to that ID in other tables with references
	to time_slide_id:0.
	"""
	# convert offsets to deltas
	time_slides = dict((time_slide_id, offsetvect.deltas) for time_slide_id, offsetvect in time_slides.items())
	if verbose:
		progressbar = ProgressBar(max = len(time_slides))
	else:
		progressbar = None
	# old --> new mapping
	mapping = {}
	# while there are time slide offset dictionaries remaining
	while time_slides:
		# pick an ID/offset dictionary pair at random
		id1, deltas1 = time_slides.popitem()
		# for every other ID/offset dictionary pair in the time
		# slides
		ids_to_delete = []
		for id2, deltas2 in time_slides.items():
			# if the relative offset dictionaries are
			# equivalent record in the old --> new mapping
			if deltas2 == deltas1:
				mapping[id2] = id1
				ids_to_delete.append(id2)
		for id2 in ids_to_delete:
			time_slides.pop(id2)
		if progressbar is not None:
			progressbar.update(progressbar.max - len(time_slides))
	# done
	del progressbar
	return mapping
示例#3
0
opts = parser.parse_args()

# Imports.
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from matplotlib import rcParams
import os
from distutils.dir_util import mkpath
import numpy as np
from glue.text_progress_bar import ProgressBar
from lalinference import plot

# Create progress bar.
pb = ProgressBar()
pb.update(-1, 'reading data')

# Read in all of the datasets listed as positional command line arguments.
datasets_ = [
    np.recfromtxt(file, names=True, usemask=True) for file in opts.input
]
dataset_names = [os.path.splitext(file.name)[0] for file in opts.input]

# For each of the quantities that we are going to plot, find their range
# over all of the datasets.
combined = np.concatenate([dataset['searched_area'] for dataset in datasets_])
min_searched_area = np.min(combined)
max_searched_area = np.max(combined)
have_offset = all('offset' in dataset.dtype.names for dataset in datasets_)
have_runtime = all('runtime' in dataset.dtype.names for dataset in datasets_)
have_searched_prob_distance = all(
parser.add_argument(
    '--projection', type=int, choices=list(range(4)), default=0,
    help='Plot one specific projection [default: plot all projections]')
parser.add_argument(
    'input', metavar='INPUT.fits[.gz]', type=argparse.FileType('rb'),
    default='-', nargs='?', help='Input FITS file [default: stdin]')
parser.add_argument(
    '--align-to', metavar='SKYMAP.fits[.gz]', type=argparse.FileType('rb'),
    help='Align to the principal axes of this sky map [default: input sky map]')
parser.set_defaults(figure_width='3.5', figure_height='3.5')
opts = parser.parse_args()

# Create progress bar.
from glue.text_progress_bar import ProgressBar
progress = ProgressBar()
progress.update(-1, 'Starting up')

# Late imports
from matplotlib import pyplot as plt
from matplotlib import gridspec
from matplotlib import transforms
from lalinference import io
from lalinference.plot import marker
from lalinference.bayestar.distance import (
    principal_axes, volume_render, marginal_pdf, marginal_ppf)
import healpy as hp
import numpy as np
import scipy.stats

# Read input, determine input resolution.
progress.update(-1, 'Loading FITS file')
opts = parser.parse_args()

# Imports.
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from matplotlib import rcParams
import os
from distutils.dir_util import mkpath
import numpy as np
from glue.text_progress_bar import ProgressBar
from lalinference import plot

# Create progress bar.
pb = ProgressBar()
pb.update(-1, 'reading data')

# Read in all of the datasets listed as positional command line arguments.
datasets_ = [np.recfromtxt(file, names=True, usemask=True) for file in opts.input]
dataset_names = [os.path.splitext(file.name)[0] for file in opts.input]

# For each of the quantities that we are going to plot, find their range
# over all of the datasets.
combined = np.concatenate([dataset['searched_area'] for dataset in datasets_])
min_searched_area = np.min(combined)
max_searched_area = np.max(combined)
combined = np.concatenate([dataset['offset'] for dataset in datasets_])
min_offset = np.min(combined)
max_offset = np.max(combined)
combined = np.concatenate([dataset['runtime'] for dataset in datasets_])
if np.any(np.isfinite(combined)):
示例#6
0
import matplotlib.pyplot as plt
import healpy as hp
import lal
from lalinference import fits
from lalinference import plot
from glue.text_progress_bar import ProgressBar


fig = plt.figure(figsize=(opts.figure_width, opts.figure_height), frameon=False)
ax = plt.subplot(111, projection='mollweide')
ax.cla()
ax.grid()

progress = ProgressBar()

progress.update(-1, 'obtaining filenames of sky maps')
fitsfilenames = tuple(command.chainglob(args))

progress.max = len(fitsfilenames)

matplotlib.rc('path', simplify=True, simplify_threshold=1)

for count_records, fitsfilename in enumerate(fitsfilenames):
    progress.update(count_records, fitsfilename)
    skymap, metadata = fits.read_sky_map(fitsfilename, nest=None)
    nside = hp.npix2nside(len(skymap))
    gmst = lal.GreenwichMeanSiderealTime(metadata['gps_time']) % (2*np.pi)

    indices = np.argsort(-skymap)
    region = np.empty(skymap.shape)
    region[indices] = 100 * np.cumsum(skymap[indices])
parser.add_argument(
    '--projection', type=int, choices=list(range(4)), default=0,
    help='Plot one specific projection [default: plot all projections]')
parser.add_argument(
    'input', metavar='INPUT.fits[.gz]', type=argparse.FileType('rb'),
    default='-', nargs='?', help='Input FITS file [default: stdin]')
parser.add_argument(
    '--align-to', metavar='SKYMAP.fits[.gz]', type=argparse.FileType('rb'),
    help='Align to the principal axes of this sky map [default: input sky map]')
parser.set_defaults(figure_width='3.5', figure_height='3.5')
opts = parser.parse_args()

# Create progress bar.
from glue.text_progress_bar import ProgressBar
progress = ProgressBar()
progress.update(-1, 'Starting up')

# Late imports
from matplotlib import pyplot as plt
from matplotlib import gridspec
from matplotlib import transforms
from lalinference.io import fits
from lalinference.plot import marker
from lalinference.bayestar.distance import (
    principal_axes, volume_render, marginal_pdf)
import healpy as hp
import numpy as np
import scipy.stats

# Read input, determine input resolution.
progress.update(-1, 'Loading FITS file')
    ret = [coinc_event_id, simulation_id, far, snr, searched_area, searched_prob, offset, runtime, distmean, diststd] + contour_areas + area_probs
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == '__main__':
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()

    db = opts.db
    contours = opts.contour
    modes = opts.modes
    areas = opts.area

    progress.update(-1, 'spawning workers')
    if opts.jobs == 1:
        from six.moves import map
    else:
        try:
            from emcee.interruptible_pool import InterruptiblePool as Pool
        except ImportError:
            from multiprocessing import Pool
        map = Pool(
            opts.jobs, startup,
            (command.sqlite_get_filename(db), contours, modes, areas)
            ).imap_unordered

    progress.update(-1, 'obtaining filenames of sky maps')
    fitsfilenames = tuple(command.chainglob(opts.fitsfileglobs))
示例#9
0
progress = ProgressBar()

progress.max = len(opts.fitsfilenames)

matplotlib.rc('path', simplify=True, simplify_threshold=1)

if opts.colormap is None:
    colors = ['k'] * len(opts.fitsfilenames)
else:
    colors = matplotlib.cm.get_cmap(opts.colormap)
    colors = colors(np.linspace(0, 1, len(opts.fitsfilenames)))
for count_records, (color,
                    fitsfilename) in enumerate(zip(colors,
                                                   opts.fitsfilenames)):
    progress.update(count_records, fitsfilename)
    skymap, metadata = fits.read_sky_map(fitsfilename, nest=None)
    nside = hp.npix2nside(len(skymap))
    gmst = lal.GreenwichMeanSiderealTime(metadata['gps_time']) % (2 * np.pi)

    indices = np.argsort(-skymap)
    region = np.empty(skymap.shape)
    region[indices] = 100 * np.cumsum(skymap[indices])
    plot.healpix_contour(region,
                         nest=metadata['nest'],
                         dlon=-gmst,
                         colors=[color],
                         linewidths=0.5,
                         levels=[opts.contour],
                         alpha=opts.alpha)
          + contour_areas + area_probs + contour_dists + contour_vols
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == '__main__':
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()

    db = opts.db
    contours = opts.contour
    modes = opts.modes
    areas = opts.area

    progress.update(-1, 'spawning workers')
    if opts.jobs == 1:
        from six.moves import map
    else:
        try:
            from emcee.interruptible_pool import InterruptiblePool as Pool
        except ImportError:
            from multiprocessing import Pool
        map = Pool(
            opts.jobs, startup,
            (command.sqlite_get_filename(db), contours, modes, areas)
            ).imap

    colnames = (
        ['coinc_event_id', 'simulation_id', 'far', 'snr', 'searched_area',
         'searched_prob', 'searched_prob_dist', 'searched_vol',
import lalsimulation
from glue.text_progress_bar import ProgressBar

# BAYESTAR imports.
from lalinference.bayestar import ligolw as ligolw_bayestar
from lalinference.bayestar import filter
from lalinference.bayestar import timing

# Other imports.
import numpy as np


progress = ProgressBar()

# Open output file.
progress.update(-1, 'setting up output document')
out_xmldoc = ligolw.Document()
out_xmldoc.appendChild(ligolw.LIGO_LW())

# Write process metadata to output file.
process = command.register_to_xmldoc(
    out_xmldoc, parser, opts, ifos=opts.detector,
    comment="Simulated coincidences")

# Add search summary to output file.
all_time = segments.segment(
    [glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)])
search_summary_table = lsctables.New(lsctables.SearchSummaryTable)
out_xmldoc.childNodes[0].appendChild(search_summary_table)
summary = ligolw_search_summary.append_search_summary(out_xmldoc, process,
    inseg=all_time, outseg=all_time)
        snr = float("nan")
    if far is None:
        far = float("nan")

    ret = [coinc_event_id, simulation_id, far, snr, searched_area, searched_prob, offset, runtime] + contour_areas
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == "__main__":
    from glue.text_progress_bar import ProgressBar

    progress = ProgressBar()

    progress.update(-1, "spawning {0} workers".format(opts.jobs))
    startupargs = (dbfilename, opts.contour, opts.modes)
    if opts.jobs == 1:
        from itertools import imap
    else:
        import multiprocessing

        imap = multiprocessing.Pool(opts.jobs, startup, startupargs).imap_unordered
    startup(*startupargs)

    progress.update(-1, "obtaining filenames of sky maps")
    fitsfilenames = tuple(command.chainglob(fitsfileglobs))

    colnames = [
        "coinc_event_id",
        "simulation_id",
示例#13
0
    if snr is None:
        snr = float('nan')
    if far is None:
        far = float('nan')

    ret = [coinc_event_id, simulation_id, far, snr, searched_area, searched_prob, offset, runtime] + contour_areas + area_probs
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == '__main__':
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()

    progress.update(-1, 'spawning {0} workers'.format(opts.jobs))
    startupargs = (dbfilename, opts.contour, opts.modes, opts.area)
    if opts.jobs == 1:
        from itertools import imap
    else:
        import multiprocessing
        imap = multiprocessing.Pool(opts.jobs, startup, startupargs).imap_unordered
    startup(*startupargs)

    progress.update(-1, 'obtaining filenames of sky maps')
    fitsfilenames = tuple(command.chainglob(fitsfileglobs))

    colnames = ['coinc_event_id', 'simulation_id', 'far', 'snr', 'searched_area',
        'searched_prob', 'offset', 'runtime'] + ["area({0:g})".format(p)
        for p in contours] + ["prob({0:g})".format(a) for a in areas]
    if modes:
示例#14
0
    ] + contour_areas + area_probs
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == '__main__':
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()

    db = opts.db
    contours = opts.contour
    modes = opts.modes
    areas = opts.area

    progress.update(-1, 'spawning workers')
    if opts.jobs == 1:
        from six.moves import map
    else:
        try:
            from emcee.interruptible_pool import InterruptiblePool as Pool
        except ImportError:
            from multiprocessing import Pool
        map = Pool(opts.jobs, startup, (command.sqlite_get_filename(db),
                                        contours, modes, areas)).imap_unordered

    progress.update(-1, 'obtaining filenames of sky maps')
    fitsfilenames = tuple(command.chainglob(opts.fitsfileglobs))

    colnames = ([
        'coinc_event_id', 'simulation_id', 'far', 'snr', 'searched_area',
def main(args=None):
    opts = parser().parse_args(args)

    # Create progress bar.
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()
    progress.update(-1, 'Starting up')

    # Late imports
    from matplotlib import pyplot as plt
    from matplotlib import gridspec
    from matplotlib import transforms
    from .. import io
    from ..plot import marker
    from ..distance import (parameters_to_marginal_moments, principal_axes,
                            volume_render, marginal_pdf)
    import healpy as hp
    import numpy as np
    import scipy.stats
    import seaborn

    # Read input, determine input resolution.
    progress.update(-1, 'Loading FITS file')
    (prob, mu, sigma, norm), metadata = io.read_sky_map(opts.input.name,
                                                        distances=True)
    npix = len(prob)
    nside = hp.npix2nside(npix)

    progress.update(-1, 'Preparing projection')

    if opts.align_to is None or opts.input.name == opts.align_to.name:
        prob2, mu2, sigma2, norm2 = prob, mu, sigma, norm
    else:
        (prob2, mu2, sigma2, norm2), _ = io.read_sky_map(opts.align_to.name,
                                                         distances=True)
    if opts.max_distance is None:
        mean, std = parameters_to_marginal_moments(prob2, mu2, sigma2)
        max_distance = mean + 2.5 * std
    else:
        max_distance = opts.max_distance
    rot = np.ascontiguousarray(principal_axes(prob2, mu2, sigma2))

    if opts.chain:
        chain = io.read_samples(opts.chain.name)
        chain = np.dot(rot.T,
                       (hp.ang2vec(0.5 * np.pi - chain['dec'], chain['ra']) *
                        np.atleast_2d(chain['dist']).T).T)

    fig = plt.figure(frameon=False)
    n = 1 if opts.projection else 2
    gs = gridspec.GridSpec(n,
                           n,
                           left=0.01,
                           right=0.99,
                           bottom=0.01,
                           top=0.99,
                           wspace=0.05,
                           hspace=0.05)

    imgwidth = int(opts.dpi * opts.figure_width / n)
    s = np.linspace(-max_distance, max_distance, imgwidth)
    xx, yy = np.meshgrid(s, s)

    # Color palette for markers
    colors = seaborn.color_palette(n_colors=len(opts.radecdist) + 1)

    truth_marker = marker.reticle(inner=0.5 * np.sqrt(2),
                                  outer=1.5 * np.sqrt(2),
                                  angle=45)

    for iface, (axis0, axis1, (sp0, sp1)) in enumerate((
        (1, 0, [0, 0]),
        (0, 2, [1, 1]),
        (1, 2, [1, 0]),
    )):

        if opts.projection and opts.projection != iface + 1:
            continue

        progress.update(text='Plotting projection {0}'.format(iface + 1))

        # Marginalize onto the given face
        density = volume_render(xx.ravel(), yy.ravel(), max_distance, axis0,
                                axis1, rot, False, prob, mu, sigma,
                                norm).reshape(xx.shape)

        # Plot heat map
        ax = fig.add_subplot(gs[0, 0] if opts.projection else gs[sp0, sp1],
                             aspect=1)
        ax.imshow(
            density,
            origin='lower',
            extent=[-max_distance, max_distance, -max_distance, max_distance],
            cmap=opts.colormap)

        # Add contours if requested
        if opts.contour:
            flattened_density = density.ravel()
            indices = np.argsort(flattened_density)[::-1]
            cumsum = np.empty_like(flattened_density)
            cs = np.cumsum(flattened_density[indices])
            cumsum[indices] = cs / cs[-1] * 100
            cumsum = np.reshape(cumsum, density.shape)
            u, v = np.meshgrid(s, s)
            contourset = ax.contour(u,
                                    v,
                                    cumsum,
                                    levels=opts.contour,
                                    linewidths=0.5)

        # Mark locations
        for (ra, dec, dist), color in zip(opts.radecdist, colors[1:]):
            theta = 0.5 * np.pi - np.deg2rad(dec)
            phi = np.deg2rad(ra)
            xyz = np.dot(rot.T, hp.ang2vec(theta, phi) * dist)
            ax.plot(xyz[axis0],
                    xyz[axis1],
                    marker=truth_marker,
                    markeredgecolor=color,
                    markerfacecolor='none',
                    markeredgewidth=1)

        # Plot chain
        if opts.chain:
            ax.plot(chain[axis0], chain[axis1], '.k', markersize=0.5)

        # Hide axes ticks
        ax.set_xticks([])
        ax.set_yticks([])

        # Set axis limits
        ax.set_xlim([-max_distance, max_distance])
        ax.set_ylim([-max_distance, max_distance])

        # Mark origin (Earth)
        ax.plot([0], [0],
                marker=marker.earth,
                markersize=5,
                markerfacecolor='none',
                markeredgecolor='black',
                markeredgewidth=0.75)

        if iface == 2:
            ax.invert_xaxis()

    # Add contour labels if contours requested
    if opts.contour:
        ax.clabel(contourset, fmt='%d%%', fontsize=7)

    if not opts.projection:
        # Add scale bar, 1/4 width of the plot
        ax.plot([0.0625, 0.3125], [0.0625, 0.0625],
                color='black',
                linewidth=1,
                transform=ax.transAxes)
        ax.text(0.0625,
                0.0625,
                '{0:d} Mpc'.format(int(np.round(0.5 * max_distance))),
                fontsize=8,
                transform=ax.transAxes,
                verticalalignment='bottom')

        # Create marginal distance plot.
        progress.update(-1, 'Plotting distance')
        gs1 = gridspec.GridSpecFromSubplotSpec(5, 5, gs[0, 1])
        ax = fig.add_subplot(gs1[1:-1, 1:-1])

        # Plot marginal distance distribution, integrated over the whole sky.
        d = np.linspace(0, max_distance)
        ax.fill_between(d,
                        marginal_pdf(d, prob, mu, sigma, norm),
                        alpha=0.5,
                        color=colors[0])

        # Plot conditional distance distribution at true position
        # and mark true distance.
        for (ra, dec, dist), color in zip(opts.radecdist, colors[1:]):
            theta = 0.5 * np.pi - np.deg2rad(dec)
            phi = np.deg2rad(ra)
            ipix = hp.ang2pix(nside, theta, phi)
            ax.fill_between(d,
                            scipy.stats.norm(mu[ipix], sigma[ipix]).pdf(d) *
                            norm[ipix] * np.square(d),
                            alpha=0.5,
                            color=color)
            ax.axvline(dist, color='black', linewidth=0.5)
            ax.plot([dist], [-0.15],
                    marker=truth_marker,
                    markeredgecolor=color,
                    markerfacecolor='none',
                    markeredgewidth=1,
                    clip_on=False,
                    transform=transforms.blended_transform_factory(
                        ax.transData, ax.transAxes))
            ax.axvline(dist, color='black', linewidth=0.5)

        # Scale axes
        ax.set_xticks([0, max_distance])
        ax.set_xticklabels(
            ['0', "{0:d}\nMpc".format(int(np.round(max_distance)))],
            fontsize=9)
        ax.set_yticks([])
        ax.set_xlim(0, max_distance)
        ax.set_ylim(0, ax.get_ylim()[1])

        if opts.annotate:
            text = []
            try:
                objid = metadata['objid']
            except KeyError:
                pass
            else:
                text.append('event ID: {}'.format(objid))
            try:
                distmean = metadata['distmean']
                diststd = metadata['diststd']
            except KeyError:
                pass
            else:
                text.append(u'distance: {}±{} Mpc'.format(
                    int(np.round(distmean)), int(np.round(diststd))))
            ax.text(0,
                    1,
                    '\n'.join(text),
                    transform=ax.transAxes,
                    fontsize=7,
                    ha='left',
                    va='bottom',
                    clip_on=False)

    progress.update(-1, 'Saving')
    opts.output()
示例#16
0
# Imports.
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from matplotlib import rcParams
import scipy.stats
import os
import subprocess
import numpy as np
from glue.text_progress_bar import ProgressBar
import lalinference.plot

# Create progress bar.
pb = ProgressBar()
pb.update(-1, 'reading data')

# Read in all of the datasets listed as positional command line arguments.
datasets_ = [np.recfromtxt(arg, names=True, usemask=True) for arg in args]
dataset_names = [os.path.splitext(arg)[0] for arg in args]

# For each of the quantities that we are going to plot, find their range
# over all of the datasets.
combined = np.concatenate([dataset['searched_area'] for dataset in datasets_])
min_searched_area = np.min(combined)
max_searched_area = np.max(combined)
combined = np.concatenate([dataset['offset'] for dataset in datasets_])
min_offset = np.min(combined)
max_offset = np.max(combined)
combined = np.concatenate([dataset['runtime'] for dataset in datasets_])
if np.any(np.isfinite(combined)):
示例#17
0
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import healpy as hp
import lal
from lalinference.io import fits
from lalinference import plot
from glue.text_progress_bar import ProgressBar

fig = plt.figure(frameon=False)
ax = plt.axes(projection='mollweide')
ax.grid()

progress = ProgressBar()

progress.update(-1, 'obtaining filenames of sky maps')
fitsfilenames = tuple(command.chainglob(opts.fitsfileglobs))

progress.max = len(fitsfilenames)

matplotlib.rc('path', simplify=True, simplify_threshold=1)

if opts.colormap is None:
    colors = ['k'] * len(fitsfilenames)
else:
    colors = matplotlib.cm.get_cmap(opts.colormap)
    colors = colors(np.linspace(0, 1, len(fitsfilenames)))
for count_records, (color,
                    fitsfilename) in enumerate(zip(colors, fitsfilenames)):
    progress.update(count_records, fitsfilename)
    skymap, metadata = fits.read_sky_map(fitsfilename, nest=None)
import lalsimulation
from lalinspiral.thinca import InspiralCoincDef
from glue.text_progress_bar import ProgressBar

# BAYESTAR imports.
from lalinference.bayestar import ligolw as ligolw_bayestar
from lalinference.bayestar import filter
from lalinference.bayestar import timing

# Other imports.
import numpy as np

progress = ProgressBar()

# Open output file.
progress.update(-1, 'setting up output document')
out_xmldoc = ligolw.Document()
out_xmldoc.appendChild(ligolw.LIGO_LW())

# Write process metadata to output file.
process = command.register_to_xmldoc(out_xmldoc,
                                     parser,
                                     opts,
                                     ifos=opts.detector,
                                     comment="Simulated coincidences")

# Add search summary to output file.
all_time = segments.segment(
    [glue.lal.LIGOTimeGPS(0),
     glue.lal.LIGOTimeGPS(2e9)])
search_summary_table = lsctables.New(lsctables.SearchSummaryTable)
opts = parser.parse_args()

# Imports.
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from matplotlib import rcParams
import os
from distutils.dir_util import mkpath
import numpy as np
from glue.text_progress_bar import ProgressBar
from lalinference import plot

# Create progress bar.
pb = ProgressBar()
pb.update(-1, 'reading data')

# Read in all of the datasets listed as positional command line arguments.
datasets_ = [np.recfromtxt(file, names=True, usemask=True) for file in opts.input]
dataset_names = [os.path.splitext(file.name)[0] for file in opts.input]

# For each of the quantities that we are going to plot, find their range
# over all of the datasets.
combined = np.concatenate([dataset['searched_area'] for dataset in datasets_])
min_searched_area = np.min(combined)
max_searched_area = np.max(combined)
have_offset = all('offset' in dataset.dtype.names for dataset in datasets_)
have_runtime = all('runtime' in dataset.dtype.names for dataset in datasets_)
have_searched_prob_dist = all('searched_prob_dist' in dataset.dtype.names for dataset in datasets_)
have_searched_prob_vol = all('searched_prob_vol' in dataset.dtype.names for dataset in datasets_)
if have_offset:
          + contour_areas + area_probs + contour_dists + contour_vols
    if modes:
        ret += [searched_modes] + contour_modes
    return ret


if __name__ == '__main__':
    from glue.text_progress_bar import ProgressBar
    progress = ProgressBar()

    db = opts.db
    contours = opts.contour
    modes = opts.modes
    areas = opts.area

    progress.update(-1, 'spawning workers')
    if opts.jobs == 1:
        from six.moves import map
    else:
        try:
            from emcee.interruptible_pool import InterruptiblePool as Pool
        except ImportError:
            from multiprocessing import Pool
        map = Pool(
            opts.jobs, startup,
            (command.sqlite_get_filename(db), contours, modes, areas)).imap

    colnames = ([
        'coinc_event_id', 'simulation_id', 'far', 'snr', 'searched_area',
        'searched_prob', 'searched_prob_dist', 'searched_vol',
        'searched_prob_vol', 'offset', 'runtime', 'distmean', 'diststd',
opts = parser.parse_args()

# Imports.
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from matplotlib import rcParams
import os
from distutils.dir_util import mkpath
import numpy as np
from glue.text_progress_bar import ProgressBar
from lalinference import plot

# Create progress bar.
pb = ProgressBar()
pb.update(-1, 'reading data')

# Read in all of the datasets listed as positional command line arguments.
datasets_ = [
    np.recfromtxt(file, names=True, usemask=True) for file in opts.input
]
dataset_names = [os.path.splitext(file.name)[0] for file in opts.input]

# For each of the quantities that we are going to plot, find their range
# over all of the datasets.
combined = np.concatenate([dataset['searched_area'] for dataset in datasets_])
min_searched_area = np.min(combined)
max_searched_area = np.max(combined)
have_offset = all('offset' in dataset.dtype.names for dataset in datasets_)
have_runtime = all('runtime' in dataset.dtype.names for dataset in datasets_)
have_searched_prob_dist = all('searched_prob_dist' in dataset.dtype.names
示例#22
0
def cluster_events(events,
                   testfunc,
                   clusterfunc,
                   sortfunc=None,
                   bailoutfunc=None,
                   verbose=False):
    """
	Cluster the events in an event list.  testfunc will be passed a
	pair of events in random order, and must return 0 (or False) if
	they should be clustered.  clusterfunc will be passed a pair of
	events in random order, and must return an event that is the
	"cluster" of the two.  clusterfunc is free to return a new events,
	or modify one or the other of its parameters in place and return
	it.

	If sortfunc and bailoutfunc are both not None (if one is provided
	the other must be as well), the events will be sorted into
	"increasing" order using sortfunc as a comparison operator, and
	then only pairs of events for which bailoutfunc returns 0 (or
	False) will be considered for clustering.

	The return value is True if the events in the event list were
	modified, and False if they were not (although their order might
	have changed).
	"""
    # changed indicates if the event list has changed
    changed = False
    while True:
        if verbose:
            progress = ProgressBar("clustering %d events" % len(events),
                                   max=len(events))
            progress.show()
        else:
            progress = None

        if sortfunc is not None:
            events.sort(sortfunc)

        # outer_did_cluster indicates if the event list changes on
        # this pass
        outer_did_cluster = False
        i = 0
        while i < len(events):
            if progress is not None:
                progress.update(i)
            if events[i] is not None:
                # inner_did_cluster indicates if events[i]
                # has changed
                inner_did_cluster = False
                for j, event_j in enumerate(events[i + 1:], 1):
                    if event_j is not None:
                        if not testfunc(events[i], event_j):
                            events[i] = clusterfunc(events[i], event_j)
                            events[i + j] = None
                            inner_did_cluster = True
                        elif (sortfunc is not None) and bailoutfunc(
                                events[i], event_j):
                            break
                if inner_did_cluster:
                    outer_did_cluster = True
                    # don't advance until events[i]
                    # stops changing
                    continue
            # events[i] has not changed
            i += 1
        del progress
        # repeat until we do a pass without the listing changing
        if not outer_did_cluster:
            break
        iterutils.inplace_filter(lambda event: event is not None, events)
        changed = True
    return changed
示例#23
0
def cluster_events(events, testfunc, clusterfunc, sortfunc = None, bailoutfunc = None, verbose = False):
	"""
	Cluster the events in an event list.  testfunc will be passed a
	pair of events in random order, and must return 0 (or False) if
	they should be clustered.  clusterfunc will be passed a pair of
	events in random order, and must return an event that is the
	"cluster" of the two.  clusterfunc is free to return a new events,
	or modify one or the other of its parameters in place and return
	it.

	If sortfunc and bailoutfunc are both not None (if one is provided
	the other must be as well), the events will be sorted into
	"increasing" order using sortfunc as a comparison operator, and
	then only pairs of events for which bailoutfunc returns 0 (or
	False) will be considered for clustering.

	The return value is True if the events in the event list were
	modified, and False if they were not (although their order might
	have changed).
	"""
	# changed indicates if the event list has changed
	changed = False
	while True:
		if verbose:
			progress = ProgressBar("clustering %d events" % len(events), max = len(events))
			progress.show()
		else:
			progress = None

		if sortfunc is not None:
			events.sort(sortfunc)

		# outer_did_cluster indicates if the event list changes on
		# this pass
		outer_did_cluster = False
		i = 0
		while i < len(events):
			if progress is not None:
				progress.update(i)
			if events[i] is not None:
				# inner_did_cluster indicates if events[i]
				# has changed
				inner_did_cluster = False
				for j, event_j in enumerate(events[i + 1:], 1):
					if event_j is not None:
						if not testfunc(events[i], event_j):
							events[i] = clusterfunc(events[i], event_j)
							events[i + j] = None
							inner_did_cluster = True
						elif (sortfunc is not None) and bailoutfunc(events[i], event_j):
							break
				if inner_did_cluster:
					outer_did_cluster = True
					# don't advance until events[i]
					# stops changing
					continue
			# events[i] has not changed
			i += 1
		del progress
		# repeat until we do a pass without the listing changing
		if not outer_did_cluster:
			break
		iterutils.inplace_filter(lambda event: event is not None, events)
		changed = True
	return changed