def snr_series(self):
     value = self._snr_series
     if self._invert_phases and value is not None:
         value = lal.CutCOMPLEX8TimeSeries(value, 0, len(value.data.data))
         value.data.data = value.data.data.conj()
     return value
def localize(event,
             waveform='o2-uberbank',
             f_low=30.0,
             min_distance=None,
             max_distance=None,
             prior_distance_power=None,
             cosmology=False,
             method='toa_phoa_snr',
             nside=-1,
             chain_dump=None,
             enable_snr_series=True,
             f_high_truncate=0.95):
    """Convenience function to produce a sky map from LIGO-LW rows. Note that
    min_distance and max_distance should be in Mpc.

    Returns a 'NESTED' ordering HEALPix image as a Numpy array.
    """
    frame = inspect.currentframe()
    argstr = inspect.formatargvalues(*inspect.getargvalues(frame))
    start_time = lal.GPSTimeNow()

    singles = event.singles
    if not enable_snr_series:
        singles = [single for single in singles if single.snr is not None]

    ifos = [single.detector for single in singles]

    # Extract SNRs from table.
    snrs = np.ma.asarray([
        np.ma.masked if single.snr is None else single.snr
        for single in singles
    ])

    # Look up physical parameters for detector.
    detectors = [
        lalsimulation.DetectorPrefixToLALDetector(str(ifo)) for ifo in ifos
    ]
    responses = np.asarray([det.response for det in detectors])
    locations = np.asarray([det.location for det in detectors])

    # Power spectra for each detector.
    psds = [single.psd for single in singles]
    psds = [
        timing.InterpolatedPSD(filter.abscissa(psd),
                               psd.data.data,
                               f_high_truncate=f_high_truncate) for psd in psds
    ]

    log.debug('calculating templates')
    H = filter.sngl_inspiral_psd(waveform, f_min=f_low, **event.template_args)

    log.debug('calculating noise PSDs')
    HS = [filter.signal_psd_series(H, S) for S in psds]

    # Signal models for each detector.
    log.debug('calculating Fisher matrix elements')
    signal_models = [timing.SignalModel(_) for _ in HS]

    # Get SNR=1 horizon distances for each detector.
    horizons = np.asarray([
        signal_model.get_horizon_distance() for signal_model in signal_models
    ])

    weights = np.ma.asarray([
        1 / np.square(signal_model.get_crb_toa_uncert(snr))
        for signal_model, snr in zip(signal_models, snrs)
    ])

    # Center detector array.
    locations -= np.sum(locations * weights.reshape(-1, 1),
                        axis=0) / np.sum(weights)

    if cosmology:
        log.warn('Enabling cosmological prior. ' 'This feature is UNREVIEWED.')

    if enable_snr_series:
        log.warn('Enabling input of SNR time series. '
                 'This feature is UNREVIEWED.')
        snr_series = [single.snr_series for single in singles]
        if all(s is None for s in snr_series):
            snr_series = None
    else:
        snr_series = None

    # Maximum barycentered arrival time error:
    # |distance from array barycenter to furthest detector| / c + 5 ms.
    # For LHO+LLO, this is 15.0 ms.
    # For an arbitrary terrestrial detector network, the maximum is 26.3 ms.
    max_abs_t = np.max(np.sqrt(np.sum(np.square(locations / lal.C_SI),
                                      axis=1))) + 0.005

    if snr_series is None:
        log.warn(
            "No SNR time series found, so we are creating a zero-noise "
            "SNR time series from the whitened template's autocorrelation "
            "sequence. The sky localization uncertainty may be "
            "underestimated.")

        acors, sample_rates = zip(
            *[filter.autocorrelation(_, max_abs_t) for _ in HS])
        sample_rate = sample_rates[0]
        deltaT = 1 / sample_rate
        nsamples = len(acors[0])
        assert all(sample_rate == _ for _ in sample_rates)
        assert all(nsamples == len(_) for _ in acors)
        nsamples = nsamples * 2 - 1

        snr_series = []
        for acor, single in zip(acors, singles):
            series = lal.CreateCOMPLEX8TimeSeries('fake SNR', 0, 0, deltaT,
                                                  lal.StrainUnit, nsamples)
            series.epoch = single.time - 0.5 * (nsamples - 1) * deltaT
            acor = np.concatenate((np.conj(acor[:0:-1]), acor))
            series.data.data = single.snr * filter.exp_i(single.phase) * acor
            snr_series.append(series)

    # Ensure that all of the SNR time series have the same sample rate.
    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    deltaT = snr_series[0].deltaT
    sample_rate = 1 / deltaT
    if any(deltaT != series.deltaT for series in snr_series):
        raise ValueError('BAYESTAR does not yet support SNR time series with '
                         'mixed sample rates')

    # Ensure that all of the SNR time series have odd lengths.
    if any(len(series.data.data) % 2 == 0 for series in snr_series):
        raise ValueError('SNR time series must have odd lengths')

    # Trim time series to the desired length.
    max_abs_n = int(np.ceil(max_abs_t * sample_rate))
    desired_length = 2 * max_abs_n - 1
    for i, series in enumerate(snr_series):
        length = len(series.data.data)
        if length > desired_length:
            snr_series[i] = lal.CutCOMPLEX8TimeSeries(
                series, length // 2 + 1 - max_abs_n, desired_length)

    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    nsamples = len(snr_series[0].data.data)
    if any(nsamples != len(series.data.data) for series in snr_series):
        raise ValueError('BAYESTAR does not yet support SNR time series of '
                         'mixed lengths')

    # Perform sanity checks that the middle sample of the SNR time series match
    # the sngl_inspiral records. Relax valid interval slightly from
    # +/- 0.5 deltaT to +/- 0.6 deltaT for floating point roundoff error.
    for single, series in zip(singles, snr_series):
        if np.abs(0.5 * (nsamples - 1) * series.deltaT +
                  float(series.epoch - single.time)) >= 0.6 * deltaT:
            raise ValueError('BAYESTAR expects the SNR time series to be '
                             'centered on the single-detector trigger times')

    # Extract the TOAs in GPS nanoseconds from the SNR time series, assuming
    # that the trigger happened in the middle.
    toas_ns = [
        series.epoch.ns() + 1e9 * 0.5 *
        (len(series.data.data) - 1) * series.deltaT for series in snr_series
    ]

    # Collect all of the SNR series in one array.
    snr_series = np.vstack([series.data.data for series in snr_series])

    # Center times of arrival and compute GMST at mean arrival time.
    # Pre-center in integer nanoseconds to preserve precision of
    # initial datatype.
    epoch = sum(toas_ns) // len(toas_ns)
    toas = 1e-9 * (np.asarray(toas_ns) - epoch)
    # FIXME: np.average does not yet support masked arrays.
    # Replace with np.average when numpy 1.13.0 is available.
    mean_toa = np.sum(toas * weights) / np.sum(weights)
    toas -= mean_toa
    epoch += int(np.round(1e9 * mean_toa))
    epoch = lal.LIGOTimeGPS(0, int(epoch))
    gmst = lal.GreenwichMeanSiderealTime(epoch)

    # Translate SNR time series back to time of first sample.
    toas -= 0.5 * (nsamples - 1) * deltaT

    # If minimum distance is not specified, then default to 0 Mpc.
    if min_distance is None:
        min_distance = 0

    # If maximum distance is not specified, then default to the SNR=4
    # horizon distance of the most sensitive detector.
    if max_distance is None:
        max_distance = max(horizons) / 4

    # If prior_distance_power is not specified, then default to 2
    # (p(r) ~ r^2, uniform in volume).
    if prior_distance_power is None:
        prior_distance_power = 2

    # Raise an exception if 0 Mpc is the minimum effective distance and the
    # prior is of the form r**k for k<0
    if min_distance == 0 and prior_distance_power < 0:
        raise ValueError(
            ('Prior is a power law r^k with k={}, '
             'undefined at min_distance=0').format(prior_distance_power))

    # Time and run sky localization.
    log.debug('starting computationally-intensive section')
    if method == 'toa_phoa_snr':
        skymap, log_bci, log_bsn = _sky_map.toa_phoa_snr(
            min_distance, max_distance, prior_distance_power, cosmology, gmst,
            sample_rate, toas, snr_series, responses, locations, horizons)
        skymap = Table(skymap)
        skymap.meta['log_bci'] = log_bci
        skymap.meta['log_bsn'] = log_bsn
    elif method == 'toa_phoa_snr_mcmc':
        skymap = localize_emcee(
            logl=_sky_map.log_likelihood_toa_phoa_snr,
            loglargs=(gmst, sample_rate, toas, snr_series, responses,
                      locations, horizons),
            logp=toa_phoa_snr_log_prior,
            logpargs=(min_distance, max_distance, prior_distance_power,
                      max_abs_t),
            xmin=[0, -1, min_distance, -1, 0, 0],
            xmax=[2 * np.pi, 1, max_distance, 1, 2 * np.pi, 2 * max_abs_t],
            nside=nside,
            chain_dump=chain_dump)
    else:
        raise ValueError('Unrecognized method: %s' % method)

    # Convert distance moments to parameters
    distmean = skymap.columns.pop('DISTMEAN')
    diststd = skymap.columns.pop('DISTSTD')
    skymap['DISTMU'], skymap['DISTSIGMA'], skymap['DISTNORM'] = \
        distance.moments_to_parameters(distmean, diststd)

    # Add marginal distance moments
    good = np.isfinite(distmean) & np.isfinite(diststd)
    prob = (moc.uniq2pixarea(skymap['UNIQ']) * skymap['PROBDENSITY'])[good]
    distmean = distmean[good]
    diststd = diststd[good]
    rbar = (prob * distmean).sum()
    r2bar = (prob * (np.square(diststd) + np.square(distmean))).sum()
    skymap.meta['distmean'] = rbar
    skymap.meta['diststd'] = np.sqrt(r2bar - np.square(rbar))

    log.debug('finished computationally-intensive section')
    end_time = lal.GPSTimeNow()

    # Fill in metadata and return.
    program, _ = os.path.splitext(os.path.basename(sys.argv[0]))
    skymap.meta['creator'] = 'BAYESTAR'
    skymap.meta['origin'] = 'LIGO/Virgo'
    skymap.meta['vcs_info'] = vcs_info
    skymap.meta['gps_time'] = float(epoch)
    skymap.meta['runtime'] = float(end_time - start_time)
    skymap.meta['instruments'] = {single.detector for single in singles}
    skymap.meta['gps_creation_time'] = end_time
    skymap.meta['history'] = [
        '', 'Generated by calling the following Python function:',
        '{}.{}{}'.format(__name__, frame.f_code.co_name, argstr), '',
        'This was the command line that started the program:',
        ' '.join([program] + sys.argv[1:])
    ]

    return skymap
Beispiel #3
0
def condition(event,
              waveform='o2-uberbank',
              f_low=30.0,
              enable_snr_series=True,
              f_high_truncate=0.95):

    if len(event.singles) == 0:
        raise ValueError('Cannot localize an event with zero detectors.')

    singles = event.singles
    if not enable_snr_series:
        singles = [single for single in singles if single.snr is not None]

    ifos = [single.detector for single in singles]

    # Extract SNRs from table.
    snrs = np.ma.asarray([
        np.ma.masked if single.snr is None else single.snr
        for single in singles
    ])

    # Look up physical parameters for detector.
    detectors = [
        lalsimulation.DetectorPrefixToLALDetector(str(ifo)) for ifo in ifos
    ]
    responses = np.asarray([det.response for det in detectors])
    locations = np.asarray([det.location for det in detectors]) / lal.C_SI

    # Power spectra for each detector.
    psds = [single.psd for single in singles]
    psds = [
        filter.InterpolatedPSD(filter.abscissa(psd),
                               psd.data.data,
                               f_high_truncate=f_high_truncate) for psd in psds
    ]

    log.debug('calculating templates')
    H = filter.sngl_inspiral_psd(waveform, f_min=f_low, **event.template_args)

    log.debug('calculating noise PSDs')
    HS = [filter.signal_psd_series(H, S) for S in psds]

    # Signal models for each detector.
    log.debug('calculating Fisher matrix elements')
    signal_models = [filter.SignalModel(_) for _ in HS]

    # Get SNR=1 horizon distances for each detector.
    horizons = np.asarray([
        signal_model.get_horizon_distance() for signal_model in signal_models
    ])

    weights = np.ma.asarray([
        1 / np.square(signal_model.get_crb_toa_uncert(snr))
        for signal_model, snr in zip(signal_models, snrs)
    ])

    # Center detector array.
    locations -= (np.sum(locations * weights.reshape(-1, 1), axis=0) /
                  np.sum(weights))

    if enable_snr_series:
        snr_series = [single.snr_series for single in singles]
        if all(s is None for s in snr_series):
            snr_series = None
    else:
        snr_series = None

    # Maximum barycentered arrival time error:
    # |distance from array barycenter to furthest detector| / c + 5 ms.
    # For LHO+LLO, this is 15.0 ms.
    # For an arbitrary terrestrial detector network, the maximum is 26.3 ms.
    max_abs_t = np.max(np.sqrt(np.sum(np.square(locations), axis=1))) + 0.005

    if snr_series is None:
        log.warning("No SNR time series found, so we are creating a "
                    "zero-noise SNR time series from the whitened template's "
                    "autocorrelation sequence. The sky localization "
                    "uncertainty may be underestimated.")

        acors, sample_rates = zip(
            *[filter.autocorrelation(_, max_abs_t) for _ in HS])
        sample_rate = sample_rates[0]
        deltaT = 1 / sample_rate
        nsamples = len(acors[0])
        assert all(sample_rate == _ for _ in sample_rates)
        assert all(nsamples == len(_) for _ in acors)
        nsamples = nsamples * 2 - 1

        snr_series = []
        for acor, single in zip(acors, singles):
            series = lal.CreateCOMPLEX8TimeSeries('fake SNR', 0, 0, deltaT,
                                                  lal.StrainUnit, nsamples)
            series.epoch = single.time - 0.5 * (nsamples - 1) * deltaT
            acor = np.concatenate((np.conj(acor[:0:-1]), acor))
            series.data.data = single.snr * filter.exp_i(single.phase) * acor
            snr_series.append(series)

    # Ensure that all of the SNR time series have the same sample rate.
    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    deltaT = snr_series[0].deltaT
    sample_rate = 1 / deltaT
    if any(deltaT != series.deltaT for series in snr_series):
        raise ValueError('BAYESTAR does not yet support SNR time series with '
                         'mixed sample rates')

    # Ensure that all of the SNR time series have odd lengths.
    if any(len(series.data.data) % 2 == 0 for series in snr_series):
        raise ValueError('SNR time series must have odd lengths')

    # Trim time series to the desired length.
    max_abs_n = int(np.ceil(max_abs_t * sample_rate))
    desired_length = 2 * max_abs_n - 1
    for i, series in enumerate(snr_series):
        length = len(series.data.data)
        if length > desired_length:
            snr_series[i] = lal.CutCOMPLEX8TimeSeries(
                series, length // 2 + 1 - max_abs_n, desired_length)

    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    nsamples = len(snr_series[0].data.data)
    if any(nsamples != len(series.data.data) for series in snr_series):
        raise ValueError('BAYESTAR does not yet support SNR time series of '
                         'mixed lengths')

    # Perform sanity checks that the middle sample of the SNR time series match
    # the sngl_inspiral records to the nearest sample (plus the smallest
    # representable LIGOTimeGPS difference of 1 nanosecond).
    for ifo, single, series in zip(ifos, singles, snr_series):
        shift = np.abs(0.5 * (nsamples - 1) * series.deltaT +
                       float(series.epoch - single.time))
        if shift >= deltaT + 1e-8:
            raise ValueError('BAYESTAR expects the SNR time series to be '
                             'centered on the single-detector trigger times, '
                             'but {} was off by {} s'.format(ifo, shift))

    # Extract the TOAs in GPS nanoseconds from the SNR time series, assuming
    # that the trigger happened in the middle.
    toas_ns = [
        series.epoch.ns() + 1e9 * 0.5 *
        (len(series.data.data) - 1) * series.deltaT for series in snr_series
    ]

    # Collect all of the SNR series in one array.
    snr_series = np.vstack([series.data.data for series in snr_series])

    # Center times of arrival and compute GMST at mean arrival time.
    # Pre-center in integer nanoseconds to preserve precision of
    # initial datatype.
    epoch = sum(toas_ns) // len(toas_ns)
    toas = 1e-9 * (np.asarray(toas_ns) - epoch)
    mean_toa = np.average(toas, weights=weights)
    toas -= mean_toa
    epoch += int(np.round(1e9 * mean_toa))
    epoch = lal.LIGOTimeGPS(0, int(epoch))

    # Translate SNR time series back to time of first sample.
    toas -= 0.5 * (nsamples - 1) * deltaT

    return epoch, sample_rate, toas, snr_series, responses, locations, horizons
Beispiel #4
0
def ligolw_sky_map(sngl_inspirals,
                   waveform,
                   f_low,
                   min_distance=None,
                   max_distance=None,
                   prior_distance_power=None,
                   method="toa_phoa_snr",
                   psds=None,
                   nside=-1,
                   chain_dump=None,
                   phase_convention='antifindchirp',
                   snr_series=None,
                   enable_snr_series=False):
    """Convenience function to produce a sky map from LIGO-LW rows. Note that
    min_distance and max_distance should be in Mpc.

    Returns a 'NESTED' ordering HEALPix image as a Numpy array.
    """

    # Ensure that sngl_inspiral is either a single template or a list of
    # identical templates
    for key in 'mass1 mass2 spin1x spin1y spin1z spin2x spin2y spin2z'.split():
        if hasattr(sngl_inspirals[0], key):
            value = getattr(sngl_inspirals[0], key)
            if any(value != getattr(_, key) for _ in sngl_inspirals):
                raise ValueError(
                    '{0} field is not the same for all detectors'.format(key))

    ifos = [sngl_inspiral.ifo for sngl_inspiral in sngl_inspirals]

    # Extract SNRs from table.
    snrs = np.ma.asarray([
        np.ma.masked if sngl_inspiral.snr is None else sngl_inspiral.snr
        for sngl_inspiral in sngl_inspirals
    ])

    # Look up physical parameters for detector.
    detectors = [
        lalsimulation.DetectorPrefixToLALDetector(str(ifo)) for ifo in ifos
    ]
    responses = np.asarray([det.response for det in detectors])
    locations = np.asarray([det.location for det in detectors])

    # Power spectra for each detector.
    if psds is None:
        psds = [timing.get_noise_psd_func(ifo) for ifo in ifos]

    log.debug('calculating templates')
    H = filter.sngl_inspiral_psd(sngl_inspirals[0], waveform, f_min=f_low)

    log.debug('calculating noise PSDs')
    HS = [filter.signal_psd_series(H, S) for S in psds]

    # Signal models for each detector.
    log.debug('calculating Fisher matrix elements')
    signal_models = [timing.SignalModel(_) for _ in HS]

    # Get SNR=1 horizon distances for each detector.
    horizons = np.asarray([
        signal_model.get_horizon_distance() for signal_model in signal_models
    ])

    weights = np.ma.asarray([
        1 / np.square(signal_model.get_crb_toa_uncert(snr))
        for signal_model, snr in zip(signal_models, snrs)
    ])

    # Center detector array.
    locations -= np.sum(locations * weights.reshape(-1, 1),
                        axis=0) / np.sum(weights)

    if enable_snr_series:
        log.warn(
            'Enabling input of SNR time series. This feature is UNREVIEWED.')
    else:
        snr_series = None

    # Maximum barycentered arrival time error:
    # |distance from array barycenter to furthest detector| / c + 5 ms.
    # For LHO+LLO, this is 15.0 ms.
    # For an arbitrary terrestrial detector network, the maximum is 26.3 ms.
    max_abs_t = np.max(np.sqrt(np.sum(np.square(locations / lal.C_SI),
                                      axis=1))) + 0.005

    if snr_series is None:
        log.warn(
            "No SNR time series found, so we are creating a zero-noise "
            "SNR time series from the whitened template's autocorrelation "
            "sequence. The sky localization uncertainty may be "
            "underestimated.")

        acors, sample_rates = zip(
            *[filter.autocorrelation(_, max_abs_t) for _ in HS])
        sample_rate = sample_rates[0]
        deltaT = 1 / sample_rate
        nsamples = len(acors[0])
        assert all(sample_rate == _ for _ in sample_rates)
        assert all(nsamples == len(_) for _ in acors)
        nsamples = nsamples * 2 - 1

        snr_series = []
        for acor, sngl in zip(acors, sngl_inspirals):
            series = lal.CreateCOMPLEX8TimeSeries('fake SNR', 0, 0, deltaT,
                                                  lal.StrainUnit, nsamples)
            series.epoch = sngl.end - 0.5 * (nsamples - 1) * deltaT
            acor = np.concatenate((np.conj(acor[:0:-1]), acor))
            if phase_convention.lower() == 'antifindchirp':
                # The matched filter phase convention does NOT affect the
                # template autocorrelation sequence; however it DOES affect
                # the maximum-likelihood phase estimate AND the SNR time series.
                # So if we are going to apply the anti-findchirp phase
                # correction later, we'll have to apply a complex conjugate to
                # the autocorrelation sequence to cancel it here.
                acor = np.conj(acor)
            series.data.data = sngl.snr * filter.exp_i(sngl.coa_phase) * acor
            snr_series.append(series)

    # Ensure that all of the SNR time series have the same sample rate.
    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    deltaT = snr_series[0].deltaT
    sample_rate = 1 / deltaT
    if any(deltaT != series.deltaT for series in snr_series):
        raise ValueError(
            'BAYESTAR does not yet support SNR time series with mixed sample rates'
        )

    # Ensure that all of the SNR time series have odd lengths.
    if any(len(series.data.data) % 2 == 0 for series in snr_series):
        raise ValueError('SNR time series must have odd lengths')

    # Trim time series to the desired length.
    max_abs_n = int(np.ceil(max_abs_t * sample_rate))
    desired_length = 2 * max_abs_n - 1
    for i, series in enumerate(snr_series):
        length = len(series.data.data)
        if length > desired_length:
            snr_series[i] = lal.CutCOMPLEX8TimeSeries(
                series, length // 2 + 1 - max_abs_n, desired_length)

    # FIXME: for now, the Python wrapper expects all of the SNR time sries to
    # also be the same length.
    nsamples = len(snr_series[0].data.data)
    if any(nsamples != len(series.data.data) for series in snr_series):
        raise ValueError(
            'BAYESTAR does not yet support SNR time series of mixed lengths')

    # Perform sanity checks that the middle sample of the SNR time series match
    # the sngl_inspiral records.
    for sngl_inspiral, series in zip(sngl_inspirals, snr_series):
        if np.abs(0.5 * (nsamples - 1) * series.deltaT +
                  float(series.epoch - sngl_inspiral.end)) >= 0.5 * deltaT:
            raise ValueError(
                'BAYESTAR expects the SNR time series to be centered on the sngl_inspiral end times'
            )

    # Extract the TOAs in GPS nanoseconds from the SNR time series, assuming
    # that the trigger happened in the middle.
    toas_ns = [
        series.epoch.ns() + 1e9 * 0.5 *
        (len(series.data.data) - 1) * series.deltaT for series in snr_series
    ]

    # Collect all of the SNR series in one array.
    snr_series = np.vstack([series.data.data for series in snr_series])

    # Fudge factor for excess estimation error in gstlal_inspiral.
    fudge = 0.83
    snr_series *= fudge

    # If using 'findchirp' phase convention rather than gstlal/mbta,
    # then flip signs of phases.
    if phase_convention.lower() == 'antifindchirp':
        log.warn('Using anti-FINDCHIRP phase convention; inverting phases. '
                 'This is currently the default and it is appropriate for '
                 'gstlal and MBTA but not pycbc as of observing run 1 ("O1"). '
                 'The default setting is likely to change in the future.')
        snr_series = np.conj(snr_series)

    # Center times of arrival and compute GMST at mean arrival time.
    # Pre-center in integer nanoseconds to preserve precision of
    # initial datatype.
    epoch = sum(toas_ns) // len(toas_ns)
    toas = 1e-9 * (np.asarray(toas_ns) - epoch)
    # FIXME: np.average does not yet support masked arrays.
    # Replace with np.average when numpy 1.13.0 is available.
    mean_toa = np.sum(toas * weights) / np.sum(weights)
    toas -= mean_toa
    epoch += int(np.round(1e9 * mean_toa))
    epoch = lal.LIGOTimeGPS(0, int(epoch))
    gmst = lal.GreenwichMeanSiderealTime(epoch)

    # Translate SNR time series back to time of first sample.
    toas -= 0.5 * (nsamples - 1) * deltaT

    # If minimum distance is not specified, then default to 0 Mpc.
    if min_distance is None:
        min_distance = 0

    # If maximum distance is not specified, then default to the SNR=4
    # horizon distance of the most sensitive detector.
    if max_distance is None:
        max_distance = max(horizons) / 4

    # If prior_distance_power is not specified, then default to 2
    # (p(r) ~ r^2, uniform in volume).
    if prior_distance_power is None:
        prior_distance_power = 2

    # Raise an exception if 0 Mpc is the minimum effective distance and the prior
    # is of the form r**k for k<0
    if min_distance == 0 and prior_distance_power < 0:
        raise ValueError(
            ("Prior is a power law r^k with k={}, " +
             "undefined at min_distance=0").format(prior_distance_power))

    # Rescale distances to horizon distance of most sensitive detector.
    max_horizon = np.max(horizons)
    horizons /= max_horizon
    min_distance /= max_horizon
    max_distance /= max_horizon

    # Time and run sky localization.
    log.debug('starting computationally-intensive section')
    start_time = lal.GPSTimeNow()
    if method == "toa_phoa_snr":
        skymap = Table(
            _sky_map.toa_phoa_snr(min_distance, max_distance,
                                  prior_distance_power, gmst, sample_rate,
                                  toas, snr_series, responses, locations,
                                  horizons))
    elif method == "toa_phoa_snr_mcmc":
        skymap = emcee_sky_map(
            logl=_sky_map.log_likelihood_toa_phoa_snr,
            loglargs=(gmst, sample_rate, toas, snr_series, responses,
                      locations, horizons),
            logp=toa_phoa_snr_log_prior,
            logpargs=(min_distance, max_distance, prior_distance_power,
                      max_abs_t),
            xmin=[0, -1, min_distance, -1, 0, 0],
            xmax=[2 * np.pi, 1, max_distance, 1, 2 * np.pi, 2 * max_abs_t],
            nside=nside,
            chain_dump=chain_dump,
            max_horizon=max_horizon * fudge)
    else:
        raise ValueError("Unrecognized method: %s" % method)

    # Convert distance moments to parameters
    distmean = skymap.columns.pop('DISTMEAN')
    diststd = skymap.columns.pop('DISTSTD')
    skymap['DISTMU'], skymap['DISTSIGMA'], skymap['DISTNORM'] = \
        distance.moments_to_parameters(distmean, diststd)

    # Add marginal distance moments
    good = np.isfinite(distmean) & np.isfinite(diststd)
    prob = (moc.uniq2pixarea(skymap['UNIQ']) * skymap['PROBDENSITY'])[good]
    distmean = distmean[good]
    diststd = diststd[good]
    rbar = (prob * distmean).sum()
    r2bar = (prob * (np.square(diststd) + np.square(distmean))).sum()
    skymap.meta['distmean'] = rbar
    skymap.meta['diststd'] = np.sqrt(r2bar - np.square(rbar))

    # Rescale
    rescale = max_horizon * fudge
    skymap['DISTMU'] *= rescale
    skymap['DISTSIGMA'] *= rescale
    skymap.meta['distmean'] *= rescale
    skymap.meta['diststd'] *= rescale
    skymap['DISTNORM'] /= np.square(rescale)

    end_time = lal.GPSTimeNow()
    log.debug('finished computationally-intensive section')

    # Fill in metadata and return.
    skymap.meta['creator'] = 'BAYESTAR'
    skymap.meta['origin'] = 'LIGO/Virgo'
    skymap.meta['gps_time'] = float(epoch)
    skymap.meta['runtime'] = float(end_time - start_time)
    skymap.meta['instruments'] = {
        sngl_inspiral.ifo
        for sngl_inspiral in sngl_inspirals
    }
    skymap.meta['gps_creation_time'] = end_time

    return skymap