Exemplo n.º 1
0
def get_functions(marginalize_over_inclination=False, seed=42):
    def _lnlike(r, a, b, c, n, i, p, t, flux, data_cov):
        gp = StarryProcess(
            r=r,
            a=a,
            b=b,
            c=c,
            n=n,
            marginalize_over_inclination=marginalize_over_inclination,
            normalized=False,
        )
        return gp.log_likelihood(t, flux, data_cov, p=p, i=i)

    def _sample(r, a, b, c, n, i, p, t):
        gp = StarryProcess(
            r=r,
            a=a,
            b=b,
            c=c,
            n=n,
            marginalize_over_inclination=marginalize_over_inclination,
            normalized=False,
        )
        gp.random.seed(seed)
        return tt.reshape(gp.sample(t, p=p, i=i), (-1,))

    # Likelihood func
    inputs = [tt.dscalar() for n in range(7)]
    inputs += [tt.dvector(), tt.dvector(), tt.dscalar()]
    lnlike = theano.function(
        inputs, _lnlike(*inputs), on_unused_input="ignore"
    )

    # Draw a sample
    inputs = [tt.dscalar() for n in range(7)]
    inputs += [tt.dvector()]
    sample = theano.function(
        inputs, _sample(*inputs), on_unused_input="ignore"
    )

    return lnlike, sample
Exemplo n.º 2
0
    def compile(self):

        # Compile the GP
        print("Compiling. This may take up to one minute...")
        r = tt.dscalar()
        a = tt.dscalar()
        b = tt.dscalar()
        c = tt.dscalar()
        n = tt.dscalar()
        self.gp = StarryProcess(ydeg=self.ydeg, r=r, a=a, b=b, c=c, n=n)
        self.gp.random.seed(238)
        self.sample_function = theano.function(
            [r, a, b, c, n],
            [self.gp.sample_ylm(nsamples=self.nmaps)],
            no_default_updates=True,
        )
        self._compiled = True
        print("Done!")
Exemplo n.º 3
0
def test_jacobian():

    # Compile the Jacobian
    _a = tt.dscalar()
    _b = tt.dscalar()
    log_jac = theano.function([_a, _b], StarryProcess(a=_a, b=_b).log_jac())

    # Log probability
    def log_prob(p):
        if np.any(p < 0):
            return -np.inf
        elif np.any(p > 1):
            return -np.inf
        else:
            return log_jac(*p)

    # Run the sampler
    ndim, nwalkers, nsteps = 2, 50, 10000
    p0 = np.random.random(size=(nwalkers, ndim))
    sampler = emcee.EnsembleSampler(nwalkers, ndim, log_prob)
    sampler.run_mcmc(p0, nsteps)

    # Transform to latitude params
    a, b = sampler.chain.T.reshape(2, -1)
    mu, sigma = beta2gauss(a, b)

    # Compute the 2d histogram
    m1, m2 = 0, 80
    s1, s2 = 0, 45
    hist, _, _ = np.histogram2d(mu, sigma, range=((m1, m2), (s1, s2)))
    hist /= np.max(hist)

    # Check that the variation is less than 10% across the domain
    std = 1.4826 * mad(hist.flatten())
    mean = np.mean(hist.flatten())
    assert std / mean < 0.1
Exemplo n.º 4
0
def test_profile_marg(gradient, profile, ydeg=15, npts=1000):

    # Free parameters
    r = tt.dscalar()
    a = tt.dscalar()
    b = tt.dscalar()
    c = tt.dscalar()
    n = tt.dscalar()
    p = tt.dscalar()
    t = tt.dvector()
    flux = tt.dvector()
    data_cov = tt.dscalar()

    # Compute the mean and covariance
    gp = StarryProcess(
        r=r, a=a, b=b, c=c, n=n, marginalize_over_inclination=True
    )

    # Compile the function
    if gradient:
        g = lambda f, x: tt.grad(f, x)
    else:
        g = lambda f, x: f
    func = theano.function(
        [r, a, b, c, n, p, t, flux, data_cov],
        [
            g(gp.log_likelihood(t, flux, data_cov, p=p), a)
        ],  # wrt a for definiteness
        profile=profile,
    )

    # Run it
    t = np.linspace(0, 1, npts)
    flux = np.random.randn(npts)
    data_cov = 1.0

    run = lambda: func(
        defaults["r"],
        defaults["a"],
        defaults["b"],
        defaults["c"],
        defaults["n"],
        defaults["p"],
        t,
        flux,
        data_cov,
    )

    if profile:

        # Profile the full function
        run()
        print(func.profile.summary())

    else:

        # Time the execution
        number = 100
        time = timeit.timeit(run, number=number) / number
        print("time elapsed: {:.4f} s".format(time))
        if (gradient and time > 0.2) or (not gradient and time > 0.1):
            warnings.warn("too slow! ({:.4f} s)".format(time))
Exemplo n.º 5
0
def plot_latitude_pdf(results, **kwargs):
    """
    Plot posterior draws from the latitude hyperdistribution.

    """
    # Get kwargs
    kwargs = update_with_defaults(**kwargs)
    plot_kwargs = kwargs["plot"]
    gen_kwargs = kwargs["generate"]
    mu_true = gen_kwargs["latitude"]["mu"]
    sigma_true = gen_kwargs["latitude"]["sigma"]
    nlat_pts = plot_kwargs["nlat_pts"]
    nlat_samples = plot_kwargs["nlat_samples"]

    # Resample to equal weight
    samples = np.array(results.samples)
    try:
        weights = np.exp(results["logwt"] - results["logz"][-1])
    except:
        weights = results["weights"]
    samples = dyfunc.resample_equal(samples, weights)

    # Function to compute the pdf for a draw
    _draw_pdf = lambda x, a, b: StarryProcess(a=a, b=b).latitude.pdf(x)
    _x = tt.dvector()
    _a = tt.dscalar()
    _b = tt.dscalar()

    # The true pdf
    draw_pdf = theano.function([_x, _a, _b], _draw_pdf(_x, _a, _b))
    x = np.linspace(-89.9, 89.9, nlat_pts)
    if np.isfinite(sigma_true):
        pdf_true = 0.5 * (Normal.pdf(x, mu_true, sigma_true) +
                          Normal.pdf(x, -mu_true, sigma_true))
    else:
        # Isotropic (special case)
        pdf_true = 0.5 * np.cos(x * np.pi / 180) * np.pi / 180

    # Draw sample pdfs
    pdf = np.empty((nlat_samples, nlat_pts))
    for k in range(nlat_samples):
        idx = np.random.randint(len(samples))
        pdf[k] = draw_pdf(x, samples[idx, 1], samples[idx, 2])

    # Plot
    fig, ax = plt.subplots(1)
    for k in range(nlat_samples):
        ax.plot(x, pdf[k], "C0-", lw=1, alpha=0.05, zorder=-1)
    ax.plot(x, pdf_true, "C1-", label="truth")
    ax.plot(x, np.nan * x, "C0-", label="samples")
    ax.legend(loc="upper right")
    ax.set_xlim(-90, 90)
    xticks = [-90, -75, -60, -45, -30, -15, 0, 15, 30, 45, 60, 75, 90]
    ax.set_xticks(xticks)
    ax.set_xticklabels(["{:d}$^\circ$".format(xt) for xt in xticks])
    ax.set_xlabel("latitude", fontsize=16)
    ax.set_ylabel("probability", fontsize=16)
    # Constrain y lims?
    mx1 = np.max(pdf_true)
    mx2 = np.sort(pdf.flatten())[int(0.9 * len(pdf.flatten()))]
    mx = max(2.0 * mx1, 1.2 * mx2)
    ax.set_ylim(-0.1 * mx, mx)
    ax.set_rasterization_zorder(1)
    return fig