Exemple #1
0
    def aic(self, uvdata, average_freq=True):
        """
        Returns AIC for current model and given instance of ``UVData`` class.

        :param uvdata:
            Instance of ``UVData`` class.
        :param average_freq: (optional)
            Boolean. Average frequency when calculating BIC? (default: ``True``)

        :return:
            Value of AIC criterion (the lower - the better)
        """
        from stats import LnLikelihood
        lnlik = LnLikelihood(uvdata, self, average_freq=average_freq)
        return -2. * lnlik(self.p) + self.size
Exemple #2
0
    def bic(self, uvdata, average_freq=True):
        """
        Returns BIC for current model and given instance of ``UVData`` class.

        :param uvdata:
            Instance of ``UVData`` class.
        :param average_freq: (optional)
            Boolean. Average frequency when calculating BIC? (default: ``True``)

        :return:
            Value of BIC criterion (the lower - the better)
        """
        from stats import LnLikelihood
        lnlik = LnLikelihood(uvdata, self, average_freq=average_freq)
        sample_size = 2 * uvdata.n_usable_visibilities_difmap(
            stokes=self.stokes)
        if average_freq:
            sample_size /= uvdata.nif
        return -2. * lnlik(self.p) + self.size * sample_size
Exemple #3
0
def fit_model_with_nestle(uv_fits, model_file, components_priors, outdir=None,
                          **nestle_kwargs):
    """
    :param uv_fits:
        Path to uv-fits file with self-calibrated visibilities.
    :param model_file:
        Path to file with difmap model.
    :param components_priors:
        Components prior's ppf. Close to phase center component goes first.
        Iterable of dicts with keys - name of the parameter and values -
        (callable, args, kwargs,) where args & kwargs - additional arguments to
        callable. Each callable is called callable.ppf(p, *args, **kwargs).
        Thus callable should has ``ppf`` method.

        Example of prior on single component:
            {'flux': (scipy.stats.uniform.ppf, [0., 10.], dict(),),
             'bmaj': (scipy.stats.uniform.ppf, [0, 5.], dict(),),
             'e': (scipy.stats.beta.ppf, [alpha, beta], dict(),)}
        First key will result in calling: scipy.stats.uniform.ppf(u, 0, 10) as
        value from prior for ``flux`` parameter.
    :param outdir: (optional)
        Directory to output results. If ``None`` then use cwd. (default:
        ``None``)
    :param nestle_kwargs: (optional)
        Any arguments passed to ``nestle.sample`` function.

    :return
        Results of ``nestle.sample`` work on that model.
    """
    if outdir is None:
        outdir = os.getcwd()

    mdl_file = model_file
    uv_data = UVData(uv_fits)
    mdl_dir, mdl_fname = os.path.split(mdl_file)
    comps = import_difmap_model(mdl_fname, mdl_dir)

    # Sort components by distance from phase center
    comps = sorted(comps, key=lambda x: np.sqrt(x.p[1]**2 + x.p[2]**2))

    ppfs = list()
    labels = list()
    for component_prior in components_priors:
        for comp_name in ('flux', 'x', 'y', 'bmaj', 'e', 'bpa'):
            try:
                ppfs.append(_function_wrapper(*component_prior[comp_name]))
                labels.append(comp_name)
            except KeyError:
                pass

    for ppf in ppfs:
        print(ppf.args)

    hypercube = hypercube_partial(ppfs)

    # Create model
    mdl = Model(stokes=stokes)
    # Add components to model
    mdl.add_components(*comps)
    loglike = LnLikelihood(uv_data, mdl)
    time0 = time.time()
    result = nestle.sample(loglikelihood=loglike, prior_transform=hypercube,
                           ndim=mdl.size, npoints=50, method='multi',
                           callback=nestle.print_progress, **nestle_kwargs)
    print("Time spent : {}".format(time.time()-time0))
    samples = nestle.resample_equal(result.samples, result.weights)
    # Save re-weighted samples from posterior to specified ``outdir``
    # directory
    np.savetxt(os.path.join(outdir, 'samples.txt'), samples)
    fig = corner.corner(samples, show_titles=True, labels=labels,
                        quantiles=[0.16, 0.5, 0.84], title_fmt='.3f')
    # Save corner plot os samples from posterior to specified ``outdir``
    # directory
    fig.savefig(os.path.join(outdir, "corner.png"), bbox_inches='tight',
                dpi=200)

    return result
Exemple #4
0
y, z = np.meshgrid(np.arange(imsize[0]), np.arange(imsize[1]))
y = y - imsize[0] / 2. + 0.5
z = z - imsize[0] / 2. + 0.5
y_mas = y * mas_in_pix
z_mas = z * mas_in_pix
y_rad = mas_to_rad * y_mas
z_rad = mas_to_rad * z_mas
image[image < 0] = 0
image[image > 10.0] = 0
image[image < np.percentile(image[image > 0].ravel(), 90)] = 0
icomp = ModelImageComponent(image, y_rad[0, :], z_rad[:, 0])
model = Model(stokes='I')
model.add_component(icomp)
uv = uvdata_core.uv

lnlik = LnLikelihood(uvdata_core, model, average_freq=True, amp_only=False)

import emcee


def lnprior(p):
    if not 1.0 < p[0] < 5.0:
        return -np.inf
    if not -20 < p[1] < 20:
        return -np.inf
    if not -20 < p[2] < 20:
        return -np.inf
    if not 0.1 < p[3] < 2.0:
        return -np.inf
    if not 0.0 < p[4] < 2*np.pi:
        return -np.inf
Exemple #5
0
        raise Exception("Unknown type of component!")

cube0 = np.array(cube0)
cube1 = np.array(cube1)


def hypercube(cube, ndim, nparams):
    for i in range(ndim):
        cube[i] = cube0[i] * cube[i] + cube1[i]


# Create model
mdl = Model(stokes=stokes)
# Add components to model
mdl.add_components(*comps)
loglike = LnLikelihood(uv_data, mdl)


def show(filepath):
    if os.name == 'mac' or platform == 'darwin':
        subprocess.call(('open', filepath))
    elif os.name == 'nt' or platform == 'win32':
        os.startfile(filepath)
    elif platform.startswith('linux'):
        subprocess.call(('xdg-open', filepath))


def myloglike(cube, ndim, nparams):
    return loglike(cube)

Exemple #6
0
def fit_model_with_ptmcmc(uv_fits, mdl_file, outdir=None, nburnin=1000,
                          nproduction=10000, nwalkers=50,
                          samples_file=None, stokes='I', use_weights=False,
                          ntemps=10, thin=10):

    # Initialize ``UVData`` instance
    uvdata = UVData(uv_fits)

    # Load difmap model
    mdl_dir, mdl_fname = os.path.split(mdl_file)
    comps = import_difmap_model(mdl_fname, mdl_dir)
    # Sort components by distance from phase center
    comps = sorted(comps, key=lambda x: np.sqrt(x.p[1]**2 + x.p[2]**2))

    # Cycle for components, add prior and calculate std for initial position of
    # walkers: 3% of flux for flux, 1% of size for position, 3% of size for
    # size, 0.01 for e, 0.01 for bpa
    p0_dict = dict()
    for comp in comps:
        print comp
        if isinstance(comp, EGComponent):
            # flux_high = 2 * comp.p[0]
            flux_high = 10. * comp.p[0]
            # bmaj_high = 4 * comp.p[3]
            bmaj_high = 4.
            if comp.size == 6:
                comp.add_prior(flux=(sp.stats.uniform.logpdf, [0., flux_high], dict(),),
                               bmaj=(sp.stats.uniform.logpdf, [0, bmaj_high], dict(),),
                               e=(sp.stats.uniform.logpdf, [0, 1.], dict(),),
                               bpa=(sp.stats.uniform.logpdf, [0, np.pi], dict(),))
                p0_dict[comp] = [0.03 * comp.p[0],
                                 0.01 * comp.p[3],
                                 0.01 * comp.p[3],
                                 0.03 * comp.p[3],
                                 0.01,
                                 0.01]
            elif comp.size == 4:
                # flux_high = 2 * comp.p[0]
                flux_high = 10. * comp.p[0]
                # bmaj_high = 4 * comp.p[3]
                bmaj_high = 4.
                comp.add_prior(flux=(sp.stats.uniform.logpdf, [0., flux_high], dict(),),
                               bmaj=(sp.stats.uniform.logpdf, [0, bmaj_high], dict(),))
                p0_dict[comp] = [0.03 * comp.p[0],
                                 0.01 * comp.p[3],
                                 0.01 * comp.p[3],
                                 0.03 * comp.p[3]]
            else:
                raise Exception("Gauss component should have size 4 or 6!")
        elif isinstance(comp, DeltaComponent):
            flux_high = 5 * comp.p[0]
            comp.add_prior(flux=(sp.stats.uniform.logpdf, [0., flux_high], dict(),))
            p0_dict[comp] = [0.03 * comp.p[0],
                             0.01,
                             0.01]
        else:
            raise Exception("Unknown type of component!")

    # Construct labels for corner and truth values (of difmap models)
    labels = list()
    truths = list()
    for comp in comps:
        truths.extend(comp.p)
        if isinstance(comp, EGComponent):
            if comp.size == 6:
                labels.extend([r'$flux$', r'$x$', r'$y$', r'$bmaj$', r'$e$', r'$bpa$'])
            elif comp.size == 4:
                labels.extend([r'$flux$', r'$x$', r'$y$', r'$bmaj$'])
            else:
                raise Exception("Gauss component should have size 4 or 6!")
        elif isinstance(comp, DeltaComponent):
            labels.extend([r'$flux$', r'$x$', r'$y$'])
        else:
            raise Exception("Unknown type of component!")

    # Create model
    mdl = Model(stokes=stokes)
    # Add components to model
    mdl.add_components(*comps)

    # Create likelihood for data & model
    lnlik = LnLikelihood(uvdata, mdl, use_weights=use_weights,)
    lnpr = LnPrior(mdl)
    ndim = mdl.size

    # Initialize pool of walkers
    p_std = list()
    for comp in comps:
        p_std.extend(p0_dict[comp])
    print "Initial std of parameters: {}".format(p_std)
    p0 = emcee.utils.sample_ball(mdl.p, p_std,
                                 size=ntemps*nwalkers).reshape((ntemps,
                                                                nwalkers, ndim))
    betas = np.exp(np.linspace(0, -(ntemps - 1) * 0.5 * np.log(2), ntemps))
    # Initialize sampler
    ptsampler = emcee.PTSampler(ntemps, nwalkers, ndim, lnlik, lnpr,
                                betas=betas)

    # Burning in
    print "Burnin"
    for p, lnprob, lnlike in ptsampler.sample(p0, iterations=nburnin):
        pass
    print "Acceptance fraction for initial burning: ", ptsampler.acceptance_fraction
    ptsampler.reset()

    print "Production"
    for p, lnprob, lnlike in ptsampler.sample(p, lnprob0=lnprob, lnlike0=lnlike,
                                              iterations=nproduction,
                                              thin=thin):
        pass
    print "Acceptance fraction for production: ", ptsampler.acceptance_fraction

    # Plot corner
    fig, axes = plt.subplots(nrows=ndim, ncols=ndim)
    fig.set_size_inches(14.5, 14.5)

    # Choose fontsize
    if len(comps) <= 2:
        fontsize = 16
    elif 2 < len(comps) <= 4:
        fontsize = 13
    else:
        fontsize = 11

    # Use zero-temperature chain
    samples = ptsampler.flatchain[0, :, :]

    corner.corner(samples, fig=fig, labels=labels,
                  truths=truths, show_titles=True,
                  title_kwargs={'fontsize': fontsize},
                  quantiles=[0.16, 0.5, 0.84],
                  label_kwargs={'fontsize': fontsize}, title_fmt=".3f")
    fig.savefig(os.path.join(outdir, 'corner_mcmc_x.png'), bbox_inches='tight',
                dpi=200)
    if not samples_file:
        samples_file = 'mcmc_samples.txt'
    print "Saving thinned samples to {} file...".format(samples_file)
    np.savetxt(samples_file, samples)
    return ptsampler