コード例 #1
0
ファイル: nest.py プロジェクト: aasensio/UltraNest
def run_nested(**config):
	ndim = config['ndim']
	
	def priortransform(u):
		assert len(u) == ndim, u
		return u
	if 'seed' in config:
		numpy.random.seed(config['seed'])

	# can use directly
	loglikelihood = config['loglikelihood']
	nlive_points = config['nlive_points']
	method = config['draw_method']
	if method.startswith('naive'):
		constrainer = RejectionConstrainer()
	elif method.startswith('maxfriends'): # maximum distance
		constrainer = FriendsConstrainer(rebuild_every=nlive_points, radial=False, force_shrink=config['force_shrink'], verbose=False)
	elif method.startswith('radfriends'): # radial distance
		constrainer = FriendsConstrainer(rebuild_every=nlive_points, radial=True, metric = 'euclidean', jackknife=config['jackknife'], force_shrink=config['force_shrink'], verbose=False)
	elif method.startswith('supfriends'): # supreme distance
		constrainer = FriendsConstrainer(rebuild_every=nlive_points, radial=True, metric = 'chebyshev', jackknife=config['jackknife'], force_shrink=config['force_shrink'], verbose=False)
	elif method.startswith('optimize'):
		constrainer = OptimizeConstrainer()
	elif method.startswith('galilean'):
		velocity_scale = config['velocity_scale']
		constrainer = GalileanConstrainer(nlive_points = nlive_points, ndim = ndim, velocity_scale = velocity_scale)
	elif method.startswith('mcmc'):
		adapt = config['adapt']
		scale = config['scale']
		if config['proposer'] == 'gauss':
			proposer = GaussProposal(adapt=adapt, scale = scale)
		elif config['proposer'] == 'multiscale':
			proposer = MultiScaleProposal(adapt=adapt, scale=scale)
		constrainer = MCMCConstrainer(proposer = proposer)
	else:
		raise NotImplementedError('draw_method "%s" not implemented' % method)
	print( 'configuring NestedSampler')
	starttime = time.time()
	sampler = NestedSampler(nlive_points = nlive_points, 
		priortransform=priortransform, loglikelihood=loglikelihood, 
		draw_constrained = constrainer.draw_constrained, ndim=ndim)
	constrainer.sampler = sampler
	print('running nested_integrator to tolerance 0.5')
	assert config['integrator'] == 'normal', config['integrator']
	result = nested_integrator(tolerance=0.5, sampler=sampler, max_samples=2000000)
	endtime = time.time()
	if hasattr(constrainer, 'stats'):
		constrainer.stats()

	output_basename = config['output_basename']
	
	if config.get('seed', 0) == 0:
		x = numpy.array([x for _, x, _ in sampler.samples])
		y = exp([l for _, _, l in sampler.samples])
		plt.plot(x[:,0], y, 'x', color='blue', ms=1)
		plt.savefig(output_basename + 'nested_samples.pdf', bbox_inches='tight')
		plt.close()

		L = numpy.array([L for _, _, L, _ in result['weights']])
		width = numpy.array([w for _, _, _, w in result['weights']])
		plt.plot(width, L, 'x-', color='blue', ms=1, label='Z=%.2f (%.2f)' % (
			result['logZ'], log(exp(L + width).sum())))
		fromleft = exp(L + width)[::-1].cumsum()
		fromleft /= fromleft.max()
		mask = (fromleft < 0.99)[::-1]
		if mask.any():
			i = width[mask].argmax()
			plt.ylim(L.max() - log(1000), L.max())
			plt.fill_between(width[mask], L[mask], L.max() - log(1000), color='grey', alpha=0.3)
		plt.xlabel('prior mass')
		plt.ylabel('likelihood')
		plt.legend(loc='best')
		plt.savefig(output_basename + 'nested_integral.pdf', bbox_inches='tight')
		plt.close()
	
		posterioru, posteriorx = equal_weighted_posterior(result['weights'])
		plt.figure(figsize=(ndim*2, ndim*2))
		marginal_plots(weights=result['weights'], ndim=ndim)
		plt.savefig(output_basename + 'posterior.pdf', bbox_inches='tight')
		plt.close()

	return dict(
		Z_computed = float(result['logZ']),
		Z_computed_err = float(result['logZerr']),
		niterations = result['niterations'],
		duration = endtime - starttime,
	)
コード例 #2
0
from nested_sampling.nested_integrator import nested_integrator
from nested_sampling.nested_sampler import NestedSampler
from nested_sampling.samplers.rejection import RejectionConstrainer
from nested_sampling.samplers.friends import FriendsConstrainer
import nested_sampling.postprocess as post
#constrainer = RejectionConstrainer()
constrainer = FriendsConstrainer(radial = True, metric = 'euclidean', jackknife=True)
sampler = NestedSampler(nlive_points = 400, 
	priortransform=priortransform, loglikelihood=loglikelihood, 
	draw_constrained = constrainer.draw_constrained, ndim=2)
constrainer.sampler = sampler
results = nested_integrator(tolerance=0.1, sampler=sampler)

# add contours?

usamples, xsamples = post.equal_weighted_posterior(results['weights'])

u, x, L, width = zip(*results['weights'])
x, y = numpy.array(x).T
weight = numpy.add(L, width)
#plt.plot(xsamples[:,0], xsamples[:,1], '.', color='green', alpha=0.1)
#plt.hexbin(x, y, exp(weight - weight.max()), gridsize=40, cmap=plt.cm.RdBu_r, 
#	vmin=0, vmax=1)

#x, y = numpy.array(xsamples).T
#plt.hexbin(x, y, gridsize=40, cmap=plt.cm.RdBu_r, vmax=len(x)/(40.), vmin=0)

# create contours using the lowest values, always summing up until 1%, 10%, 50%
# is contained
z = exp(weight - weight.max()).cumsum()
z /= z.max()
コード例 #3
0
ファイル: lighthouse.py プロジェクト: mattpitkin/UltraNest
import nested_sampling.postprocess as post
#constrainer = RejectionConstrainer()
constrainer = FriendsConstrainer(radial=True,
                                 metric='euclidean',
                                 jackknife=True)
sampler = NestedSampler(nlive_points=400,
                        priortransform=priortransform,
                        loglikelihood=loglikelihood,
                        draw_constrained=constrainer.draw_constrained,
                        ndim=2)
constrainer.sampler = sampler
results = nested_integrator(tolerance=0.5, sampler=sampler)

# add contours?

usamples, xsamples = post.equal_weighted_posterior(results['weights'])

u, x, L, width = list(zip(*results['weights']))
x, y = numpy.array(x).T
weight = numpy.add(L, width)
#plt.plot(xsamples[:,0], xsamples[:,1], '.', color='green', alpha=0.1)
#plt.hexbin(x, y, exp(weight - weight.max()), gridsize=40, cmap=plt.cm.RdBu_r,
#	vmin=0, vmax=1)

#x, y = numpy.array(xsamples).T
#plt.hexbin(x, y, gridsize=40, cmap=plt.cm.RdBu_r, vmax=len(x)/(40.), vmin=0)

# create contours using the lowest values, always summing up until 1%, 10%, 50%
# is contained
z = exp(weight - weight.max()).cumsum()
z /= z.max()
コード例 #4
0
def run_nested(**config):
    ndim = config['ndim']

    def priortransform(u):
        assert len(u) == ndim, u
        return u

    if 'seed' in config:
        numpy.random.seed(config['seed'])

    print('Configuring for %s, with seed=%s ...' %
          (config.get('output_basename'), config.get('seed')))
    # can use these directly
    loglikelihood = config['loglikelihood']
    nlive_points = config['nlive_points']
    method = config['draw_method']
    if method.startswith('naive'):
        constrainer = RejectionConstrainer()
    elif method.startswith('maxfriends'):  # maximum distance
        constrainer = FriendsConstrainer(rebuild_every=nlive_points,
                                         radial=False,
                                         force_shrink=config['force_shrink'],
                                         verbose=False)
    elif method.startswith('radfriends2'):  # radial distance
        constrainer = FriendsConstrainer2(rebuild_every=nlive_points,
                                          radial=True,
                                          metric='euclidean',
                                          jackknife=config['jackknife'],
                                          force_shrink=config['force_shrink'],
                                          verbose=False)
    elif method.startswith('supfriends2'):  # supreme distance
        constrainer = FriendsConstrainer2(rebuild_every=nlive_points,
                                          radial=True,
                                          metric='chebyshev',
                                          jackknife=config['jackknife'],
                                          force_shrink=config['force_shrink'],
                                          verbose=False)
    elif method.startswith('radfriends'):  # radial distance
        constrainer = FriendsConstrainer(
            rebuild_every=nlive_points,
            radial=True,
            metric='euclidean',
            jackknife=config['jackknife'],
            force_shrink=config['force_shrink'],
            keep_phantom_points=config.get('keep_phantom_points', False),
            optimize_phantom_points=config.get('optimize_phantom_points',
                                               False),
            verbose=False)
    elif method.startswith('mlfriends'):  # metric-learning distance
        constrainer = MetricLearningFriendsConstrainer(
            metriclearner=config['metriclearner'],
            keep_phantom_points=config.get('keep_phantom_points', False),
            optimize_phantom_points=config.get('optimize_phantom_points',
                                               False),
            force_shrink=config['force_shrink'],
            rebuild_every=config.get('rebuild_every', nlive_points),
            verbose=False)
    elif method.startswith('hradfriends'):  # radial distance
        friends_filter = FriendsConstrainer(
            rebuild_every=nlive_points,
            radial=True,
            metric='euclidean',
            jackknife=config['jackknife'],
            force_shrink=config['force_shrink'],
            keep_phantom_points=config.get('keep_phantom_points', False),
            optimize_phantom_points=config.get('optimize_phantom_points',
                                               False),
            verbose=False)
        if config['proposer'] == 'gauss':
            proposer = nested_sampling.samplers.hybrid.FilteredGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'svargauss':
            proposer = nested_sampling.samplers.hybrid.FilteredSVarGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'mahgauss':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'harm':
            proposer = nested_sampling.samplers.hybrid.FilteredUnitHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'mahharm':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'ptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'ess':
            proposer = nested_sampling.samplers.hybrid.FilteredEllipticalSliceProposal(
            )
        else:
            assert False, config['proposer']
        if config['nsteps'] < 0:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredVarlengthMCMCConstrainer(
                proposer=proposer, nsteps_initial=-config['nsteps'])
        else:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredMCMCConstrainer(
                proposer=proposer,
                nsteps=config['nsteps'],
                nminaccepts=config.get('nminaccepts', 0))
        constrainer = nested_sampling.samplers.hybrid.HybridFriendsConstrainer(
            friends_filter,
            filtered_mcmc,
            switchover_efficiency=config.get('switchover_efficiency', 0))
    elif method.startswith('hmlfriends'):  # radial distance
        friends_filter = MetricLearningFriendsConstrainer(
            rebuild_every=nlive_points,
            metriclearner=config['metriclearner'],
            keep_phantom_points=config.get('keep_phantom_points', False),
            optimize_phantom_points=config.get('optimize_phantom_points',
                                               False),
            force_shrink=config['force_shrink'],
            verbose=False)
        if config['proposer'] == 'gauss':
            proposer = nested_sampling.samplers.hybrid.FilteredGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'harm':
            proposer = nested_sampling.samplers.hybrid.FilteredUnitHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'mahharm':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'ptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'diffptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredDeltaPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'ess':
            proposer = nested_sampling.samplers.hybrid.FilteredEllipticalSliceProposal(
            )
        else:
            assert False, config['proposer']
        if config['nsteps'] < 0:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredVarlengthMCMCConstrainer(
                proposer=proposer, nsteps_initial=-config['nsteps'])
        else:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredMCMCConstrainer(
                proposer=proposer,
                nsteps=config['nsteps'],
                nminaccepts=config.get('nminaccepts', 0))
        constrainer = nested_sampling.samplers.hybrid.HybridMLFriendsConstrainer(
            friends_filter,
            filtered_mcmc,
            switchover_efficiency=config.get('switchover_efficiency', 0),
            unfiltered=config.get('unfiltered', False))
    elif method.startswith('hmultiellipsoid'):  # multi-ellipsoid
        if config['proposer'] == 'gauss':
            proposer = nested_sampling.samplers.hybrid.FilteredGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'svargauss':
            proposer = nested_sampling.samplers.hybrid.FilteredSVarGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'mahgauss':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'harm':
            proposer = nested_sampling.samplers.hybrid.FilteredUnitHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'mahharm':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'ptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'diffptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredDeltaPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'ess':
            proposer = nested_sampling.samplers.hybrid.FilteredEllipticalSliceProposal(
            )
        else:
            assert False, config['proposer']
        if config['nsteps'] < 0:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredVarlengthMCMCConstrainer(
                proposer=proposer, nsteps_initial=-config['nsteps'])
        else:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredMCMCConstrainer(
                proposer=proposer,
                nsteps=config['nsteps'],
                nminaccepts=config.get('nminaccepts', 0))
        constrainer = nested_sampling.samplers.hybrid.HybridMultiEllipsoidConstrainer(
            filtered_mcmc,
            enlarge=config.get('enlarge', 1.2),
            switchover_efficiency=config.get('switchover_efficiency', 0))
    elif method.startswith('hmlmultiellipsoid'):  # multi-ellipsoid
        if config['proposer'] == 'gauss':
            proposer = nested_sampling.samplers.hybrid.FilteredGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'svargauss':
            proposer = nested_sampling.samplers.hybrid.FilteredSVarGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'mahgauss':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisGaussProposal(
                adapt=True, scale=0.1)
        elif config['proposer'] == 'harm':
            proposer = nested_sampling.samplers.hybrid.FilteredUnitHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'mahharm':
            proposer = nested_sampling.samplers.hybrid.FilteredMahalanobisHARMProposal(
                adapt=False, scale=1)
        elif config['proposer'] == 'ptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'diffptharm':
            proposer = nested_sampling.samplers.hybrid.FilteredDeltaPointHARMProposal(
                adapt=False, scale=10)
        elif config['proposer'] == 'ess':
            proposer = nested_sampling.samplers.hybrid.FilteredEllipticalSliceProposal(
            )
        else:
            assert False, config['proposer']
        if config['nsteps'] < 0:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredVarlengthMCMCConstrainer(
                proposer=proposer, nsteps_initial=-config['nsteps'])
        else:
            filtered_mcmc = nested_sampling.samplers.hybrid.FilteredMCMCConstrainer(
                proposer=proposer,
                nsteps=config['nsteps'],
                nminaccepts=config.get('nminaccepts', 0))
        constrainer = nested_sampling.samplers.hybrid.HybridMLMultiEllipsoidConstrainer(
            filtered_mcmc,
            metriclearner=config['metriclearner'],
            switchover_efficiency=config.get('switchover_efficiency', 0),
            enlarge=config.get('enlarge', 1.2),
            bs_enabled=config.get('bs_enabled', False),
        )
    elif method.startswith('supfriends'):  # supreme distance
        constrainer = FriendsConstrainer(rebuild_every=nlive_points,
                                         radial=True,
                                         metric='chebyshev',
                                         jackknife=config['jackknife'],
                                         force_shrink=config['force_shrink'],
                                         verbose=False)
    # These two do not work
    # Because after an update, at a later time, the distances computed are rescaled based on new points
    # we would need to store the metric at update time
    #elif method.startswith('sradfriends'):
    #	constrainer = FriendsConstrainer(rebuild_every=nlive_points, radial=True, metric = 'seuclidean', jackknife=config['jackknife'], force_shrink=config['force_shrink'], verbose=False)
    #elif method.startswith('mahfriends'):
    #	constrainer = FriendsConstrainer(rebuild_every=nlive_points, radial=True, metric = 'mahalanobis', jackknife=config['jackknife'], force_shrink=config['force_shrink'], verbose=False)
    elif method.startswith('optimize'):
        constrainer = OptimizeConstrainer()
    elif method.startswith('ellipsoid'):
        constrainer = EllipsoidConstrainer()
    elif method.startswith('multiellipsoid'):
        constrainer = MultiEllipsoidConstrainer()
    elif method.startswith('galilean'):
        velocity_scale = config['velocity_scale']
        constrainer = GalileanConstrainer(nlive_points=nlive_points,
                                          ndim=ndim,
                                          velocity_scale=velocity_scale)
    elif method.startswith('mcmc'):
        adapt = config['adapt']
        scale = config['scale']
        if config['proposer'] == 'gauss':
            proposer = GaussProposal(adapt=adapt, scale=scale)
        elif config['proposer'] == 'multiscale':
            proposer = MultiScaleProposal(adapt=adapt, scale=scale)
        constrainer = MCMCConstrainer(proposer=proposer,
                                      nsteps=config['nsteps'],
                                      nminaccepts=config.get('nminaccepts', 0))
    else:
        raise NotImplementedError('draw_method "%s" not implemented' % method)

    print('configuring TerminationCriterion')
    if config.get('unlimited_sampling', False):
        max_samples = None
    else:
        max_samples = 2000000

    if config['integrator'] == 'normal':
        termination = TerminationCriterion(tolerance=0.5)
    elif config['integrator'] == 'normal-max':
        termination = MaxErrorCriterion(tolerance=0.5)
    elif config['integrator'] == 'normal-verysmall':
        termination = TerminationCriterion(tolerance=0.5,
                                           maxRemainderFraction=0.001)
    elif config['integrator'] == 'normal-bs':
        termination = BootstrappedCriterion(tolerance=0.5)
        #result = nested_integrator(tolerance=0.5, sampler=sampler, max_samples=max_samples, need_small_remainder=False, need_robust_remainder_error=True)
    elif config['integrator'] == 'normal+bs2':
        termination = BootstrappedCriterion(tolerance=0.5,
                                            maxRemainderFraction=0.5)
    elif config['integrator'] == 'normal+bs3':
        termination = BootstrappedCriterion(tolerance=0.5,
                                            maxRemainderFraction=1 / 3.)
    elif config['integrator'] == 'normal+bs10':
        termination = BootstrappedCriterion(tolerance=0.5,
                                            maxRemainderFraction=1 / 10.)
    elif config['integrator'] == 'normal-rbs3':
        termination = RememberingBootstrappedCriterion(tolerance=0.5,
                                                       memory_length=3)
    elif config['integrator'] == 'normal-rbs5':
        termination = RememberingBootstrappedCriterion(tolerance=0.5,
                                                       memory_length=5)
    elif config['integrator'] == 'normal+rbs32':
        termination = RememberingBootstrappedCriterion(
            tolerance=0.5, memory_length=3, maxRemainderFraction=0.5)
    elif config['integrator'] == 'normal-dbs11':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5, required_decrease=1., required_decrease_scatter=1.)
    elif config['integrator'] == 'normal-dbs22':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5,
            required_decrease=0.5,
            required_decrease_scatter=0.5)
    #elif config['integrator'] == 'normal-dbs31':
    #	termination = DecliningBootstrappedCriterion(tolerance=0.5, required_decrease=1./3., required_decrease_scatter=1.)
    elif config['integrator'] == 'normal-dbs33':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5,
            required_decrease=1. / 3.,
            required_decrease_scatter=1. / 3.)
    elif config['integrator'] == 'normal-dbs03':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5,
            required_decrease=0.,
            required_decrease_scatter=1. / 3.)
    elif config['integrator'] == 'normal-dbs01':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5, required_decrease=0., required_decrease_scatter=1.)
    elif config['integrator'] == 'normal-dbs10':
        termination = DecliningBootstrappedCriterion(
            tolerance=0.5, required_decrease=1., required_decrease_scatter=0.)
    elif config['integrator'] == 'normal-nbs':
        termination = NoisyBootstrappedCriterion(tolerance=0.5)
    elif config['integrator'] == 'normal-cnbs':
        termination = NoisyBootstrappedCriterion(tolerance=0.5,
                                                 conservative=True)
    elif config['integrator'] == 'normal-ndbs10':
        termination = NoiseDetectingBootstrappedCriterion(
            tolerance=0.5, maxNoisyRemainder=0.1)
    elif config['integrator'] == 'normal-ndbs100':
        termination = NoiseDetectingBootstrappedCriterion(
            tolerance=0.5, maxNoisyRemainder=0.01)
    else:
        assert config['integrator'] == 'normal', config['integrator']
    # only record for the first seed
    termination.plot = config.get('seed', 0) == 0

    print('configuring NestedSampler')
    starttime = time.time()
    if hasattr(constrainer, 'get_Lmax'):
        constrainer_get_Lmax = constrainer.get_Lmax
    else:
        constrainer_get_Lmax = None
    sampler = NestedSampler(nlive_points=nlive_points,
                            priortransform=priortransform,
                            loglikelihood=loglikelihood,
                            draw_constrained=constrainer.draw_constrained,
                            ndim=ndim,
                            constrainer_get_Lmax=constrainer_get_Lmax)
    constrainer.sampler = sampler
    print('running nested_integrator to tolerance 0.5')
    result = nested_integrator(sampler=sampler,
                               max_samples=max_samples,
                               terminationcriterion=termination)

    endtime = time.time()
    if hasattr(constrainer, 'stats'):
        constrainer.stats()

    output_basename = config['output_basename']
    #numpy.savetxt(output_basename + 'convergencetests.txt.gz', result['convergence_tests'])

    if config.get('seed', 0) == 0:
        # drawn samples
        print('plotting drawn samples...')
        x = numpy.array([x for _, x, _ in sampler.samples])
        y = exp([l for _, _, l in sampler.samples])
        plt.plot(x[:, 0], y, 'x', color='blue', ms=1)
        plt.savefig(output_basename + 'nested_samples.pdf',
                    bbox_inches='tight')
        plt.close()

        # L vs V
        print('plotting V-L...')
        L = numpy.array([L for _, _, L, _ in result['weights']])
        width = numpy.array([w for _, _, _, w in result['weights']])
        plt.plot(width,
                 L,
                 'x-',
                 color='blue',
                 ms=1,
                 label='Z=%.2f (%.2f)' %
                 (result['logZ'], log(exp(L + width).sum())))
        fromleft = exp(L + width)[::-1].cumsum()
        fromleft /= fromleft.max()
        mask = (fromleft < 0.99)[::-1]
        if mask.any():
            i = width[mask].argmax()
            plt.ylim(L.max() - log(1000), L.max())
            plt.fill_between(width[mask],
                             L[mask],
                             L.max() - log(1000),
                             color='grey',
                             alpha=0.3)
        plt.xlabel('prior mass')
        plt.ylabel('likelihood')
        plt.legend(loc='best')
        plt.savefig(output_basename + 'nested_integral.pdf',
                    bbox_inches='tight')
        plt.close()

        # posteriors
        print('plotting posteriors...')
        posterioru, posteriorx = equal_weighted_posterior(result['weights'])
        plt.figure(figsize=(ndim * 2, ndim * 2))
        marginal_plots(weights=result['weights'], ndim=ndim)
        plt.savefig(output_basename + 'posterior.pdf', bbox_inches='tight')
        plt.close()

        # plot convergence history
        print('plotting Z history...')
        plt.figure()
        plt.plot(termination.plotdata['normalZ'], label='NS')
        plt.plot(termination.plotdata['remainderZ'], label='remainder')
        plt.plot(termination.plotdata['totalZ'], label='total')
        hi = max(termination.plotdata['totalZ'])
        plt.ylim(hi - 10, hi + 0.1)
        plt.legend(loc='best', prop=dict(size=8))
        plt.savefig(output_basename + 'convergence_Z.pdf', bbox_inches='tight')
        plt.close()

        print('plotting convergence history...')
        plt.figure()
        plt.plot(termination.plotdata['normalZerr'], label='NS')
        plt.plot(termination.plotdata['remainderZerr'], label='remainder')
        plt.plot(termination.plotdata['totalZerr'], label='total')
        if 'memory_sigma' in termination.plotdata:
            plt.plot(termination.plotdata['memory_sigma'],
                     label='memory_sigma')
        if 'classic_totalZerr' in termination.plotdata:
            plt.plot(termination.plotdata['classic_totalZerr'],
                     label='classic_totalZerr')
        plt.ylim(0, 2)
        plt.legend(loc='best', prop=dict(size=8))
        plt.savefig(output_basename + 'convergence_Zerr.pdf',
                    bbox_inches='tight')
        plt.close()

    return dict(
        Z_computed=float(result['logZ']),
        Z_computed_err=float(result['logZerr']),
        niterations=result['niterations'],
        duration=endtime - starttime,
    )