def test_different_parameters(): np.random.seed(3) params_x = ['x0', 'x1', 'x2', 'x3', 'x4'] params_y = ['x0', 'x1', 'x2'] fig, axes = make_1d_axes(params_x) ns = NestedSamples(root='./tests/example_data/pc') ns.plot_1d(axes) fig, axes = make_2d_axes(params_y) ns.plot_2d(axes) fig, axes = make_2d_axes(params_x) ns.plot_2d(axes) fig, axes = make_2d_axes([params_x, params_y]) ns.plot_2d(axes) plt.close('all')
def test_masking(): pc = NestedSamples(root="./tests/example_data/pc") mask = pc['x0'] > 0 plot_types = ['kde', 'hist'] if 'fastkde' in sys.modules: plot_types += ['fastkde'] for ptype in plot_types: fig, axes = make_1d_axes(['x0', 'x1', 'x2']) pc[mask].plot_1d(axes=axes, plot_type=ptype) for ptype in plot_types + ['scatter']: fig, axes = make_2d_axes(['x0', 'x1', 'x2'], upper=False) pc[mask].plot_2d(axes=axes, types=dict(lower=ptype, diagonal='hist'))
def test_plot_1d_colours(): np.random.seed(3) gd = MCMCSamples(root="./tests/example_data/gd") gd.drop(columns='x3', inplace=True) pc = NestedSamples(root="./tests/example_data/pc") pc.drop(columns='x4', inplace=True) mn = NestedSamples(root="./tests/example_data/mn") mn.drop(columns='x2', inplace=True) plot_types = ['kde', 'hist'] if 'astropy' in sys.modules: plot_types += ['astropyhist'] if 'fastkde' in sys.modules: plot_types += ['fastkde'] for plot_type in plot_types: fig = plt.figure() fig, axes = make_1d_axes(['x0', 'x1', 'x2', 'x3', 'x4'], fig=fig) gd.plot_1d(axes, plot_type=plot_type, label="gd") pc.plot_1d(axes, plot_type=plot_type, label="pc") mn.plot_1d(axes, plot_type=plot_type, label="mn") gd_colors = [] pc_colors = [] mn_colors = [] for x, ax in axes.iteritems(): handles, labels = ax.get_legend_handles_labels() for handle, label in zip(handles, labels): if isinstance(handle, Rectangle): color = to_hex(handle.get_facecolor()) else: color = handle.get_color() if label == 'gd': gd_colors.append(color) elif label == 'pc': pc_colors.append(color) elif label == 'mn': mn_colors.append(color) assert(len(set(gd_colors)) == 1) assert(len(set(mn_colors)) == 1) assert(len(set(pc_colors)) == 1) plt.close("all")
ns_output[:6] #| The evidence, KL divergence and Bayesian model dimensionality, with their corresponding errors, are: for x in ns_output: print('%10s = %9.2f +/- %4.2f' % (x, ns_output[x].mean(), ns_output[x].std())) #| Since ``ns_output`` is a set of ``MCMCSamples``, it may be plotted as usual. #| Here we illustrate slightly more fine-grained control of the axes construction #| (demanding three columns) from anesthetic import make_1d_axes fig, axes = make_1d_axes(['logZ', 'D', 'd'], ncols=3, tex=ns_output.tex) ns_output.plot_1d(axes) #| We can also inspect the correlation between these inferences: ns_output.plot_2d(['logZ', 'D']) #| Here is a comparison of the base and NS output h = nested['H0'] / 100 nested['omegab'] = nested['omegabh2'] / h**2 nested.tex['omegab'] = '$\Omega_b$' fig, axes = mcmc.plot_2d(['sigma8', 'omegab']) nested.plot_2d(axes=axes)