Пример #1
0
    def PlotCredibleIntervals(self):
        # plots credible intervals ("weather plot") for each order we're interested in

        try:
            # kernel for fit
            self.kernel_fit = RBF(length_scale = self.ls) + \
                                WhiteKernel(noise_level = self.nugget, noise_level_bounds = 'fixed')

            # truncation GP
            self.gp_trunc = gm.TruncationGP(kernel = self.kernel_fit, ref = self.ref, \
                        ratio = self.ratio, center = self.center, disp = self.disp, \
                        df = self.df, scale = self.scale)
            self.gp_trunc.fit(self.X[self.x_train_mask], y = self.data[self.x_train_mask], \
                        orders = self.orders_array)

            # extracts truncation error for each x-value
            self.norm_trunc_cov = self.gp_trunc.cov(self.X[self.x_valid_mask],
                                                    start=0,
                                                    end=0)
            self.norm_residuals = (self.data_true[self.x_valid_mask, None] - \
                                   self.data[self.x_valid_mask]) / \
                    (self.ratio**(self.orders_array+1) / np.sqrt(1 - self.ratio**2))
            self.gr_dgn_trunc = gm.GraphicalDiagnostic(self.norm_residuals, \
                            mean = np.zeros(self.x[self.x_valid_mask].shape[0]), \
                            cov = self.norm_trunc_cov, colors = self.colors, gray = gray, \
                            black = softblack)

            fig, ax = plt.subplots(figsize=(3.4, 3.2))

            # plots the curves
            for i, n in enumerate(self.orders_array):
                norm_residuals_alt = self.data_true[self.x_valid_mask] - \
                                            self.data[self.x_valid_mask][:,i]
                norm_trunc_cov_alt = self.gp_trunc.cov(
                    self.X[self.x_valid_mask], start=n + 1)
                gr_dgn_trunc_alt = gm.GraphicalDiagnostic(
                    norm_residuals_alt, mean = np.zeros(self.x[self.x_valid_mask].shape[0]), \
                    cov = norm_trunc_cov_alt, colors = [self.colors[i]], gray = gray, black = softblack)
                gr_dgn_trunc_alt.credible_interval(
                    np.linspace(1e-5, 1, 100),
                    band_perc=[0.68, 0.95],
                    ax=ax,
                    title=None,
                    xlabel=r'Credible Interval ($100\alpha\%$)',
                    ylabel=r'Empirical Coverage ($\%$)')
            ax.set_xticks([0, 0.2, 0.4, 0.6, 0.8, 1])
            ax.set_xticklabels([0, 20, 40, 60, 80, 100])
            ax.set_yticks([0, 0.2, 0.4, 0.6, 0.8, 1])
            ax.set_yticklabels([0, 20, 40, 60, 80, 100])
            fig.tight_layout()

        except:
            print(
                "The credible intervals could not be calculated at one or more orders."
            )
Пример #2
0
    def PlotPC(self):
        # plots the pivoted Cholesky decomposition in one of two ways

        try:
            # kernel for GP fit
            self.kernel_fit = RBF(length_scale = self.ls) + \
                                WhiteKernel(noise_level = self.nugget, noise_level_bounds = 'fixed')

            # fits GP and extracts error bars
            self.gp_diagnostic = gm.ConjugateGaussianProcess(kernel = self.kernel_fit, center = self.center, \
                        disp = self.disp, df = self.df, scale = self.scale, n_restarts_optimizer = 2, \
                        random_state = 32)
            self.gp_diagnostic.fit(self.X[self.x_train_mask],
                                   self.coeffs[self.x_train_mask])
            self.pred, self.std = self.gp_diagnostic.predict(self.X,
                                                             return_std=True)
            self.underlying_std = np.sqrt(self.gp_diagnostic.cov_factor_)

            # extracts underlying covariance matrix and calculates the diagnostics
            self.mean_underlying = self.gp_diagnostic.mean(
                self.X[self.x_valid_mask])
            self.cov_underlying = self.gp_diagnostic.cov(
                self.X[self.x_valid_mask])
            self.gdgn = gm.GraphicalDiagnostic(self.coeffs[self.x_valid_mask], \
                        self.mean_underlying, self.cov_underlying, colors = self.colors,
                        gray = gray, black = softblack)

            # plots the pivoted Cholesky decomposition
            with plt.rc_context({
                    "text.usetex": True,
                    "text.latex.preview": True
            }):
                #             with plt.rc_context({"text.usetex": True}):
                fig, ax = plt.subplots(figsize=(3.2, 3.2))
                self.gdgn.pivoted_cholesky_errors(ax=ax, title=None)
                ax.set_xticks([2, 4, 6, 8, 10, 12])
                ax.set_xticks([1, 3, 5, 7, 9, 11], minor=True)
                ax.set_yticks([-2, -1, 0, 1, 2])
                ax.text(0.04, 0.967, r'$\mathrm{D}_{\mathrm{PC}}$', bbox = text_bbox, \
                        transform = ax.transAxes, va = 'top', ha = 'left')
                fig.tight_layout()
                plt.show()
        except:
            print(
                "The pivoted Cholesky decomposition could not be calculated at one or more orders."
            )
Пример #3
0
    def PlotMD(self, plot_type='box'):
        # plots the Mahalanobis distance in one of two ways (box-and-whisker or histogram)
        try:
            # kernel for GP fit
            self.kernel_fit = RBF(length_scale = self.ls) + \
                            WhiteKernel(noise_level = self.nugget, noise_level_bounds = 'fixed')

            # fits GP and extracts error bars
            self.gp_diagnostic = gm.ConjugateGaussianProcess(kernel = self.kernel_fit, \
                        center = self.center, disp = self.disp, df = self.df, scale = self.scale, \
                        n_restarts_optimizer = 2, random_state = 32)
            self.gp_diagnostic.fit(self.X[self.x_train_mask],
                                   self.coeffs[self.x_train_mask])
            self.pred, self.std = self.gp_diagnostic.predict(self.X,
                                                             return_std=True)
            self.underlying_std = np.sqrt(self.gp_diagnostic.cov_factor_)

            # extracts underlying covariance matrix and calculates the diagnostics
            self.mean_underlying = self.gp_diagnostic.mean(
                self.X[self.x_valid_mask])
            self.cov_underlying = self.gp_diagnostic.cov(
                self.X[self.x_valid_mask])
            self.gdgn = gm.GraphicalDiagnostic(self.coeffs[self.x_valid_mask], \
                        self.mean_underlying, self.cov_underlying, colors = self.colors,
                        gray = gray, black = softblack)

            # plots the Mahalanobis distance
            if plot_type == 'box':
                fig, ax = plt.subplots(figsize=(1.5, 3.0))
                ax = self.gdgn.md_squared(type = plot_type, trim = False, title = None, \
                    xlabel = r'$\mathrm{D}_{\mathrm{MD}}^2$')
            elif plot_type == 'hist':
                fig, ax = plt.subplots(figsize=(9, 3.2))
                ax = self.gdgn.md_squared(type = plot_type, title = None, \
                    xlabel = r'$\mathrm{D}_{\mathrm{MD}}^2$')
                ax.set_ylim(0, 25)
            else:
                return 0

            offset_xlabel(ax)
#             fig.tight_layout()
        except:
            print(
                "The Mahalanobis distance could not be calculated at one or more orders."
            )
Пример #4
0
    def PlotTruncations(self):
        # plots the data summed to each order we're interested in

        try:
            # kernel for fit
            self.kernel_fit = RBF(length_scale = self.ls) + \
                                WhiteKernel(noise_level = self.nugget, noise_level_bounds = 'fixed')

            # fits truncation GP to data given a mask
            self.gp_trunc = gm.TruncationGP(kernel = self.kernel_fit, ref = self.ref, \
                        ratio = self.ratio, center = self.center, disp = self.disp, \
                        df = self.df, scale = self.scale)
            self.gp_trunc.fit(self.X[self.x_train_mask], y = self.data[self.x_train_mask], \
                        orders = self.orders_array)

            # extracts truncation error for each x-value
            self.norm_trunc_cov = self.gp_trunc.cov(self.X[self.x_valid_mask],
                                                    start=0,
                                                    end=0)
            self.norm_residuals = (self.data_true[self.x_valid_mask, None] - \
                                   self.data[self.x_valid_mask]) / \
                    (self.ratio**(self.orders_array+1) / np.sqrt(1 - self.ratio**2))
            self.gr_dgn_trunc = gm.GraphicalDiagnostic(self.norm_residuals, \
                            mean = np.zeros(self.x[self.x_valid_mask].shape[0]), \
                            cov = self.norm_trunc_cov, colors = self.colors, gray = gray, \
                            black = softblack)

            fig, axes = plt.subplots(math.ceil(self.n_orders / 2), 2, sharex = True, sharey = True, \
                figsize = (3.9, 3.2))

            # plots curves with error
            for i, n in enumerate(self.orders_array):
                _, std_trunc = self.gp_trunc.predict(self.X, order = n, return_std = True, \
                                    kind = 'trunc')

                for j in range(i, self.n_orders):
                    ax = axes.ravel()[j]
                    ax.plot(self.x,
                            self.data[:, i],
                            zorder=i - 5,
                            c=self.colors[i])
                    ax.fill_between(self.x, self.data[:, i] + 2 * std_trunc, \
                                    self.data[:, i] - 2 * std_trunc, zorder = i-5, \
                                    facecolor = self.light_colors[i], edgecolor = self.colors[i], \
                                    lw = edgewidth)
                ax = axes.ravel()[i]
                ax.plot(self.x, self.data_true, color=softblack, lw=1, ls='--')
                ax.set_xticks([0.25, 0.5, 0.75])
                ax.set_xticks(self.x[self.x_valid_mask], minor=True)
                ax.set_xticklabels([0.25, 0.5, 0.75])
                ax.set_yticks([0, 10, 20])
                ax.set_yticks([-10, 0, 10, 20, 30])
                ax.set_ylim(-15, 37)

            axes[1, 0].set_xlabel(r'$x$')
            axes[1, 1].set_xlabel(r'$x$')
            fig.tight_layout(h_pad=0.3, w_pad=0.3)

        except:
            print(
                "The truncation error curves could not be calculated at one or more orders."
            )