def plotDatas(RegObs, plotcov=False, plotNormal=False, num=1): fig = plt.figure(num, figsize=set_size(twoColumns=True)) ax = fig.add_subplot(121) RegObs.plot(ax, plotcov=plotcov, plotNormal=plotNormal) #ax.set_xlim(-5,5) #ax.set_ylim(-5,5) ax.set_aspect('equal') ax.set_title('inputs X with separator (blue) and covariances (red)') ax = fig.add_subplot(122) RegObs.plotOutputs(ax) ax.set_title(r'ouputs means $sigma(x_i^Ttheta)$')
def XP_2D(sigma0, mu0, N, s, c, seed, name, num, nbLevels=0): d = 2 ################### GENERATE DATA ####################### RegObs = LogisticRegObservations(s, N, d, c, seed, scale=1, rotate=True, normalize=True) y, X = RegObs.datas ################### RUN KALMAN ####################### theta0 = mu0 * np.ones([d, 1]) / math.sqrt(d) Cov0 = np.identity(d) * sigma0**2 # True posterior posterior = PosteriorLogReg(theta0, Cov0).fit(X, y.reshape(N, )) # Kalman algorithms ekf = EKFLogReg(theta0, Cov0).fit(X, y.reshape(N, )) qkf = QKFLogReg(theta0, Cov0).fit(X, y.reshape(N, )) rvga = RVGALogReg(theta0, Cov0).fit(X, y.reshape(N, )) rvgae = RVGALogRegExplicite(theta0, Cov0).fit(X, y.reshape(N, )) lap = LaplaceLogisticRegression(theta0, Cov0).fit(X, y.reshape(N, )) # Plot figure 1: Datas + KL fig = plt.figure(num, figsize=set_size(twoColumns=True)) ax = fig.add_subplot(121) ax.set_title('Distributions of inputs') RegObs.plot(ax, plotcov=False, plotNormal=False) #ax.set_xticks(fontsize=8) #ax.set_yticks(fontsize=8) ax.set_xlabel( r'$\sigma_0=${0}, $||\mu_0||={1:.1f}$ , N={2}, d={3}, s={4:.1f}, c={5}' .format(sigma0, mu0, N, d, s, c)) ax = fig.add_subplot(122) ax.set_title('KL divergence', loc='left') plotKL(ax, rvga, rvgae, ekf, qkf, lap, posterior, seed) ax.set_ylabel('KL error') #ax.set_xticks(fontsize=8) #ax.set_yticks(fontsize=8) fig.legend(loc="upper right", ncol=2) #plt.savefig('./outputs/KL_mu0{}_sigma0{}_N{}_d{}_s{}_c{}'.format(int(mu0),int(sigma0),int(N),int(d),int(s),int(c))) plt.savefig('./outputs/KL_2d_' + name) # Plot figure 2: Covariances num = num + 1 plotCov(rvga, rvgae, ekf, qkf, lap, posterior, num, nbLevels) #plt.savefig('./outputs/Covariances_mu0{}_sigma0{}_N{}_d{}_s{}_c{}'.format(int(mu0),int(sigma0),int(N),int(d),int(s),int(c))) plt.savefig('./outputs/Cov_2d_' + name)
def plotCov(rvga, rvgae, ekf, qkf, lap, posterior, num=1, nbLevels=0): print('Plot covariances ... ') fig = plt.figure(num, figsize=set_size(twoColumns=True)) ax = fig.add_subplot(141) rvga.plotEllipsoid(ax, nbLevels=nbLevels, labelize=True) lap.plotEllipsoid(ax, labelize=True) if not posterior is None: posterior.plot(ax, showMleMap=False) ax.set_title('RVGA (implicit)') #ax.xaxis.set_major_locator(plt.MaxNLocator(4)) #ax.yaxis.set_major_locator(plt.MaxNLocator(5)) ax = fig.add_subplot(142) rvgae.plotEllipsoid(ax, nbLevels=nbLevels, labelize=False) lap.plotEllipsoid(ax, labelize=False) if not posterior is None: posterior.plot(ax, showMleMap=False) ax.set_title('RVGA (explicit)') #ax.xaxis.set_major_locator(plt.MaxNLocator(4)) #ax.yaxis.set_major_locator(plt.MaxNLocator(5)) ax = fig.add_subplot(143) ekf.plotEllipsoid(ax, nbLevels=nbLevels, labelize=False) lap.plotEllipsoid(ax, labelize=False) if not posterior is None: posterior.plot(ax, showMleMap=False) ax.set_title('EKF') #ax.xaxis.set_major_locator(plt.MaxNLocator(4)) #ax.yaxis.set_major_locator(plt.MaxNLocator(5)) ax = fig.add_subplot(144) qkf.plotEllipsoid(ax, nbLevels=nbLevels, labelize=False) lap.plotEllipsoid(ax, labelize=False) if not posterior is None: posterior.plot(ax, showMleMap=False) ax.set_title('QKF') #ax.xaxis.set_major_locator(plt.MaxNLocator(4)) #ax.yaxis.set_major_locator(plt.MaxNLocator(5)) fig.legend(loc="lower center", ncol=4)
def plotPredictionMap(RegObs, rvga, rvgae, ekf, qkf, lap, num=3): print('Compute prediction maps ... ') fig = plt.figure(num, figsize=set_size(twoColumns=True)) ax = fig.add_subplot(151) ax.set_title('RVGA') rvga.plotPredictionMap(ax) RegObs.plot(ax) ax = fig.add_subplot(152) ax.set_title('RVGA-exp') rvgae.plotPredictionMap(ax) RegObs.plot(ax) ax = fig.add_subplot(153) ax.set_title('EKF') ekf.plotPredictionMap(ax) RegObs.plot(ax) ax = fig.add_subplot(154) ax.set_title('QKF') qkf.plotPredictionMap(ax) RegObs.plot(ax) ax = fig.add_subplot(155) ax.set_title('Laplace') lap.plotPredictionMap(ax) RegObs.plot(ax)
ekf = EKFLogReg(theta0, Cov0).fit(X, y.reshape(N, )) qkf = QKFLogReg(theta0, Cov0).fit(X, y.reshape(N, )) rvga = RVGALogReg(theta0, Cov0).fit(X, y.reshape(N, )) rvgae = RVGALogRegExplicite(theta0, Cov0).fit(X, y.reshape(N, )) lap = LaplaceLogisticRegression(theta0, Cov0).fit(X, y.reshape(N, )) plotDatas(RegObs, plotcov=True, plotNormal=True, num=1) if d == 2: plotCov(rvga, rvgae, ekf, qkf, lap, posterior, num=2, nbLevels=4) else: plotCov(rvga, rvgae, ekf, qkf, lap, None, num=2, nbLevels=4) plt.suptitle('covariances over iterations') plt.savefig('./outputs/Cov_outputs') fig = plt.figure(3, figsize=set_size(twoColumns=False)) ax = fig.add_subplot(111) plotKL(ax, rvga, rvgae, ekf, qkf, lap, posterior, seed) ax.set_title('Evolution of the KL divergence with iterations') plt.legend(loc='upper right') fig.text(0.5, 0.04, 'number of iterations \n ({} pass x {} samples)'.format(1, N), ha='center') plotPredictionMap(RegObs, rvga, rvgae, ekf, qkf, lap, num=4) plt.savefig('./outputs/Map_outputs') plt.suptitle('outputs probabilities') plt.show()
num = num + 2 XP_2D(10, 0, N, 10, 1, 1, 's10', num, nbLevels=4) num = num + 2 # XP in High Dim # TEST HD1 sensitivity to dimension with Sharp prior sigma0=1 if 'HD1' in Test: sigma0 = 1 mu0 = 0 d_list = [30, 70, 100] N = 500 s = 2 c = 0 seed = 10 fig = plt.figure(num, figsize=set_size(ratio=0.5)) num = num + 1 ax1 = fig.add_subplot(131) XP_HighDim(np.array([ax1]), sigma0, mu0, N, d_list[0], s, c, seed, label=True) ax1.set_title('d={}'.format(d_list[0])) ax2 = fig.add_subplot(132) XP_HighDim(np.array([ax2]), sigma0,