Gaussians. """ # new figure pl.figure() # Gaussian parameters params = fit[0] # plot the histogram plot_bars(H.T, xloc=bin_left, width=bin_width, yerr='std') # show the Gaussians x = np.linspace(0, 1, 100) # first gaussian pl.plot(x, params[0] * norm.pdf(x, params[1], params[2]), "r-", zorder=2) pl.axvline(params[1], color='r', linestyle='--', alpha=0.6) # second gaussian pl.plot(x, params[3] * norm.pdf(x, params[4], params[5]), "b-", zorder=3) pl.axvline(params[4], color='b', linestyle='--', alpha=0.6) # dual gaussian pl.plot(x, dual_gaussian(x, *params), "k--", alpha=0.5, zorder=1) pl.xlim(0, 1) pl.ylim(ymin=0) pl.title('Dual Gaussian fit of searchlight accuracies') if cfg.getboolean('examples', 'interactive', True): # show the cool figures pl.show() """
Gaussians. """ # new figure pl.figure() # Gaussian parameters params = fit[0] # plot the histogram plot_bars(H.T, xloc=bin_left, width=bin_width, yerr='std') # show the Gaussians x = np.linspace(0, 1, 100) # first gaussian pl.plot(x, params[0] * norm.pdf(x, params[1], params[2]), "r-", zorder=2) pl.axvline(params[1], color='r', linestyle='--', alpha=0.6) # second gaussian pl.plot(x, params[3] * norm.pdf(x, params[4], params[5]), "b-", zorder=3) pl.axvline(params[4], color='b', linestyle='--', alpha=0.6) # dual gaussian pl.plot(x, dual_gaussian(x, *params), "k--", alpha=0.5, zorder=1) pl.xlim(0, 1) pl.ylim(ymin=0) pl.title('Dual Gaussian fit of searchlight accuracies') if cfg.getboolean('examples', 'interactive', True): # show the cool figures pl.show()
def plot_proj_dir(p): pl.plot([0, p[0, 0]], [0, p[0, 1]], linewidth=3, hold=True, color='y') pl.plot([0, p[1, 0]], [0, p[1, 1]], linewidth=3, hold=True, color='k')
def plot_proj_dir(p): pl.plot([0, p[0,0]], [0, p[0,1]], linewidth=3, hold=True, color='y') pl.plot([0, p[1,0]], [0, p[1,1]], linewidth=3, hold=True, color='k')
result = kernel.compute(data) # In the following we draw some 2D functions at random from the # distribution N(O,kernel) defined by each available kernel and # plot them. These plots shows the flexibility of a given kernel # (with default parameters) when doing interpolation. The choice # of a kernel defines a prior probability over the function space # used for regression/classfication with GPR/GPC. count = 1 for k in kernel_dictionary.keys(): pl.subplot(3, 4, count) # X = np.random.rand(size)*12.0-6.0 # X.sort() X = np.arange(-1, 1, .02) X = X[:, np.newaxis] ker = kernel_dictionary[k]() ker.compute(X, X) print k K = np.asarray(ker) for i in range(10): f = np.random.multivariate_normal(np.zeros(X.shape[0]), K) pl.plot(X[:, 0], f, "b-") pl.title(k) pl.axis('tight') count += 1 if cfg.getboolean('examples', 'interactive', True): # show all the cool figures pl.show()