noise_std=0.5), ] ndatasets = len(datasets) nmappers = len(mappers.keys()) pl.figure(figsize=(8, 8)) fig = 1 for ds in datasets: for mname, mapper in mappers.iteritems(): mapper.train(ds) dproj = mapper.forward(ds.samples) mproj = mapper.proj pl.subplot(ndatasets, nmappers, fig) if fig <= 3: pl.title(mname) pl.axis('equal') pl.scatter(ds.samples[:, 0] - center[0], ds.samples[:, 1] - center[1], s=30, c=(ds.sa.targets) * 200) plot_proj_dir(mproj) fig += 1 if cfg.getboolean('examples', 'interactive', True): pl.show() """ Output of the example:
center, noise_std=0.5), ] ndatasets = len(datasets) nmappers = len(mappers.keys()) pl.figure(figsize=(8,8)) fig = 1 for ds in datasets: for mname, mapper in mappers.iteritems(): mapper.train(ds) dproj = mapper.forward(ds.samples) mproj = mapper.proj pl.subplot(ndatasets, nmappers, fig) if fig <= 3: pl.title(mname) pl.axis('equal') pl.scatter(ds.samples[:, 0] - center[0], ds.samples[:, 1] - center[1], s=30, c=(ds.sa.targets) * 200) plot_proj_dir(mproj) fig += 1 if cfg.getboolean('examples', 'interactive', True): pl.show() """
(Matern_5_2Kernel, {}), (RationalQuadraticKernel, {}), ): kernel = kernel_class(**kernel_args) print kernel result = kernel.compute(data) # In the following we draw some 2D functions at random from the # distribution N(O,kernel) defined by each available kernel and # plot them. These plots shows the flexibility of a given kernel # (with default parameters) when doing interpolation. The choice # of a kernel defines a prior probability over the function space # used for regression/classfication with GPR/GPC. count = 1 for k in kernel_dictionary.keys(): pl.subplot(3, 4, count) # X = np.random.rand(size)*12.0-6.0 # X.sort() X = np.arange(-1, 1, .02) X = X[:, np.newaxis] ker = kernel_dictionary[k]() ker.compute(X, X) print k K = np.asarray(ker) for i in range(10): f = np.random.multivariate_normal(np.zeros(X.shape[0]), K) pl.plot(X[:, 0], f, "b-") pl.title(k) pl.axis('tight') count += 1