def callback(params):
        print("Log likelihood {}, Squared Error {}".format(-objective(params),squared_error(params,X,y,n_samples)))
        
        # Show posterior marginals.
        if dimensions[0] == 1:
            plot_xs = np.reshape(np.linspace(-5, 5, 300), (300,1))
            plot_deep_gp(ax_end_to_end, params, plot_xs)
            deep_map = create_deep_map(params)
            if dimensions == [1,1]:
                ax_end_to_end.plot(np.ndarray.flatten(deep_map[0][0]['x0']),deep_map[0][0]['y0'], 'ro')
            elif dimensions == [1,1,1]:
                plot_single_gp(ax_x_to_h,params,0,0,plot_xs)
                ax_x_to_h.set_title("Inputs to hiddens, pesudo data in red")

                plot_single_gp(ax_h_to_y,params,1,0,plot_xs)
                ax_h_to_y.set_title("Hiddens to outputs, pesudo data in red")
            elif dimensions == [1,1,1,1]:
                plot_single_gp(ax_x_to_h, params,0,0, plot_xs)
                ax_x_to_h.set_title("Inputs to Hidden 1, pesudo data in red")

                plot_single_gp(ax_h_to_h2, params,1,0,plot_xs)
                ax_h_to_h2.set_title("Hidden 1 to Hidden 2, pesudo data in red")

                plot_single_gp(ax_h2_to_y, params,2,0, plot_xs)
                ax_h2_to_y.set_title("Hidden 2 to Outputs, pesudo data in red")
        elif dimensions[0] == 2:
            plot_xs = np.array([np.array([a,b]) for a in np.linspace(-1,1,40) for b in np.linspace(-1,1,40)])
            plot_deep_gp_2d(ax, params, plot_xs)
        plt.draw()
        plt.pause(1.0/60.0)
    def plot_deep_gp_2d(ax,params,plot_xs):
        ax.cla()
        rs = npr.RandomState(0)

        sampled_means_and_covs = [sample_mean_cov_from_deep_gp(params, plot_xs) for i in xrange(n_samples)]
        sampled_means, sampled_covs = zip(*sampled_means_and_covs)
        avg_pred_mean = np.mean(sampled_means, axis = 0)
        avg_pred_cov = np.mean(sampled_covs, axis = 0)
        #print("X*",avg_pred_mean)
        #rint("X*",plot_xs[0:4])

        #sampled_means_and_covs_orig = [sample_mean_cov_from_deep_gp(params, X) for i in xrange(n_samples)]
        #sampled_means_orig, sampled_covs_orig = zip(*sampled_means_and_covs_orig)
        #avg_pred_mean_orig = np.mean(sampled_means_orig, axis = 0)
        #print("Orignal Xs",avg_pred_mean_orig)

        X0 = params[5:5+num_pseudo_params*2].reshape(num_pseudo_params,2)
        y0 = params[5+num_pseudo_params*2:5+num_pseudo_params*3]
        #ax.scatter(X0[:,0],X0[:,1],c = y0)

        avg_pred_mean = avg_pred_mean.reshape(40,40)
        ax.contourf(np.linspace(-1,1,40),np.linspace(-1,1,40), avg_pred_mean)
        ax.scatter(X[:,0],X[:,1],c=y)
        ax.set_xticks([])
        ax.set_yticks([])
        ax.set_title("Full Deep GP")
Example #3
0
def build_toy_dataset(D=1, n_data=20, noise_std=0.1):
    rs = npr.RandomState(0)
    inputs  = np.concatenate([np.linspace(0, 3, num=n_data/2),
                              np.linspace(6, 8, num=n_data/2)])
    targets = (np.cos(inputs) + rs.randn(n_data) * noise_std) / 2.0
    inputs = (inputs - 4.0) / 2.0
    inputs  = inputs.reshape((len(inputs), D))
    return inputs, targets
Example #4
0
def make_pinwheel_data(num_spokes=5, points_per_spoke=40, rate=1.0, noise_std=0.005):
    """Make synthetic data in the shape of a pinwheel."""
    spoke_angles = np.linspace(0, 2 * np.pi, num_spokes + 1)[:-1]
    rs = npr.RandomState(0)
    x = np.linspace(0.1, 1, points_per_spoke)
    xs = np.concatenate([x * np.cos(angle + x * rate) + noise_std * rs.randn(len(x)) for angle in spoke_angles])
    ys = np.concatenate([x * np.sin(angle + x * rate) + noise_std * rs.randn(len(x)) for angle in spoke_angles])
    return np.concatenate([np.expand_dims(xs, 1), np.expand_dims(ys, 1)], axis=1)
Example #5
0
def build_toy_dataset(n_data=80, noise_std=0.1):
    rs = npr.RandomState(0)
    inputs  = np.concatenate([np.linspace(0, 3, num=n_data/2),
                              np.linspace(6, 8, num=n_data/2)])
    targets = np.cos(inputs) + rs.randn(n_data) * noise_std
    inputs = (inputs - 4.0) / 2.0
    inputs  = inputs[:, np.newaxis]
    targets = targets[:, np.newaxis] / 2.0
    return inputs, targets
 def init():
     offset = 2.0
     #if optimum[0] < np.inf:
     #    xmin = min(results['ADAM'][0][0], optimum[0]) - offset
     #    xmax = max(results['ADAM'][0][0], optimum[0]) + offset
     #else:
     xmin = domain[0, 0]
     xmax = domain[0, 1]
     #if optimum[1] < np.inf:
     #    ymin = min(results['ADAM'][1][0], optimum[1]) - offset
     #    ymax = max(results['ADAM'][1][0], optimum[1]) + offset
     #else:
     ymin = domain[1, 0]
     ymax = domain[1, 1]
     x = np.arange(xmin, xmax, 0.01)
     y = np.arange(ymin, ymax, 0.01)
     X, Y = np.meshgrid(x, y)
     Z = np.zeros(np.shape(Y))
     for a, _ in np.ndenumerate(Y):
         Z[a] = func(X[a], Y[a])
     level = fdict['level']
     if level is None:
         level = np.linspace(Z.min(), Z.max(), 20)
     else:
         if level[0] == 'normal':
             level = np.linspace(Z.min(), Z.max(), level[1])
         if level[0] == 'log':
             level = np.logspace(np.log(Z.min()), np.log(Z.max()), level[1])
     CF = ax[0].contour(X,Y,Z, levels=level)
     #plt.colorbar(CF, orientation='horizontal', format='%.2f')
     ax[0].grid()
     ax[0].plot(results['ADAM'][0][0], results['ADAM'][1][0], 
         'h', markersize=15, color = '0.75')
     if optimum[0] < np.inf and optimum[1] < np.inf:
         ax[0].plot(optimum[0], optimum[1], '*', markersize=40, 
             markeredgewidth = 2, alpha = 0.5, color = '0.75')
     ax[0].legend(loc='upper center', ncol=3, bbox_to_anchor=(0.5, 1.15))
     
     ax[1].plot(0, results['ADAM'][2][0], 'o')
     ax[1].axis([0, T, -0.5, max_err + 0.5])
     ax[1].set_xlabel('num. iteration')
     ax[1].set_ylabel('loss')
     
     line1.set_data([], [])
     line2.set_data([], [])
     line3.set_data([], [])
     line4.set_data([], [])
     line5.set_data([], [])
     
     err1.set_data([], [])
     err2.set_data([], [])
     err3.set_data([], [])
     err4.set_data([], [])
     err5.set_data([], [])
     
     return line1, line2, line3, line4, line5, \
         err1, err2, err3, err4, err5, 
Example #7
0
 def plot_isocontours(ax, func, xlimits=[-2, 2], ylimits=[-4, 2], numticks=101):
     x = np.linspace(*xlimits, num=numticks)
     y = np.linspace(*ylimits, num=numticks)
     X, Y = np.meshgrid(x, y)
     zs = func(np.concatenate([np.atleast_2d(X.ravel()), np.atleast_2d(Y.ravel())]).T)
     Z = zs.reshape(X.shape)
     plt.contour(X, Y, Z)
     ax.set_yticks([])
     ax.set_xticks([])
Example #8
0
def make_pinwheel_data(num_classes, num_per_class, rate=2.0, noise_std=0.001):
    spoke_angles = np.linspace(0, 2*np.pi, num_classes+1)[:-1]

    rs = npr.RandomState(0)
    x = np.linspace(0.1, 1, num_per_class)
    xs = np.concatenate([rate *x * np.cos(angle + x * rate) + noise_std * rs.randn(num_per_class)
                         for angle in spoke_angles])
    ys = np.concatenate([rate *x * np.sin(angle + x * rate) + noise_std * rs.randn(num_per_class)
                         for angle in spoke_angles])
    return np.concatenate([np.expand_dims(xs, 1), np.expand_dims(ys,1)], axis=1)
def build_toy_dataset(n_data=40, noise_std=0.1):
    D = 1
    rs = npr.RandomState(0)
    inputs  = np.concatenate([np.linspace(0, 2, num=n_data/2),
                              np.linspace(6, 8, num=n_data/2)])
    targets = np.cos(inputs) + rs.randn(n_data) * noise_std
    inputs = (inputs - 5.0) / 4.0
    inputs  = inputs.reshape((len(inputs), D))
    targets = targets.reshape((len(targets), D))
    return inputs, targets
	def callback(params):

	    print("Log likelihood {}".format(-objective(params)))
	    plt.cla()
	    pred_mean, pred_cov = predict(params, X, y, plot_xs)
	    pred_mean = pred_mean.reshape(40,40)
	    ax.contourf(np.linspace(-1,1,40),np.linspace(-1,1,40), pred_mean)
	    ax.scatter(X[:,0],X[:,1],c=y)
	    ax.set_xticks([])
	    ax.set_yticks([])
	    plt.draw()
	    plt.pause(1.0/60.0)
Example #11
0
def plot_error_surface(loss_fun, params, ax=None):
    if ax is None:
        fig = plt.figure()
        ax = fig.add_subplot(111)
    w0s = np.linspace(-2*params[0], 2*params[0], 10)
    w1s = np.linspace(-2*params[1], 2*params[1], 10)
    w0_grid, w1_grid = np.meshgrid(w0s, w1s)
    lossvec = np.vectorize(loss_fun)
    z = lossvec(w0_grid, w1_grid)
    cs = ax.contour(w0s, w1s, z)
    ax.clabel(cs)
    ax.plot(params[0], params[1], 'rx', markersize=14)
    return ax
Example #12
0
def plot_trace(ps, ttl):
    x = np.linspace(-5, 5, 100)
    y = np.linspace(-5, 5, 100)
    X, Y = np.meshgrid(x, y)
    Z = rosen(np.vstack([X.ravel(), Y.ravel()])).reshape((100,100))
    ps = np.array(ps)
    plt.figure(figsize=(12,4))
    plt.subplot(121)
    plt.contour(X, Y, Z, np.arange(10)**5)
    plt.plot(ps[:, 0], ps[:, 1], '-o')
    plt.plot(1, 1, 'r*', markersize=12) # global minimum
    plt.subplot(122)
    plt.semilogy(range(len(ps)), rosen(ps.T))
    plt.title(ttl)
Example #13
0
def plot_error_surface(xtrain, ytrain, model, ax=None):
    params = model.params
    if ax is None:
        fig = plt.figure()
        ax = fig.add_subplot(111)
    w0s = np.linspace(-2*params[0], 2*params[0], 10)
    w1s = np.linspace(-2*params[1], 2*params[1], 10)
    w0_grid, w1_grid = np.meshgrid(w0s, w1s)
    def loss(w0, w1):
        return model.objective([w0, w1], xtrain, ytrain)
    lossvec = np.vectorize(loss)
    z = lossvec(w0_grid, w1_grid)
    cs = ax.contour(w0s, w1s, z)
    ax.clabel(cs)
    ax.plot(params[0], params[1], 'rx', markersize=14)
Example #14
0
 def plot_gmm(params, ax, num_points=100):
     angles = np.expand_dims(np.linspace(0, 2*np.pi, num_points), 1)
     xs, ys = np.cos(angles), np.sin(angles)
     circle_pts = np.concatenate([xs, ys], axis=1) * 2.0
     for log_proportion, mean, chol in zip(*unpack_params(params)):
         cur_pts = mean + np.dot(circle_pts, chol)
         ax.plot(cur_pts[:, 0], cur_pts[:, 1], '-')
Example #15
0
def equal_size_cv(n, num_chunks):
    bs = map(int, np.linspace(0,n,num_chunks))
    ans = []
    for (l,r) in itertools.izip(bs[0:-1],bs[1:]):
        ans.append([range(l,(l+r)/2), range((l+r)/2,r)])
        ans.append([range((l+r)/2,r), range(l,(l+r)/2)])
    return ans
Example #16
0
def plot_true_posterior():
    true_posterior_contour_levels = [0.01, 0.2, 1.0, 10.0]

    x = np.linspace(*xlimits, num=200)
    y = np.linspace(*ylimits, num=200)
    X, Y = np.meshgrid(x, y)

    fig = plt.figure(0); fig.clf()
    fig.set_size_inches((5,4))
    ax = fig.add_subplot(111)
    zs = np.array([nllfun(np.concatenate(([x],[y]))) for x,y in zip(np.ravel(X), np.ravel(Y))])
    Z = zs.reshape(X.shape)
    plt.contour(X, Y, np.exp(-Z), true_posterior_contour_levels, colors='k')
    ax.set_yticks([])
    ax.set_xticks([])
    return ax
Example #17
0
    def callback(params):
        print("Log likelihood {}".format(-objective(params)))
        plt.cla()
        print(params)
        # Show posterior marginals.
        plot_xs = np.reshape(np.linspace(-7, 7, 300), (300,1))
        pred_mean, pred_cov = predict(params, X, y, plot_xs)
        marg_std = np.sqrt(np.diag(pred_cov))
        ax.plot(plot_xs, pred_mean, 'b')
        ax.fill(np.concatenate([plot_xs, plot_xs[::-1]]),
                np.concatenate([pred_mean - 1.96 * marg_std,
                               (pred_mean + 1.96 * marg_std)[::-1]]),
                alpha=.15, fc='Blue', ec='None')

        # Show samples from posterior.
        rs = npr.RandomState(0)
        sampled_funcs = rs.multivariate_normal(pred_mean, pred_cov, size=10)
        ax.plot(plot_xs, sampled_funcs.T)

        ax.plot(X, y, 'kx')
        ax.set_ylim([-1.5, 1.5])
        ax.set_xticks([])
        ax.set_yticks([])
        plt.draw()
        plt.pause(1.0/60.0)
Example #18
0
def PyLQR_TrajCtrl_TrackingTest():
    n_pnts = 200
    x_coord = np.linspace(0.0, 2*np.pi, n_pnts)
    y_coord = np.sin(x_coord)
    #concatenate to have trajectory
    ref_traj = np.array([x_coord, y_coord]).T
    weight_mats = [ np.eye(ref_traj.shape[1])*100 ]

    #draw reference trajectory
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.hold(True)
    ax.plot(ref_traj[:, 0], ref_traj[:, 1], '.-k', linewidth=3.5)
    ax.plot([ref_traj[0, 0]], [ref_traj[0, 1]], '*k', markersize=16)

    lqr_traj_ctrl = PyLQR_TrajCtrl(use_autograd=True)
    lqr_traj_ctrl.build_ilqr_tracking_solver(ref_traj, weight_mats)

    n_queries = 5

    for i in range(n_queries):
        #start from a perturbed point
        x0 = ref_traj[0, :] + np.random.rand(2) * 2 - 1
        syn_traj = lqr_traj_ctrl.synthesize_trajectory(x0)
        #plot it
        ax.plot(syn_traj[:, 0], syn_traj[:, 1], linewidth=3.5)

    plt.show()
    return
Example #19
0
def build_sigmoid_dataset():
    def sigmoid(x):
        return 1.0 / (1 + np.exp(-x))
    n_data = 500
    X = np.linspace(-6, 6, n_data)
    y = sigmoid(X) + .3*np.random.randn(len(X))
    X = X.reshape((len(X),1))
    return X,y
    def callback(params, t, g):

        # log_weights = params[:10] - logsumexp(params[:10])
        print("Iteration {} lower bound {}".format(t, -objective(params, t)))
        # print (np.exp(log_weights))

        mean = params[0]
        log_std = params[1]

        print ('mean', mean)
        print ('std', np.exp(log_std))

        # print ('u0', params[2][0][0])
        # print ('u1', params[2][1][0])
        # print ('w0', params[2][0][1])
        # print ('w1', params[2][1][1])
        # print ('b0', params[2][0][2])
        # print ('b1', params[2][1][2])



        # x_inverse = 

        plt.cla()
        target_distribution = lambda x: np.exp(log_density(x))
        var_distribution    = lambda x: np.exp(variational_log_density(params, x))
        plot_isocontours(ax, target_distribution)
        plot_isocontours(ax, var_distribution, cmap=plt.cm.bone)
        ax.set_autoscale_on(False)


        #PLot the z0 density
        var_distribution0 = lambda x: np.exp(diag_gaussian_log_density(x, mean, log_std))
        plot_isocontours(ax, var_distribution0)

        for transform in params[2]:

            xlimits=[-6, 6]
            w = transform[1]
            b = transform[2]
            x = np.linspace(*xlimits, num=101)
            plt.plot(x, (-w[0]*x - b)/w[1], '-')
            
            u = transform[0]
            plt.plot(x, (-u[0]*x)/u[1], '-')

        #PLot variational samples
        samples = variational_sampler(params)
        plt.plot(samples[:, 0], samples[:, 1], 'x')

        # #Plot q0 variational samples
        # rs = npr.RandomState(0)
        # samples = sample_diag_gaussian(mean, log_std, n_samples, rs) 
        # plt.plot(samples[:, 0], samples[:, 1], 'x')


        plt.draw()
        plt.pause(1.0/30.0)
Example #21
0
File: gmm.py Project: mattjj/svae
 def plot_ellipse(ax, alpha, mean, cov, line=None):
     t = np.linspace(0, 2*np.pi, 100) % (2*np.pi)
     circle = np.vstack((np.sin(t), np.cos(t)))
     ellipse = 2.*np.dot(np.linalg.cholesky(cov), circle) + mean[:,None]
     if line:
         line.set_data(ellipse)
         line.set_alpha(alpha)
     else:
         ax.plot(ellipse[0], ellipse[1], alpha=alpha, linestyle='-', linewidth=2)
def job_scf_gwgrid(sample_source, tr, te, r, J):
    rand_state = np.random.get_state()
    np.random.seed(r+92856)

    d = tr.dim()
    T_randn = np.random.randn(J, d)
    np.random.set_state(rand_state)

    # grid search to determine the initial gwidth
    mean_sd = tr.mean_std()
    scales = 2.0**np.linspace(-4, 4, 20)
    list_gwidth = np.hstack( (mean_sd*scales*(d**0.5), 2**np.linspace(-10, 10, 20) ))
    list_gwidth.sort()
    besti, powers = tst.SmoothCFTest.grid_search_gwidth(tr, T_randn,
            list_gwidth, alpha)
    # initialize with the best width from the grid search
    best_width = list_gwidth[besti]
    scf_gwgrid = tst.SmoothCFTest(T_randn, best_width, alpha)
    return scf_gwgrid.perform_test(te)
Example #23
0
    def generate_random_star(self):
        """
        Generate a random star (not yet fully implemented)
        Args:
            None
        
        Returns:
            None
        """

        theta = np.linspace(0, np.pi, num=40)[:, None]
        phi = np.linspace(-np.pi, np.pi, num=40)

        pix = hp.ang2pix(self.NSIDE, theta, phi)
        healpix_map = np.zeros(hp.nside2npix(self.NSIDE), dtype=np.double)

        healpix_map[pix] = np.random.randn(40,40)

        stop()
Example #24
0
def initParams(num):

    mat = np.random.randn(m, m)
    return dict({num + 'z': np.reshape(np.linspace(0.0, 1.0, num=m), (m, 1)),
            num + 'u_mean': np.random.randn(m, 1),
            num + 'u_cov_fac': mat @ mat.T,
            num + 'h_mean': np.random.randn(n, 1),
            num + 'h_cov_fac': np.random.randn(n, 1),
            num + 'kernel_noise': np.ones((1, 1)),
            num + 'kernel_lenscale': np.ones((1, 1)),
            num + 'function_noise': np.ones((1, 1))})
Example #25
0
def make_data_linreg_1d(N=21, linear=True):
    xtrain = np.linspace(0, 20, N)
    sigma2 = 2
    w_true = np.array([-1.5, 1/9.])
    if linear:
        fun = lambda x: w_true[0] + w_true[1]*x
    else:
        fun = lambda x: w_true[0] + w_true[1]*np.sin(x)
    noise = np.random.normal(0, 1, xtrain.shape) * np.sqrt(sigma2)
    ytrain = fun(xtrain) + noise    
    return xtrain, ytrain, w_true
    def plot_deep_gp_2d(ax,params,plot_xs):
        ax.cla()

        sampled_means_and_covs = [sample_mean_cov_from_deep_gp(params, plot_xs) for i in xrange(n_samples)]
        sampled_means, sampled_covs = zip(*sampled_means_and_covs)
        avg_pred_mean = np.mean(sampled_means, axis = 0)
        avg_pred_cov = np.mean(sampled_covs, axis = 0)
        
        if dimensions[1] == 1:
            deep_map = create_deep_map(params)
            x0 = deep_map[0][0]['x0']
            y0 = deep_map[0][0]['y0']
            ax.scatter(x0[:,0],x0[:,1],c = y0)

        avg_pred_mean = avg_pred_mean.reshape(40,40)
        ax.contourf(np.linspace(-1,1,40),np.linspace(-1,1,40), avg_pred_mean)
        ax.scatter(X[:,0],X[:,1],c=y)
        ax.set_xticks([])
        ax.set_yticks([])
        ax.set_title("Full Deep GP")
Example #27
0
    def callback(params, t, g):
        print("Iteration {} log likelihood {}".format(t, -objective(params, t)))

        # Plot data and functions.
        plt.cla()
        ax.plot(inputs.ravel(), targets.ravel(), 'bx', ms=12)
        plot_inputs = np.reshape(np.linspace(-7, 7, num=300), (300,1))
        outputs = nn_predict(params, plot_inputs)
        ax.plot(plot_inputs, outputs, 'r', lw=3)
        ax.set_ylim([-1, 1])
        plt.draw()
        plt.pause(1.0/60.0)
Example #28
0
def plot_data_and_pred(x, y, model, draw_verticals=True):
    x_range = np.linspace(np.min(x), np.max(x), 100)
    yhat_range = model.predict(x_range)
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.plot(x, y, 'o', label='observed')
    ax.plot(x_range, yhat_range, 'r-', label='predicted')
    if draw_verticals: # from observed value to predicted true
        yhat_sparse = model.predict(x)
        for x0, y0, yhat0 in zip(x, y, yhat_sparse):
            ax.plot([x0, x0],[y0, yhat0],'k-')
    plt.legend() #[line_pred, line_true], ['predicted', 'true'])
Example #29
0
def PyLQR_TrajCtrl_GeneralTest():
    #build RBF basis
    rbf_basis = np.array([
        [-1.0, -1.0],
        [-1.0, 1.0],
        [1.0, -1.0],
        [1.0, 1.0]
        ])
    gamma = 1
    T = 100
    R = 1e-5
    # rbf_funcs = [lambda x, u, t, aux: np.exp(-gamma*np.linalg.norm(x[0:2]-basis)**2) + .01*np.linalg.norm(u)**2 for basis in rbf_basis]
    rbf_funcs = [
    lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[0])**2) + R*np.linalg.norm(u)**2,
    lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[1])**2) + R*np.linalg.norm(u)**2,
    lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[2])**2) + R*np.linalg.norm(u)**2,
    lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[3])**2) + R*np.linalg.norm(u)**2
    ]

    weights = np.array([.75, .5, .25, 1.])
    weights = weights / (np.sum(weights) + 1e-6)

    cost_func = lambda x, u, t, aux: np.sum(weights * np.array([basis_func(x, u, t, aux) for basis_func in rbf_funcs]))

    lqr_traj_ctrl = PyLQR_TrajCtrl(use_autograd=True)
    lqr_traj_ctrl.build_ilqr_general_solver(cost_func, n_dims=rbf_basis.shape[1], T=T)

    n_eval_pnts = 50
    coords = np.linspace(-2.5, 2.5, n_eval_pnts)
    xv, yv = np.meshgrid(coords, coords)

    z = [[cost_func(np.array([xv[i, j], yv[i, j]]), np.zeros(2), None, None) for j in range(yv.shape[1])] for i in range(len(xv))]

    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.hold(True)
    ax.contour(xv, yv, z)
    
    n_queries = 5
    u_array = np.random.rand(2, T-1).T * 2 - 1
    
    for i in range(n_queries):
        #start from a perturbed point
        x0 = np.random.rand(2) * 4 - 2
        syn_traj = lqr_traj_ctrl.synthesize_trajectory(x0, u_array)
        #plot it
        ax.plot([x0[0]], [x0[1]], 'k*', markersize=12.0)
        ax.plot(syn_traj[:, 0], syn_traj[:, 1], linewidth=3.5)

    plt.show()

    return
Example #30
0
def make_pinwheel_data(radial_std, tangential_std, num_classes, num_per_class, rate):
    rads = np.linspace(0, 2*np.pi, num_classes, endpoint=False)

    features = npr.randn(num_classes*num_per_class, 2) \
        * np.array([radial_std, tangential_std])
    features[:,0] += 1.
    labels = np.repeat(np.arange(num_classes), num_per_class)

    angles = rads[labels] + rate * np.exp(features[:,0])
    rotations = np.stack([np.cos(angles), -np.sin(angles), np.sin(angles), np.cos(angles)])
    rotations = np.reshape(rotations.T, (-1, 2, 2))

    return 10*npr.permutation(np.einsum('ti,tij->tj', features, rotations))
Example #31
0
def main():
    #====== Setup =======
    n_iters, n_samples, d = 2500, 2000, 2
    init_vals = np.random.rand(n_samples, d) * 5.0

    logprob = make_logprob()
    allsamps = []

    #====== Tests =======

    t = dt.datetime.now()
    print('running 2d tests ...')
    samps = langevin(logprob,
                     copy(init_vals),
                     num_iters=n_iters,
                     num_samples=n_samples,
                     step_size=0.05)
    print('done langevin in', dt.datetime.now() - t, '\n')
    allsamps.append(samps)

    samps = MALA(logprob,
                 copy(init_vals),
                 num_iters=n_iters,
                 num_samples=n_samples,
                 step_size=0.05)
    print('done MALA in', dt.datetime.now() - t, '\n')
    allsamps.append(samps)

    t = dt.datetime.now()
    samps = RK_langevin(logprob,
                        copy(init_vals),
                        num_iters=n_iters,
                        num_samples=n_samples,
                        step_size=0.01)
    print('done langevin_RK in', dt.datetime.now() - t, '\n')
    allsamps.append(samps)

    t = dt.datetime.now()
    samps = RWMH(logprob,
                 copy(init_vals),
                 num_iters=n_iters,
                 num_samples=n_samples,
                 sigma=0.5)
    print('done RW MH in', dt.datetime.now() - t, '\n')
    allsamps.append(samps)

    t = dt.datetime.now()
    samps = HMC(logprob,
                copy(init_vals),
                num_iters=n_iters // 5,
                num_samples=n_samples,
                step_size=0.05,
                num_leap_iters=5)
    print('done HMC in', dt.datetime.now() - t, '\n')
    allsamps.append(samps)

    #====== Plotting =======

    pts = np.linspace(-7, 7, 1000)
    X, Y = np.meshgrid(pts, pts)
    pos = np.empty(X.shape + (2, ))
    pos[:, :, 0] = X
    pos[:, :, 1] = Y
    Z = np.exp(logprob(pos))

    f, axes = plt.subplots(2, 3)
    names = ['langevin', 'MALA', 'langevin_RK', 'RW MH', 'HMC']
    for i, (name, samps) in enumerate(zip(names, allsamps)):

        row = i // 3
        col = i % 3
        ax = axes[row, col]

        ax.contour(X, Y, Z)
        ax.hist2d(samps[:, 0], samps[:, 1], alpha=0.5, bins=25)
        ax.set_title(name)

    axes[1, 2].hist2d(init_vals[:, 0], init_vals[:, 1], bins=25)
    axes[1, 2].set_title('Initial samples.')

    plt.show()
Example #32
0
    bounds = np.array([[-5, 10], [0, 15]])
if True:
    target_function = hyper_ellipsoid_function
    x = np.random.random((2, N)) * 10 + np.array([[-5], [5]])
    bounds = np.array([[-5, 5], [-5, 5]])

y = target_function(x)

kernel = gp.kernels.Matern()
model_gp = gp.GaussianProcessRegressor(kernel=kernel,
                                       alpha=1e-5,
                                       n_restarts_optimizer=10,
                                       normalize_y=True)

# plot the surface
lambdas = np.linspace(-5, 10, 100)
gammas = np.linspace(0, 15, 100)

# We need the cartesian combination of these two vectors
param_grid = np.array([[C, gamma] for gamma in gammas for C in lambdas])
real_loss = [barnin_function(params[:, np.newaxis]) for params in param_grid]
# The maximum is at:
print param_grid[np.array(real_loss).argmin(), :]

C, G = np.meshgrid(lambdas, gammas)
plt.figure()
cp = plt.contourf(C, G, np.array(real_loss).reshape(C.shape))
plt.colorbar(cp)
plt.savefig('surface_grid.png')
#plt.show()
plt.clf()
Example #33
0
                        help='Whether to save or not an animated GIF')
    parser.add_argument('--save_tikz',
                        action='store_true',
                        help='Whether to save or not a tikz plot for LaTeX')
    args = parser.parse_args()

    # args = argparse.Namespace()
    #
    # args.animate = True
    # args.nsteps = 120
    # args.good_init = True
    # args.lr = 0.25
    # args.save_gif = True
    # args.save_tikz = True

    xx = np.linspace(-1, 1, 64)
    yy = np.linspace(-3, 1.5, 64)

    x, y = np.meshgrid(xx, yy)
    z = f(x, y)

    if args.good_init:
        x_init = 0.2
        y_init = -0.1
    else:
        x_init = -0.3
        y_init = 0.4

    levelsf = MaxNLocator(nbins=20).tick_values(z.min(), z.max())
    levels = MaxNLocator(nbins=20).tick_values(z.min(), z.max())
Example #34
0
        psy_t = psy_trial(xi, net_out)
        gradient_of_trial = psy_grad(xi, net_out)
        second_gradient_of_trial = psy_grad2(xi, net_out)

        func = f(xi, psy_t, gradient_of_trial)
        err_sqr = (second_gradient_of_trial - func)**2
        loss_sum += err_sqr

    return loss_sum - 0.1 * np.dot(W[0].flatten(), W[0].flatten(
    )) - 0.1 * np.dot(W[1].flatten(), W[1].flatten())


#%%

x_space = np.linspace(0, 2, nx)
y_space = psy_analytic(x_space)

W = [ran.randn(1, 20), ran.randn(20, 1), ran.randn(20), ran.randn(1)]
lmb = 0.001

for i in range(1000):
    loss_grad = grad(loss_function)(W, x_space)

    W[0] = W[0] - lmb * loss_grad[0]
    W[1] = W[1] - lmb * loss_grad[1]
    W[2] = W[2] - lmb * loss_grad[2]
    W[3] = W[3] - lmb * loss_grad[3]
#%%
print(loss_function(W, x_space))
print(W)
Example #35
0
    def plot_fit_and_feature_space(self, w, model, feat, **kwargs):
        # construct figure
        fig, axs = plt.subplots(1, 3, figsize=(9, 4))

        # create subplot with 2 panels
        gs = gridspec.GridSpec(1, 2, width_ratios=[1, 1])
        ax1 = plt.subplot(gs[0])
        ax2 = plt.subplot(gs[1])

        view = [20, 20]
        if 'view' in kwargs:
            view = kwargs['view']

        ##### plot left panel in original space ####
        # scatter points
        xmin, xmax, ymin, ymax = self.scatter_pts_2d(self.x, ax1)

        # clean up panel
        ax1.set_xlim([xmin, xmax])
        ax1.set_ylim([ymin, ymax])

        # label axes
        ax1.set_xlabel(r'$x$', fontsize=16)
        ax1.set_ylabel(r'$y$', rotation=0, fontsize=16, labelpad=10)

        # create fit
        s = np.linspace(xmin, xmax, 300)[np.newaxis, :]

        normalizer = lambda a: a
        if 'normalizer' in kwargs:
            normalizer = kwargs['normalizer']

        t = model(normalizer(s), w)

        ax1.plot(s.flatten(), t.flatten(), linewidth=4, c='k', zorder=0)
        ax1.plot(s.flatten(), t.flatten(), linewidth=2, c='lime', zorder=0)

        #### plot fit in transformed feature space #####
        # check if feature transform has internal parameters
        x_transformed = 0
        sig = signature(feat)
        if len(sig.parameters) == 2:
            if np.shape(w)[1] == 1:
                x_transformed = feat(normalizer(self.x), w)
            else:
                x_transformed = feat(normalizer(self.x), w[0])
        else:
            x_transformed = feat(normalizer(self.x))

        # two dimensional transformed feature space
        if x_transformed.shape[0] == 1:
            s = np.linspace(xmin, xmax, 300)[np.newaxis, :]

            # scatter points
            xmin, xmax, ymin, ymax = self.scatter_pts_2d(x_transformed, ax2)

            # produce plot
            s2 = copy.deepcopy(s)
            if len(sig.parameters) == 2:
                if np.shape(w)[1] == 1:
                    s2 = feat(normalizer(s), w)
                else:
                    s2 = feat(normalizer(s), w[0])
            else:
                s2 = feat(normalizer(s))
            t = model(normalizer(s), w)

            ax2.plot(s2.flatten(), t.flatten(), linewidth=4, c='k', zorder=0)
            ax2.plot(s2.flatten(),
                     t.flatten(),
                     linewidth=2,
                     c='lime',
                     zorder=0)

            # label axes
            ax2.set_xlabel(r'$f\left(x,\mathbf{w}^{\star}\right)$',
                           fontsize=16)
            ax2.set_ylabel(r'$y$', rotation=0, fontsize=16, labelpad=10)

        # three dimensional transformed feature space
        if x_transformed.shape[0] == 2:
            # create panel
            ax2 = plt.subplot(gs[1], projection='3d')
            s = np.linspace(xmin, xmax, 100)[np.newaxis, :]

            # plot data in 3d
            xmin, xmax, xmin1, xmax1, ymin, ymax = self.scatter_3d_points(
                x_transformed, ax2)

            # create and plot fit
            s2 = copy.deepcopy(s)
            if len(sig.parameters) == 2:
                s2 = feat(normalizer(s), w[0])
            else:
                s2 = feat(normalizer(s))

            # reshape for plotting
            a = s2[0, :]
            b = s2[1, :]
            a = np.linspace(xmin, xmax, 100)
            b = np.linspace(xmin1, xmax1, 100)
            a, b = np.meshgrid(a, b)

            # get firstem
            a.shape = (1, np.size(s)**2)
            f1 = feat(normalizer(a))[0, :]

            # secondm
            b.shape = (1, np.size(s)**2)
            f2 = feat(normalizer(b))[1, :]

            # tack a 1 onto the top of each input point all at once
            c = np.vstack((a, b))
            o = np.ones((1, np.shape(c)[1]))
            c = np.vstack((o, c))
            r = (np.dot(c.T, w))

            # various
            a.shape = (np.size(s), np.size(s))
            b.shape = (np.size(s), np.size(s))
            r.shape = (np.size(s), np.size(s))
            ax2.plot_surface(a,
                             b,
                             r,
                             alpha=0.1,
                             color='lime',
                             rstride=15,
                             cstride=15,
                             linewidth=0.5,
                             edgecolor='k')
            ax2.set_xlim([np.min(a), np.max(a)])
            ax2.set_ylim([np.min(b), np.max(b)])
            '''
            a,b = np.meshgrid(t1,t2)
            a.shape = (1,np.size(s)**2)
            b.shape = (1,np.size(s)**2)
            '''
            '''
            c = np.vstack((a,b))
            o = np.ones((1,np.shape(c)[1]))
            c = np.vstack((o,c))

            # tack a 1 onto the top of each input point all at once
            r = (np.dot(c.T,w))

            a.shape = (np.size(s),np.size(s))
            b.shape = (np.size(s),np.size(s))
            r.shape = (np.size(s),np.size(s))
            ax2.plot_surface(a,b,r,alpha = 0.1,color = 'lime',rstride=15, cstride=15,linewidth=0.5,edgecolor = 'k')
            '''

            # label axes
            #self.move_axis_left(ax2)
            ax2.set_xlabel(r'$f_1(x)$', fontsize=12, labelpad=5)
            ax2.set_ylabel(r'$f_2(x)$', rotation=0, fontsize=12, labelpad=5)
            ax2.set_zlabel(r'$y$', rotation=0, fontsize=12, labelpad=0)
            self.move_axis_left(ax2)
            ax2.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
            ax2.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
            ax2.view_init(view[0], view[1])
def get_Ap(nb_sampled_frequencies, passband_edge, stopband_edge, order_length):
    sampled_frequencies = np.linspace(passband_edge, stopband_edge, nb_sampled_frequencies)
    App = np.array([get_cl(w, order_length) for w in sampled_frequencies], dtype=np.float64)
    Anp = np.array([-np.array(get_cl(w, order_length)) for w in sampled_frequencies], dtype=np.float64)
    return np.concatenate((App, Anp))
Example #37
0
import autograd
from autograd import numpy as np

from defaults import pennylane as qml, BaseTest

from pennylane.qnode import _flatten, unflatten, QNode, QuantumFunctionError
from pennylane.plugins.default_qubit import CNOT, Rotx, Roty, Rotz, I, Y, Z
from pennylane._device import DeviceError


def expZ(state):
    return np.abs(state[0])**2 - np.abs(state[1])**2


thetas = np.linspace(-2 * np.pi, 2 * np.pi, 7)

a = np.linspace(-1, 1, 64)
a_shapes = [(64, ), (64, 1), (32, 2), (16, 4), (8, 8), (16, 2, 2),
            (8, 2, 2, 2), (4, 2, 2, 2, 2), (2, 2, 2, 2, 2, 2)]

b = np.linspace(-1., 1., 8)
b_shapes = [(8, ), (8, 1), (4, 2), (2, 2, 2), (2, 1, 2, 1, 2)]


class BasicTest(BaseTest):
    """Qnode basic tests.
    """
    def setUp(self):
        self.dev1 = qml.device('default.qubit', wires=1)
        self.dev2 = qml.device('default.qubit', wires=2)
Example #38
0
 def fun(x, y):
     return np.linspace(x, y, num)