Ejemplo n.º 1
0
def swiss_roll(optimize=True, verbose=1, plot=True, N=1000, num_inducing=25, Q=4, sigma=.2):
    import GPy
    from pods.datasets import swiss_roll_generated
    from GPy.models import BayesianGPLVM

    data = swiss_roll_generated(num_samples=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = _np.hstack((X, _np.random.randn(N, Q - 2)))
    except ImportError:
        X = _np.random.randn(N, Q)

    if plot:
        import matplotlib.pyplot as plt
        from mpl_toolkits.mplot3d import Axes3D  # @UnusedImport
        fig = plt.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("BGPLVM init")

    var = .5
    S = (var * _np.ones_like(X) + _np.clip(_np.random.randn(N, Q) * var ** 2,
                                         - (1 - var),
                                         (1 - var))) + .001
    Z = _np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(Q, _np.exp(-2)) + GPy.kern.White(Q, _np.exp(-2))

    m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
    m.data_colors = c
    m.data_t = t

    if optimize:
        m.optimize('bfgs', messages=verbose, max_iters=2e3)

    if plot:
        fig = plt.figure('fitted')
        ax = fig.add_subplot(111)
        s = m.input_sensitivity().argsort()[::-1][:2]
        ax.scatter(*m.X.mean.T[s], c=c)

    return m
Ejemplo n.º 2
0
def swiss_roll(optimize=True,
               verbose=1,
               plot=True,
               N=1000,
               num_inducing=25,
               Q=4,
               sigma=.2):
    import GPy
    from pods.datasets import swiss_roll_generated
    from GPy.models import BayesianGPLVM

    data = swiss_roll_generated(num_samples=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = _np.hstack((X, _np.random.randn(N, Q - 2)))
    except ImportError:
        X = _np.random.randn(N, Q)

    if plot:
        import matplotlib.pyplot as plt
        from mpl_toolkits.mplot3d import Axes3D  # @UnusedImport
        fig = plt.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("BGPLVM init")

    var = .5
    S = (var * _np.ones_like(X) +
         _np.clip(_np.random.randn(N, Q) * var**2, -(1 - var),
                  (1 - var))) + .001
    Z = _np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.RBF(Q, ARD=True) + GPy.kern.Bias(
        Q, _np.exp(-2)) + GPy.kern.White(Q, _np.exp(-2))

    m = BayesianGPLVM(Y,
                      Q,
                      X=X,
                      X_variance=S,
                      num_inducing=num_inducing,
                      Z=Z,
                      kernel=kernel)
    m.data_colors = c
    m.data_t = t

    if optimize:
        m.optimize('bfgs', messages=verbose, max_iters=2e3)

    if plot:
        fig = plt.figure('fitted')
        ax = fig.add_subplot(111)
        s = m.input_sensitivity().argsort()[::-1][:2]
        ax.scatter(*m.X.mean.T[s], c=c)

    return m