Ejemplo n.º 1
0
def swiss_roll(optimize=True, verbose=1, plot=True, N=1000, num_inducing=15, Q=4, sigma=.2):
    import GPy
    from GPy.util.datasets import swiss_roll_generated
    from GPy.models import BayesianGPLVM

    data = swiss_roll_generated(num_samples=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = _np.hstack((X, _np.random.randn(N, Q - 2)))
    except ImportError:
        X = _np.random.randn(N, Q)

    if plot:
        import matplotlib.pyplot as plt
        from mpl_toolkits.mplot3d import Axes3D  # @UnusedImport
        fig = plt.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("BGPLVM init")

    var = .5
    S = (var * _np.ones_like(X) + _np.clip(_np.random.randn(N, Q) * var ** 2,
                                         - (1 - var),
                                         (1 - var))) + .001
    Z = _np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2))

    m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
    m.data_colors = c
    m.data_t = t
    m['noise_variance'] = Y.var() / 100.

    if optimize:
        m.optimize('scg', messages=verbose, max_iters=2e3)

    if plot:
        fig = plt.figure('fitted')
        ax = fig.add_subplot(111)
        s = m.input_sensitivity().argsort()[::-1][:2]
        ax.scatter(*m.X.T[s], c=c)

    return m
Ejemplo n.º 2
0
def swiss_roll(optimize=True, N=1000, num_inducing=15, Q=4, sigma=.2, plot=False):
    from GPy.util.datasets import swiss_roll_generated
    from GPy.core.transformations import logexp_clipped

    data = swiss_roll_generated(N=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = np.hstack((X, np.random.randn(N, Q - 2)))
    except ImportError:
        X = np.random.randn(N, Q)

    if plot:
        from mpl_toolkits import mplot3d
        import pylab
        fig = pylab.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("Initialization")


    var = .5
    S = (var * np.ones_like(X) + np.clip(np.random.randn(N, Q) * var ** 2,
                                         - (1 - var),
                                         (1 - var))) + .001
    Z = np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(Q, np.exp(-2)) + GPy.kern.white(Q, np.exp(-2))

    m = BayesianGPLVM(Y, Q, X=X, X_variance=S, num_inducing=num_inducing, Z=Z, kernel=kernel)
    m.data_colors = c
    m.data_t = t

    m['rbf_lengthscale'] = 1. # X.var(0).max() / X.var(0)
    m['noise_variance'] = Y.var() / 100.
    m['bias_variance'] = 0.05

    if optimize:
        m.optimize('scg', messages=1)
    return m
Ejemplo n.º 3
0
def swiss_roll(optimize=True,
               verbose=1,
               plot=True,
               N=1000,
               num_inducing=15,
               Q=4,
               sigma=.2):
    import GPy
    from GPy.util.datasets import swiss_roll_generated
    from GPy.models import BayesianGPLVM

    data = swiss_roll_generated(num_samples=N, sigma=sigma)
    Y = data['Y']
    Y -= Y.mean()
    Y /= Y.std()

    t = data['t']
    c = data['colors']

    try:
        from sklearn.manifold.isomap import Isomap
        iso = Isomap().fit(Y)
        X = iso.embedding_
        if Q > 2:
            X = _np.hstack((X, _np.random.randn(N, Q - 2)))
    except ImportError:
        X = _np.random.randn(N, Q)

    if plot:
        import matplotlib.pyplot as plt
        from mpl_toolkits.mplot3d import Axes3D  # @UnusedImport
        fig = plt.figure("Swiss Roll Data")
        ax = fig.add_subplot(121, projection='3d')
        ax.scatter(*Y.T, c=c)
        ax.set_title("Swiss Roll")

        ax = fig.add_subplot(122)
        ax.scatter(*X.T[:2], c=c)
        ax.set_title("BGPLVM init")

    var = .5
    S = (var * _np.ones_like(X) +
         _np.clip(_np.random.randn(N, Q) * var**2, -(1 - var),
                  (1 - var))) + .001
    Z = _np.random.permutation(X)[:num_inducing]

    kernel = GPy.kern.rbf(Q, ARD=True) + GPy.kern.bias(
        Q, _np.exp(-2)) + GPy.kern.white(Q, _np.exp(-2))

    m = BayesianGPLVM(Y,
                      Q,
                      X=X,
                      X_variance=S,
                      num_inducing=num_inducing,
                      Z=Z,
                      kernel=kernel)
    m.data_colors = c
    m.data_t = t
    m['noise_variance'] = Y.var() / 100.

    if optimize:
        m.optimize('scg', messages=verbose, max_iters=2e3)

    if plot:
        fig = plt.figure('fitted')
        ax = fig.add_subplot(111)
        s = m.input_sensitivity().argsort()[::-1][:2]
        ax.scatter(*m.X.T[s], c=c)

    return m