def load_letters(which_letter=None):
    datadict = np.load('data/letters.npz')
    images, labels = datadict['images'], datadict['labels']

    if which_letter is not None:
        images = images[labels == string.lowercase.index(which_letter)]

    return theano.shared(floatX(images), borrow=True), labels
Beispiel #2
0
def load_letters(which_letter=None):
    datadict = np.load('data/letters.npz')
    images, labels = datadict['images'], datadict['labels']

    if which_letter is not None:
        images = images[labels == string.lowercase.index(which_letter)]

    return theano.shared(floatX(images), borrow=True), labels
def load_mice(N, permute=True, addnoise=True):
    data = np.load("data/images_for_vae.npy").astype(theano.config.floatX)
    if permute:
        data = np.random.permutation(data)
    data = data.reshape(data.shape[0], -1)[:N]
    data /= data.max()
    if addnoise:
        data += 1e-3 * np.random.normal(size=data.shape)
    return theano.shared(floatX(data), borrow=True)
def load_mice(N, permute=True, addnoise=True):
    data = np.load('data/images_for_vae.npy').astype(theano.config.floatX)
    print data[0].shape
    if permute:
        data = np.random.permutation(data)
    data = data.reshape(data.shape[0], -1)[:N]
    data /= data.max()
    if addnoise:
        data += 1e-3 * np.random.normal(size=data.shape)
    return theano.shared(floatX(data), borrow=True)
        def vlb(X, N, M, L):
            def sample_z(mu, log_sigmasq):
                eps = srng.normal((M, z_dim), dtype=theano.config.floatX)
                return mu + T.exp(0.5 * log_sigmasq) * eps

            mu, log_sigmasq = encode(X)
            logpxz = sum(loglike(X, decode(sample_z(mu, log_sigmasq)))
                        for l in xrange(L)) / floatX(L)

            minibatch_val = -kl_to_prior(mu, log_sigmasq) + logpxz

            return minibatch_val / M  # NOTE: multiply by N for overall vlb
        def vlb(X, N, M, L):
            def sample_z(mu, log_sigmasq):
                eps = srng.normal((M, z_dim), dtype=theano.config.floatX)
                return mu + T.exp(0.5 * log_sigmasq) * eps

            mu, log_sigmasq = encode(X)
            logpxz = sum(
                loglike(X, decode(sample_z(mu, log_sigmasq)))
                for l in xrange(L)) / floatX(L)

            minibatch_val = -kl_to_prior(mu, log_sigmasq) + logpxz

            return minibatch_val / M  # NOTE: multiply by N for overall vlb
    def adam(cost, params):
        grads = T.grad(cost=cost, wrt=params)
        ms = [shared_zeros_like(p) for p in params]
        vs = [shared_zeros_like(p) for p in params]
        t = theano.shared(floatX(1))

        def make_update(p, g, m, v, t):
            m_new = beta_1*m + (1.-beta_1)*g
            v_new = beta_2*v + (1.-beta_2)*g**2
            mhat = m / (1.-beta_1**t)
            vhat = v / (1.-beta_2**t)
            p_new = p - alpha * mhat / (T.sqrt(vhat) + epsilon)
            return [(m, m_new), (v, v_new), (p, p_new)]

        return [(t, t+1)] + concat(
            make_update(p, g, m, v, t) for p,g,m,v in zip(params, grads, ms, vs))
Beispiel #8
0
    def adam(cost, params):
        grads = T.grad(cost=cost, wrt=params)
        ms = [shared_zeros_like(p) for p in params]
        vs = [shared_zeros_like(p) for p in params]
        t = theano.shared(floatX(1))

        def make_update(p, g, m, v, t):
            m_new = beta_1 * m + (1. - beta_1) * g
            v_new = beta_2 * v + (1. - beta_2) * g**2
            mhat = m / (1. - beta_1**t)
            vhat = v / (1. - beta_2**t)
            p_new = p - alpha * mhat / (T.sqrt(vhat) + epsilon)
            return [(m, m_new), (v, v_new), (p, p_new)]

        return [(t, t + 1)] + concat(
            make_update(p, g, m, v, t)
            for p, g, m, v in zip(params, grads, ms, vs))
Beispiel #9
0
def load_pendulum(N, permute=True, addnoise=True):
    with open('data/pendulous.pkl') as infile:
        images = pickle.load(infile).astype(theano.config.floatX)

    if permute:
        images = np.random.permutation(images)

    images = images[:N]

    images -= images.min()
    images /= images.max()

    if addnoise:
        images += 1e-2*np.random.normal(size=images.shape)

    images = np.reshape(images, (images.shape[0], -1))

    return theano.shared(floatX(images), borrow=True)
def load_pendulum(N, permute=True, addnoise=True):
    with open('data/pendulous.pkl') as infile:
        images = pickle.load(infile).astype(theano.config.floatX)

    if permute:
        images = np.random.permutation(images)

    images = images[:N]

    images -= images.min()
    images /= images.max()

    if addnoise:
        images += 1e-2 * np.random.normal(size=images.shape)

    images = np.reshape(images, (images.shape[0], -1))

    return theano.shared(floatX(images), borrow=True)
Beispiel #11
0
def init_tensor(shape, name=None):
    return theano.shared(floatX(1e-2 * np.random.normal(size=shape)),
                         borrow=True,
                         name=name)
def init_tensor(shape, name=None):
    return theano.shared(
        floatX(1e-2 * np.random.normal(size=shape)),
        borrow=True, name=name)