Exemple #1
0
 def __call__(self, shape, name=None):
     flat_shape = (shape[0], np.prod(shape[1:]))
     a = np_rng.normal(0.0, 1.0, flat_shape)
     u, _, v = np.linalg.svd(a, full_matrices=False)
     q = u if u.shape == flat_shape else v  # pick the one with the correct shape
     q = q.reshape(shape)
     return sharedX(self.scale * q[:shape[0], :shape[1]], name=name)
Exemple #2
0
 def __call__(self, shape, name=None):
     flat_shape = (shape[0], np.prod(shape[1:]))
     a = np_rng.normal(0.0, 1.0, flat_shape)
     u, _, v = np.linalg.svd(a, full_matrices=False)
     q = u if u.shape == flat_shape else v # pick the one with the correct shape
     q = q.reshape(shape)
     return sharedX(self.scale * q[:shape[0], :shape[1]], name=name)
Exemple #3
0
 def __call__(self, shape):
     if len(shape) == 2:
         scale = np.sqrt(2. / shape[0])
     elif len(shape) == 4:
         scale = np.sqrt(2. / np.prod(shape[1:]))
     else:
         raise NotImplementedError
     return sharedX(np_rng.normal(size=shape, scale=scale))
Exemple #4
0
 def __call__(self, shape):
     if len(shape) == 2:
         scale = np.sqrt(2./shape[0])
     elif len(shape) == 4:
         scale = np.sqrt(2./np.prod(shape[1:]))
     else:
         raise NotImplementedError
     return sharedX(np_rng.normal(size=shape, scale=scale))
Exemple #5
0
 def init(self):
     naxes = len(self.out_shape)
     if naxes == 2 or naxes == 4:
         dim = self.out_shape[1]
     elif naxes == 3:
         dim = self.out_shape[-1]
     else:
         raise NotImplementedError
     self.g = inits.Constant(c=1.)(dim)
     self.b = inits.Constant(c=0.)(dim)
     self.u = inits.Constant(c=0.)(dim)
     self.s = inits.Constant(c=0.)(dim)
     self.n = sharedX(0.)
     self.params = [self.g, self.b]
     self.other_params = [self.u, self.s, self.n]
Exemple #6
0
 def init(self):
     naxes = len(self.out_shape)
     if naxes == 2 or naxes == 4:
         dim = self.out_shape[1]
     elif naxes == 3:
         dim = self.out_shape[-1]
     else:
         raise NotImplementedError
     self.g = inits.Constant(c=1.)(dim)
     self.b = inits.Constant(c=0.)(dim)
     self.u = inits.Constant(c=0.)(dim)
     self.s = inits.Constant(c=0.)(dim)
     self.n = sharedX(0.)
     self.params = [self.g, self.b]
     self.other_params = [self.u, self.s, self.n]
 def __call__(self, vocab, name=None):
     t = time()
     w2v_vocab = joblib.load(os.path.join(self.data_dir, '3m_w2v_gn_vocab.jl'))
     w2v_embed = joblib.load(os.path.join(self.data_dir, '3m_w2v_gn.jl'))
     mapping = {}
     for i, w in enumerate(w2v_vocab):
         w = w.lower()
         if w in mapping:
             mapping[w].append(i)
         else:
             mapping[w] = [i]
     widxs = []
     w2vidxs = []
     for i, w in enumerate(vocab):
         w = w.replace('`', "'")
         if w in mapping:
             w2vi = min(mapping[w])
             w2vidxs.append(w2vi)
             widxs.append(i)
     w = np.zeros((len(vocab), w2v_embed.shape[1]))
     w[widxs, :] = w2v_embed[w2vidxs, :]/2.
     return sharedX(w, name=name)
Exemple #8
0
 def __call__(self, vocab, name=None):
     t = time()
     w2v_vocab = joblib.load(
         os.path.join(self.data_dir, '3m_w2v_gn_vocab.jl'))
     w2v_embed = joblib.load(os.path.join(self.data_dir, '3m_w2v_gn.jl'))
     mapping = {}
     for i, w in enumerate(w2v_vocab):
         w = w.lower()
         if w in mapping:
             mapping[w].append(i)
         else:
             mapping[w] = [i]
     widxs = []
     w2vidxs = []
     for i, w in enumerate(vocab):
         w = w.replace('`', "'")
         if w in mapping:
             w2vi = min(mapping[w])
             w2vidxs.append(w2vi)
             widxs.append(i)
     w = np.zeros((len(vocab), w2v_embed.shape[1]))
     w[widxs, :] = w2v_embed[w2vidxs, :] / 2.
     return sharedX(w, name=name)
Exemple #9
0
 def __call__(self, shape):
     return sharedX(np.identity(shape[0]) * self.scale)
Exemple #10
0
 def __call__(self, shape):
     return sharedX(np.ones(shape) * self.c)
Exemple #11
0
 def __call__(self, shape, name=None):
     r = np_rng.normal(loc=0, scale=0.01, size=shape)
     r = r/np.sqrt(np.sum(r**2))*np.sqrt(shape[1])
     return sharedX(r, name=name)
Exemple #12
0
 def __call__(self, shape, name=None):
     return sharedX(np_rng.normal(loc=self.loc, scale=self.scale, size=shape), name=name)
Exemple #13
0
 def __call__(self, shape):
     return sharedX(np.ones(shape) * self.c)
Exemple #14
0
 def __call__(self, shape, name=None):
     r = np_rng.normal(loc=0, scale=0.01, size=shape)
     r = r / np.sqrt(np.sum(r**2)) * np.sqrt(shape[1])
     return sharedX(r, name=name)
Exemple #15
0
 def __call__(self, shape, name=None):
     return sharedX(np_rng.normal(loc=self.loc,
                                  scale=self.scale,
                                  size=shape),
                    name=name)
Exemple #16
0
 def __call__(self, shape):
     return sharedX(
         np_rng.uniform(low=-self.scale, high=self.scale, size=shape))
Exemple #17
0
#get the cost functions from both the real, generated, and discriminator functions
d_cost_real = bce(p_real, T.ones(p_real.shape)).mean()
d_cost_gen = bce(p_gen, T.zeros(p_gen.shape)).mean()
g_cost_d = bce(p_gen, T.ones(p_gen.shape)).mean()

#get the cost functions from the discriminator function
d_cost = d_cost_real + d_cost_gen
g_cost = g_cost_d

#create the grand cost function
cost = [g_cost, d_cost, d_cost_real, d_cost_gen]

#update the weights using the 'adam' method https://arxiv.org/pdf/1412.6980
lr = 0.001
lrt = sharedX(lr)
d_updater = updates.Adam(lr=lrt)
g_updater = updates.Adam(lr=lrt)

d_updates = d_updater(d_params, d_cost)
g_updates = g_updater(g_params, g_cost)
updates = d_updates + g_updates

#get the final variables for both networks, cost, and score (to keep track of things)
_train_g = theano.function([X, Z], cost, updates=g_updates)
_train_d = theano.function([X, Z], cost, updates=d_updates)
_train_both = theano.function([X, Z], cost, updates=updates)
_gen = theano.function([Z], gen)
_score = theano.function([X], p_real)
_cost = theano.function([X, Z], cost)
Exemple #18
0
 def __call__(self, shape):
     return sharedX(np.identity(shape[0]) * self.scale)
#get the cost functions from both the real, generated, and discriminator functions
d_cost_real = bce(p_real, T.ones(p_real.shape)).mean()
d_cost_gen = bce(p_gen, T.zeros(p_gen.shape)).mean()
g_cost_d = bce(p_gen, T.ones(p_gen.shape)).mean()

#get the cost functions from the discriminator function
d_cost = d_cost_real + d_cost_gen
g_cost = g_cost_d

#create the grand cost function
cost = [g_cost, d_cost, d_cost_real, d_cost_gen]

#update the weights using the 'adam' method https://arxiv.org/pdf/1412.6980
lr = 0.001
lrt = sharedX(lr)
d_updater = updates.Adam(lr=lrt)
g_updater = updates.Adam(lr=lrt)

d_updates = d_updater(d_params, d_cost)
g_updates = g_updater(g_params, g_cost)
updates = d_updates + g_updates

#get the final variables for both networks, cost, and score (to keep track of things)
_train_g = theano.function([X, Z], cost, updates=g_updates)
_train_d = theano.function([X, Z], cost, updates=d_updates)
_train_both = theano.function([X, Z], cost, updates=updates)
_gen = theano.function([Z], gen)
_score = theano.function([X], p_real)
_cost = theano.function([X, Z], cost)
Exemple #20
0
 def __call__(self, shape):
     return sharedX(np_rng.uniform(low=-self.scale, high=self.scale, size=shape))