def test_perceptron(self): pc = dy.ParameterCollection() init = OrthogonalInitializer mlp = MLP(pc, [10, 8, 5], init=init) x = dy.random_normal((10,)) y = mlp(x, True) assert y.dim() == ((5,), 1) mlp_batch = MLP(pc, [10, 8, 5], p=0.5, init=init) x = dy.random_normal((10,), batch_size=5) y = mlp_batch(x, True) assert y.dim() == ((5,), 5)
def test_linear(self): pc = dy.ParameterCollection() init = OrthogonalInitializer affine = Linear(pc, in_dim=10, out_dim=5, bias=True, init=init) x = dy.random_normal((10, )) y = affine(x) assert y.dim() == ((5, ), 1) init = dy.ConstInitializer(1) affine = Linear(pc, in_dim=10, out_dim=1, bias=False, init=init) x = dy.random_normal((10, )) y = affine(x) assert math.fabs(np.sum(y.npvalue()) - np.sum(x.npvalue())) < 1e-6
def reparameterize(self, mu, logvar): if self.training: std = dy.exp(logvar * 0.5) eps = dy.random_normal(dim=std.dim()[0], mean=0.0, stddev=1.0) return dy.cmult(eps, std) + mu else: return mu
def reparameterize(mu, logvar): # Get z by reparameterization. d = mu.dim()[0][0] eps = dy.random_normal(d) std = dy.exp(logvar * 0.5) return mu + dy.cmult(std, eps)
def test_DeepBiRNNBuilder(self): pc = dy.ParameterCollection() ENC = DeepBiRNNBuilder(pc, 2, 50, 20, orthonormal_VanillaLSTMBuilder) x = [dy.random_normal((50, )) for _ in range(10)] y = ENC(x, p_x=0.33, p_h=0.33, train=True) assert len(y) == 10 assert y[0].dim() == ((40, ), 1)
def reparameterize(self, mu, logvar): d = mu.dim()[0][0] eps = dy.random_normal(d) std = dy.exp(logvar * 0.5) return mu + dy.cmult(std, eps)
def reparameterize(mu, log_sigma_squared): d = mu.dim()[0][0] sample = dy.random_normal(d) covar = dy.exp(log_sigma_squared * 0.5) return mu + dy.cmult(covar, sample)