def __call__(self, x):
     x = F.relu(self.linear(x))
     x = F.reshape(x, (-1,) + self.to_shape)  # reshape to (-1, C, H, W)
     x = F.relu(self.deconv(x))
     x = self.conv(x)
     x = F.sigmoid(x)
     return x
Exemple #2
0
    def forward(self, x):
        x = F.relu(self.conv1_1(x))
        x = F.relu(self.conv1_2(x))
        x = F.pooling(x, 2, 2)

        x = F.relu(self.conv2_1(x))
        x = F.relu(self.conv2_2(x))
        x = F.pooling(x, 2, 2)

        x = F.relu(self.conv3_1(x))
        x = F.relu(self.conv3_2(x))
        x = F.relu(self.conv3_3(x))
        x = F.pooling(x, 2, 2)

        x = F.relu(self.conv4_1(x))
        x = F.relu(self.conv4_2(x))
        x = F.relu(self.conv4_3(x))
        x = F.pooling(x, 2, 2)

        x = F.relu(self.conv5_1(x))
        x = F.relu(self.conv5_2(x))
        x = F.relu(self.conv5_3(x))
        x = F.pooling(x, 2, 2)

        x = F.reshape(x, (x.shape[0], -1))

        x = F.dropout(F.relu(self.fc6(x)))
        x = F.dropout(F.relu(self.fc7(x)))
        x = self.fc8(x)

        return x
Exemple #3
0
 def test_reshape(self):
     x = Variable(np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]))
     y = reshape(x, (6, ))
     y.backward()
     assert_array_equal(y.data, np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0]))
     assert_array_equal(x.grad.data,
                        np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]))
 def test_backward(self):
     x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
     y = reshape(x, (6, ))
     y.backward(retain_grad=True)
     self.assertEqual(y.grad.shape, (6, ))
     self.assertEqual(x.grad.shape, x.shape)
     self.assertTrue((x.grad.data == np.ones((2, 3))).all())
    def test_reshape(self):
        x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
        y = F.reshape(x, (6, ))
        y.backward(retain_grad=True)

        self.assertEqual(y.shape, (6, ))
        self.assertTrue(np.allclose(y.data, np.array([1, 2, 3, 4, 5, 6])))
        self.assertTrue(
            np.allclose(x.grad.data, np.array([[1, 1, 1], [1, 1, 1]])))
Exemple #6
0
 def forward(self, x):
     x = F.relu(self.conv1(x))  # (OH, OW)=(28, 28)
     x = F.pooling(x, 2, 2)  # (OH, OW)=(14, 14)
     #x = F.relu(self.conv2(x))
     #x = F.pooling(x, 2, 2)
     x = F.reshape(x, (x.shape[0], -1))  # (14, 14)->(196, )
     x = F.dropout(F.relu(self.fc3(x)))
     #x = F.dropout(F.relu(self.fc4(x)))
     x = self.fc5(x)
     return x
Exemple #7
0
 def forward(self, x):
     x = F.relu(self.conv1_1(x))  # (OH, OW)=(28, 28)
     x = F.relu(self.conv1_2(x))  # (OH, OW)=(28, 28)
     x = F.pooling(x, 2, 2)  # (OH, OW)=(14, 14)
     x = F.relu(self.conv2_1(x))  # (OH, OW)=(14, 14)
     x = F.relu(self.conv2_2(x))  # (OH, OW)=(14, 14)
     x = F.pooling(x, 2, 2)  # (OH, OW)=(7, 7)
     x = F.reshape(x, (x.shape[0], -1))  # (7, 7)->(49, )
     x = F.dropout(F.relu(self.fc3(x)))
     x = self.fc4(x)
     return x
Exemple #8
0
import os, sys

sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import numpy as np
from dezero import Variable
import dezero.functions as F

x = Variable(np.array([[0, 1, 2], [3, 4, 5]]))
y = F.reshape(x, (6, ))
# or y = x.reshape(6)
y.backward(retain_grad=True)
print(y.grad)

x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
y = F.transpose(x)
# or y = x.T
y.backward()
print(x.grad)
Exemple #9
0
import dezero.layers as L
from dezero import DataLoader
from dezero.models import Sequential
from dezero.optimizers import Adam

use_gpu = dezero.cuda.gpu_enable
max_epoch = 5
batch_size = 128
hidden_size = 62

fc_channel, fc_height, fc_width = 128, 7, 7

gen = Sequential(L.Linear(1024), L.BatchNorm(), F.relu,
                 L.Linear(fc_channel * fc_height * fc_width), L.BatchNorm(),
                 F.relu,
                 lambda x: F.reshape(x, (-1, fc_channel, fc_height, fc_width)),
                 L.Deconv2d(fc_channel // 2, kernel_size=4, stride=2, pad=1),
                 L.BatchNorm(), F.relu,
                 L.Deconv2d(1, kernel_size=4, stride=2, pad=1), F.sigmoid)

dis = Sequential(L.Conv2d(64, kernel_size=4, stride=2, pad=1), F.leaky_relu,
                 L.Conv2d(128, kernel_size=4, stride=2, pad=1),
                 L.BatchNorm(), F.leaky_relu, F.flatten, L.Linear(1024),
                 L.BatchNorm(), F.leaky_relu, L.Linear(1), F.sigmoid)


def init_weight(dis, gen, hidden_size):
    # Input dummy data to initialize weights
    batch_size = 1
    z = np.random.rand(batch_size, hidden_size)
    fake_images = gen(z)
Exemple #10
0
def _global_average_pooling_2d(x):
    N, C, H, W = x.shape
    h = F.average_pooling(x, (H, W), stride=1)
    h = F.reshape(h, (N, C))
    return h
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import numpy as np
from dezero import Variable
import dezero.functions as F

x = Variable(np.array([[0, 1, 2], [3, 4, 5]]))
y = F.reshape(x, (6, ))  # y = x.reshape(6)
y.backward(retain_grad=True)
print(x.grad)

x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
y = F.transpose(x)  # y = x.T
y.backward()
print(x.grad)
 def test_forward(self):
     x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
     y = reshape(x, (6, ))
     self.assertEqual(y.shape, (6, ))
    def test_reshape_backward(self):
        x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
        y = F.reshape(x, (6, ))
        y.backward(create_graph=False)

        assert x.grad.shape == (2, 3)
 def test_reshape_forward(self):
     x = Variable(np.array([[1, 2, 3], [4, 5, 6]]))
     y = F.reshape(x, (6, ))
     assert y.shape == (6, )