def __init__(self, input, params=None, rng=np.random.RandomState(), zsize=100): self.input = input h_input = input h = FullyConnected(input=h_input, n_in=zsize, n_out=4 * 4 * 1024, W=params[0] if params is not None else None, b=params[1] if params is not None else None, rng=rng) h_out = relu(batchnorm(h.output.reshape((input.shape[0], 1024, 4, 4)))) conv1 = ConvLayer(h_out, 4, 8, 1024, 512, rng=rng, W=params[2] if params is not None else None) conv1_out = relu(batchnorm(conv1.output)) conv2 = ConvLayer(conv1_out, 8, 16, 512, 256, rng=rng, W=params[3] if params is not None else None) conv2_out = relu(batchnorm(conv2.output)) conv3 = ConvLayer(conv2_out, 16, 32, 256, 128, rng=rng, W=params[4] if params is not None else None) conv3_out = relu(batchnorm(conv3.output)) conv4 = ConvLayer(conv3_out, 32, 64, 128, 3, rng=rng, W=params[5] if params is not None else None) conv4_out = T.tanh(conv4.output) self.output = conv4_out self.params = h.params + conv1.params + conv2.params + \ conv3.params + conv4.params
def __init__(self, input, params=None, rng=np.random.RandomState()): self.input = input conv1 = ConvLayer(input, 64, 32, 3, 128, rng=rng, W=params[0] if params is not None else None) conv1_out = lrelu(conv1.output) conv2 = ConvLayer(conv1_out, 32, 16, 128, 256, rng=rng, W=params[1] if params is not None else None) conv2_out = lrelu(batchnorm(conv2.output)) conv3 = ConvLayer(conv2_out, 16, 8, 256, 512, rng=rng, W=params[2] if params is not None else None) conv3_out = lrelu(batchnorm(conv3.output)) conv4 = ConvLayer(conv3_out, 8, 4, 512, 1024, rng=rng, W=params[3] if params is not None else None) conv4_out = lrelu(batchnorm(conv4.output)) h_input = conv4_out.flatten(2) h = FullyConnected(input=h_input, n_in=1024 * 4 * 4, n_out=1, W=params[4] if params is not None else None, b=params[5] if params is not None else None, rng=rng) h_out = h.output self.output = h.output self.params = conv1.params + conv2.params + \ conv3.params + conv4.params + h.params