Exemple #1
0
	def forward(self, state):
		""" Build an actor (policy) network that maps states to actions """
		x = F.relu(self.fc1(state))
		x = F.relu(self.fc2(x))
		x = F.tanh(self.fc3(x)) # outputs are in the range [-1, 1]

		return x
Exemple #2
0
	def forward(self, state, action):
		""" Build a critic (value) network that maps 
		(state, action) pairs -> Q-values """
		x = F.relu(self.fc1(state))
		x = F.relu(self.fc2(torch.cat([x, action], dim=1))) # add action too for the mapping
		x = F.relu(self.fc3(x))

		return x
Exemple #3
0
    def forward(self, x):
        h = F.relu(self.bn1(x), inplace=True)
        h = self.conv1(h)
        h = F.relu(self.bn2(h), inplace=True)
        h = F.dropout(h, p=self.drop_rate)
        h = self.conv2(h)
        y = F.relu(h + self.shortcut(x))

        return y
    def forward(self, x):

        x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))

        x = F.max_pool2d(F.relu(self.conv2(x)), 2)
        x = x.view(-1, self.num_flat_features(x))
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x
Exemple #5
0
    def forward(self, x):
        h = self.conv1(x)
        h = F.relu(self.bn1(h), inplace=True)
        h = self.pool1(h)
        h = self.block0(h)
        for block in self.block1:
            h = block(h)
        h = self.conv2(h)
        for block in self.block2:
            h = block(h)
        h = self.conv3(h)
        for block in self.block3:
            h = block(h)
        h = self.conv4(h)
        for block in self.block4:
            h = block(h)
        h = self.avg_pool(h)
        h = self.fc1(h)
        h = torch.relu(h)
        h = self.out(h)
        y = torch.log_softmax(h, dim=1)

        return y
 def encode(self, X, layer_number=-1):
     if (layer_number < 0):
         layer_number += len(self.lin)
     for i in range(layer_number):
         X = F.relu(self.lin[i](X))
     return self.lin[layer_number](X)
 def forward(self, X):
     if (self.encoder is not None):
         X = self.encoder(X)
     for layer in self.lin:
         X = F.relu(layer(X))
     return self.out(X)
Exemple #8
0
 def forward(self, input, hidden):
     output = self.embedding(input).view(1, 1, -1)
     output = F.relu(output)
     output, hidden = self.gru(output, hidden)
     output = self.softmax(self.out(output[0]))
     return output, hidden