def __init__(self, features_dim: int): """ Args: features_dim: size of the output tensor """ super(SimpleConv, self).__init__() self._net = nn.Sequential( nn.Conv2d(1, 32, 3, 1), nn.ReLU(), nn.Conv2d(32, 64, 3, 1), nn.ReLU(), nn.MaxPool2d(2), Flatten(), nn.Linear(9216, 128), nn.ReLU(), nn.Linear(128, features_dim), Normalize(), )
def __init__(self, out_features: int, normalize: bool = True): """ Args: out_features: size of the output tensor """ super().__init__() layers = [ nn.Conv2d(1, 32, 3, 1), nn.ReLU(), nn.Conv2d(32, 64, 3, 1), nn.ReLU(), nn.MaxPool2d(2), Flatten(), nn.Linear(9216, 128), nn.ReLU(), nn.Linear(128, out_features), ] if normalize: layers.append(Normalize()) self._net = nn.Sequential(*layers)
def __init__(self, num_hidden1=128, num_hidden2=64): """ Args: num_hidden1: size of the first hidden representation num_hidden2: size of the second hidden representation """ super().__init__() self.conv_net = nn.Sequential( nn.Conv2d(1, 32, 3, 1), nn.ReLU(), nn.Conv2d(32, 64, 3, 1), nn.ReLU(), nn.MaxPool2d(2), Flatten(), ) self.linear_net = nn.Sequential( nn.Linear(9216, num_hidden1), nn.ReLU(), nn.Linear(num_hidden1, num_hidden2), Normalize(), ) self._net = nn.Sequential(self.conv_net, self.linear_net)