def __init__(self): super(MyConv, self).__init__() self.conv = nn.Sequential( exnn.Conv3d(20, 30, 5, 5), exnn.Conv3d(30, 60, 5, 5), exnn.Flatten(), )
def __init__(self, classes): super(TeacherNet, self).__init__() self.classes = classes self.conv = nn.Sequential( nn.Conv2d(in_channels=3, out_channels=96, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(96), nn.ReLU(), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(256), nn.ReLU(), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(384), nn.ReLU(), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(384), nn.ReLU(), nn.MaxPool2d(3, 2), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(256), nn.ReLU(), nn.MaxPool2d(3, 2)) self.fc = nn.Sequential(exnn.GlobalAvgPool2d(), exnn.Flatten(), nn.Linear(256, self.classes))
def __init__(self, out_channels, activation='relu'): super(FlattenLinear, self).__init__() self.out_channels = out_channels self.linear = exnn.Linear(self.out_channels) self.flatten = exnn.Flatten() if activation == 'relu': self.activation = nn.ReLU() else: self.activation = nn.Identity()
def __init__(self): super(Net, self).__init__() self.linear = nn.Sequential( exnn.Flatten(), nn.Linear(784, 50), nn.ReLU(), nn.Dropout(), nn.Linear(50, 10), nn.LogSoftmax(dim=1) )
def construct(self, idx): _layer_dict = copy.deepcopy(self.layer_dict) (module_name, params) = self.layer[idx] _layer_dict[module_name] = 1 for k, v in params.items(): _layer_dict[k] = v vec = list(_layer_dict.values()) if module_name == 'conv2d': return conv2d(**params), vec elif module_name == 'linear': return FlattenLinear(**params), vec elif module_name == 'identity': return exnn.Flatten(), vec
def __init__(self, seq_len=21, out_channels=64): super(Tox21Embedding, self).__init__() self.embd = nn.Embedding(seq_len, out_channels) self.pool = GlobalAvgPool1d() self.flatten = exnn.Flatten()
def __init__(self, out_channels): super(FlattenLinear, self).__init__() self.out_channels = out_channels self.linear = exnn.Linear(self.out_channels) self.flatten = exnn.Flatten()
def _get_flatten(): return exnn.Flatten()