示例#1
0
 def __init__(self):
     super(MyConv, self).__init__()
     self.conv = nn.Sequential(
         exnn.Conv3d(20, 30, 5, 5),
         exnn.Conv3d(30, 60, 5, 5),
         exnn.Flatten(),
     )
示例#2
0
 def __init__(self, classes):
     super(TeacherNet, self).__init__()
     self.classes = classes
     self.conv = nn.Sequential(
         nn.Conv2d(in_channels=3,
                   out_channels=96,
                   kernel_size=3,
                   stride=1,
                   padding=1), nn.BatchNorm2d(96), nn.ReLU(),
         nn.MaxPool2d(kernel_size=3, stride=2),
         nn.Conv2d(in_channels=96,
                   out_channels=256,
                   kernel_size=3,
                   stride=1,
                   padding=1), nn.BatchNorm2d(256), nn.ReLU(),
         nn.MaxPool2d(kernel_size=3, stride=2),
         nn.Conv2d(in_channels=256,
                   out_channels=384,
                   kernel_size=3,
                   stride=1,
                   padding=1), nn.BatchNorm2d(384), nn.ReLU(),
         nn.Conv2d(in_channels=384,
                   out_channels=384,
                   kernel_size=3,
                   stride=1,
                   padding=1), nn.BatchNorm2d(384), nn.ReLU(),
         nn.MaxPool2d(3, 2),
         nn.Conv2d(in_channels=384,
                   out_channels=256,
                   kernel_size=3,
                   stride=1,
                   padding=1), nn.BatchNorm2d(256), nn.ReLU(),
         nn.MaxPool2d(3, 2))
     self.fc = nn.Sequential(exnn.GlobalAvgPool2d(), exnn.Flatten(),
                             nn.Linear(256, self.classes))
示例#3
0
 def __init__(self, out_channels, activation='relu'):
     super(FlattenLinear, self).__init__()
     self.out_channels = out_channels
     self.linear = exnn.Linear(self.out_channels)
     self.flatten = exnn.Flatten()
     if activation == 'relu':
         self.activation = nn.ReLU()
     else:
         self.activation = nn.Identity()
示例#4
0
 def __init__(self):
     super(Net, self).__init__()
     self.linear = nn.Sequential(
         exnn.Flatten(),
         nn.Linear(784, 50),
         nn.ReLU(),
         nn.Dropout(),
         nn.Linear(50, 10),
         nn.LogSoftmax(dim=1)
         )
示例#5
0
 def construct(self, idx):
     _layer_dict = copy.deepcopy(self.layer_dict)
     (module_name, params) = self.layer[idx]
     _layer_dict[module_name] = 1
     for k, v in params.items():
         _layer_dict[k] = v
     vec = list(_layer_dict.values())
     if module_name == 'conv2d':
         return conv2d(**params), vec
     elif module_name == 'linear':
         return FlattenLinear(**params), vec
     elif module_name == 'identity':
         return exnn.Flatten(), vec
示例#6
0
 def __init__(self, seq_len=21, out_channels=64):
     super(Tox21Embedding, self).__init__()
     self.embd = nn.Embedding(seq_len, out_channels)
     self.pool = GlobalAvgPool1d()
     self.flatten = exnn.Flatten()
示例#7
0
 def __init__(self, out_channels):
     super(FlattenLinear, self).__init__()
     self.out_channels = out_channels
     self.linear = exnn.Linear(self.out_channels)
     self.flatten = exnn.Flatten()
示例#8
0
 def _get_flatten():
     return exnn.Flatten()