コード例 #1
0
 def __init__(self):
     super().__init__()
     self.sequntial = [
         nn.linear(784, 300),
         nn.ReLU(),
         nn.linear(300, 300),
         nn.ReLU(),
         nn.linear(300, 10)
     ]
     self.activation = []
コード例 #2
0
 def __init__(self):
     super().__init__()
     self.sequntial = [
         nn.Conv2d(1, 16, 3),
         nn.ReLU(),
         nn.MaxPool2d(2),
         nn.Conv2d(16, 32, 3),
         nn.ReLU(),
         nn.MaxPool2d(2),
         nn.Conv2d(32, 64, 3),
         nn.ReLU(),
         nn.MaxPool2d(2),
         nn.Conv2d(64, 128, 3),
         nn.ReLU(),
         nn.MaxPool2d(2),
         nn.flatten(),
         nn.linear(1 * 1 * 128, 10)
     ]
     self.activation = []
コード例 #3
0
    def __init__(self):
        super().__init__()
        self.CNNSequential = SeqModule([
            # convolution 1 + Relu
            nn.Conv2d(1, 4, 3),
            nn.ReLU(),
            # convolution 2 + Relu + pooling
            nn.Conv2d(4, 4, 3),
            nn.ReLU(),
            nn.MaxPool2d(2),
            # convolution 3 + Relu + pooling
            nn.Conv2d(4, 8, 3),
            nn.ReLU(),
            # convolution 4 + Relu + pooling
            nn.Conv2d(8, 8, 3),
            nn.ReLU(),
            nn.MaxPool2d(2),
        ])
        # 5층까지
        self.CNN2Sequential = SeqModule([
            nn.Conv2d(8, 16, 3),
            nn.ReLU(),
        ])
        ############
        # Concatenate
        ############
        self.afterCNN = SeqModule([
            nn.MaxPool2d(2, padding=1),
            # same as reshape
            nn.flatten(),
        ])

        self.lastLayer = SeqModule(
            [nn.linear(384, 128),
             nn.ReLU(), nn.linear(128, 10)])
コード例 #4
0
    def __init__(self, embed, dropout=False):
        super().__init__()
        self.CNNSequential = SeqModule([
            # convolution 1 + Relu
            nn.Conv2d(3, 16, 3),
            nn.ReLU(),
            # convolution 2 + Relu + pooling
            nn.Conv2d(16, 16, 3),
            nn.ReLU(),
            nn.MaxPool2d(2),
            # convolution 3 + Relu + pooling
            nn.Conv2d(16, 32, 3),
            nn.ReLU(),
            nn.MaxPool2d(2),
            # convolution 4 + Relu + pooling
            nn.Conv2d(32, 32, 3),
            nn.ReLU(),
            nn.MaxPool2d(2),
        ])
        # 5층까지
        self.CNN2Sequential = SeqModule([
            nn.Conv2d(32, 64, 3),
            nn.ReLU(),
        ])
        ############
        # Concatenate
        ############
        self.afterCNN = SeqModule([
            nn.MaxPool2d(2),
            # same as reshape
            nn.flatten(),
            nn.Dropout(check=dropout),
            nn.linear(1536, 256),
            nn.tanh()
        ])

        self.Dropout = nn.Dropout(check=dropout)
        self.RNN = SeqModule([
            nn.RNN(embed, 256),
            nn.tanh(),
        ])
        ############
        # 중간에 ADD
        ############

        self.lastLayer = SeqModule(
            [nn.linear(256, 256),
             nn.ReLU(), nn.linear(256, 1665)])