Exemplo n.º 1
0
 def __init__(self, num_classes=10, num_channels=1):
     super(AlexNet, self).__init__()
     self.num_classes = num_classes
     self.input_size = 224
     self.dimension = 4
     self.conv1 = layer.Conv2d(num_channels, 64, 11, stride=4, padding=2)
     self.conv2 = layer.Conv2d(64, 192, 5, padding=2)
     self.conv3 = layer.Conv2d(192, 384, 3, padding=1)
     self.conv4 = layer.Conv2d(384, 256, 3, padding=1)
     self.conv5 = layer.Conv2d(256, 256, 3, padding=1)
     self.linear1 = layer.Linear(4096)
     self.linear2 = layer.Linear(4096)
     self.linear3 = layer.Linear(num_classes)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling3 = layer.MaxPool2d(2, 2, padding=0)
     self.avg_pooling1 = layer.AvgPool2d(3, 2, padding=0)
     self.relu1 = layer.ReLU()
     self.relu2 = layer.ReLU()
     self.relu3 = layer.ReLU()
     self.relu4 = layer.ReLU()
     self.relu5 = layer.ReLU()
     self.relu6 = layer.ReLU()
     self.relu7 = layer.ReLU()
     self.flatten = layer.Flatten()
     self.dropout1 = layer.Dropout()
     self.dropout2 = layer.Dropout()
     self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
Exemplo n.º 2
0
 def __init__(self):
     self.conv1 = layer.Conv2d(1, 20, 5, padding=0)
     self.conv2 = layer.Conv2d(20, 50, 5, padding=0)
     self.linear1 = layer.Linear(4 * 4 * 50, 500)
     self.linear2 = layer.Linear(500, 10)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.relu1 = layer.ReLU()
     self.relu2 = layer.ReLU()
     self.relu3 = layer.ReLU()
     self.flatten = layer.Flatten()
Exemplo n.º 3
0
Arquivo: cnn.py Projeto: zlheui/singa
 def __init__(self, num_classes=10, num_channels=1):
     super(CNN, self).__init__()
     self.num_classes = num_classes
     self.input_size = 28
     self.dimension = 4
     self.conv1 = layer.Conv2d(num_channels, 20, 5, padding=0, activation="RELU")
     self.conv2 = layer.Conv2d(20, 50, 5, padding=0, activation="RELU")
     self.linear1 = layer.Linear(500)
     self.linear2 = layer.Linear(num_classes)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.relu = layer.ReLU()
     self.flatten = layer.Flatten()
     self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
Exemplo n.º 4
0
    def __init__(self, block, layers, num_classes=10, num_channels=3):
        self.inplanes = 64
        super(ResNet, self).__init__()
        self.num_classes = num_classes
        self.input_size = 224
        self.dimension = 4
        self.conv1 = layer.Conv2d(num_channels,
                                  64,
                                  7,
                                  stride=2,
                                  padding=3,
                                  bias=False)
        self.bn1 = layer.BatchNorm2d(64)
        self.relu = layer.ReLU()
        self.maxpool = layer.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.layer1, layers1 = self._make_layer(block, 64, layers[0])
        self.layer2, layers2 = self._make_layer(block, 128, layers[1], stride=2)
        self.layer3, layers3 = self._make_layer(block, 256, layers[2], stride=2)
        self.layer4, layers4 = self._make_layer(block, 512, layers[3], stride=2)
        self.avgpool = layer.AvgPool2d(7, stride=1)
        self.flatten = layer.Flatten()
        self.fc = layer.Linear(num_classes)
        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()

        self.register_layers(*layers1, *layers2, *layers3, *layers4)
Exemplo n.º 5
0
 def __init__(self,
              hidden_size,
              q_seq,
              a_seq,
              num_layers=1,
              bidirectional=True,
              return_sequences=True):
     super(QAModel_maxpooling, self).__init__()
     self.hidden_size = hidden_size
     self.lstm_q = layer.CudnnRNN(hidden_size=hidden_size,
                                  bidirectional=bidirectional,
                                  return_sequences=return_sequences)
     self.lstm_a = layer.CudnnRNN(hidden_size=hidden_size,
                                  bidirectional=bidirectional,
                                  return_sequences=return_sequences)
     self.q_pool = layer.MaxPool2d((q_seq, 1))
     self.a_pool = layer.MaxPool2d((a_seq, 1))
Exemplo n.º 6
0
    def __init__(self,
                 in_filters,
                 out_filters,
                 reps,
                 strides=1,
                 padding=0,
                 start_with_relu=True,
                 grow_first=True):
        super(Block, self).__init__()

        if out_filters != in_filters or strides != 1:
            self.skip = layer.Conv2d(in_filters,
                                     out_filters,
                                     1,
                                     stride=strides,
                                     padding=padding,
                                     bias=False)
            self.skipbn = layer.BatchNorm2d(out_filters)
        else:
            self.skip = None

        self.layers = []

        filters = in_filters
        if grow_first:
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(in_filters,
                                      out_filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(out_filters))
            filters = out_filters

        for i in range(reps - 1):
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(filters,
                                      filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(filters))

        if not grow_first:
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(in_filters,
                                      out_filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(out_filters))

        if not start_with_relu:
            self.layers = self.layers[1:]
        else:
            self.layers[0] = layer.ReLU()

        if strides != 1:
            self.layers.append(layer.MaxPool2d(3, strides, padding + 1))

        self.register_layers(*self.layers)

        self.add = layer.Add()
Exemplo n.º 7
0
    def __init__(self, num_classes=10, num_channels=3):
        """ Constructor
        Args:
            num_classes: number of classes
        """
        super(Xception, self).__init__()
        self.num_classes = num_classes
        self.input_size = 299
        self.dimension = 4

        self.conv1 = layer.Conv2d(num_channels, 32, 3, 2, 0, bias=False)
        self.bn1 = layer.BatchNorm2d(32)
        self.relu1 = layer.ReLU()

        self.conv2 = layer.Conv2d(32, 64, 3, 1, 1, bias=False)
        self.bn2 = layer.BatchNorm2d(64)
        self.relu2 = layer.ReLU()
        # do relu here

        self.block1 = Block(64,
                            128,
                            2,
                            2,
                            padding=0,
                            start_with_relu=False,
                            grow_first=True)
        self.block2 = Block(128,
                            256,
                            2,
                            2,
                            padding=0,
                            start_with_relu=True,
                            grow_first=True)
        self.block3 = Block(256,
                            728,
                            2,
                            2,
                            padding=0,
                            start_with_relu=True,
                            grow_first=True)

        self.block4 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block5 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block6 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block7 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)

        self.block8 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block9 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block10 = Block(728,
                             728,
                             3,
                             1,
                             start_with_relu=True,
                             grow_first=True)
        self.block11 = Block(728,
                             728,
                             3,
                             1,
                             start_with_relu=True,
                             grow_first=True)

        self.block12 = Block(728,
                             1024,
                             2,
                             2,
                             start_with_relu=True,
                             grow_first=False)

        self.conv3 = layer.SeparableConv2d(1024, 1536, 3, 1, 1)
        self.bn3 = layer.BatchNorm2d(1536)
        self.relu3 = layer.ReLU()

        # do relu here
        self.conv4 = layer.SeparableConv2d(1536, 2048, 3, 1, 1)
        self.bn4 = layer.BatchNorm2d(2048)

        self.relu4 = layer.ReLU()
        self.globalpooling = layer.MaxPool2d(10, 1)
        self.flatten = layer.Flatten()
        self.fc = layer.Linear(num_classes)

        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()