Пример #1
0
 def __init__(self, num_classes=10, num_channels=1):
     super(AlexNet, self).__init__()
     self.num_classes = num_classes
     self.input_size = 224
     self.dimension = 4
     self.conv1 = layer.Conv2d(num_channels, 64, 11, stride=4, padding=2)
     self.conv2 = layer.Conv2d(64, 192, 5, padding=2)
     self.conv3 = layer.Conv2d(192, 384, 3, padding=1)
     self.conv4 = layer.Conv2d(384, 256, 3, padding=1)
     self.conv5 = layer.Conv2d(256, 256, 3, padding=1)
     self.linear1 = layer.Linear(4096)
     self.linear2 = layer.Linear(4096)
     self.linear3 = layer.Linear(num_classes)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling3 = layer.MaxPool2d(2, 2, padding=0)
     self.avg_pooling1 = layer.AvgPool2d(3, 2, padding=0)
     self.relu1 = layer.ReLU()
     self.relu2 = layer.ReLU()
     self.relu3 = layer.ReLU()
     self.relu4 = layer.ReLU()
     self.relu5 = layer.ReLU()
     self.relu6 = layer.ReLU()
     self.relu7 = layer.ReLU()
     self.flatten = layer.Flatten()
     self.dropout1 = layer.Dropout()
     self.dropout2 = layer.Dropout()
     self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
Пример #2
0
 def __init__(self):
     self.conv1 = layer.Conv2d(1, 20, 5, padding=0)
     self.conv2 = layer.Conv2d(20, 50, 5, padding=0)
     self.linear1 = layer.Linear(4 * 4 * 50, 500)
     self.linear2 = layer.Linear(500, 10)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.relu1 = layer.ReLU()
     self.relu2 = layer.ReLU()
     self.relu3 = layer.ReLU()
     self.flatten = layer.Flatten()
Пример #3
0
 def __init__(self, num_classes=10, num_channels=1):
     super(CNN, self).__init__()
     self.num_classes = num_classes
     self.input_size = 28
     self.dimension = 4
     self.conv1 = layer.Conv2d(num_channels, 20, 5, padding=0, activation="RELU")
     self.conv2 = layer.Conv2d(20, 50, 5, padding=0, activation="RELU")
     self.linear1 = layer.Linear(500)
     self.linear2 = layer.Linear(num_classes)
     self.pooling1 = layer.MaxPool2d(2, 2, padding=0)
     self.pooling2 = layer.MaxPool2d(2, 2, padding=0)
     self.relu = layer.ReLU()
     self.flatten = layer.Flatten()
     self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
Пример #4
0
    def _make_layer(self, block, planes, blocks, stride=1):
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            conv = layer.Conv2d(
                self.inplanes,
                planes * block.expansion,
                1,
                stride=stride,
                bias=False,
            )
            bn = layer.BatchNorm2d(planes * block.expansion)

            def _downsample(x):
                return bn(conv(x))

            downsample = _downsample

        layers = []
        layers.append(block(self.inplanes, planes, stride, downsample))
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
            layers.append(block(self.inplanes, planes))

        def forward(x):
            for layer in layers:
                x = layer(x)
            return x

        return forward, layers
Пример #5
0
    def __init__(self, block, layers, num_classes=10, num_channels=3):
        self.inplanes = 64
        super(ResNet, self).__init__()
        self.num_classes = num_classes
        self.input_size = 224
        self.dimension = 4
        self.conv1 = layer.Conv2d(num_channels,
                                  64,
                                  7,
                                  stride=2,
                                  padding=3,
                                  bias=False)
        self.bn1 = layer.BatchNorm2d(64)
        self.relu = layer.ReLU()
        self.maxpool = layer.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.layer1, layers1 = self._make_layer(block, 64, layers[0])
        self.layer2, layers2 = self._make_layer(block, 128, layers[1], stride=2)
        self.layer3, layers3 = self._make_layer(block, 256, layers[2], stride=2)
        self.layer4, layers4 = self._make_layer(block, 512, layers[3], stride=2)
        self.avgpool = layer.AvgPool2d(7, stride=1)
        self.flatten = layer.Flatten()
        self.fc = layer.Linear(num_classes)
        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()

        self.register_layers(*layers1, *layers2, *layers3, *layers4)
Пример #6
0
def conv3x3(in_planes, out_planes, stride=1):
    """3x3 convolution with padding"""
    return layer.Conv2d(
        in_planes,
        out_planes,
        3,
        stride=stride,
        padding=1,
        bias=False,
    )
Пример #7
0
 def __init__(self,
              onnx_model,
              num_classes=10,
              image_size=224,
              num_channels=3):
     super(MyModel, self).__init__(onnx_model)
     self.dimension = 4
     self.num_classes = num_classes
     self.input_size = image_size
     self.num_channels = num_channels
     self.conv = layer.Conv2d(1280, 10, 1, padding=0)
Пример #8
0
    def __init__(self, inplanes, planes, stride=1, downsample=None):
        super(Bottleneck, self).__init__()
        self.conv1 = layer.Conv2d(inplanes, planes, 1, bias=False)
        self.bn1 = layer.BatchNorm2d(planes)
        self.relu1 = layer.ReLU()
        self.conv2 = layer.Conv2d(planes,
                                  planes,
                                  3,
                                  stride=stride,
                                  padding=1,
                                  bias=False)
        self.bn2 = layer.BatchNorm2d(planes)
        self.relu2 = layer.ReLU()
        self.conv3 = layer.Conv2d(planes,
                                  planes * self.expansion,
                                  1,
                                  bias=False)
        self.bn3 = layer.BatchNorm2d(planes * self.expansion)

        self.add = layer.Add()
        self.relu3 = layer.ReLU()

        self.downsample = downsample
        self.stride = stride
Пример #9
0
 def __init__(self):
     super(MyModel, self).__init__()
     self.conv1 = layer.Conv2d(2, 2)
     self.bn1 = layer.BatchNorm2d(2)
     self.doublelinear1 = DoubleLinear(2, 4, 2)
     self.optimizer = opt.SGD()
Пример #10
0
    def __init__(self,
                 in_filters,
                 out_filters,
                 reps,
                 strides=1,
                 padding=0,
                 start_with_relu=True,
                 grow_first=True):
        super(Block, self).__init__()

        if out_filters != in_filters or strides != 1:
            self.skip = layer.Conv2d(in_filters,
                                     out_filters,
                                     1,
                                     stride=strides,
                                     padding=padding,
                                     bias=False)
            self.skipbn = layer.BatchNorm2d(out_filters)
        else:
            self.skip = None

        self.layers = []

        filters = in_filters
        if grow_first:
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(in_filters,
                                      out_filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(out_filters))
            filters = out_filters

        for i in range(reps - 1):
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(filters,
                                      filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(filters))

        if not grow_first:
            self.layers.append(layer.ReLU())
            self.layers.append(
                layer.SeparableConv2d(in_filters,
                                      out_filters,
                                      3,
                                      stride=1,
                                      padding=1,
                                      bias=False))
            self.layers.append(layer.BatchNorm2d(out_filters))

        if not start_with_relu:
            self.layers = self.layers[1:]
        else:
            self.layers[0] = layer.ReLU()

        if strides != 1:
            self.layers.append(layer.MaxPool2d(3, strides, padding + 1))

        self.register_layers(*self.layers)

        self.add = layer.Add()
Пример #11
0
    def __init__(self, num_classes=10, num_channels=3):
        """ Constructor
        Args:
            num_classes: number of classes
        """
        super(Xception, self).__init__()
        self.num_classes = num_classes
        self.input_size = 299
        self.dimension = 4

        self.conv1 = layer.Conv2d(num_channels, 32, 3, 2, 0, bias=False)
        self.bn1 = layer.BatchNorm2d(32)
        self.relu1 = layer.ReLU()

        self.conv2 = layer.Conv2d(32, 64, 3, 1, 1, bias=False)
        self.bn2 = layer.BatchNorm2d(64)
        self.relu2 = layer.ReLU()
        # do relu here

        self.block1 = Block(64,
                            128,
                            2,
                            2,
                            padding=0,
                            start_with_relu=False,
                            grow_first=True)
        self.block2 = Block(128,
                            256,
                            2,
                            2,
                            padding=0,
                            start_with_relu=True,
                            grow_first=True)
        self.block3 = Block(256,
                            728,
                            2,
                            2,
                            padding=0,
                            start_with_relu=True,
                            grow_first=True)

        self.block4 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block5 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block6 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block7 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)

        self.block8 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block9 = Block(728,
                            728,
                            3,
                            1,
                            start_with_relu=True,
                            grow_first=True)
        self.block10 = Block(728,
                             728,
                             3,
                             1,
                             start_with_relu=True,
                             grow_first=True)
        self.block11 = Block(728,
                             728,
                             3,
                             1,
                             start_with_relu=True,
                             grow_first=True)

        self.block12 = Block(728,
                             1024,
                             2,
                             2,
                             start_with_relu=True,
                             grow_first=False)

        self.conv3 = layer.SeparableConv2d(1024, 1536, 3, 1, 1)
        self.bn3 = layer.BatchNorm2d(1536)
        self.relu3 = layer.ReLU()

        # do relu here
        self.conv4 = layer.SeparableConv2d(1536, 2048, 3, 1, 1)
        self.bn4 = layer.BatchNorm2d(2048)

        self.relu4 = layer.ReLU()
        self.globalpooling = layer.MaxPool2d(10, 1)
        self.flatten = layer.Flatten()
        self.fc = layer.Linear(num_classes)

        self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()