def __init__(self): super(Net, self).__init__() self.conv1 = Conv2dCOB(1, 6, 5) self.conv2 = Conv2dCOB(6, 16, 5) self.fc1 = LinearCOB(16 * 20 * 20, 120) self.fc2 = LinearCOB(120, 84) self.fc3 = LinearCOB(84, 10) self.flatten = FlattenCOB() self.relu1 = ReLUCOB() self.relu2 = ReLUCOB() self.tanhcob = TanhCOB() self.relu3 = ReLUCOB()
def __init__(self): super(ResidualNet2, self).__init__() self.conv1 = Conv2dCOB(in_channels=1, out_channels=3, kernel_size=3, padding=1) self.conv2 = Conv2dCOB(in_channels=3, out_channels=3, kernel_size=3, padding=1) self.conv3 = Conv2dCOB(in_channels=3, out_channels=3, kernel_size=3, padding=1) self.conv4 = Conv2dCOB(in_channels=3, out_channels=3, kernel_size=3) self.relu1 = ReLUCOB() self.relu2 = ReLUCOB() self.relu3 = ReLUCOB() self.relu4 = ReLUCOB() self.add1 = Add() self.add2 = Add() self.flatten = FlattenCOB() self.fc1 = LinearCOB(2028, 10) self.relu5 = ReLUCOB()
def __init__(self, in_channels=3): super(DenseNet, self).__init__() self.conv1 = Conv2dCOB(in_channels=in_channels, out_channels=3, kernel_size=3, padding=1) self.conv2 = Conv2dCOB(in_channels=3, out_channels=3, kernel_size=3, padding=1) self.conv3 = Conv2dCOB(in_channels=6, out_channels=3, kernel_size=3, padding=1) self.conv4 = Conv2dCOB(in_channels=3, out_channels=3, kernel_size=3, padding=1) self.relu1 = ReLUCOB() self.relu2 = ReLUCOB() self.relu3 = ReLUCOB() self.relu4 = ReLUCOB() self.concat1 = Concat() self.flatten = FlattenCOB() self.fc1 = LinearCOB(in_channels * 32 * 32, 10)
def __init__(self, input_shape, num_classes, hidden_layers=(500, 500, 500, 500, 500), activation="relu"): super().__init__() # Create the input and hidden layers layers_dim = ((reduce(mul, input_shape), ) + tuple(hidden_layers)) layers = [] for idx in range(len(hidden_layers)): layers.append(LinearCOB(layers_dim[idx], layers_dim[idx + 1])) if activation == 'elu': layers.append(ELUCOB()) elif activation == 'leakyrelu': layers.append(LeakyReLUCOB()) elif activation == 'tanh': layers.append(TanhCOB()) else: layers.append(ReLUCOB()) self.net = torch.nn.Sequential( FlattenCOB(), *layers, LinearCOB(hidden_layers[-1], num_classes))
def __init__(self, growth_rate, block_config, num_init_features, num_classes, bn_size=4, drop_rate=0, input_channels=3, memory_efficient=False): super(DenseNetCOB, self).__init__() # First convolution self.features = nn.Sequential( OrderedDict([ ('conv0', Conv2dCOB(input_channels, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)), ('norm0', BatchNorm2dCOB(num_init_features)), ('relu0', ReLUCOB(inplace=True)), ('pool0', MaxPool2dCOB(kernel_size=3, stride=2, padding=1)), ])) # Each denseblock num_features = num_init_features for i, num_layers in enumerate(block_config): block = _DenseBlockCOB(num_layers=num_layers, num_input_features=num_features, bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate, memory_efficient=memory_efficient) self.features.add_module('denseblock%d' % (i + 1), block) num_features = num_features + num_layers * growth_rate if i != len(block_config) - 1: trans = _TransitionCOB(num_input_features=num_features, num_output_features=num_features // 2) self.features.add_module('transition%d' % (i + 1), trans) num_features = num_features // 2 # Final batch norm self.features.add_module('norm5', BatchNorm2dCOB(num_features)) # Flatten layer self.relu = ReLUCOB(inplace=True) self.adaptive_avg_pool2d = AdaptiveAvgPool2dCOB((1, 1)) self.flatten = FlattenCOB() # Linear layer self.classifier = LinearCOB(num_features, num_classes) # Official init from torch repo. for m in self.modules(): if isinstance(m, Conv2dCOB): nn.init.kaiming_normal_(m.weight) elif isinstance(m, BatchNorm2dCOB): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) elif isinstance(m, LinearCOB): nn.init.constant_(m.bias, 0)
def __init__(self): super().__init__() self.net = torch.nn.Sequential(FlattenCOB(), LinearCOB(784, 128), BatchNorm1dCOB(128), ReLUCOB(), LinearCOB(128, 10))
def __init__(self, block, layers, num_classes, zero_init_residual=False, groups=1, width_per_group=64, replace_stride_with_dilation=None, norm_layer=None, input_channels=3, for_dataset=None): super(ResNetCOB, self).__init__() if norm_layer is None: norm_layer = BatchNorm2dCOB self._norm_layer = norm_layer self.inplanes = 64 self.dilation = 1 if replace_stride_with_dilation is None: # each element in the tuple indicates if we should replace # the 2x2 stride with a dilated convolution instead replace_stride_with_dilation = [False, False, False] if len(replace_stride_with_dilation) != 3: raise ValueError("replace_stride_with_dilation should be None " "or a 3-element tuple, got {}".format( replace_stride_with_dilation)) self.groups = groups self.base_width = width_per_group self.conv1 = Conv2dCOB(input_channels, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) if for_dataset == "cifar": # CIFAR10: kernel_size 7 -> 3, stride 2 -> 1, padding 3->1 self.conv1 = Conv2dCOB(input_channels, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = norm_layer(self.inplanes) self.relu = ReLUCOB(inplace=True) self.maxpool = MaxPool2dCOB(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0]) self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1]) self.layer4 = self._make_layer(block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2]) self.avgpool = AdaptiveAvgPool2dCOB((1, 1)) self.fc = LinearCOB(512 * block.expansion, num_classes) self.flatten = FlattenCOB() for m in self.modules(): if isinstance(m, Conv2dCOB): nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') elif isinstance(m, (BatchNorm2dCOB, nn.GroupNorm)): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) # Zero-initialize the last BN in each residual branch, # so that the residual branch starts with zeros, and each residual block behaves like an identity. # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 if zero_init_residual: for m in self.modules(): if isinstance(m, BottleneckCOB): nn.init.constant_(m.bn3.weight, 0) elif isinstance(m, BasicBlockCOB): nn.init.constant_(m.bn2.weight, 0)