def __init__(self, in_channels, out_channels, downsample): super().__init__() self.downsample = downsample mid_channels = out_channels // 2 self.compress_conv1 = create_conv( (in_channels if self.downsample else mid_channels), mid_channels, 1, 1, -2) self.dw_conv2 = create_depthwise_conv( mid_channels, 3, 2, -2, padding=1, stride=(2 if self.downsample else 1)) self.expand_conv3 = create_conv(mid_channels, mid_channels, 1, 1, -2) if downsample: self.dw_conv4 = create_depthwise_conv(in_channels, 3, 2, -2, padding=1, stride=2) self.expand_conv5 = create_conv(in_channels, mid_channels, 1, 1, -2) self.activ = nn.ReLU(inplace=True)
def __init__(self, use_last_conv=True): super().__init__() self.use_last_conv = use_last_conv self.first_conv = create_conv(1, 8, 1) self.branch_conv = create_conv(8, 32, 2, stride=2) if use_last_conv: self.last_conv = create_conv(32, 1, 1)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 16, 1, 1, -2) for i in range(16): self.conv1.weight.data[i] += i self.conv2 = create_conv(16, 16, 1, 2, -2) self.conv3 = create_conv(16, 16, 1, 2, -2) for i in range(16): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.relu = nn.ReLU() self.conv4 = create_conv(32, 16, 1, 10, 0) for i in range(16): self.conv4.weight.data[i] += 16 - i self.conv5 = create_conv(48, 16, 1, 10, 0) self.bn = nn.BatchNorm2d(16) self.bn.bias = torch.nn.Parameter(torch.ones(16)) self.bn1 = nn.BatchNorm2d(32) self.bn1.bias = torch.nn.Parameter(torch.ones(32)) self.bn2 = nn.BatchNorm2d(48) self.bn2.bias = torch.nn.Parameter(torch.ones(48))
def __init__(self): super().__init__() self.conv1 = create_conv(1, 3, 2, 1, -2) self.conv2 = create_conv(1, 3, 2, 2, -2) self.conv3 = create_conv(1, 3, 2, 3, -2) self.relu = nn.ReLU() self.conv4 = create_conv(3, 1, 3, 10, 0) self.conv5 = create_conv(3, 1, 3, -10, 0)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 8, 3, 1, -2, padding=1) self.conv2 = create_conv(8, 8, 3, 2, -2, padding=1) self.conv3 = create_conv(8, 8, 3, 3, -2, padding=1) self.conv4 = create_conv(8, 1, 3, 10, 0, padding=1) self.conv5 = create_conv(8, 1, 3, -10, 0, padding=1) self.linear = nn.Linear(64, 10) self.relu = nn.ReLU()
def __init__(self): super().__init__() self.conv1 = create_conv(1, 16, 1, 1, -2) for i in range(16): self.conv1.weight.data[i] += i self.gn1 = nn.GroupNorm(16, 16) # Instance Normalization self.conv2 = create_conv(16, 16, 1, 1, -2) for i in range(16): self.conv2.weight.data[i] += i self.gn2 = nn.GroupNorm(2, 16) # Group Normalization
def __init__(self): super().__init__() self.conv1 = create_conv(1, 512, 1, 1, 1) for i in range(512): self.conv1.weight.data[i] += i self.conv2 = create_conv(512, 1024, 3, 1, 1) self.conv3 = create_conv(1024, 1024, 1, 1, 1) for i in range(1024): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.maxpool = nn.MaxPool2d(2)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 16, 1, 1, -2) for i in range(16): self.conv1.weight.data[i] += i self.conv2 = create_conv(16, 16, 1, 1, -2) for i in range(16): self.conv2.weight.data[i] += i self.conv3 = create_conv(16, 1, 1, 1, -2) self.relu = nn.ReLU() self.fc = nn.Linear(64, 3)
def __init__(self): super().__init__() # Usual conv self.conv1 = create_conv(1, 32, 2, 9, -2) self.relu = nn.ReLU() # Depthwise conv self.conv2 = nn.Conv2d(32, 32, 1, groups=32) # Downsample conv self.conv3 = create_conv(32, 32, 3, -10, 0, stride=2) # Group conv self.conv4 = nn.Conv2d(32, 16, 1, groups=8)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 512, 1, 1, 1) for i in range(512): self.conv1.weight.data[i] += i self.conv2 = create_conv(512, 1024, 1, 1, 1) self.conv3 = create_conv(512, 1024, 1, 1, 1) for i in range(1024): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.conv4 = create_conv(1024, 1024, 1, 1, 1) for i in range(1024): self.conv4.weight.data[i] += i
def __init__(self): super().__init__() self.conv1 = create_conv(1, 512, 1, 1, 1) self.conv4 = create_conv(2048, 512, 2, 1, 1) for i in range(512): self.conv1.weight.data[i] += i self.conv4.weight.data[i] += i self.conv2 = create_conv(512, 1024, 3, 1, 1) self.conv3 = create_conv(512, 1024, 3, 1, 1) self.depthwise_conv = create_grouped_conv(1024, 2048, 5, 1024, 1, 1) for i in range(1024): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.depthwise_conv.weight.data[i] += i
def __init__(self, in_out_ch=((1, 3), (3, 5), (5, 7), (7, 10))): super().__init__() self.features = [] self.features.append(create_conv(*in_out_ch[0], 2, -1, -2)) self.features.append(nn.BatchNorm2d(in_out_ch[0][1])) self.features.append(nn.ReLU()) self.features.append(create_conv(*in_out_ch[1], 5, 1, 1)) self.features.append(nn.BatchNorm2d(in_out_ch[1][1])) self.features.append(nn.ReLU()) self.features.append(create_conv(*in_out_ch[2], 1, 2, 2)) self.features.append(nn.BatchNorm2d(in_out_ch[2][1])) self.features.append(nn.ReLU()) self.features.append(create_conv(*in_out_ch[3], 9, -1, 0)) self.features.append(nn.BatchNorm2d(in_out_ch[3][1])) self.features.append(nn.ReLU()) self.features.append(create_conv(*reversed(in_out_ch[3]), 3, 0, 1)) self.features.append(nn.BatchNorm2d(in_out_ch[3][0])) self.features.append(nn.ReLU()) self.features.append(create_conv(*reversed(in_out_ch[2]), 1, -1, 9)) self.features.append(nn.BatchNorm2d(in_out_ch[2][0])) self.features.append(nn.ReLU()) self.features.append(create_conv(*reversed(in_out_ch[1]), 2, 10, 1)) self.features.append(nn.BatchNorm2d(in_out_ch[1][0])) self.features.append(nn.ReLU()) self.features.append(create_conv(*reversed(in_out_ch[0]), 1, 1, 1)) self.features.append(nn.BatchNorm2d(in_out_ch[0][0])) self.features.append(nn.ReLU()) self.features = nn.Sequential(*self.features)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 16, 2, 1, -2) for i in range(16): self.conv1.weight.data[i] += i self.conv2 = create_conv(16, 32, 2, 2, -2) self.conv3 = create_conv(16, 32, 2, 2, -2) for i in range(32): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.relu = nn.ReLU() self.conv4 = create_conv(32, 16, 3, 10, 0) for i in range(16): self.conv4.weight.data[i] += i
def __init__(self): super().__init__() self.conv1 = create_conv(1, 16, 2, 0, 1) for i in range(16): self.conv1.weight.data[i] += i self.bn1 = nn.BatchNorm2d(16) self.relu = nn.ReLU() self.conv_depthwise = create_depthwise_conv(16, 3, 0, 1) for i in range(16): self.conv_depthwise.weight.data[i] += i self.conv2 = create_conv(16, 32, 3, 20, 0) for i in range(32): self.conv2.weight.data[i] += i self.bn2 = nn.BatchNorm2d(32) self.up = create_transpose_conv(32, 64, 3, 3, 1, 2) for i in range(64): self.up.weight.data[0][i] += i self.conv3 = create_conv(64, 1, 5, 5, 1)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 512, 1, 1, 1) for i in range(512): self.conv1.weight.data[i] += i self.conv2 = create_grouped_conv(512, 128, 3, 4, 1, 1) self.conv3 = create_depthwise_conv(128, 3, 1, 1) for i in range(128): self.conv2.weight.data[i] += i self.conv3.weight.data[i] += i self.fc = nn.Linear(2048, 128)
def __init__(self, repeat_seq_of_shared_convs=False, additional_last_shared_layers=False): super().__init__() self.num_iter_shared_convs = 2 if repeat_seq_of_shared_convs else 1 self.last_shared_layers = additional_last_shared_layers self.conv1 = create_conv(2, 16, 1, 1, -2) for i in range(16): self.conv1.weight.data[i] += i self.conv2 = create_conv(2, 16, 1, 1, -2) for i in range(16): self.conv2.weight.data[i] += i self.conv3 = create_conv(2, 16, 1, 1, -2) # Weights of conv3 is initialized to check difference masks self.conv4 = create_conv(16, 16, 1, 1, -2) for i in range(16): self.conv4.weight.data[i] += i self.conv5 = create_conv(16, 16, 1, 1, -2) for i in range(16): self.conv5.weight.data[i] += i
def __init__(self): super().__init__() self.first_conv = create_conv(1, 8, 1) self.last_conv = create_conv(32, 1, 1)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 2, 2, 2) self.conv2 = create_conv(2, 2, 2, 2)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 1, 2, -1, -2) self.fc = nn.Linear(3, 6)
def __init__(self): super().__init__() self.conv = create_conv(1, 16, 1, 1, -2) self.unit1 = TestShuffleUnit(16, 32, True)
def __init__(self): super().__init__() self.conv = create_conv(1, 1, 1) self.linear = nn.Linear(64, 1)
def __init__(self): super().__init__() self.conv2 = create_conv(self.in_channels, self.out_channels, self.kernel_size, self.weight_init, self.bias_init)
def __init__(self): super().__init__() self.conv1 = create_conv(1, 2, 2, 9, -2) self.conv2 = create_conv(2, 1, 3, -10, 0)