def test_layer_choice(self): for i in range(2): for j in range(2): if j == 0: # test number layer_choice = LayerChoice([nn.Conv2d(3, 3, 3), nn.Conv2d(3, 5, 3), nn.Conv2d(3, 6, 3)]) else: # test ordered dict layer_choice = LayerChoice(OrderedDict([ ("conv1", nn.Conv2d(3, 3, 3)), ("conv2", nn.Conv2d(3, 5, 3)), ("conv3", nn.Conv2d(3, 6, 3)) ])) if i == 0: # test modify self.assertEqual(len(layer_choice.choices), 3) layer_choice[1] = nn.Conv2d(3, 4, 3) self.assertEqual(layer_choice[1].out_channels, 4) self.assertEqual(len(layer_choice[0:2]), 2) if j > 0: layer_choice["conv3"] = nn.Conv2d(3, 7, 3) self.assertEqual(layer_choice[-1].out_channels, 7) if i == 1: # test delete del layer_choice[1] self.assertEqual(len(layer_choice), 2) self.assertEqual(len(list(layer_choice)), 2) self.assertEqual(layer_choice.names, ["conv1", "conv3"] if j > 0 else ["0", "2"]) if j > 0: del layer_choice["conv1"] self.assertEqual(len(layer_choice), 1)
def __init__(self): super(Net, self).__init__() self.conv1 = LayerChoice([nn.Conv2d(3, 6, 3, padding=1), nn.Conv2d(3, 6, 5, padding=2)]) self.pool = nn.MaxPool2d(2, 2) self.conv2 = LayerChoice([nn.Conv2d(6, 16, 3, padding=1), nn.Conv2d(6, 16, 5, padding=2)]) self.conv3 = nn.Conv2d(16, 16, 1) self.skipconnect = InputChoice(n_candidates=1) self.bn = nn.BatchNorm2d(16) self.gap = nn.AdaptiveAvgPool2d(4) self.fc1 = nn.Linear(16 * 4 * 4, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10)
def __init__(self, test_case): super().__init__() self.test_case = test_case self.conv1 = LayerChoice( [nn.Conv2d(3, 6, 3, padding=1), nn.Conv2d(3, 6, 5, padding=2)]) self.pool = nn.MaxPool2d(2, 2) self.conv2 = LayerChoice( [nn.Conv2d(6, 16, 3, padding=1), nn.Conv2d(6, 16, 5, padding=2)], return_mask=True) self.conv3 = nn.Conv2d(16, 16, 1) self.bn = nn.BatchNorm2d(16) self.gap = nn.AdaptiveAvgPool2d(1) self.fc = nn.Linear(16, 10)
def __init__(self, hidden_size): super(Net, self).__init__() # two options of conv1 self.conv1 = LayerChoice([nn.Conv2d(1, 20, 5, 1), nn.Conv2d(1, 20, 3, 1)], key='first_conv') # two options of mid_conv self.mid_conv = LayerChoice([nn.Conv2d(20, 20, 3, 1, padding=1), nn.Conv2d(20, 20, 5, 1, padding=2)], key='mid_conv') self.conv2 = nn.Conv2d(20, 50, 5, 1) self.fc1 = nn.Linear(4*4*50, hidden_size) self.fc2 = nn.Linear(hidden_size, 10) # skip connection over mid_conv self.input_switch = InputChoice(n_candidates=2, n_chosen=1, key='skip')
def __init__(self, cell_name, prev_labels, channels): super().__init__(cell_name) self.input_choice = InputChoice(choose_from=prev_labels, n_chosen=1, return_mask=True, key=cell_name + "_input") self.op_choice = LayerChoice([ nn.Conv2d(channels, channels, 3, padding=1), nn.Conv2d(channels, channels, 5, padding=2), nn.MaxPool2d(3, stride=1, padding=1), nn.AvgPool2d(3, stride=1, padding=1), nn.Identity() ], key=cell_name + "_op")
def __init__(self, cell_id, C_in, C_out, stride, bn_affine=True, bn_momentum=0.1, bn_track_running_stats=True): super(NASBench201Cell, self).__init__() self.NUM_NODES = 4 self.layers = nn.ModuleList() OPS = lambda layer_idx: OrderedDict( [("none", Zero(C_in, C_out, stride)), ("avg_pool_3x3", Pooling(C_in, C_out, stride if layer_idx == 0 else 1, bn_affine, bn_momentum, bn_track_running_stats)), ("conv_3x3", ReLUConvBN(C_in, C_out, 3, stride if layer_idx == 0 else 1, 1, 1, bn_affine, bn_momentum, bn_track_running_stats)), ("conv_1x1", ReLUConvBN(C_in, C_out, 1, stride if layer_idx == 0 else 1, 0, 1, bn_affine, bn_momentum, bn_track_running_stats)), ("skip_connect", nn.Identity() if stride == 1 and C_in == C_out else FactorizedReduce( C_in, C_out, stride if layer_idx == 0 else 1, bn_affine, bn_momentum, bn_track_running_stats))]) for i in range(self.NUM_NODES): node_ops = nn.ModuleList() for j in range(0, i): node_ops.append( LayerChoice(OPS(j), key="%d_%d" % (j, i), reduction="mean")) self.layers.append(node_ops) self.in_dim = C_in self.out_dim = C_out self.cell_id = cell_id
def __init__(self): super(ToxicClassifierModel, self).__init__() self.BiGRU = nn.GRU(300, hidden_size=LSTM_UNITS, bidirectional=True, num_layers=1) self.BiRNN = LayerChoice([ nn.RNN(input_size=2 * LSTM_UNITS, hidden_size=LSTM_UNITS, bidirectional=True, num_layers=1), nn.RNN(input_size=2 * LSTM_UNITS, hidden_size=LSTM_UNITS, bidirectional=True, num_layers=2) ]) self.hidden1 = nn.Linear(DENSE_HIDDEN_UNITS, DENSE_HIDDEN_UNITS) self.hidden2 = nn.Linear(DENSE_HIDDEN_UNITS, DENSE_HIDDEN_UNITS) self.hidden3 = nn.Linear(DENSE_HIDDEN_UNITS, 6) self.vectors = FastText() self.skipconnect1 = InputChoice(n_candidates=1) self.skipconnect2 = InputChoice(n_candidates=1)
def __init__(self, test_case): super().__init__() self.test_case = test_case self.conv1 = LayerChoice([MutableOp(3), MutableOp(5)]) self.gap = nn.AdaptiveAvgPool2d(1) self.fc1 = nn.Linear(120, 10)