def __init__(self, num_classes=31): super(DeepMEDA, self).__init__() self.feature_layers = ResNet.resnet50(True) self.mmd_marginal = mmd.MMD_loss() if bottle_neck: self.bottle = nn.Linear(2048, 256) self.cls_fc = nn.Linear(256, num_classes) else: self.cls_fc = nn.Linear(2048, num_classes)
def adapt_loss(self, X, Y, adapt_loss): """Compute adaptation loss, currently we support mmd and coral Arguments: X {tensor} -- source matrix Y {tensor} -- target matrix adapt_loss {string} -- loss type, 'mmd' or 'coral'. You can add your own loss Returns: [tensor] -- adaptation loss tensor """ if adapt_loss == 'mmd': mmd_loss = mmd.MMD_loss() loss = mmd_loss(X, Y) elif adapt_loss == 'coral': loss = CORAL(X, Y) else: loss = 0 return loss
train_index = permutation[0:num_train] train_data = all_train_X[train_index, :] train_label = all_train_y[train_index, :] x1 = torch.FloatTensor(train_data) y1 = torch.FloatTensor(train_label) x1 = Variable(x1) y1 = Variable(y1) m1, n1 = train_data.shape net = Net(n_feature=n1, n_hidden=128, n_out=1) loss_func = torch.nn.MSELoss() optimizer = torch.optim.Adam(net.parameters(), lr=0.001, weight_decay=0.01) mmd_loss = mmd.MMD_loss() epochs = 300 for i in range(epochs): prediction = net(x1)[0] # loss = loss_func(prediction, y1) loss = loss_func(prediction, y1) + mmd_loss( net(x_all)[1], net(x1)[1]) optimizer.zero_grad() loss.backward() optimizer.step() print(i + 1, "loss:", loss.data.item()) x_test1 = torch.FloatTensor(test_data) y_test1 = torch.FloatTensor(test_label)