if m.bias is not None:
                    nn.init.constant_(m.bias, 0)


if __name__ == '__main__':
    # seed = 1
    model_fn = lambda: FastText(1024, 1024)
    model_name = 'fasttext_char'
    train_data = np.load('../../data/char_train_input.npy')
    train_label = np.load('../../data/label.npy')
    test_data = np.load('../../data/char_test_input.npy')
    # hold_out_test(model_fn, model_name, train_data, train_label, test_data, lr=1e-4)
    cross_validation_bagging(model_fn,
                             model_name,
                             train_data,
                             train_label,
                             test_data,
                             lr=1e-4,
                             patience=20,
                             seed=2)

    # seed = 1
    # model_fn = lambda: Fast_Attention_Text(1024, 1024)
    # model_name = 'fast_attention_text'
    # train_data = np.load('../../data/train_input.npy')
    # train_label = np.load('../../data/label.npy')
    # test_data = np.load('../../data/test_input.npy')
    # hold_out_test(model_fn, model_name, train_data, train_label, test_data)
    # cross_validation_bagging(model_fn, model_name, train_data, train_label, test_data, batch_size=32, lr=5e-4,
    #                          seed=seed)
            elif isinstance(m, nn.Linear):
                nn.init.xavier_normal_(m.weight)
                if m.bias is not None:
                    nn.init.constant_(m.bias, 0)


if __name__ == '__main__':
    # seed = 1
    # model_fn = lambda: FastText(1024, 1024)
    # model_name = 'new_fasttext'
    # train_data = np.load('../../data/train_input.npy')
    # train_label = np.load('../../data/label.npy')
    # test_data = np.load('../../data/test_input.npy')
    # cross_validation_bagging(model_fn, model_name, train_data, train_label, test_data, seed=seed)

    seed = 1
    model_fn = lambda: Fast_Attention_Text(1024, 1024)
    model_name = 'fast_attention_text'
    train_data = np.load('../../data/train_input.npy')
    train_label = np.load('../../data/label.npy')
    test_data = np.load('../../data/test_input.npy')
    # hold_out_test(model_fn, model_name, train_data, train_label, test_data)
    cross_validation_bagging(model_fn,
                             model_name,
                             train_data,
                             train_label,
                             test_data,
                             batch_size=32,
                             lr=5e-4,
                             seed=seed)
Exemple #3
0
        return out

    def _initialize_weights(self):
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, mode='fan_out')
                if m.bias is not None:
                    nn.init.constant_(m.bias, 0)
            elif isinstance(m, nn.BatchNorm2d):
                nn.init.constant_(m.weight, 1)
                nn.init.constant_(m.bias, 0)
            elif isinstance(m, nn.Linear):
                nn.init.xavier_uniform_(m.weight)
                nn.init.constant_(m.bias, 0)
            elif isinstance(m, nn.LSTM):
                for name, param in m.named_parameters():
                    if 'weight' in name:
                        nn.init.orthogonal_(param)
                    elif 'bias' in name:
                        nn.init.constant_(param, 0.0)


if __name__ == '__main__':
    model_fn = lambda: Pooled_BiLSTM(40, 128)
    model_name = 'pooled_bilstm'
    train_data = np.load('../../data/train_input.npy')
    train_label = np.load('../../data/label.npy')
    test_data = np.load('../../data/test_input.npy')
    cross_validation_bagging(model_fn, model_name, train_data, train_label,
                             test_data)