def test_bert_glue_const() -> None: config = conf.load_config( conf.nlp_examples_path("bert_glue_pytorch/const.yaml")) config = conf.set_max_length(config, {"batches": 200}) exp.run_basic_test_with_temp_config( config, conf.nlp_examples_path("bert_glue_pytorch"), 1)
def test_word_language_transformer_distributed() -> None: config = conf.load_config(conf.nlp_examples_path("word_language_model/distributed.yaml")) config = conf.set_max_length(config, {"batches": 200}) config = config.copy() config["hyperparameters"]["model_cls"] = "Transformer" exp.run_basic_test_with_temp_config(config, conf.nlp_examples_path("word_language_model"), 1)
def test_word_language_lstm_const() -> None: config = conf.load_config(conf.nlp_examples_path("word_language_model/distributed.yaml")) config = conf.set_max_length(config, {"batches": 200}) config = config.copy() config["hyperparameters"]["model_cls"] = "LSTM" config["hyperparameters"]["tied"] = False exp.run_basic_test_with_temp_config(config, conf.nlp_examples_path("word_language_model"), 1)
def test_text_classification_tf_keras_distributed() -> None: config = conf.load_config( conf.nlp_examples_path( "text_classification_tf_keras/distributed.yaml")) config = conf.set_max_length(config, {"batches": 200}) exp.run_basic_test_with_temp_config( config, conf.nlp_examples_path("text_classification_tf_keras"), 1)