コード例 #1
0
    def test_run_pl_glue(self):
        stream_handler = logging.StreamHandler(sys.stdout)
        logger.addHandler(stream_handler)

        tmp_dir = self.get_auto_remove_tmp_dir()
        testargs = f"""
            run_pl_glue.py
            --model_name_or_path bert-base-cased
            --data_dir ./tests/fixtures/tests_samples/MRPC/
            --output_dir {tmp_dir}
            --task mrpc
            --do_train
            --do_predict
            --train_batch_size=32
            --learning_rate=1e-4
            --num_train_epochs=1
            --seed=42
            --max_seq_length=128
            """.split()
        if torch.cuda.is_available():
            testargs += ["--gpus=1"]
        if is_cuda_and_apex_available():
            testargs.append("--fp16")

        with patch.object(sys, "argv", testargs):
            result = run_pl_glue.main()
            # for now just testing that the script can run to a completion
            self.assertGreater(result["acc"], 0.25)
コード例 #2
0
    def test_run_pl_glue(self):
        stream_handler = logging.StreamHandler(sys.stdout)
        logger.addHandler(stream_handler)

        testargs = """
            run_pl_glue.py
            --model_name_or_path bert-base-cased
            --data_dir ./tests/fixtures/tests_samples/MRPC/
            --task mrpc
            --do_train
            --do_predict
            --output_dir ./tests/fixtures/tests_samples/temp_dir
            --train_batch_size=32
            --learning_rate=1e-4
            --num_train_epochs=1
            --seed=42
            --max_seq_length=128
            """.split()

        if torch.cuda.is_available():
            testargs += ["--fp16", "--gpus=1"]

        with patch.object(sys, "argv", testargs):
            result = run_pl_glue.main()
            # for now just testing that the script can run to a completion
            self.assertGreater(result["acc"], 0.25)
            #
            # TODO: this fails on CI - doesn't get acc/f1>=0.75:
            #
            #     # remove all the various *loss* attributes
            #     result = {k: v for k, v in result.items() if "loss" not in k}
            #     for k, v in result.items():
            #         self.assertGreaterEqual(v, 0.75, f"({k})")
            #
        clean_test_dir()