コード例 #1
0
    def _distributed_train_model(self, opt):
        with testing_utils.tempdir() as tmpdir:
            if 'model_file' not in opt:
                opt['model_file'] = os.path.join(tmpdir, 'model')
            if 'dict_file' not in opt:
                opt['dict_file'] = os.path.join(tmpdir, 'model.dict')

            parser = mp_train.setup_args()
            popt = _forced_parse(parser, opt)

            # we need a prebuilt dictionary
            parser = build_dict.setup_args()
            build_dict.build_dict(popt)

            valid, test = mp_train.launch_and_train(popt, 31337)

        return (valid, test)
コード例 #2
0
    def _distributed_train_model(self, **overrides):
        opt = {**self.base_config, **overrides}
        with testing_utils.tempdir() as tmpdir:
            if 'model_file' not in opt:
                opt['model_file'] = os.path.join(tmpdir, 'model')
            if 'dict_file' not in opt:
                opt['dict_file'] = os.path.join(tmpdir, 'model.dict')

            parser = mp_train.setup_args()
            popt = parser.parse_kwargs(**opt)

            # we need a prebuilt dictionary
            parser = build_dict.setup_args()
            build_dict.build_dict(popt)

            valid, test = mp_train.launch_and_train(popt)

        return (valid, test)
コード例 #3
0
ファイル: test_gpt2.py プロジェクト: donshen/ParlAI
    def _distributed_train_model(self, opt):
        with testing_utils.tempdir() as tmpdir:
            if 'model_file' not in opt:
                opt['model_file'] = os.path.join(tmpdir, 'model')
            if 'dict_file' not in opt:
                opt['dict_file'] = os.path.join(tmpdir, 'model.dict')

            parser = mp_train.setup_args()
            # TODO: Kill this after dictionaries build correctly
            popt = self._forced_parse(parser, opt)

            # we need a prebuilt dictionary
            parser = build_dict.setup_args()
            build_dict.build_dict(popt)

            valid, test = mp_train.launch_and_train(popt, 31338)
            dist.destroy_process_group()

        return (valid, test)
コード例 #4
0
    def _distributed_train_model(self, opt):
        # we have to delay our import to here, because the set_spawn_method call
        # inside multiprocessing_train will break the multithreading tests, even
        # when we skip the test.
        import parlai.scripts.multiprocessing_train as mp_train

        with testing_utils.capture_output() as output:
            with testing_utils.tempdir() as tmpdir:
                if 'model_file' not in opt:
                    opt['model_file'] = os.path.join(tmpdir, 'model')
                if 'dict_file' not in opt:
                    opt['dict_file'] = os.path.join(tmpdir, 'model.dict')

                parser = mp_train.setup_args()
                popt = _forced_parse(parser, opt)

                # we need a prebuilt dictionary
                parser = build_dict.setup_args()
                build_dict.build_dict(popt)

                valid, test = mp_train.launch_and_train(popt, 31337)

        return (output.getvalue(), valid, test)
コード例 #5
0
    "compute_tokenized_bleu":
    True,
    # CL Training (For debugging)
    "ref_model_update_freq":
    30,
    "pretrain_steps":
    30,
    "ref_model_file":
    os.path.join(
        PARLAI_HOME,
        'models/contrastive_learning/seq2seq/baseline_seq2seq/gpu-154-36-v100_GPU0/personachat_extend'
    )
}

if __name__ == '__main__':
    parser = setup_args()
    parser = add_cl_cmdline_args(parser)

    parser.set_defaults(**DEFAULT_PARAMS)
    parser.set_defaults(**DEFAULT_OVERRIDE)
    parser.set_defaults(**OVERRIDE)

    parser.set_defaults(
        task='personachat_extend',
        model='parlai.agents.contrastive_learning.seq2seq:CLSeq2seqAgent',
        model_file=os.path.join(
            PARLAI_HOME, 'models/contrastive_learning/tmp/personachat_extend'),
        hiddensize=256,
        attention='general',
        attention_time='post',
        numlayers=2,