def setup_task(cls, args, **kwargs): langs, dicts, training = MultilingualDatasetManager.prepare( cls.load_dictionary, args, **kwargs) dict0 = None for _, lang_dict in dicts.items(): if dict0 is None: dict0 = lang_dict else: assert ( dict0 == lang_dict ), "Diffrent dictionary are specified for different languages; " "TranslationMultiSimpleEpochTask only supports one shared dictionary across all languages" return cls(args, langs, dicts, training)
def setup_task(cls, args, **kwargs): langs, dicts, training = MultilingualDatasetManager.prepare( cls.load_dictionary, args, **kwargs) dict0 = None for _, lang_dict in dicts.items(): if dict0 is None: dict0 = lang_dict else: assert ( dict0 == lang_dict ), "Diffrent dictionary are specified for different languages; " "TranslationMultiSimpleEpochNNITask only supports one shared dictionary across all languages" torch.set_num_threads(int(args.intra)) torch.set_num_interop_threads(int(args.inter)) torch.backends.cudnn.benchmark = bool(args.benchmark) torch.backends.cudnn.allow_tf32 = bool(args.allow_tf32) # ### NNI modification ### return cls(args, langs, dicts, training)
def setup_task(cls, args, **kwargs): langs, dicts, training = MultilingualDatasetManager.prepare( cls.load_dictionary, args, **kwargs) return cls(args, langs, dicts, training)