def _callback_wrapper(list_callback, run_context, callback_type): """ reset the context for callback of model train Raises: ValueError: If the type keyword is not recognized """ _callback_func_map = { "begin": list_callback.begin, "epoch_begin": list_callback.epoch_begin, "step_begin": list_callback.step_begin, "step_end": list_callback.step_end, "epoch_end": list_callback.epoch_end, "end": list_callback.end } if callback_type not in _callback_func_map: raise ValueError("Get type keyword %s is not recognized!" % callback_type) func = _callback_func_map[callback_type] if callback_type == "begin": _reset_checkpoint_auto_parallel_context() _checkpoint_auto_parallel_context() global _parallel_mode if _parallel_mode == "stand_alone": func(run_context) return _reset_auto_parallel_context() func(run_context) _restore_auto_parallel_context()
def reset_auto_parallel_context(): """ Reset auto parallel context attributes to the default values: - device_num: 1. - global_rank: 0. - mirror_mean: False. - cast_before_mirror: True. - parallel_mode: "stand_alone". - parameter_broadcast: False. """ _reset_auto_parallel_context()
def reset_auto_parallel_context(): """ Reset auto parallel context attributes to the default values: - device_num: 1. - global_rank: 0. - mirror_mean: False. - cast_before_mirror: True. - parallel_mode: "stand_alone". - parameter_broadcast: False. - strategy_ckpt_load_file: "". - strategy_ckpt_save_file: "". - enable_parallel_optimizer: False. """ _reset_auto_parallel_context()
def reset_auto_parallel_context(): """ Reset auto parallel context attributes to the default values: - device_num: 1. - global_rank: 0. - gradients_mean: False. - gradient_fp32_sync: True. - parallel_mode: 'stand_alone'. - auto_parallel_search_mode: 'dynamic_programming'. - parameter_broadcast: False. - strategy_ckpt_load_file: ''. - strategy_ckpt_save_file: ''. - full_batch: False. - enable_parallel_optimizer: False. """ _reset_auto_parallel_context()