def set_config(model_path):
    config = None
    if os.path.exists(os.path.join(model_path, '__model__')):
        config = AnalysisConfig(model_path)
    else:
        config = AnalysisConfig(model_path + '/model', model_path + '/params')
    if test_args.use_analysis:
        config.switch_ir_optim(True)
        config.enable_mkldnn()
        config.set_mkldnn_cache_capacity(test_args.mkldnn_cache_capacity)
        config.set_cpu_math_library_num_threads(test_args.num_threads)
    else:
        config.to_native_config()

    return config
예제 #2
0
 def __load_inference_model(self, model_path, use_gpu):
     """
     :param meta_path:
     :return:
     """
     check_cuda(use_gpu)
     config = AnalysisConfig(model_path + "/" + "model", model_path + "/" + "params")
     if use_gpu:
         config.enable_use_gpu(1024)
     else:
         config.disable_gpu()
         config.enable_mkldnn()
     inference = create_paddle_predictor(config.to_native_config())
     return inference