def load_inference_model(dirname, executor): """ Load inference model from a directory :param dirname: directory path :param executor: executor that load inference model :return: [program, feed_var_names, fetch_var_names] program: program especially for inference. feeded_var_names: Names of variables that need to feed data fetch_vars: Variables from which we can get inference results. """ if not os.path.isdir(dirname): raise ValueError("There is no directory named '%s'", dirname) model_file_name = dirname + "/__model__" model = pickle.load(open(model_file_name, "r")) program_desc_str = model["program_desc_str"] feed_var_names = model["feed_var_names"] fetch_var_names = model["fetch_var_names"] program = Program.parse_from_string(program_desc_str) load_persistables_if_exist(executor, dirname, program) fetch_vars = [program.global_block().var(name) for name in fetch_var_names] return [program, feed_var_names, fetch_vars]
def test_parse_program_from_string(self): prog = Program() x = prog.global_block().create_var( name='X', shape=[1000, 784], dtype='float32') y = prog.global_block().create_var( name='Y', shape=[784, 100], dtype='float32') out = prog.global_block().create_var(name='Out', dtype='float32') prog.global_block().append_op( type="mul", inputs={'X': [x], 'Y': [y]}, outputs={'Out': [out]}) binary_str = prog.desc.serialize_to_string() prog_restored = Program.parse_from_string(binary_str) print(prog) print(prog_restored)
def test_parse_program_from_string(self): prog = Program() x = prog.global_block().create_var(name='X', shape=[1000, 784], dtype='float32') y = prog.global_block().create_var(name='Y', shape=[784, 100], dtype='float32') out = prog.global_block().create_var(name='Out', dtype='float32') prog.global_block().append_op(type="mul", inputs={ 'X': [x], 'Y': [y] }, outputs={'Out': [out]}) binary_str = prog.desc.serialize_to_string() prog_restored = Program.parse_from_string(binary_str) print(prog) print(prog_restored)