def test_program_clone_with_parameter(self): main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): d = layers.data(name='x', shape=[784], dtype='float32') hidden = layers.fc(input=d, size=100) layers.fc(input=hidden, size=100) new_program = main_program.clone() self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))
def test_program_clone_with_parameter(self): main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): d = layers.data(name='x', shape=[784], dtype='float32') hidden = layers.fc(input=d, size=100) layers.fc(input=hidden, size=100) new_program = main_program.clone() self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))
def test_program_clone(self): prog = Program() x = prog.global_block().create_var( name='X', shape=[1000, 784], dtype='float32') y = prog.global_block().create_var( name='Y', shape=[784, 100], dtype='float32') out = prog.global_block().create_var(name='Out', dtype='float32') prog.global_block().append_op( type="mul", inputs={'X': [x], 'Y': [y]}, outputs={'Out': [out]}) # FIXME(yuyang18): We manual compare the output string, since the order # of variable could be changed. print(prog) print(prog.clone())
def test_program_clone(self): prog = Program() x = prog.global_block().create_var(name='X', shape=[1000, 784], dtype='float32') y = prog.global_block().create_var(name='Y', shape=[784, 100], dtype='float32') out = prog.global_block().create_var(name='Out', dtype='float32') prog.global_block().append_op(type="mul", inputs={ 'X': [x], 'Y': [y] }, outputs={'Out': [out]}) # FIXME(yuyang18): We manual compare the output string, since the order # of variable could be changed. print(prog) print(prog.clone())
def test_fit_line_inference_model(self): MODEL_DIR = "./tmp/inference_model" UNI_MODEL_DIR = "./tmp/inference_model1" init_program = Program() program = Program() with program_guard(program, init_program): x = layers.data(name='x', shape=[2], dtype='float32') y = layers.data(name='y', shape=[1], dtype='float32') y_predict = layers.fc(input=x, size=1, act=None) cost = layers.square_error_cost(input=y_predict, label=y) avg_cost = layers.mean(cost) sgd_optimizer = optimizer.SGDOptimizer(learning_rate=0.001) sgd_optimizer.minimize(avg_cost, init_program) place = core.CPUPlace() exe = executor.Executor(place) exe.run(init_program, feed={}, fetch_list=[]) for i in six.moves.xrange(100): tensor_x = np.array([[1, 1], [1, 2], [3, 4], [5, 2]]).astype("float32") tensor_y = np.array([[-2], [-3], [-7], [-7]]).astype("float32") exe.run(program, feed={ 'x': tensor_x, 'y': tensor_y }, fetch_list=[avg_cost]) # Separated model and unified model save_inference_model(MODEL_DIR, ["x", "y"], [avg_cost], exe, program) save_inference_model(UNI_MODEL_DIR, ["x", "y"], [avg_cost], exe, program, 'model', 'params') main_program = program.clone()._prune_with_input( feeded_var_names=["x", "y"], targets=[avg_cost]) params_str = save_persistables(exe, None, main_program, None) expected = exe.run(program, feed={ 'x': tensor_x, 'y': tensor_y }, fetch_list=[avg_cost])[0] six.moves.reload_module(executor) # reload to build a new scope model_0 = InferModel(load_inference_model(MODEL_DIR, exe)) with open(os.path.join(UNI_MODEL_DIR, 'model'), "rb") as f: model_str = f.read() model_1 = InferModel( load_inference_model(None, exe, model_str, params_str)) for model in [model_0, model_1]: outs = exe.run(model.program, feed={ model.feed_var_names[0]: tensor_x, model.feed_var_names[1]: tensor_y }, fetch_list=model.fetch_vars) actual = outs[0] self.assertEqual(model.feed_var_names, ["x", "y"]) self.assertEqual(len(model.fetch_vars), 1) print("fetch %s" % str(model.fetch_vars[0])) self.assertEqual(expected, actual) self.assertRaises(ValueError, fluid.io.load_inference_model, None, exe, model_str, None)
class GraphWrapper(object): """ It is a wrapper of paddle.fluid.framework.IrGraph with some special functions for paddle slim framework. Args: program(framework.Program): A program with in_nodes(dict): A dict to indicate the input nodes of the graph. The key is user-defined and human-readable name. The value is the name of Variable. out_nodes(dict): A dict to indicate the input nodes of the graph. The key is user-defined and human-readable name. The value is the name of Variable. """ def __init__(self, program=None, in_nodes=[], out_nodes=[]): """ """ super(GraphWrapper, self).__init__() self.program = Program() if program is None else program self.persistables = {} self.teacher_persistables = {} for var in self.program.list_vars(): if var.persistable: self.persistables[var.name] = var self.compiled_graph = None in_nodes = [] if in_nodes is None else in_nodes out_nodes = [] if out_nodes is None else out_nodes self.in_nodes = OrderedDict(in_nodes) self.out_nodes = OrderedDict(out_nodes) self._attrs = OrderedDict() def all_parameters(self): """ Get all the parameters in this graph. Returns: list<VarWrapper>: A list of VarWrapper instances. """ params = [] for block in self.program.blocks: for param in block.all_parameters(): params.append(VarWrapper(param, self)) return params def is_parameter(self, var): """ Whether the given variable is parameter. Args: var(VarWrapper): The given varibale. """ return isinstance(var._var, Parameter) def is_persistable(self, var): """ Whether the given variable is persistable. Args: var(VarWrapper): The given varibale. """ return var._var.persistable def ops(self): """ Return all operator nodes included in the graph as a set. """ ops = [] for block in self.program.blocks: for op in block.ops: ops.append(OpWrapper(op, self)) return ops def vars(self): """ Get all the variables. """ return [VarWrapper(var, self) for var in self.program.list_vars()] def var(self, name): """ Get the variable by variable name. """ for block in self.program.blocks: if block.has_var(name): return VarWrapper(block.var(name), self) return None def clone(self, for_test=False): """ Clone a new graph from current graph. Returns: (GraphWrapper): The wrapper of a new graph. """ return GraphWrapper( self.program.clone(for_test), copy.deepcopy(self.in_nodes), copy.deepcopy(self.out_nodes)) def program(self): """ Get the program in current wrapper. """ return self.program def pre_ops(self, op): """ Get all the previous operators of target operator. Args: op(OpWrapper): Target operator. Returns: list<OpWrapper>: A list of operators. """ ops = [] for p in self.ops(): for in_var in op.all_inputs(): if in_var in p.all_outputs(): ops.append(p) return ops def next_ops(self, op): """ Get all the next operators of target operator. Args: op(OpWrapper): Target operator. Returns: list<OpWrapper>: A list of operators. """ ops = [] for p in self.ops(): for out_var in op.all_outputs(): if out_var in p.all_inputs(): ops.append(p) return ops def get_param_by_op(self, op): """ Get the parameters used by target operator. """ assert isinstance(op, OpWrapper) params = [] for var in op.all_inputs(): if isinstance(var._var, Parameter): params.append(var) assert len(params) > 0 return params def numel_params(self): """ Get the number of elements in all parameters. """ ret = 0 for param in self.all_parameters(): ret += np.product(param.shape()) return ret def infer_shape(self): """ Update the groups of convolution layer according to current filters. It is used after loading pruned parameters from file. """ for op in self.ops(): if op.type() != 'conditional_block': op._op.desc.infer_shape(op._op.block.desc)