示例#1
0
 def gen_new_layer(self, matches, parameters, graph):
     layer_id_list = list(matches.keys())
     layer_id_list.sort(key=int)
     for layer_id, layer in matches.items():
         if layer.kernel == "paddle.nn.ReLU":
             input_name = layer.inputs["x"]
         if layer.kernel == "self.create_parameter":
             param_name = layer.outputs[0]
         if layer.kernel == "paddle.add":
             output_name = layer.outputs[0]
     transpose0 = PaddleLayer(
         id=layer_id_list[-1] + "_1",
         kernel="paddle.transpose",
         inputs={"x": input_name},
         outputs=["{}_transpose_for_prelu".format(input_name)],
         perm=[0, 3, 1, 2])
     prelu_name = "merge_prelu{}".format(self.prelu_index)
     self.prelu_index += 1
     param = parameters[param_name]
     c = param.shape[0]
     prelu = PaddleLayer(
         id=layer_id_list[-1] + "_2",
         kernel="paddle.nn.PReLU",
         inputs={"input": "{}_transpose_for_prelu".format(input_name)},
         outputs=[prelu_name, "{}_prelu".format(input_name)],
         num_parameters=c,
         weight_attr=string(param_name))
     transpose1 = PaddleLayer(id=layer_id_list[-1] + "_3",
                              kernel="paddle.transpose",
                              inputs={"x": "{}_prelu".format(input_name)},
                              outputs=[output_name],
                              perm=[0, 2, 3, 1])
     return [transpose0, prelu, transpose1], layer_id_list[-1]
 def gen_new_layer(self, matches, parameters, graph):
     layer_id_list = list(matches.keys())
     layer_id_list.sort(key=int)
     for layer_id, layer in matches.items():
         if layer.kernel == "paddle.full":
             full_layer = layer
             out_layer_id = graph.edges_out[layer_id][0]
             if matches[out_layer_id].kernel == "paddle.add":
                 var_layer_id = graph.edges_in[out_layer_id][0]
                 var_layer = matches[var_layer_id]
         if layer.kernel == "paddle.rsqrt":
             out_layer_id = graph.edges_out[layer_id][0]
             if matches[out_layer_id].kernel == "paddle.multiply":
                 gamma_layer_id = graph.edges_in[out_layer_id][1]
                 gamma_layer = matches[gamma_layer_id]
         if layer.kernel == "paddle.subtract":
             in_layer_id = graph.edges_in[layer_id][0]
             beta_layer = matches[in_layer_id]
             in_layer_id = graph.edges_in[layer_id][1]
             in_layer_id = graph.edges_in[in_layer_id][0]
             mean_layer = matches[in_layer_id]
             out_layer_id = graph.edges_out[layer_id][0]
             add_layer = matches[out_layer_id]
         if layer.kernel == "paddle.multiply":
             in_layer_id = graph.edges_in[layer_id][1]
             mul_layer = matches[in_layer_id]
             if mul_layer.kernel == "paddle.multiply":
                 in_layer_id = graph.edges_in[layer_id][0]
                 if in_layer_id not in matches:
                     input_name = layer.inputs["x"]
     transpose0 = PaddleLayer(
         id=layer_id_list[-1] + "_1",
         kernel="paddle.transpose",
         inputs={"x": input_name},
         outputs=["{}_transpose_for_bn".format(input_name)],
         perm=[0, 3, 1, 2])
     bn_name = "merge_bn{}".format(self.bn_index)
     self.bn_index += 1
     params = parameters[gamma_layer.outputs[0]]
     c = params.shape[0]
     bn = PaddleLayer(
         id=layer_id_list[-1] + "_2",
         kernel="paddle.nn.BatchNorm",
         inputs={"input": "{}_transpose_for_bn".format(input_name)},
         outputs=[bn_name, "{}_bn".format(input_name)],
         num_channels=c,
         epsilon=full_layer.attrs["fill_value"],
         param_attr=string(gamma_layer.outputs[0]),
         bias_attr=string(beta_layer.outputs[0]),
         moving_mean_name=string(mean_layer.outputs[0]),
         moving_variance_name=string(var_layer.outputs[0]),
         is_test=True)
     transpose1 = PaddleLayer(id=layer_id_list[-1] + "_3",
                              kernel="paddle.transpose",
                              inputs={"x": "{}_bn".format(input_name)},
                              outputs=add_layer.outputs,
                              perm=[0, 2, 3, 1])
     return [transpose0, bn, transpose1], layer_id_list[-1]
示例#3
0
 def gen_new_layer(self, parameters, matches):
     layers_id = list(matches.keys())
     layer = matches[layers_id[0]]
     layer_inputs = layer.inputs
     bn_name = layer.outputs[0]
     layer_attrs = layer.attrs
     layer_attrs.pop("weight_attr")
     layer_attrs.pop("bias_attr")
     layer = matches[layers_id[-1]]
     layer_outputs = [bn_name] + layer.outputs
     layer = matches[layers_id[1]]
     data0_name = layer.outputs[0]
     data0_numpy = parameters.pop(data0_name)
     parameters["{}.weight".format(layer_outputs[0])] = data0_numpy
     layer = matches[layers_id[3]]
     data1_name = layer.outputs[0]
     data1_numpy = parameters.pop(data1_name)
     parameters["{}.bias".format(layer_outputs[0])] = data1_numpy
     new_layer = PaddleLayer(
         layers_id[0],
         "paddle.nn.BatchNorm2D",
         inputs=layer_inputs,
         outputs=layer_outputs,
         **layer_attrs)
     return new_layer
示例#4
0
 def gen_new_layer(self, parameters, matches):
     layers_id = list(matches.keys())
     if len(layers_id) == 5:
         layer = matches[layers_id[2]]
     else:
         layer = matches[layers_id[-1]]
     input_name = layer.inputs["x"]
     scope_name = layer.scope_name
     layer = matches[layers_id[-1]]
     output_name = layer.outputs[0]
     layer = matches[layers_id[0]]
     weight_name = layer.outputs[0]
     layer = matches[layers_id[-2]]
     bias_name = layer.outputs[0]
     attrs = dict()
     attrs["in_features"] = parameters[weight_name].shape[1]
     attrs["out_features"] = parameters[weight_name].shape[0]
     linear_name = "linear{}".format(self.linear_index)
     self.linear_index += 1
     parameters["{}.weight".format(
         linear_name)] = parameters[weight_name].transpose((1, 0))
     parameters["{}.bias".format(linear_name)] = np.squeeze(
         parameters[bias_name])
     new_layer = PaddleLayer(layers_id[0],
                             "paddle.nn.Linear",
                             inputs={"input": input_name},
                             outputs=[linear_name, output_name],
                             scope_name=scope_name,
                             **attrs)
     return new_layer
示例#5
0
    def merge_node(self, sub_layers_list, attrs_table, module_name):
        sub_layers = sub_layers_list[0]
        diff_attrs_column = self.analyze_attrs_table(attrs_table)
        sub_layers, _, _ = rename_layers(sub_layers)
        code_str = gen_layer_code(self.pd_graph,
                                  sub_layers,
                                  module_name,
                                  different_attrs=diff_attrs_column)
        self.codes.append(code_str)
        for index, sub_layers in enumerate(sub_layers_list):
            inputs, outputs = get_inputs_outputs(self.pd_graph, sub_layers)
            inputs_dict = dict()
            for i, input in enumerate(inputs):
                inputs_dict["input_{}".format(i)] = input
            mn = module_name.lower()
            outputs = ["{}_{}".format(mn, index)] + outputs
            node_name = "{}_{}".format(module_name, index)
            diff_attrs = dict()
            for column, element in diff_attrs_column.items():
                current_element = attrs_table.get(column).loc[node_name]
                if current_element != element:
                    diff_attrs[column] = current_element
            new_layer = PaddleLayer(id=list(sub_layers.keys())[-1],
                                    kernel="module",
                                    inputs=inputs_dict,
                                    outputs=outputs,
                                    module=module_name,
                                    **diff_attrs)

            _, nn_param_nodes, _ = rename_layers(sub_layers, self.param_tree)
            param_node = PamareterNode(old_name=outputs[0])
            for node in nn_param_nodes:
                param_node.add_child(node)
            self.param_tree.add_node(param_node)

            for i, (layer_id, layer) in enumerate(sub_layers.items()):
                if i == len(sub_layers) - 1:
                    self.pd_graph.layers[layer_id] = new_layer
                else:
                    if len(layer_id.split(".")) > 1:
                        continue
                    self.pd_graph.layers.pop(layer_id)

            self.pd_graph.build()
 def gen_new_layer(self, parameters, matches):
     layers_id = list(matches.keys())
     if matches[layers_id[
             -1]].kernel == "paddle.nn.functional.adaptive_avg_pool2d":
         layer = matches[layers_id[11]]
         pool_size = layer.attrs["list"]
         layer = matches[layers_id[0]]
         input_name = layer.inputs["input"]
         layer = matches[layers_id[-1]]
         output_name = layer.outputs[0]
         attrs = dict()
         attrs["output_size"] = pool_size
         new_layer = PaddleLayer(layers_id[0],
                                 "paddle.nn.functional.adaptive_avg_pool2d",
                                 inputs={"x": input_name},
                                 outputs=[output_name],
                                 **attrs)
     else:
         new_layer = copy.deepcopy(matches[layers_id[-1]])
     return new_layer
示例#7
0
    def merge_node(self, sub_layers_list, attrs_table, node_name2sub_layers,
                   module_name):
        """ 将一个scope的节点合成一个Module(Class),并将对应的Class代码
            放到code字符串中。
        """
        def get_node_name(sub_layers):
            for k, v in node_name2sub_layers.items():
                if v == sub_layers:
                    node_name = k
                    break
            return node_name

        sub_layers = sub_layers_list[0]
        node_name = get_node_name(sub_layers)

        sub_layers, _, _ = rename_layers(sub_layers)
        diff_attrs_column = self.analyze_attrs_table(attrs_table)
        if module_name is None:
            module_name = node_name.replace("/",
                                            "_")  #node_name.split("/")[-1]
            module_name = module_name[0].upper() + module_name[1:]
        if module_name in self.module_name2count:
            module_name = module_name + "_0"
        code_str = gen_layer_code(self.pd_graph,
                                  sub_layers,
                                  module_name,
                                  different_attrs=diff_attrs_column)

        self.codes.append(code_str)
        for sub_layers in sub_layers_list:
            inputs, outputs = get_inputs_outputs(self.pd_graph, sub_layers)
            inputs_dict = dict()
            for i, input in enumerate(inputs):
                inputs_dict["input_{}".format(i)] = input
            if module_name in self.module_name2count:
                self.module_name2count[module_name] += 1
            else:
                self.module_name2count[module_name] = 0
            if module_name.lower() in NN_KERNEL_NAME.values():
                mn = module_name.lower() + "__"
            else:
                mn = module_name.lower()
            outputs = [
                "{}/{}".format(mn, self.module_name2count[module_name])
            ] + outputs
            node_name = get_node_name(sub_layers)
            diff_attrs = dict()
            for column in diff_attrs_column:
                diff_attrs[column] = attrs_table.get(column).loc[node_name]

            node_name_seg = node_name.split(SEPARATOR_IN_SCOPE)
            node_name_seg[-1] = module_name.lower()
            new_node_name = SEPARATOR_IN_SCOPE.join(node_name_seg)
            new_layer = PaddleLayer(id=list(sub_layers.keys())[-1],
                                    kernel="module",
                                    inputs=inputs_dict,
                                    outputs=outputs,
                                    scope_name=new_node_name,
                                    module=module_name,
                                    **diff_attrs)

            _, nn_param_nodes, _ = rename_layers(sub_layers, self.param_tree)
            param_node = PamareterNode(old_name=outputs[0])
            for node in nn_param_nodes:
                param_node.add_child(node)
            self.param_tree.add_node(param_node)

            for i, (layer_id, layer) in enumerate(sub_layers.items()):
                if i == len(sub_layers) - 1:
                    self.pd_graph.layers[layer_id] = new_layer
                else:
                    self.pd_graph.layers.pop(layer_id)

            self.pd_graph.build()
            self[node_name].data = new_layer