Ejemplo n.º 1
0
 def forward(self, input, use_cudnn=True, use_mkldnn=True):
     main_block = self.memory.main_program.current_block()
     conv_op_desc = main_block.desc.append_op()
     conv_out = main_block.create_var(name="%s_out" % self.base_name,
                                      dtype='float32')
     conv_op = Operator(block=main_block,
                        desc=conv_op_desc,
                        type='conv2d',
                        inputs={
                            'Input': input,
                            'Filter': self.main_conv_weight
                        },
                        outputs={'Output': conv_out},
                        attrs={
                            'strides': self.stride,
                            'paddings': self.padding,
                            'dilations': self.dilation,
                            'groups': self.groups,
                            'use_cudnn': use_cudnn,
                            'use_mkldnn': use_mkldnn
                        })
     main_block.ops.append(conv_op)
     if self.bias:
         # add bias
         final_out_name = "%s_%d_out" % (self.base_name, self.call_count)
         final_out_var = main_block.create_var(name=final_out_name,
                                               shape=conv_out.shape,
                                               dtype='float32')
         add_op_desc = main_block.desc.append_op()
         add_op = Operator(block=main_block,
                           desc=add_op_desc,
                           type='elementwise_add',
                           inputs={
                               'X': [conv_out],
                               'Y': [self.bias]
                           },
                           outputs={'Out': [final_out_var]},
                           attrs={'axis': 1})
         main_block.ops.append(add_op)
         self.memory.add_var(final_out_var)
         return final_out_var
     self.call_count += 1
     return conv_out
Ejemplo n.º 2
0
 def forward(self, input):
     """ create op here"""
     main_block = self.memory.main_program.current_block()
     tmp_out_name = "%s_tmp_%d_out" % (self.base_name, self.call_count)
     tmp_out_var = main_block.create_var(name=tmp_out_name,
                                         shape=[-1, self.output_dim],
                                         dtype='float32')
     self.memory.add_var(tmp_out_var)
     mul_op_desc = main_block.desc.append_op()
     mul_op = Operator(block=main_block,
                       desc=mul_op_desc,
                       type='mul',
                       inputs={
                           'X': [input],
                           'Y': [self.main_weight]
                       },
                       outputs={'Out': [tmp_out_var]},
                       attrs={
                           'x_num_col_dims': 1,
                           'y_num_col_dims': 1
                       })
     main_block.ops.append(mul_op)
     if self.bias:
         final_out_name = "%s_%d_out" % (self.base_name, self.call_count)
         final_out_var = main_block.create_var(name=final_out_name,
                                               shape=[-1, self.output_dim],
                                               dtype='float32')
         self.memory.add_var(final_out_var)
         add_op_desc = main_block.desc.append_op()
         add_op = Operator(block=main_block,
                           desc=add_op_desc,
                           type='elementwise_add',
                           inputs={
                               'X': [tmp_out_var],
                               'Y': [self.main_bias]
                           },
                           outputs={'Out': [final_out_var]},
                           attrs={'axis': 1})
         main_block.ops.append(add_op)
         return final_out_var
     self.call_count += 1
     return tmp_out_var
Ejemplo n.º 3
0
 def forward(self, input):
     main_block = self.memory.main_program.current_block()
     out_name = "%s_%d_out" % (self.base_name, self.call_count)
     out_var = main_block.create_var(name=out_name, dtype='float32')
     self.memory.add_var(out_var)
     relu_op_desc = main_block.desc.append_op()
     relu_op = Operator(block=main_block,
                        desc=relu_op_desc,
                        type='relu',
                        inputs={'X': input},
                        outputs={'Out': out_var})
     main_block.ops.append(relu_op)
     self.call_count += 1
     return out_var
Ejemplo n.º 4
0
 def forward(self, input1, input2):
     main_block = self.memory.main_program.current_block()
     out_var = main_block.create_var(name="%s_out" % self.base_name,
                                     dtype='float32')
     element_add_op_desc = main_block.desc.append_op()
     element_add_op = Operator(block=main_block,
                               desc=element_add_op_desc,
                               type='elementwise_add',
                               inputs={
                                   'X': [input1],
                                   'Y': [input2]
                               },
                               outputs={'Out': [out_var]})
     main_block.ops.append(element_add_op)
     return out_var
Ejemplo n.º 5
0
 def forward(self, softmax, label):
     loss_name = "%s_loss" % self.base_name
     start_block = self.memory.startup_program.global_block()
     main_block = self.memory.main_program.current_block()
     loss_var = main_block.create_var(name=loss_name,
                                      dtype='float32')
     loss_desc = main_block.desc.append_op()
     loss_op = Operator(block=main_block,
                        desc=loss_desc,
                        type='cross_entropy',
                        inputs={'X':[softmax],
                                'Label':[label]},
                        outputs={'Y':[loss_var]},
                        attrs={"soft_label":False})
     self.memory.add_var(loss_var)
     main_block.ops.append(loss_op)
     return loss_var
Ejemplo n.º 6
0
 def forward(self, input, use_cudnn=False):
     out_name = "%s_%d_out" % (self.base_name, self.call_count)
     start_block = self.memory.startup_program.global_block()
     main_block = self.memory.main_program.current_block()
     out_var = main_block.create_var(name=out_name,
                                      dtype='float32')
     softmax_desc = main_block.desc.append_op()
     softmax_op = Operator(block=main_block,
                           desc=softmax_desc,
                           type='softmax',
                           inputs={'X':[input]},
                           outputs={'Out':[out_var]},
                           attrs={"use_cudnn":use_cudnn})
     self.memory.add_var(out_var)
     main_block.ops.append(softmax_op)
     self.call_count += 1
     return out_var
Ejemplo n.º 7
0
 def forward(self, input):
     main_block = self.memory.main_program.current_block()
     batch_norm_op_desc = main_block.desc.append_op()
     batch_norm_out = main_block.create_var(name="%s_bn_out" %
                                            self.base_name,
                                            dtype='float32')
     mean_out = self.mean
     variance_out = self.variance
     saved_mean = main_block.create_var(name="%s_mean_out" % self.base_name,
                                        dtype='float32')
     saved_variance = main_block.create_var(name="%s_var_out" %
                                            self.base_name,
                                            dtype='float32')
     batch_norm_op = Operator(block=main_block,
                              desc=batch_norm_op_desc,
                              type="batch_norm",
                              inputs={
                                  "X": input,
                                  "Scale": self.main_scale,
                                  "Bias": self.main_bias,
                                  "Mean": self.main_mean,
                                  "Variance": self.main_variance
                              },
                              outputs={
                                  "Y": batch_norm_out,
                                  "MeanOut": mean_out,
                                  "VarianceOut": variance_out,
                                  "SavedMean": saved_mean,
                                  "SavedVariance": saved_variance
                              },
                              attrs={
                                  "momentum": 0.9,
                                  "epsilon": 1e-5,
                                  "is_test": False,
                                  "use_mkldnn": False,
                                  "fuse_with_relu": False
                              })
     self.memory.add_var(batch_norm_out)
     self.memory.add_var(mean_out)
     self.memory.add_var(variance_out)
     self.memory.add_var(saved_mean)
     self.memory.add_var(saved_variance)
     main_block.ops.append(batch_norm_op)
     self.call_count += 1
     return batch_norm_out
Ejemplo n.º 8
0
    def forward(self,
                input,
                pool_type='max',
                pool_size=-1,
                pool_stride=1,
                pool_padding=0,
                use_cudnn=True,
                ceil_mode=False,
                use_mkldnn=False,
                global_pooling=False):
        if pool_type not in ["max", "avg"]:
            raise ValueError(
                "Unknown pool_type: '%s'. It can only be 'max' or 'avg'.",
                str(pool_type))

        if global_pooling is False and pool_size == -1:
            raise ValueError(
                "When the global_pooling is False, pool_size must be passed "
                "and be a valid value. Received pool_size: " + str(pool_size))
        main_block = self.memory.main_program.current_block()
        out_name = "%s_%d_out" % (self.base_name, self.call_count)
        pool_out = main_block.create_var(name=out_name, dtype='float32')
        self.memory.add_var(pool_out)
        pool2d_op_desc = main_block.desc.append_op()
        pool2d_op = Operator(block=main_block,
                             desc=pool2d_op_desc,
                             type='pool2d',
                             inputs={'X': input},
                             outputs={'Out': pool_out},
                             attrs={
                                 "pooling_type": pool_type,
                                 "ksize": _pair(pool_size),
                                 "global_pooling": global_pooling,
                                 "strides": _pair(pool_stride),
                                 "paddings": _pair(pool_padding),
                                 "use_cudnn": use_cudnn,
                                 "ceil_mode": ceil_mode,
                                 "use_mkldnn": use_mkldnn
                             })
        main_block.ops.append(pool2d_op)
        self.call_count += 1
        return pool_out
Ejemplo n.º 9
0
 def forward(self, input, dim=None, keep_dim=False):
     out_name = "%s_%d_out" % (self.base_name, self.call_count)
     start_block = self.memory.startup_program.global_block()
     main_block = self.memory.main_program.current_block()
     out_var = main_block.create_var(name=out_name, dtype='float32')
     mean_desc = main_block.desc.append_op()
     mean_op = Operator(block=main_block,
                        desc=mean_desc,
                        type='reduce_mean',
                        inputs={'X': input},
                        outputs={'Out': out_var},
                        attrs={
                            'dim': dim if dim != None else [0],
                            'keep_dim': keep_dim,
                            'reduce_all': True if dim == None else False
                        })
     self.memory.add_var(out_var)
     main_block.ops.append(mean_op)
     self.call_count += 1
     return out_var
Ejemplo n.º 10
0
 def forward(self, logits, label):
     softmax_out_name = "%s_softmax" % self.base_name
     loss_name = "%s_loss" % self.base_name
     start_block = self.memory.startup_program.global_block()
     main_block = self.memory.main_program.current_block()
     softmax_out_var = main_block.create_var(name=softmax_out_name,
                                             dtype='float32')
     loss_var = main_block.create_var(name=loss_name,
                                      dtype='float32')
     loss_desc = main_block.desc.append_op()
     loss_op = Operator(block=main_block,
                        desc=loss_desc,
                        type='softmax_with_cross_entropy',
                        inputs={'Logits': logits,
                                'Label': label},
                        outputs={'Softmax': softmax_out_var,
                                 'Loss': loss_var},
                        attrs={'soft_label': False})
     self.memory.add_var(loss_var)
     self.memory.add_var(softmax_out_var)
     main_block.ops.append(loss_op)
     return loss_var
Ejemplo n.º 11
0
from paddle.fluid import Program, Operator

my_program = Program()
cur_block = my_program.current_block()
abs_input_var = cur_block.create_var(name='abs_input', 
                                     shape=[-1, 32, 32],
                                     dtype='float32')
abs_output_var = cur_block.create_var(name='abs_output',
                                      shape=[-1, 32, 32],
                                      dtype='float32')

op_desc = cur_block.desc.append_op()
op = Operator(block=cur_block, desc=op_desc, type='abs', 
              inputs={'X': [abs_input_var]}, outputs={'Out': [abs_output_var]})

print(str(my_program))
Ejemplo n.º 12
0
const_init = ConstantInitializer(value=0.0)
W_var = cur_block.create_parameter(name='fc_W',
                                   dtype='float32',
                                   shape=[128, 64],
                                   initializer=xavier_init)
b_var = cur_block.create_parameter(name='fc_b',
                                   dtype='float32',
                                   shape=[64],
                                   initializer=const_init)
mul_op_desc = cur_block.desc.append_op()
mul_op = Operator(block=cur_block,
                  desc=mul_op_desc,
                  type='mul',
                  inputs={
                      'X': x_var,
                      'Y': W_var
                  },
                  outputs={'Out': Wx_var},
                  attrs={
                      "x_num_col_dims": 1,
                      "y_num_col_dims": 1
                  })

add_op_desc = cur_block.desc.append_op()
add_op = Operator(block=cur_block,
                  desc=add_op_desc,
                  type='elementwise_add',
                  inputs={
                      'X': [Wx_var],
                      'Y': [b_var]
                  },
                  outputs={'Out': [y_var]},