def _fully_connected(opr, context): assert opr.inp_tensors[1].np_data is not None param_W = opr.inp_tensors[1].np_data assert not opr.transpose_a if not opr.transpose_b: param_W = param_W.T blobs = [context.gen_blob_proto(param_W)] bias_term = False if isinstance(opr, LinearOpr) and opr.has_bias: bias_term = True blobs.append( context.gen_blob_proto(opr.inp_tensors[2].np_data.reshape(-1, ))) param = cp.InnerProductParameter(bias_term=bias_term, num_output=opr.out_tensors[0].shape[1]) bottom = [context.get_blob_name(opr.inp_tensors[0])] top = [context.set_blob_name(opr.out_tensors[0], opr.out_tensors[0].name)] context.add_layer( cp.LayerParameter( name=opr.out_tensors[0].name, type="InnerProduct", bottom=bottom, top=top, inner_product_param=param, blobs=blobs, ))
def fc_param(self, num_output, weight_filler='xavier', bias_filler='constant', has_bias=True): if self.type != 'InnerProduct': raise TypeError('the layer type must be InnerProduct if you want set fc param') fc_param = pb.InnerProductParameter() fc_param.num_output = num_output fc_param.weight_filler.type = weight_filler fc_param.bias_term = has_bias if has_bias: fc_param.bias_filler.type = bias_filler self.param.inner_product_param.CopyFrom(fc_param)
def create_inner_product_layer(input, out_name, output_number): inner_product_layer = caffe_pb2.LayerParameter( name=out_name, type="InnerProduct", bottom=[input], top=[out_name], param=[ caffe_pb2.ParamSpec( lr_mult=1.0, decay_mult=1.0, ), caffe_pb2.ParamSpec( lr_mult=2.0, decay_mult=0.0, ) ], inner_product_param=caffe_pb2.InnerProductParameter( num_output=output_number, bias_term=False, weight_filler=caffe_pb2.FillerParameter(type="xavier"), bias_filler=caffe_pb2.FillerParameter(type="constant", value=0.0))) net = caffe_pb2.NetParameter() net.layer.extend([inner_product_layer]) return net