示例#1
0
def loadCaffeModel(net_path, model_path):
    # read prototxt
    net = caffe_pb2.NetParameter()
    # 把字符串读如message中
    text_format.Merge(open(net_path).read(), net)
    # print(net.layer)

    # read caffemodel
    model = caffe_pb2.NetParameter()
    f = open(model_path, 'rb')
    # 反序列化
    model.ParseFromString(f.read())
    f.close()
    # print(net.layer)
    print("1.caffe模型加载完成")
    print(model)
示例#2
0
文件: net.py 项目: zqplgl/pcnn
    def load_weights_from_caffemodel(self, weight_file):
        net_parameter = caffe_pb2.NetParameter()
        net_parameter.ParseFromString(open(weight_file, "rb").read())

        for layer in net_parameter.layer:
            if layer.name not in self.__layer_names:
                sys.stderr.write("ignore layer %s\n" % layer.name)
                continue

            l = self.__layers[self.__layer_names.index(layer.name)]

            if len(layer.blobs) == 2:
                sys.stderr.write("loading parameter from %s\n" % layer.name)
                l.load_parameter(layer.blobs)
示例#3
0
文件: net.py 项目: zqplgl/pcnn
    def __init__(self, prototxt=None, parameter=None):
        if prototxt:
            parameter = caffe_pb2.NetParameter()
            text_format.Merge(open(prototxt).read(), parameter)

        if not parameter:
            sys.stderr.write("error init\n")

        self.__register_layers = [
            "Accuracy", "Convolution", "Data", "InnerProduct", "Pooling",
            "ReLU", "Softmax"
        ]
        self.__parameter = parameter
        self.__blobs = {}
        self.__layers = []
        self.__layer_names = []

        self.__init_from_parameter()
示例#4
0
        assert len(blobs[
            self._bottoms[0]].shape) > 1, "%s error input blobs" % self._name
        bottom_blob = blobs[self._bottoms[0]].reshape(
            (blobs[self._bottoms[0]].shape[0], -1))
        top_blob = blobs[self._tops[0]]

        assert bottom_blob.shape[1] == self._w.shape[
            1], "%s error input blobs" % self._name

        top_shape = self.__generate_top_shape(bottom_blob.shape)
        if top_blob.shape != top_shape:
            blobs[self._tops[0]] = np.zeros(top_shape, dtype=np.float32)
            sys.stderr.write("reshape blob %s from %s-->%s\n" %
                             (self._tops[0], top_blob.shape, top_shape))
            top_blob = blobs[self._tops[0]]

        top_blob[...] = np.dot(bottom_blob, self._w.transpose()) + self._b

        sys.stderr.write("%s forward successfully\n" % self._name)


if __name__ == "__main__":
    prototxt = "/home/zqp/github/caffe/examples/mnist/lenet_train_test.prototxt"
    net = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt).read(), net)

    parameter = [layer for layer in net.layer
                 if layer.type == "Convolution"][0]

    conv = InnerProductLayer(parameter)
示例#5
0
def caffe2shadow(model_root, meta_net_info, copy_params=False):
    caffe_deploys, caffe_models = [], []
    for model_name in meta_net_info['model_name']:
        deploy_file = model_root + '/' + model_name + '.prototxt'
        deploy_model = model_root + '/' + model_name + '.caffemodel'

        caffe_deploy = caffe_pb2.NetParameter()
        with open(deploy_file, 'r') as caffe_file:
            text_format.Merge(caffe_file.read(), caffe_deploy)
            caffe_deploys.append(caffe_deploy)
        if copy_params:
            caffe_model = caffe_pb2.NetParameter()
            with open(deploy_model, 'rb') as caffe_file:
                caffe_model.ParseFromString(caffe_file.read())
                caffe_models.append(caffe_model)

    shadow_net = Shadow()

    for n in range(0, len(caffe_deploys)):
        caffe_deploy = caffe_deploys[n]
        net_info = meta_net_info['network'][n]

        shadow_net.set_net(n)
        shadow_net.set_net_name(caffe_deploy.name)
        shadow_net.set_net_num_class(net_info['num_class'])
        shadow_net.set_net_arg(net_info['arg'])
        shadow_net.set_net_out_blob(net_info['out_blob'])

        start_layer = convert_input(caffe_deploy, net_info, shadow_net)

        for l in range(start_layer, len(caffe_deploy.layer)):
            caffe_layer = caffe_deploy.layer[l]
            layer_type = caffe_layer.type

            if layer_type == 'ReLU' or layer_type == 'PReLU':
                convert_activate(caffe_layer, shadow_net)
            elif layer_type == 'BatchNorm':
                convert_batch_norm(caffe_layer, shadow_net)
            elif layer_type == 'Bias':
                convert_bias(caffe_layer, shadow_net)
            elif layer_type == 'Concat':
                convert_concat(caffe_layer, shadow_net)
            elif layer_type == 'InnerProduct':
                convert_connected(caffe_layer, shadow_net)
            elif layer_type == 'Convolution' or layer_type == 'DepthwiseConvolution':
                convert_conv(caffe_layer, shadow_net)
            elif layer_type == 'Eltwise':
                convert_eltwise(caffe_layer, shadow_net)
            elif layer_type == 'Flatten':
                convert_flatten(caffe_layer, shadow_net)
            elif layer_type == 'LRN':
                convert_lrn(caffe_layer, shadow_net)
            elif layer_type == 'Normalize':
                convert_normalize(caffe_layer, shadow_net)
            elif layer_type == 'Permute':
                convert_permute(caffe_layer, shadow_net)
            elif layer_type == 'Pooling':
                convert_pooling(caffe_layer, shadow_net)
            elif layer_type == 'PriorBox':
                convert_prior_box(caffe_layer, shadow_net)
            elif layer_type == 'PSROIPooling':
                convert_psroi_pooling(caffe_layer, shadow_net)
            elif layer_type == 'Python':
                convert_python(caffe_layer, shadow_net)
            elif layer_type == 'Reshape':
                convert_reshape(caffe_layer, shadow_net)
            elif layer_type == 'ROIPooling':
                convert_roi_pooling(caffe_layer, shadow_net)
            elif layer_type == 'Scale':
                convert_scale(caffe_layer, shadow_net)
            elif layer_type == 'Softmax':
                convert_softmax(caffe_layer, shadow_net)
            else:
                print('Layer type: ' + layer_type + ' is not recognized!')

        if copy_params:
            copy_weights(caffe_models[n], shadow_net)

    return shadow_net
示例#6
0
def caffe2tf(input_net_proto_file, input_shape, phase=None):
    """caffe2tf.
    
    Parameters
    ----------
    input_net_proto_file : caffe net proto file
    input_shape: [batch, in_height, in_width, in_channels]
        input x dimension.   
    phase : {caffe_pb2.TRAIN, caffe_pb2.TEST, None} optional
        Include layers from this network phase.  If None, include all layers.
        (the default is None)

    Returns
    -------
    tensorflow tensor
    """

    #加载网络定义
    netparam = pb.NetParameter()

    with open(input_net_proto_file, 'r') as fp:
        str_def = fp.read()
        caffe_net = tformat.Parse(str_def, netparam)

    #input x
    x = tf.placeholder(tf.float32, input_shape)

    #
    layer_tensors = {}

    #遍历所有的层
    #参考 function: get_pydot_graph@caffe\python\caffe\draw.py
    for layer in caffe_net.layer:

        creator = None

        if phase is not None:
            included = False

            if len(layer.include) == 0:
                included = True

            if len(layer.include) > 0 and len(layer.exclude) > 0:
                raise ValueError('layer ' + layer.name + ' has both include '
                                 'and exclude specified.')

            for layer_phase in layer.include:
                included = included or layer_phase.phase == phase
            for layer_phase in layer.exclude:
                included = included and not layer_phase.phase == phase

            if not included:
                continue

        #input layer
        if len(layer.bottom) == 0:
            continue

        #check0
        creator = get_register_creator(layer.type)
        if (creator is None):
            raise ValueError('layer type %s has no register creator' %
                             layer.type)

        #check1
        for bottom_blob in layer.bottom:
            if (not dict_has_key(layer_tensors, bottom_blob.name)):
                raise ValueError( 'layer named:%s bottom %s is None,please check flow!!'% \
                                 (layer.name,bottom_blob.name) )

        node_label = get_layer_label(layer, rankdir)
        node_name = "%s_%s" % (layer.name, layer.type)
        if (len(layer.bottom) == 1 and len(layer.top) == 1
                and layer.bottom[0] == layer.top[0]):
            # We have an in-place neuron layer.
            pydot_nodes[node_name] = pydot.Node(node_label,
                                                **NEURON_LAYER_STYLE)
        else:
            layer_style = LAYER_STYLE_DEFAULT
            layer_style['fillcolor'] = choose_color_by_layertype(layer.type)
            pydot_nodes[node_name] = pydot.Node(node_label, **layer_style)
        for bottom_blob in layer.bottom:
            pydot_nodes[bottom_blob + '_blob'] = pydot.Node(
                '%s' % bottom_blob, **BLOB_STYLE)
            edge_label = '""'
            pydot_edges.append({
                'src': bottom_blob + '_blob',
                'dst': node_name,
                'label': edge_label
            })
        for top_blob in layer.top:
            pydot_nodes[top_blob + '_blob'] = pydot.Node('%s' % (top_blob))
            if label_edges:
                edge_label = get_edge_label(layer)
            else:
                edge_label = '""'
            pydot_edges.append({
                'src': node_name,
                'dst': top_blob + '_blob',
                'label': edge_label
            })

    return net_def
示例#7
0
def convert_caffe(network, net_info, model_root, model_name, copy_params):
    deploy_file = model_root + '/' + model_name + '.prototxt'
    deploy_model = model_root + '/' + model_name + '.caffemodel'

    caffe_deploy = caffe_pb2.NetParameter()
    with open(deploy_file, 'r') as caffe_file:
        text_format.Merge(caffe_file.read(), caffe_deploy)

    network.set_net_name(model_name)
    network.set_net_arg_dict(net_info['arg'])

    start_layer = convert_input(caffe_deploy, net_info, network)

    for index in range(start_layer, len(caffe_deploy.layer)):
        caffe_layer = caffe_deploy.layer[index]
        layer_type = caffe_layer.type
        if layer_type == 'PReLU' or layer_type == 'ReLU' or layer_type == 'Sigmoid':
            convert_activate(caffe_layer, network)
        elif layer_type == 'Axpy':
            convert_axpy(caffe_layer, network)
        elif layer_type == 'BatchNorm':
            convert_batch_norm(caffe_layer, network)
        elif layer_type == 'Bias':
            convert_bias(caffe_layer, network)
        elif layer_type == 'Concat':
            convert_concat(caffe_layer, network)
        elif layer_type == 'InnerProduct':
            convert_connected(caffe_layer, network)
        elif layer_type == 'Convolution' or layer_type == 'DepthwiseConvolution':
            convert_conv(caffe_layer, network)
        elif layer_type == 'DetectionOutput':
            convert_decode_box(caffe_layer, network)
        elif layer_type == 'Deconvolution':
            convert_deconv(caffe_layer, network)
        elif layer_type == 'Eltwise':
            convert_eltwise(caffe_layer, network)
        elif layer_type == 'Flatten':
            convert_flatten(caffe_layer, network)
        elif layer_type == 'LRN':
            convert_lrn(caffe_layer, network)
        elif layer_type == 'Normalize':
            convert_normalize(caffe_layer, network)
        elif layer_type == 'Permute':
            convert_permute(caffe_layer, network)
        elif layer_type == 'Pooling':
            convert_pooling(caffe_layer, network)
        elif layer_type == 'PriorBox':
            convert_prior_box(caffe_layer, network)
        elif layer_type == 'PSROIPooling':
            convert_psroi_pooling(caffe_layer, network)
        elif layer_type == 'Python':
            convert_python(caffe_layer, network)
        elif layer_type == 'Reshape':
            convert_reshape(caffe_layer, network)
        elif layer_type == 'ROIPooling':
            convert_roi_pooling(caffe_layer, network)
        elif layer_type == 'Scale':
            convert_scale(caffe_layer, network)
        elif layer_type == 'Slice':
            convert_slice(caffe_layer, network)
        elif layer_type == 'Softmax':
            convert_softmax(caffe_layer, network)
        else:
            print('Layer type: ' + layer_type + ' is not recognized!')

    if copy_params:
        caffe_model = caffe_pb2.NetParameter()
        with open(deploy_model, 'rb') as caffe_file:
            caffe_model.ParseFromString(caffe_file.read())
        copy_weights(caffe_model, network)
示例#8
0
def load_proto(proto_name):
    proto = caffe_pb2.NetParameter()
    f = open(proto_name, 'r')
    text_format.Merge(f.read(), proto)
    f.close()
    return proto
示例#9
0
    weight_arr[under_threshold] = 0
    count = np.sum(under_threshold)
    return weight_arr


# How many percentages you want to apply pruning
ratio = {
    "conv1": 0.75,
    "conv2": 0.75,
    "conv3": 0.75,
    "fc6": 0.90,
    "fc7": 0.90,
    "fc8": 0.90
}

model_pb = caffe_pb2.NetParameter()
f = open(sys.argv[1], "rb")
model_pb.ParseFromString(f.read())

layers = model_pb.layers

for i in layers:
    if (i.name in ratio.keys()):
        print "layer name: ", i.name
        print "width: ", i.blobs[0].width
        print "height: ", i.blobs[0].height
        temp = np.array(i.blobs[0].data, dtype=float)
        nnz_before = np.sum(temp != 0)

        boundary = read_boundary_value_with_ratio(temp, ratio[i.name])
        temp = prune_dense(temp, name=i.name, thresh=boundary)
示例#10
0
def main():
    args = parse_args()
    net = caffe_pb2.NetParameter()
    text_format.Merge(open(args.input_net_proto_file).read(), net,allow_unknown_extension=True)
    print('Drawing net to %s' % args.output_image_file)
    draw.draw_net_to_file(net, args.output_image_file, args.rankdir)