Beispiel #1
0
    def __init__(self, model_path):
        if not available:
            msg = 'CaffeFunction is only supported on protobuf>=3 in Python3'
            raise RuntimeError(msg)

        super(CaffeFunction, self).__init__()

        net = caffe_pb.NetParameter()
        with open(model_path, 'rb') as model_file:
            net.MergeFromString(model_file.read())

        self.forwards = {}
        self.split_map = {}
        self.layers = []

        if net.layer:
            for layer in net.layer:
                meth = _type_to_method.get(layer.type)
                if meth:
                    meth(self, layer)
                else:
                    warnings.warn(
                        'Skip the layer "%s", since CaffeFunction does not'
                        'support %s layer' % (layer.name, layer.type))
        else:  # v1 format
            for layer in net.layers:
                meth = _oldname_to_method.get(layer.type)
                if meth:
                    meth(self, layer)
                else:
                    warnings.warn(
                        'Skip the layer "%s", since CaffeFunction does not'
                        'support it' % layer.name)
Beispiel #2
0
    def __call__(self, name, inputs, outputs):
        dumped_list = _dump_graph(outputs)
        f = None
        net = None
        if self.caffemodel is not None:
            net = caffe_pb.NetParameter()
        try:
            if self.prototxt is not None:
                f = open(self.prototxt, 'wt')
                f.write('name: "{}"\n'.format(name))
                assert len(inputs) == 1
                f.write('layer {\n'
                        '  name: "data"\n'
                        '  type: "Input"\n'
                        '  top: "data"\n'
                        '  input_param { shape: {')
                for i in inputs[0].shape:
                    f.write(' dim: ' + str(i))
                f.write(' } }\n' '} \n')
            for i in dumped_list:
                self.dump_function_object(i, f, net)
        finally:
            if f is not None:
                f.close()

        if net is not None:
            with open(self.caffemodel, 'wb') as f:
                f.write(net.SerializeToString())
            if self.debug:
                import google.protobuf.text_format
                with open(self.caffemodel + ".txt", 'w') as f:
                    f.write(google.protobuf.text_format.MessageToString(net))
Beispiel #3
0
def copy_ssd_norm_layer(caffe_ssd_path, pose_ssd_model):
    net = caffe_pb.NetParameter()
    with open(caffe_ssd_path, 'rb') as model_file:
        net.MergeFromString(model_file.read())

    def get_normalize_layer(net):
        for layer in net.layer:
            if layer.type == 'Normalize':
                return layer

    norm_layer = get_normalize_layer(net)
    pose_ssd_model.ssd_net.norm4.scale.data = numpy.array(
        norm_layer.blobs[0].data)
    print("Copy layer: dst({}) / src({})".format("ssd_net.norm4",
                                                 "conv4_3_norm"))
Beispiel #4
0
	def __init__(self, caffemodel):
		self.timer_hooks = []
		net = caffe_pb.NetParameter()
		with open(caffemodel, 'rb') as model_file:
			net.MergeFromString(model_file.read())
		
		if net.layer:
			for layer in net.layer:
				layer_impl_path = _layer_impl_path_dict.get(layer.type)
				if layer_impl_path:
					_setup_timer_by_layer(layer.type, layer_impl_path)
				else:
					print 'unimplemented layer timer: %s' % layer.type
		else: #v1 format
			for layer in net.layers:
				layer_type = _v1_to_new_name_dict[layer.type]
				layer_impl_path = _layer_impl_path_dict.get(layer_type)
				if layer_impl_path:
					self._setup_timer_by_layer(layer.name, layer_impl_path)
				else:
					print 'unimplemented layer timer: %s' % layer.type
Beispiel #5
0
    def load_caffe_model(self, model_path):
        net = caffe_pb.NetParameter()
        with open(model_path, 'rb') as model_file:
            net.MergeFromString(model_file.read())
        print("Loaded")
        if net.layer:
            for layer in net.layer:
                if layer.type == 'Convolution':
                    #continue
                    print(layer.name, layer.type)
                    obj = self.get_layer(layer.name)
                    blobs = layer.blobs
                    param = layer.convolution_param
                    num = _get_num(blobs[0])
                    channels = _get_channels(blobs[0])
                    #print(obj.out_channels)
                    #print(_get_num(blobs[0]))
                    assert obj.ksize == _get_ksize(param)
                    assert obj.stride[0] == _get_stride(param)
                    assert obj.pad[0] == _get_pad(param)
                    assert obj.W.data.shape[1] == _get_channels(blobs[0])
                    assert obj.out_channels == _get_num(blobs[0])

                    n_in = obj.W.data.shape[1]
                    n_out = obj.out_channels
                    part_size = len(blobs[0].data) // param.group

                    obj.W.data[...] = 0
                    for i in six.moves.range(param.group):
                        in_slice = slice(i * n_in // param.group,
                                         (i + 1) * n_in // param.group)
                        out_slice = slice(i * n_out // param.group,
                                          (i + 1) * n_out // param.group)
                        w = obj.W.data[out_slice, in_slice]
                        data = np.array(blobs[0].data[i * part_size:(i + 1) *
                                                      part_size])
                        w[:] = data.reshape(w.shape)

                    if param.bias_term:
                        obj.b.data[:] = blobs[1].data

                elif layer.type == 'Deconvolution':
                    print(layer.name, layer.type)
                    obj = self.get_layer(layer.name)
                    blobs = layer.blobs
                    param = layer.convolution_param

                    num = _get_num(blobs[0])
                    channels = _get_channels(blobs[0])

                    assert obj.ksize == _get_ksize(param)
                    assert obj.stride[0] == _get_stride(param)
                    assert obj.pad[0] == _get_pad(param)
                    assert obj.W.data.shape[0] == num
                    assert obj.out_channels == channels  # _get_channels(blobs[0])
                    part_size = len(blobs[0].data)  #// param.group
                    obj.W.data[...] = 0
                    obj.W.data = np.array(blobs[0].data[0:part_size]).reshape(
                        obj.W.data.shape)

                    if param.bias_term:
                        obj.b.data[:] = blobs[1].data