Exemple #1
0
    def to_proto(self):
        """Serialize to the proto.

        Returns
        -------
        LayerParameter
            The ``LayerParameter`` protocol buffer.

        """
        proto = caffe_pb2.LayerParameter()
        proto.CopyFrom(self._proto)
        for blob in self._blobs:
            value = blob['data'].get_value()
            if str(value.dtype) == 'float32':
                blob_proto = caffe_pb2.BlobProto(
                    data=value.flatten(),
                    shape=caffe_pb2.BlobShape(dim=value.shape))
            elif str(value.dtype) == 'float64':
                blob_proto = caffe_pb2.BlobProto(
                    double_data=value.flatten(),
                    shape=caffe_pb2.BlobShape(dim=value.shape))
            else:
                raise ValueError('Either float32 or float64 blob is required.')
            proto.blobs.extend([blob_proto])
        return proto
Exemple #2
0
    def _to_proto(self, layers, names, autonames):
        if self in layers:
            return
        bottom_names = []
        for inp in self.inputs:
            inp._to_proto(layers, names, autonames)
            bottom_names.append(layers[inp.fn].top[inp.n])
        layer = caffe_pb2.LayerParameter()
        layer.type = self.type_name
        layer.bottom.extend(bottom_names)

        if self.in_place:
            layer.top.extend(layer.bottom)
        else:
            for top in self.tops:
                layer.top.append(self._get_top_name(top, names, autonames))
        layer.name = self._get_name(names, autonames)

        for k, v in self.params.items():
            # special case to handle generic *params
            if k.endswith('param'):
                assign_proto(layer, k, v)
            else:
                try:
                    assign_proto(getattr(
                        layer, _param_names[self.type_name] + '_param'), k, v)
                except (AttributeError, KeyError):
                    assign_proto(layer, k, v)

        layers[self] = layer
Exemple #3
0
    def __init__(self, *args):
        """Create a ``Net``.

        Parameters
        ----------
        net_file : str
            The path of text proto file to load network.
        param_file : str, optional
            The path of binary proto file to load parameters.
        phase : {'TRAIN', 'TEST'}, optional
            The optional phase tag.

        """
        if len(args) == 2:
            (net_file, self._phase), param_file = args, None
        elif len(args) == 3:
            net_file, param_file, self._phase = args
        else:
            raise ValueError('Excepted 2 or 3 args.')
        self._blobs = {}
        self._layers = []
        self._layer_blobs = []
        self._losses = []
        self._params = []
        self._blob_dict = None
        self._param_dict = None
        self._input_list = None
        self._output_list = None
        # Parse the network file
        with open(net_file, 'r') as f:
            self._proto = text_format.Parse(f.read(), caffe_pb2.NetParameter())
        # Construct the layer class from proto
        for layer_param in self._proto.layer:
            if not self._filter_layer(layer_param):
                continue
            cls = getattr(layer_factory, layer_param.type)
            with context.name_scope(layer_param.name):
                self._layers.append(cls(layer_param))
        # Prepare for the legacy net inputs
        if len(self._proto.input) > 0:
            layer_param = caffe_pb2.LayerParameter(
                name='data',
                type='Input',
                top=self._proto.input,
                input_param=caffe_pb2.InputParameter(
                    shape=self._proto.input_shape))
            cls = getattr(layer_factory, layer_param.type)
            with context.name_scope(layer_param.name):
                self._layers.insert(0, cls(layer_param))
        # Call layers sequentially to get outputs
        self._setup()
        # Collect losses and parameters
        for layer in self._proto.layer:
            if not self._filter_layer(layer):
                continue
            self._collect_losses_and_params(layer)
        # Load the pre-trained weights if necessary
        if param_file is not None:
            self.copy_from(param_file)
Exemple #4
0
def param_name_dict():
    """Find out the correspondence between layer names and parameter names."""
    layer = caffe_pb2.LayerParameter()
    # Get all parameter names (typically underscore case) and corresponding
    # type names (typically camel case), which contain the layer names
    # (note that not all parameters correspond to layers, but we'll ignore that).
    param_names = [f.name for f in layer.DESCRIPTOR.fields if f.name.endswith('_param')]
    param_type_names = [type(getattr(layer, s)).__name__ for s in param_names]
    # Strip the final '_param' or 'Parameter'.
    param_names = [s[:-len('_param')] for s in param_names]
    param_type_names = [s[:-len('Parameter')] for s in param_type_names]
    return dict(zip(param_type_names, param_names))
Exemple #5
0
    def __init__(self, network_file, phase='TEST', weights=None):
        """Create a ``Net``.

        Parameters
        ----------
        network_file : str
            The path of text proto file to load network.
        phase : str, optional, default='TEST'
            The execution phase.
        weights : str, optional
            The path of binary proto file to load weights.

        """
        # Parse the network file.
        with open(network_file, 'r') as f:
            self._proto = google.protobuf.text_format.Parse(
                f.read(), caffe_pb2.NetParameter())
        self._phase = phase
        self._layers = []
        self._learnable_blobs = []
        self._net_blobs = dict()
        self._net_outputs = set()
        # Construct the layers from proto.
        layer_names = []
        for layer_param in self._proto.layer:
            if not self._filter_layer(layer_param):
                continue
            try:
                layer_index = layer_names.index(layer_param.name)
                call_layer = self._layers[layer_index]
            except ValueError:
                call_layer = None
                layer_names.append(layer_param.name)
            cls = getattr(layer_factory, layer_param.type)
            self._layers.append(cls(layer_param))
            self._layers[-1]._call_layer = call_layer
        # Add an input layer for the legacy inputs.
        if len(self._proto.input) > 0:
            layer_param = caffe_pb2.LayerParameter(
                name='data',
                type='Input',
                top=self._proto.input,
                input_param=caffe_pb2.InputParameter(
                    shape=self._proto.input_shape))
            cls = getattr(layer_factory, layer_param.type)
            with context.name_scope(layer_param.name):
                self._layers.insert(0, cls(layer_param))
        # Connect layers to get outputs.
        self._init()
        # Load the pre-trained weights if necessary
        if weights is not None:
            self.copy_from(weights)