Пример #1
0
 def TranslateModel(cls, caffe_net, pretrained_net,
                    net_state=caffe_pb2.NetState()):
   net = caffe2_pb2.NetDef()
   net.name = caffe_net.name
   net_params = caffe2_pb2.TensorProtos()
   if len(caffe_net.layer) == 0:
     raise ValueError('I think something is wrong. This translation script '
                      'only accepts new style layers that are stored in the '
                      'layer field.')
   for layer in caffe_net.layer:
     if not _ShouldInclude(net_state, layer):
       print 'Current net state does not need layer', layer.name
       continue
     print 'Translate layer', layer.name
     # Get pretrained one
     pretrained_layers = (
         [l for l in pretrained_net.layer if l.name == layer.name] +
         [l for l in pretrained_net.layers if l.name == layer.name])
     if len(pretrained_layers) > 1:
       raise ValueError('huh? more than one pretrained layer of one name?')
     elif len(pretrained_layers) == 1:
       pretrained_blobs = [utils.CaffeBlobToNumpyArray(blob)
                           for blob in pretrained_layers[0].blobs]
     else:
       # No pretrained layer for the given layer name. We'll just pass no
       # parameter blobs.
       # print 'No pretrained layer for layer', layer.name
       pretrained_blobs = []
     operators, params = cls.TranslateLayer(layer, pretrained_blobs)
     net.op.extend(operators)
     net_params.protos.extend(params)
   return net, net_params
Пример #2
0
 def TranslateModel(
     cls,
     caffe_net,
     pretrained_net,
     is_test=False,
     net_state=None,
     remove_legacy_pad=False,
     input_dims=None
 ):
     net_state = caffe_pb2.NetState() if net_state is None else net_state
     net = caffe2_pb2.NetDef()
     net.name = caffe_net.name
     net_params = caffe2_pb2.TensorProtos()
     if len(caffe_net.layers) > 0:
         raise ValueError(
             'I think something is wrong. This translation script '
             'only accepts new style layers that are stored in the '
             'layer field.'
         )
     if not input_dims:
         input_dims = _GetInputDims(caffe_net)
     for layer in caffe_net.layer:
         if not _ShouldInclude(net_state, layer):
             log.info('Current net state does not need layer {}'
                         .format(layer.name))
             continue
         log.info('Translate layer {}'.format(layer.name))
         # Get pretrained one
         pretrained_layers = (
             [l for l in pretrained_net.layer
              if l.name == layer.name] + [l
                                          for l in pretrained_net.layers
                                          if l.name == layer.name]
         )
         if len(pretrained_layers) > 1:
             raise ValueError(
                 'huh? more than one pretrained layer of one name?')
         elif len(pretrained_layers) == 1:
             pretrained_blobs = [
                 utils.CaffeBlobToNumpyArray(blob)
                 for blob in pretrained_layers[0].blobs
             ]
         else:
             # No pretrained layer for the given layer name. We'll just pass
             # no parameter blobs.
             # print 'No pretrained layer for layer', layer.name
             pretrained_blobs = []
         operators, params = cls.TranslateLayer(
             layer, pretrained_blobs, is_test, net=net,
             net_params=net_params, input_dims=input_dims)
         net.op.extend(operators)
         net_params.protos.extend(params)
     if remove_legacy_pad:
         assert input_dims, \
                'Please specify input_dims to remove legacy_pad'
         net = _RemoveLegacyPad(net, net_params, input_dims)
     return net, net_params
Пример #3
0
    def TranslateModel(
        cls,
        caffe_net,
        pretrained_net,
        is_test=False,
        input_mean=None,
        net_state=None,
    ):
        net_state = caffe_pb2.NetState() if net_state is None else net_state
        net = caffe2_pb2.NetDef()
        net.name = caffe_net.name
        net_params = caffe2_pb2.TensorProtos()
        if len(caffe_net.layer) == 0:
            raise ValueError(
                'I think something is wrong. This translation script '
                'only accepts new style layers that are stored in the '
                'layer field.')
        if input_mean:
            caffenet_mean = caffe_pb2.BlobProto()
            caffenet_mean.ParseFromString(open(input_mean, 'rb').read())
            mean_ = utils.CaffeBlobToNumpyArray(caffenet_mean)
            mean_tensor = utils.NumpyArrayToCaffe2Tensor(mean_, 'mean_')
            net_params.protos.extend([mean_tensor])
            mean_op = caffe2_pb2.OperatorDef()
            mean_op.type = 'Sub'
            mean_op.input.extend(['data_', 'mean_'])
            # Assume that input blob's name is "data"
            mean_op.output.extend(['data'])
            net.op.extend([mean_op])
        i = 0
        while i < len(caffe_net.layer):
            if not _ShouldInclude(net_state, caffe_net.layer[i]):
                log.info('Current net state does not need layer {}'.format(
                    caffe_net.layer[i].name))
                continue
            log.info('Translate layer {}'.format(caffe_net.layer[i].name))
            # Get pretrained one
            pretrained_layers_index = ([
                l for l in xrange(len(pretrained_net.layer))
                if pretrained_net.layer[l].name == caffe_net.layer[i].name
            ] + [
                l for l in xrange(len(pretrained_net.layers))
                if pretrained_net.layers[l].name == caffe_net.layer[i].name
            ])
            is_bn = False
            if len(pretrained_layers_index) > 1:
                raise ValueError(
                    'huh? more than one pretrained layer of one name?')
            elif len(pretrained_layers_index) == 1:
                if pretrained_net.layer[
                        pretrained_layers_index[0]].type == "BatchNorm":
                    # A Scale layer should follow BatchNorm layer
                    # according to paper https://arxiv.org/abs/1502.03167.
                    assert pretrained_net.layer[pretrained_layers_index[0] +
                                                1].type == "Scale"
                    pretrained_blobs = [utils.CaffeBlobToNumpyArray(blob)
                    for blob in pretrained_net.layer[pretrained_layers_index[0]].blobs] + \
                        [utils.CaffeBlobToNumpyArray(blob)
                    for blob in pretrained_net.layer[pretrained_layers_index[0] + 1].blobs]
                    is_bn = True
                else:
                    pretrained_blobs = [
                        utils.CaffeBlobToNumpyArray(blob) for blob in
                        pretrained_net.layer[pretrained_layers_index[0]].blobs
                    ]
            else:
                # No pretrained layer for the given layer name. We'll just pass
                # no parameter blobs.
                # print 'No pretrained layer for layer', layer.name
                pretrained_blobs = []

            operators, params = cls.TranslateLayer(caffe_net.layer[i],
                                                   pretrained_blobs, is_test)
            net.op.extend(operators)
            net_params.protos.extend(params)
            if is_bn:
                i += 2
            else:
                i += 1
        return net, net_params
Пример #4
0
    def TranslateModel(cls,
                       caffe_net,
                       pretrained_net,
                       is_test=False,
                       net_state=None,
                       remove_legacy_pad=False,
                       input_dims=None):
        net_state = caffe_pb2.NetState() if net_state is None else net_state
        net = caffe2_pb2.NetDef()
        net.name = caffe_net.name
        net_params = caffe2_pb2.TensorProtos()
        if len(caffe_net.layers) > 0:
            raise ValueError(
                'I think something is wrong. This translation script '
                'only accepts new style layers that are stored in the '
                'layer field.')
        if not input_dims:
            input_dims = _GetInputDims(caffe_net)
        for layer in caffe_net.layer:
            if not _ShouldInclude(net_state, layer):
                log.info('Current net state does not need layer {}'.format(
                    layer.name))
                continue
            log.info('Translate layer {}'.format(layer.name))
            # Get pretrained one
            pretrained_layers = (
                [l for l in pretrained_net.layer if l.name == layer.name] +
                [l for l in pretrained_net.layers if l.name == layer.name])
            if len(pretrained_layers) > 1:
                print('>>> huh? more than one pretrained layer of one name?')
                print(
                    '>>> Assuming these layers have no trainable parameters (e.g relu or pooling)'
                )

                print([(pt.name, pt.type) for pt in pretrained_layers])
                pt_types = [pt.type for pt in pretrained_layers]
                if len(set(pt_types)) == len(pt_types):
                    print(
                        '>>> But, just in case, try to match layer types since types are unique. If not, do not transfer params.'
                    )
                    for pt in pretrained_layers:
                        if pt.type == layer.type:
                            print '  Found matching type {}'.format(layer.type)
                            print '  Setting pretrained blobs'
                            pretrained_blobs = [
                                utils.CaffeBlobToNumpyArray(blob)
                                for blob in pt.blobs
                            ]
                            #print pretrained_blobs

                else:
                    print('>>> Setting pretrained blobs = []')
                    pretrained_blobs = []

                #raise ValueError(
                #    'huh? more than one pretrained layer of one name?')
            elif len(pretrained_layers) == 1:
                pretrained_blobs = [
                    utils.CaffeBlobToNumpyArray(blob)
                    for blob in pretrained_layers[0].blobs
                ]
            else:
                # No pretrained layer for the given layer name. We'll just pass
                # no parameter blobs.
                # print 'No pretrained layer for layer', layer.name
                pretrained_blobs = []

            operators, params = cls.TranslateLayer(layer,
                                                   pretrained_blobs,
                                                   is_test,
                                                   net=net,
                                                   net_params=net_params,
                                                   input_dims=input_dims)

            net.op.extend(operators)
            net_params.protos.extend(params)

        if remove_legacy_pad:
            assert input_dims, \
                   'Please specify input_dims to remove legacy_pad'
            net = _RemoveLegacyPad(net, net_params, input_dims)

        return net, net_params