Ejemplo n.º 1
0
def setUpModule():
    # We will do all the computation stuff in the global space.
    caffenet = caffe_pb2.NetParameter()
    caffenet_pretrained = caffe_pb2.NetParameter()
    text_format.Merge(
        open('data/testdata/caffe_translator/deploy.prototxt').read(),
        caffenet)
    caffenet_pretrained.ParseFromString(
        open(
            'data/testdata/caffe_translator/bvlc_reference_caffenet.caffemodel'
        ).read())
    for remove_legacy_pad in [True, False]:
        net, pretrained_params = caffe_translator.TranslateModel(
            caffenet,
            caffenet_pretrained,
            is_test=True,
            remove_legacy_pad=remove_legacy_pad)
        with open(
                'data/testdata/caffe_translator/'
                'bvlc_reference_caffenet.translatedmodel', 'w') as fid:
            fid.write(str(net))
        for param in pretrained_params.protos:
            workspace.FeedBlob(param.name,
                               utils.Caffe2TensorToNumpyArray(param))
        # Let's also feed in the data from the Caffe test code.
        data = np.load('data/testdata/caffe_translator/data_dump.npy').astype(
            np.float32)
        workspace.FeedBlob('data', data)
        # Actually running the test.
        workspace.RunNetOnce(net.SerializeToString())
Ejemplo n.º 2
0
def load_and_convert_caffe_model(prototxt_file_name, caffemodel_file_name):
    caffenet = caffe_pb2.NetParameter()
    caffenet_weights = caffe_pb2.NetParameter()
    text_format.Merge(open(prototxt_file_name).read(), caffenet)
    caffenet_weights.ParseFromString(open(caffemodel_file_name).read())
    # C2 conv layers current require biases, but they are optional in C1
    # Add zeros as biases is they are missing
    add_missing_biases(caffenet_weights)
    # We only care about getting parameters, so remove layers w/o parameters
    remove_layers_without_parameters(caffenet, caffenet_weights)
    # BatchNorm is not implemented in the translator *and* we need to fold Scale
    # layers into the new C2 SpatialBN op, hence we remove the batch norm layers
    # and apply custom translations code
    bn_weights = remove_spatial_bn_layers(caffenet, caffenet_weights)
    # Set num, channel, height and width for blobs that use shape.dim instead
    normalize_shape(caffenet_weights)
    # Translate the rest of the model
    net, pretrained_weights = caffe_translator.TranslateModel(
        caffenet, caffenet_weights)
    pretrained_weights.protos.extend(bn_weights)
    return net, pretrained_weights
Ejemplo n.º 3
0
            'Pass in any argument to have the test run for you.')
        sys.exit(0)
    if not os.path.exists('data/testdata/caffe_translator'):
        print('No testdata existing for the caffe translator test. Exiting.')
        sys.exit(0)
    # We will do all the computation stuff in the global space.
    caffenet = caffe_pb2.NetParameter()
    caffenet_pretrained = caffe_pb2.NetParameter()
    text_format.Merge(
        open('data/testdata/caffe_translator/deploy.prototxt').read(),
        caffenet)
    caffenet_pretrained.ParseFromString(
        open(
            'data/testdata/caffe_translator/bvlc_reference_caffenet.caffemodel'
        ).read())
    net, pretrained_params = caffe_translator.TranslateModel(
        caffenet, caffenet_pretrained, is_test=True)
    with open(
            'data/testdata/caffe_translator/'
            'bvlc_reference_caffenet.translatedmodel', 'w') as fid:
        fid.write(str(net))
    for param in pretrained_params.protos:
        workspace.FeedBlob(param.name, utils.Caffe2TensorToNumpyArray(param))
    # Let's also feed in the data from the Caffe test code.
    data = np.load('data/testdata/caffe_translator/data_dump.npy').astype(
        np.float32)
    workspace.FeedBlob('data', data)
    # Actually running the test.
    workspace.RunNetOnce(net.SerializeToString())
    unittest.main()
Ejemplo n.º 4
0
            'Pass in any argument to have the test run for you.')
        sys.exit(0)
    if not os.path.exists('data/testdata/caffe_translator'):
        print 'No testdata existing for the caffe translator test. Exiting.'
        sys.exit(0)
    # We will do all the computation stuff in the global space.
    caffenet = caffe_pb2.NetParameter()
    caffenet_pretrained = caffe_pb2.NetParameter()
    text_format.Merge(
        open('data/testdata/caffe_translator/deploy.prototxt').read(),
        caffenet)
    caffenet_pretrained.ParseFromString(
        open(
            'data/testdata/caffe_translator/bvlc_reference_caffenet.caffemodel'
        ).read())
    net, pretrained_params = caffe_translator.TranslateModel(
        caffenet, caffenet_pretrained)
    caffe_translator.DeleteDropout(net)
    with open(
            'data/testdata/caffe_translator/bvlc_reference_caffenet.translatedmodel',
            'w') as fid:
        fid.write(str(net))
    for param in pretrained_params.protos:
        workspace.FeedBlob(param.name, utils.Caffe2TensorToNumpyArray(param))
    # Let's also feed in the data from the Caffe test code.
    data = np.load('data/testdata/caffe_translator/data_dump.npy').astype(
        np.float32)
    workspace.FeedBlob('data', data)
    # Actually running the test.
    workspace.RunNetOnce(net.SerializeToString())
    unittest.main()