Esempio n. 1
0
def setUpModule():
    # We will do all the computation stuff in the global space.
    caffenet = caffe_pb2.NetParameter()
    caffenet_pretrained = caffe_pb2.NetParameter()
    text_format.Merge(
        open('data/testdata/caffe_translator/deploy.prototxt').read(),
        caffenet)
    caffenet_pretrained.ParseFromString(
        open(
            'data/testdata/caffe_translator/bvlc_reference_caffenet.caffemodel'
        ).read())
    for remove_legacy_pad in [True, False]:
        net, pretrained_params = caffe_translator.TranslateModel(
            caffenet,
            caffenet_pretrained,
            is_test=True,
            remove_legacy_pad=remove_legacy_pad)
        with open(
                'data/testdata/caffe_translator/'
                'bvlc_reference_caffenet.translatedmodel', 'w') as fid:
            fid.write(str(net))
        for param in pretrained_params.protos:
            workspace.FeedBlob(param.name,
                               utils.Caffe2TensorToNumpyArray(param))
        # Let's also feed in the data from the Caffe test code.
        data = np.load('data/testdata/caffe_translator/data_dump.npy').astype(
            np.float32)
        workspace.FeedBlob('data', data)
        # Actually running the test.
        workspace.RunNetOnce(net.SerializeToString())
Esempio n. 2
0
def FeedBlob(name, arr, device_option=None):
    """Feeds a blob into the workspace.

    Inputs:
      name: the name of the blob.
      arr: either a TensorProto object or a numpy array object to be fed into
          the workspace.
      device_option (optional): the device option to feed the data with.
    Returns:
      True or False, stating whether the feed is successful.
    """
    if type(arr) is caffe2_pb2.TensorProto:
        arr = utils.Caffe2TensorToNumpyArray(arr)
    if type(arr) is np.ndarray and arr.dtype.kind in 'SU':
        # Plain NumPy strings are weird, let's use objects instead
        arr = arr.astype(np.object)

    if device_option is None:
        device_option = scope.CurrentDeviceScope()

    if device_option and device_option.device_type == caffe2_pb2.CUDA:
        if arr.dtype == np.dtype('float64'):
            logger.warning(
                "CUDA operators do not support 64-bit doubles, " +
                "please use arr.astype(np.float32) or np.int32 for ints." +
                " Blob: {}".format(name) + " type: {}".format(str(arr.dtype)))

    name = StringifyBlobName(name)
    if device_option is not None:
        return C.feed_blob(name, arr, StringifyProto(device_option))
    else:
        return C.feed_blob(name, arr)
Esempio n. 3
0
def pickle_weights(out_file_name, weights):
    blobs = {
        normalize_resnet_name(blob.name): utils.Caffe2TensorToNumpyArray(blob)
        for blob in weights.protos
    }
    save_object(blobs, out_file_name)
    print('Wrote blobs:')
    print(sorted(blobs.keys()))
Esempio n. 4
0
def pickle_weights(out_file_name, weights):
    blobs = {
        normalize_resnet_name(blob.name): utils.Caffe2TensorToNumpyArray(blob)
        for blob in weights.protos
    }
    with open(out_file_name, 'w') as f:
        pickle.dump(blobs, f, protocol=pickle.HIGHEST_PROTOCOL)
    print('Wrote blobs:')
    print(sorted(blobs.keys()))
Esempio n. 5
0
def _RemoveLegacyPad(net, net_params, input_dims):
    legacy_pad_ops = []
    for i in range(len(net.op)):
        op_def = net.op[i]
        if re.match(r'^(Conv|ConvTranspose|MaxPool|AveragePool)(\dD)?$',
                    op_def.type):
            for arg in op_def.arg:
                if arg.name == 'legacy_pad':
                    legacy_pad_ops.append(i)
                    break
    if legacy_pad_ops:
        n, c, h, w = input_dims
        dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
        dim_map = _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops)

        # Running with the legacy pad argument removed
        # compare the dimensions and adjust pad argument when necessary
        current = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace("legacypad-removed", True)

        external_input = net.op[0].input[0]
        workspace.FeedBlob(external_input, dummy_input)
        for param in net_params.protos:
            workspace.FeedBlob(param.name,
                               utils.Caffe2TensorToNumpyArray(param))

        for i in range(len(net.op)):
            op_def = net.op[i]
            if i in legacy_pad_ops:
                arg_map = {}
                for arg in op_def.arg:
                    arg_map[arg.name] = arg
                pads = _GetLegacyPadArgs(op_def, arg_map)
                # remove legacy pad arg
                for j in range(len(op_def.arg)):
                    arg = op_def.arg[j]
                    if arg.name == 'legacy_pad':
                        del op_def.arg[j]
                        break
                output = op_def.output[0]
                # use a new name to avoid the interference with inplace
                nonlegacy_output = output + '_nonlegacy'
                op_def.output[0] = nonlegacy_output
                workspace.RunOperatorOnce(op_def)
                blob_nonlegacy = workspace.FetchBlob(nonlegacy_output)
                # reset output name
                op_def.output[0] = output

                dim1 = dim_map[i]
                dim2 = blob_nonlegacy.shape
                _AdjustDims(op_def, arg_map, pads, dim1, dim2)

            workspace.RunOperatorOnce(op_def)
        workspace.SwitchWorkspace(current)
    return net
Esempio n. 6
0
def _GetBlobDimMap(net, net_params, dummy_input):
    dim_map = {}
    ws = workspace.C.Workspace()
    for param in net_params.protos:
        ws.create_blob(param.name) \
          .feed(utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    ws.create_blob(external_input).feed(dummy_input)
    # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        ws._run_operator(op_def.SerializeToString())
        for output in op_def.output:
            blob = ws.fetch_blob(output)
            dim_map[output] = blob.shape
    return dim_map
Esempio n. 7
0
def FeedBlob(name, arr, device_option=None):
    """Feeds a blob into the workspace.

  Inputs:
    name: the name of the blob.
    arr: either a TensorProto object or a numpy array object to be fed into the
        workspace.
    device_option (optional): the device option to feed the data with.
  Returns:
    True or False, stating whether the feed is successful.
  """
    if type(arr) is caffe2_pb2.TensorProto:
        arr = utils.Caffe2TensorToNumpyArray(arr)
    if device_option is not None:
        return cc_FeedBlob(name, arr, StringfyProto(device_option))
    else:
        return cc_FeedBlob(name, arr)
Esempio n. 8
0
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
    dim_map = {}
    ws = workspace.C.Workspace()
    for param in net_params.protos:
        ws.create_blob(param.name) \
            .feed_blob(utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    ws.create_blob(external_input).feed_blob(dummy_input)
    # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        ws._run_operator(op_def.SerializeToString())
        if i in legacy_pad_ops:
            output = op_def.output[0]
            blob_legacy = ws.fetch_blob(output)
            dim_map[i] = blob_legacy.shape
    return dim_map
Esempio n. 9
0
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
    dim_map = {}
    current = workspace.CurrentWorkspace()
    workspace.SwitchWorkspace('legacypad', True)
    for param in net_params.protos:
        workspace.FeedBlob(param.name, utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    workspace.FeedBlob(external_input, dummy_input)
    # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        workspace.RunOperatorOnce(op_def)
        if i in legacy_pad_ops:
            output = op_def.output[0]
            blob_legacy = workspace.FetchBlob(output)
            dim_map[i] = blob_legacy.shape
    workspace.SwitchWorkspace(current)
    return dim_map
Esempio n. 10
0
def _Workspace_feed_blob(ws, name, arr, device_option=None):
    if type(arr) is caffe2_pb2.TensorProto:
        arr = utils.Caffe2TensorToNumpyArray(arr)
    if type(arr) is np.ndarray and arr.dtype.kind in 'SU':
        # Plain NumPy strings are weird, let's use objects instead
        arr = arr.astype(np.object)

    if device_option is None:
        device_option = scope.CurrentDeviceScope()

    if device_option and device_option.device_type == caffe2_pb2.CUDA:
        if arr.dtype == np.dtype('float64'):
            logger.warning(
                "CUDA operators do not support 64-bit doubles, " +
                "please use arr.astype(np.float32) or np.int32 for ints." +
                " Blob: {}".format(name) + " type: {}".format(str(arr.dtype)))

    name = StringifyBlobName(name)
    print("device option is:")
    print(device_option)
    return ws.create_blob(name).feed(arr)
Esempio n. 11
0
def FeedBlob(name, arr, device_option=None):
    """Feeds a blob into the workspace.

    Inputs:
      name: the name of the blob.
      arr: either a TensorProto object or a numpy array object to be fed into
          the workspace.
      device_option (optional): the device option to feed the data with.
    Returns:
      True or False, stating whether the feed is successful.
    """
    if type(arr) is caffe2_pb2.TensorProto:
        arr = utils.Caffe2TensorToNumpyArray(arr)
    if type(arr) is np.ndarray and arr.dtype.kind == 'S':
        # Plain NumPy strings are weird, let's use objects instead
        arr = arr.astype(np.object)
    name = StringifyBlobName(name)
    if device_option is not None:
        return C.feed_blob(name, arr, StringfyProto(device_option))
    elif scope.DEVICESCOPE is not None:
        return C.feed_blob(name, arr, StringfyProto(scope.DEVICESCOPE))
    else:
        return C.feed_blob(name, arr)
Esempio n. 12
0
    '--init_net=/data/local/tmp/super_resolution_mobile_init.pb '  # mobile init_net
    '--net=/data/local/tmp/super_resolution_mobile_predict.pb '  # mobile predict_net
    '--input=9 '  # name of our input image blob
    '--input_file=/data/local/tmp/input.blobproto '  # serialized input image
    '--output_folder=/data/local/tmp '  # destination folder for saving mobile output
    '--output=27,9 '  # output blobs we are interested in
    '--iter=1 '  # number of net iterations to execute
    '--caffe2_log_level=0 ')

# get the model output from adb and save to a file
os.system('adb pull /data/local/tmp/27 ./output.blobproto')

# We can recover the output content and post-process the model using same steps as we followed earlier
blob_proto = caffe2_pb2.BlobProto()
blob_proto.ParseFromString(open('./output.blobproto').read())
img_out = utils.Caffe2TensorToNumpyArray(blob_proto.tensor)
img_out_y = Image.fromarray(np.uint8((img_out[0, 0]).clip(0, 255)), mode='L')
final_img = Image.merge("YCbCr", [
    img_out_y,
    img_cb.resize(img_out_y.size, Image.BICUBIC),
    img_cr.resize(img_out_y.size, Image.BICUBIC),
]).convert("RGB")
final_img.save("./_static/img/cat_superres_mobile.jpg")

######################################################################
# Now, you can compare the image ``cat_superres.jpg`` (model output from
# pure caffe2 backend execution) and ``cat_superres_mobile.jpg`` (model
# output from mobile execution) and see that both the images look same. If
# they don't look same, something went wrong with execution on mobile and
# in that case, please contact Caffe2 community. You should expect to see
# the output image to look like following:
Esempio n. 13
0
    caffenet = caffe_pb2.NetParameter()
    caffenet_pretrained = caffe_pb2.NetParameter()
    input_proto = args.prototext
    input_caffemodel = args.caffemodel
    output_init_net = args.init_net
    output_predict_net = args.predict_net

    text_format.Merge(open(input_proto).read(), caffenet)
    caffenet_pretrained.ParseFromString(open(input_caffemodel).read())
    net, pretrained_params = TranslateModel(caffenet,
                                            caffenet_pretrained,
                                            is_test=True)

    # Assume there is one input and one output
    external_input = net.op[0].input[0]
    external_output = net.op[-1].output[0]

    net.external_input.extend([external_input])
    net.external_input.extend(
        [param.name for param in pretrained_params.protos])
    net.external_output.extend([external_output])
    init_net = ConvertTensorProtosToInitNet(pretrained_params, external_input)

    for param in pretrained_params.protos:
        workspace.FeedBlob(param.name, utils.Caffe2TensorToNumpyArray(param))
    with open(output_predict_net, 'wb') as f:
        f.write(net.SerializeToString())
    with open(output_init_net, 'wb') as f:
        f.write(init_net.SerializeToString())