コード例 #1
0
    def analyze(self, named_features):
        print("==================== Model parameters =========================")
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)

        for parameter in self._parameters:
            parameter_value = workspace.FetchBlob(parameter)
            print()
            print("Parameter {}:".format(parameter))
            print(parameter_value)
            print()
            print()

        print()
        print("==================== Output ============================")
        for _ in range(3):
            score = self.predict(named_features)
            print(score)
        print()

        print("==================== Input =========================")
        for name, value in six.iteritems(named_features):
            print("Feature {}: {}".format(name, value))

        print()
        print("==================== Normalized Input =========================")
        for name in named_features:
            norm_blob_value = workspace.FetchBlob(name + "_preprocessed")
            print("Normalized Feature {}: {}".format(name, norm_blob_value))

        workspace.SwitchWorkspace(previous_workspace)
コード例 #2
0
    def testCreateWorkspace(self):
        workspaces = workspace.Workspaces()
        self.assertEqual(len(workspaces), 1)
        self.assertEqual(workspaces[0], "default")
        self.net = core.Net("test-net")
        self.net.ConstantFill([], "testblob", shape=[1, 2, 3, 4], value=1.0)
        self.assertEqual(
            workspace.RunNetOnce(self.net.Proto().SerializeToString()), True)
        self.assertEqual(workspace.HasBlob("testblob"), True)
        self.assertEqual(workspace.SwitchWorkspace("test", True), True)
        self.assertEqual(workspace.HasBlob("testblob"), False)
        self.assertEqual(workspace.SwitchWorkspace("default"), True)
        self.assertEqual(workspace.HasBlob("testblob"), True)

        try:
            # The following should raise an error.
            workspace.SwitchWorkspace("non-existing")
            # so this should never happen.
            self.assertEqual(True, False)
        except RuntimeError:
            pass

        workspaces = workspace.Workspaces()
        self.assertEqual(len(workspaces), 2)
        workspaces.sort()
        self.assertEqual(workspaces[0], "default")
        self.assertEqual(workspaces[1], "test")
コード例 #3
0
    def test_convolution_sum_relu_fusion(self, stride, pad, kernel, size,
                                         input_channels, output_channels,
                                         batch_size, use_bias, group, gc, dc):
        conv = core.CreateOperator(
            "Conv", ["X0", "w0", "b0"] if use_bias else ["X0", "w0"], ["Y0"],
            stride=stride,
            pad=pad,
            kernel=kernel,
            group=group,
            device_option=dc[0])
        sum = core.CreateOperator("Sum", ["S0", "Y0"], ["S0"],
                                  device_option=dc[0])
        relu = core.CreateOperator("Relu", ["S0"], ["S0"], device_option=dc[0])
        conv_fusion = core.CreateOperator(
            "ConvFusion",
            ["X1", "w1", "b1", "S1"] if use_bias else ["X1", "w1", "S1"],
            ["S1"],
            stride=stride,
            pad=pad,
            kernel=kernel,
            group=group,
            fusion_type=3,
            device_option=dc[1])
        X = np.random.rand(batch_size, input_channels * group, size,
                           size).astype(np.float32) - 0.5
        w = np.random.rand(
                output_channels * group, input_channels, kernel, kernel) \
            .astype(np.float32) - 0.5
        b = np.random.rand(output_channels * group).astype(np.float32) - 0.5

        old_ws_name = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace("_device_check_", True)
        workspace.FeedBlob('X0', X, dc[0])
        workspace.FeedBlob('w0', w, dc[0])
        workspace.FeedBlob('b0', b, dc[0])
        workspace.RunOperatorOnce(conv)
        Y0 = workspace.FetchBlob('Y0')
        S = np.random.rand(*Y0.shape).astype(np.float32) - 0.5
        workspace.FeedBlob('S0', S, dc[0])
        workspace.RunOperatorOnce(sum)
        workspace.RunOperatorOnce(relu)
        S0 = workspace.FetchBlob('S0')

        workspace.ResetWorkspace()
        workspace.FeedBlob('X1', X, dc[1])
        workspace.FeedBlob('w1', w, dc[1])
        workspace.FeedBlob('b1', b, dc[1])
        workspace.FeedBlob('S1', S, dc[1])
        workspace.RunOperatorOnce(conv_fusion)
        S1 = workspace.FetchBlob('S1')

        if not np.allclose(S0, S1, atol=0.01, rtol=0.01):
            print(S1.flatten())
            print(S0.flatten())
            print(np.max(np.abs(S1 - S0)))
            self.assertTrue(False)
        workspace.SwitchWorkspace(old_ws_name)
コード例 #4
0
    def CheckSimple(self,
                    op,
                    inputs,
                    outputs_to_check,
                    input_device_options=None):
        """Checks the operator with different device implementations.

        Inputs:
          op: the operator to be checked.
          inputs: the input data in numpy arrays.
          outputs_to_check: the outputs to check between devices.
          input_device_options: a mapping from input name to a device to use
            (instead of self._device_options)
        Outputs:
          boolean: True if it passes, False if it does not pass.
        """
        op = copy.deepcopy(op)
        # Entering the checker workspace
        old_ws_name = workspace.CurrentWorkspace()
        results = []
        workspace.SwitchWorkspace("_device_check_", True)
        for i, device_option in enumerate(self._device_options):
            op.device_option.CopyFrom(device_option)
            _input_device_options = input_device_options or \
                InferOpBlobDevicesAsDict(op)[0]
            print(_input_device_options)
            for i, arr in enumerate(inputs):
                workspace.FeedBlob(
                    op.input[i], np.array(arr),
                    _input_device_options.get(op.input[i], device_option))
            workspace.RunOperatorOnce(op)
            results.append([
                workspace.FetchBlob(op.output[idx]) for idx in outputs_to_check
            ])
            # Everything is done, reset the workspace.
            workspace.ResetWorkspace()
        # After running on all devices, check correctness
        success = True
        for i in range(1, len(self._device_options)):
            for j in range(len(outputs_to_check)):
                x = results[i][j]
                y = results[0][j]
                if not np.allclose(
                        x, y, atol=self._threshold, rtol=self._threshold):
                    print('Failure in checking device option {}'
                          ' and output {}. The outputs are:'.format(
                              i, op.output[outputs_to_check[j]]))
                    print(x.flatten())
                    print(y.flatten())
                    print(np.max(np.abs(x - y)))
                    success = False
                # else:
                #     print ('Passed device pair (0, %d), %s %s' %
                #            (i, outputs_to_check[j], y.shape))
        workspace.SwitchWorkspace(old_ws_name)
        return success
コード例 #5
0
def _RemoveLegacyPad(net, net_params, input_dims):
    legacy_pad_ops = []
    for i in range(len(net.op)):
        op_def = net.op[i]
        if re.match(r'^(Conv|ConvTranspose|MaxPool|AveragePool)(\dD)?$',
                    op_def.type):
            for arg in op_def.arg:
                if arg.name == 'legacy_pad':
                    legacy_pad_ops.append(i)
                    break
    if legacy_pad_ops:
        n, c, h, w = input_dims
        dummy_input = np.random.randn(n, c, h, w).astype(np.float32)
        dim_map = _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops)

        # Running with the legacy pad argument removed
        # compare the dimensions and adjust pad argument when necessary
        current = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace("legacypad-removed", True)

        external_input = net.op[0].input[0]
        workspace.FeedBlob(external_input, dummy_input)
        for param in net_params.protos:
            workspace.FeedBlob(param.name,
                               utils.Caffe2TensorToNumpyArray(param))

        for i in range(len(net.op)):
            op_def = net.op[i]
            if i in legacy_pad_ops:
                arg_map = {}
                for arg in op_def.arg:
                    arg_map[arg.name] = arg
                pads = _GetLegacyPadArgs(op_def, arg_map)
                # remove legacy pad arg
                for j in range(len(op_def.arg)):
                    arg = op_def.arg[j]
                    if arg.name == 'legacy_pad':
                        del op_def.arg[j]
                        break
                output = op_def.output[0]
                # use a new name to avoid the interference with inplace
                nonlegacy_output = output + '_nonlegacy'
                op_def.output[0] = nonlegacy_output
                workspace.RunOperatorOnce(op_def)
                blob_nonlegacy = workspace.FetchBlob(nonlegacy_output)
                # reset output name
                op_def.output[0] = output

                dim1 = dim_map[i]
                dim2 = blob_nonlegacy.shape
                _AdjustDims(op_def, arg_map, pads, dim1, dim2)

            workspace.RunOperatorOnce(op_def)
        workspace.SwitchWorkspace(current)
    return net
コード例 #6
0
ファイル: rl_predictor.py プロジェクト: keithmgould/BlueWhale
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            if np.any(np.isnan(workspace.FetchBlob(parameter))):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
        workspace.SwitchWorkspace(previous_workspace)
コード例 #7
0
 def predict(self, states, actions):
     """ Returns values for each state
     :param states states as feature -> value dict
     """
     previous_workspace = workspace.CurrentWorkspace()
     workspace.SwitchWorkspace(self._workspace_id)
     for name, value in states.items():
         workspace.FeedBlob(name, np.atleast_1d(value).astype(np.float32))
     for name, value in actions.items():
         workspace.FeedBlob(name, np.atleast_1d(value).astype(np.float32))
     workspace.RunNet(self._net)
     result = {'Q': workspace.FetchBlob(self._output_blobs[0])}
     workspace.SwitchWorkspace(previous_workspace)
     return result
コード例 #8
0
    def relu_test(self, inputs, gc, dc, seed):
        np.random.seed(seed)
        inputs = np.random.rand(1).astype(np.float32)
        X = inputs[0]
        # First dimension is the batch size
        print(X.shape)
        pred_net = caffe2_pb2.NetDef()
        pred_net.name = "pred"
        pred_net.external_input.extend(["X"])
        pred_net.external_output.append("Y")
        pred_net.op.add().CopyFrom(
            core.CreateOperator(
                "Relu",
                ["X"],
                ["Y"]
            )
        )
        pred_net_ref = caffe2_pb2.NetDef()
        pred_net_ref.name = "ref"
        pred_net_ref.external_input.extend(["X"])
        pred_net_ref.external_output.append("Y_ref")
        pred_net_ref.op.add().CopyFrom(
            core.CreateOperator(
                "ReluFakeFp16",
                ["X"],
                ["Y_ref"],
            )
        )

        shape_hints = {"X": X.shape}
        pred_net_onnxified = onnxifi_caffe2_net(pred_net,
                                                shape_hints,
                                                debug=True,
                                                adjust_batch=True,
                                                use_onnx=False)
        print(pred_net_onnxified)
        num_onnxified_ops = sum(
            1 if o.type == "Onnxifi" else 0 for o in pred_net_onnxified.op)
        np.testing.assert_equal(num_onnxified_ops, 1)
        workspace.SwitchWorkspace("glow_test_ws", True)
        workspace.FeedBlob("X", X)

        workspace.CreateNet(pred_net_ref)
        workspace.CreateNet(pred_net_onnxified)
        workspace.FeedBlob("X", X)
        # Run caffe2 net
        workspace.RunNet(pred_net_ref.name)
        Y_c2 = workspace.FetchBlob("Y_ref")

        # Run Glow net
        workspace.RunNet(pred_net_onnxified.name)
        Y_glow = workspace.FetchBlob("Y")

        # Results should be identical since we are comparing with the C2 emulation
        if not np.allclose(Y_c2, Y_glow):
            diff = np.abs((Y_glow - Y_c2) / (Y_c2 + kEpsilon))
            print_test_debug_info("Relu", {
                "seed":seed, "X": X,
                "Y_glow": Y_glow, "Y_c2": Y_c2, "diff": diff})
            assert(0)
コード例 #9
0
ファイル: __init__.py プロジェクト: twwhatever/pytext
def create_predictor(
    config: PyTextConfig,
    model_file: Optional[str] = None,
    db_type: str = CAFFE2_DB_TYPE,
    task: Optional[NewTask] = None,
) -> Predictor:
    """
    Create a simple prediction API from a training config and an exported caffe2
    model file. This model file should be created by calling export on a trained
    model snapshot.
    """
    workspace_id = str(uuid.uuid4())
    workspace.SwitchWorkspace(workspace_id, True)
    predict_net = predictor_exporter.prepare_prediction_net(
        filename=model_file or config.export_caffe2_path, db_type=db_type
    )

    new_task = task or NewTask.from_config(config.task)
    input_tensorizers = {
        name: tensorizer
        for name, tensorizer in new_task.data.tensorizers.items()
        if tensorizer.is_input
    }
    return lambda input: _predict(
        workspace_id, predict_net, new_task.model, input_tensorizers, input
    )
コード例 #10
0
    def load(cls, db_path, db_type):
        """ Creates DiscreteActionPredictor by loading from a database

        :param db_path see load_from_db
        :param db_type see load_from_db
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace_id = str(uuid.uuid4())
        workspace.SwitchWorkspace(workspace_id, True)
        net = prepare_prediction_net(db_path, db_type)
        meta = load_from_db(db_path, db_type)
        inputs = GetBlobs(meta, predictor_constants.INPUTS_BLOB_TYPE)
        outputs = GetBlobs(meta, predictor_constants.OUTPUTS_BLOB_TYPE)
        parameters = GetBlobs(meta, predictor_constants.PARAMETERS_BLOB_TYPE)
        workspace.SwitchWorkspace(previous_workspace)
        return cls(net, inputs, outputs, parameters, workspace_id)
コード例 #11
0
ファイル: main.py プロジェクト: ForrestHeiYing/tutorials
def workspace_test():

    print("current blobs in the workspace : {}".format(workspace.Blobs))  # 查看workspace里面所有的blobs
    print("Workspace has blob 'X' ?: {}".format(workspace.HasBlob("X")))  # 判断是否有blob X

    X = np.random.randn(2, 3).astype(np.float32)
    print("Generated X from numpy: \n{}".format(X))
    workspace.FeedBlob("X", X)  # 将 blob 传入 workspace

    print("current blobs in the workspace:{}".format(workspace.Blobs()))
    print("Workspace has blob 'X' ?{}".format(workspace.HasBlob("X")))
    print("Fethched X:\n{}".format(workspace.FetchBlob("X")))   # 从workspace里面读取blob

    # 判断两个矩阵是否相等,不等会抛出异常
    np.testing.assert_array_equal(X, workspace.FetchBlob("X"))
    # print("a=", np.testing.assert_array_equal(X, workspace.FetchBlob("X")))

    print("current workspace: {}".format(workspace.CurrentWorkspace()))  # 查看当前workspace
    print("current blobs in the workspace: {}".format(workspace.Blobs()))

    # The second parameter 'True' indicates that if 'gutentag' does not exist, create one
    workspace.SwitchWorkspace("gutentag", True)  # switch the workspace.

    print("After Switch Workspace ................")
    print("current workspace:{}".format(workspace.CurrentWorkspace()))
    print("current blobs in the workspace:{}".format(workspace.Blobs()))
コード例 #12
0
def _predict(workspace_id, feature_config, predict_net, featurizer, input):
    workspace.SwitchWorkspace(workspace_id)
    features = featurizer.featurize(InputRecord(**input))
    if feature_config.word_feat:
        for blob_name in feature_config.word_feat.export_input_names:
            converted_blob_name = convert_caffe2_blob_name(blob_name)
            workspace.blobs[converted_blob_name] = np.array([features.tokens],
                                                            dtype=str)
        workspace.blobs["tokens_lens"] = np.array([len(features.tokens)],
                                                  dtype=np.int_)
    if feature_config.dict_feat:
        dict_feats, weights, lens = feature_config.dict_feat.export_input_names
        converted_dict_blob_name = convert_caffe2_blob_name(dict_feats)
        workspace.blobs[converted_dict_blob_name] = np.array(
            [features.gazetteer_feats], dtype=str)
        workspace.blobs[weights] = np.array([features.gazetteer_feat_weights],
                                            dtype=np.float32)
        workspace.blobs[lens] = np.array(features.gazetteer_feat_lengths,
                                         dtype=np.int_)

    if feature_config.char_feat:
        for blob_name in feature_config.char_feat.export_input_names:
            converted_blob_name = convert_caffe2_blob_name(blob_name)
            workspace.blobs[converted_blob_name] = np.array(
                [features.characters], dtype=str)

    workspace.RunNet(predict_net)
    return {
        str(blob): workspace.blobs[blob][0]
        for blob in predict_net.external_outputs
    }
コード例 #13
0
ファイル: __init__.py プロジェクト: twwhatever/pytext
def _predict(workspace_id, predict_net, model, tensorizers, input):
    workspace.SwitchWorkspace(workspace_id)
    tensor_dict = {
        name: tensorizer.prepare_input(input)
        for name, tensorizer in tensorizers.items()
    }
    model_inputs = model.arrange_model_inputs(tensor_dict)
    flat_model_inputs = []
    for model_input in model_inputs:
        if isinstance(model_input, tuple):
            flat_model_inputs.extend(model_input)
        else:
            flat_model_inputs.append(model_input)
    model_inputs = flat_model_inputs
    model_input_names = model.get_export_input_names(tensorizers)
    vocab_to_export = model.vocab_to_export(tensorizers)
    for blob_name, model_input in zip(model_input_names, model_inputs):
        converted_blob_name = blob_name
        dtype = np.float32
        if blob_name in vocab_to_export:
            converted_blob_name = convert_caffe2_blob_name(blob_name)
            dtype = str

        workspace.blobs[converted_blob_name] = np.array([model_input], dtype=dtype)
    workspace.RunNet(predict_net)
    return {
        str(blob): workspace.blobs[blob][0] for blob in predict_net.external_outputs
    }
コード例 #14
0
def create_predictor(config: PyTextConfig,
                     model_file: Optional[str] = None) -> Predictor:
    """
    Create a simple prediction API from a training config and an exported caffe2
    model file. This model file should be created by calling export on a trained
    model snapshot.
    """
    workspace_id = str(uuid.uuid4())
    workspace.SwitchWorkspace(workspace_id, True)
    predict_net = predictor_exporter.prepare_prediction_net(
        filename=model_file or config.export_caffe2_path,
        db_type=CAFFE2_DB_TYPE)

    supportedInputTensorizers = [
        FloatListTensorizer,
        GazetteerTensorizer,
        TokenTensorizer,
    ]
    new_task = NewTask.from_config(config.task)
    input_tensorizers = {
        name: tensorizer
        for name, tensorizer in new_task.data.tensorizers.items() if any(
            isinstance(tensorizer, t) for t in supportedInputTensorizers)
    }

    return lambda input: _predict(workspace_id, predict_net, new_task.model,
                                  input_tensorizers, input)
コード例 #15
0
ファイル: shared.py プロジェクト: ashnair1/detectron2
    def __enter__(self):
        self.org_ws = workspace.CurrentWorkspace()
        if self.ws_name is not None:
            workspace.SwitchWorkspace(self.ws_name, True)
        if self.is_reset:
            workspace.ResetWorkspace()

        return workspace
コード例 #16
0
 def predict(self, states):
     """ Returns values for each state
     :param states states as feature -> value dict
     """
     previous_workspace = workspace.CurrentWorkspace()
     workspace.SwitchWorkspace(self._workspace_id)
     for input_blob in states:
         workspace.FeedBlob(
             input_blob,
             np.atleast_1d(states[input_blob]).astype(np.float32))
     workspace.RunNet(self._net)
     result = {
         output: workspace.FetchBlob(output)
         for output in self._output_blobs
     }
     workspace.SwitchWorkspace(previous_workspace)
     return result
コード例 #17
0
    def _test_unary_op(self, opname, value):
        workspace.ResetWorkspace()
        n = 1
        m = 10001
        X = np.linspace(-value, value, num=m, dtype=np.float32)
        pred_net = caffe2_pb2.NetDef()
        pred_net.name = "pred"
        pred_net.external_input.append("X")
        pred_net.external_output.append("Y")
        pred_net.op.add().CopyFrom(
            core.CreateOperator(
                opname,
                ['X'],
                ['Y'])
        )
        ref_net = caffe2_pb2.NetDef()
        ref_net.name = "ref"
        ref_net.external_input.append("X")
        ref_net.external_output.append("Y")
        ref_net.op.add().CopyFrom(
            core.CreateOperator(
                opname + 'FakeFp16NNPI',
                ['X'],
                ['Y'])
        )
        print("REF NET = {}".format(ref_net))

        shape_hints = {"X": (n, m)}
        pred_net_onnxified = onnxifi_caffe2_net(pred_net,
                                                shape_hints,
                                                debug=True,
                                                adjust_batch=False,
                                                use_onnx=False)
        num_onnxified_ops = sum(
            1 if o.type == "Onnxifi" else 0 for o in pred_net_onnxified.op)
        np.testing.assert_equal(num_onnxified_ops, 1)
        workspace.SwitchWorkspace("glow_test_ws", True)
        workspace.FeedBlob("X", X)
        workspace.CreateNet(ref_net)
        workspace.CreateNet(pred_net_onnxified)
        # Run Glow net
        workspace.RunNet(pred_net_onnxified.name)
        Y_glow = workspace.FetchBlob('Y')
        # Run caffe2 reference net
        workspace.RunNet(ref_net.name)
        Y_c2 = workspace.FetchBlob('Y')

        if not np.allclose(Y_c2, Y_glow):
            diff = np.abs(Y_c2 - Y_glow)
            np.save('/tmp/' + opname + 'diff', diff)
            np.save('/tmp/' + opname + 'result', Y_c2)
            print_test_debug_info(opname, {
                "X": X,
                "Y_c2": Y_c2,
                "Y_glow": Y_glow,
                "diff": diff
            })
            assert(0)
コード例 #18
0
def _GetLegacyDims(net, net_params, dummy_input, legacy_pad_ops):
    dim_map = {}
    current = workspace.CurrentWorkspace()
    workspace.SwitchWorkspace('legacypad', True)
    for param in net_params.protos:
        workspace.FeedBlob(param.name, utils.Caffe2TensorToNumpyArray(param))
    external_input = net.op[0].input[0]
    workspace.FeedBlob(external_input, dummy_input)
    # Get dimensions with legacy pad
    for i in range(len(net.op)):
        op_def = net.op[i]
        workspace.RunOperatorOnce(op_def)
        if i in legacy_pad_ops:
            output = op_def.output[0]
            blob_legacy = workspace.FetchBlob(output)
            dim_map[i] = blob_legacy.shape
    workspace.SwitchWorkspace(current)
    return dim_map
コード例 #19
0
 def CheckNet(self, net, inputs=None, blobs_to_check=None, ignore=None):
     """Checks a network by inspecting all of its intermediate results, and
     see if things match.
     """
     if inputs is None:
         inputs = {}
     if ignore is None:
         ignore = set()
     old_ws_name = workspace.CurrentWorkspace()
     results = []
     if blobs_to_check is None:
         blobs_to_check = sum([list(op.output) for op in net.op], [])
     blobs_to_check = [b for b in blobs_to_check if b not in ignore]
     workspace.SwitchWorkspace("_device_check_", True)
     for device_option in self._device_options:
         for name, arr in viewitems(inputs):
             # print 'feeding', name
             workspace.FeedBlob(name, arr, device_option)
         for op in net.op:
             op.device_option.CopyFrom(device_option)
         workspace.RunNetOnce(net)
         results.append(
             [workspace.FetchBlob(name) for name in blobs_to_check])
     # After running on all devices, check correctness
     success = True
     for i in range(1, len(results)):
         for j in range(len(blobs_to_check)):
             x = results[i][j]
             y = results[0][j]
             if not np.allclose(
                     x, y, atol=self._threshold, rtol=self._threshold):
                 print('Failure in checking device option {}'
                       ' and output {}. The outputs are:'.format(
                           i, blobs_to_check[j]))
                 print(x.flatten())
                 print(y.flatten())
                 print(np.max(np.abs(x - y)))
                 success = False
             # else:
             #     print ('Passed device pair (%d, %d), %s %s: %s' %
             #            (i, j, blobs_to_check[j], y.shape,
             #             str(y.flatten())))
     workspace.SwitchWorkspace(old_ws_name)
     return success
コード例 #20
0
 def __exit__(self, exc_type, exc_value, traceback):
     w = self.workspace_stack.pop()
     # Strictly speaking, create_if_missing here is unnecessary, since a user
     # is not supposed to be allowed to destruct a workspace while we're in
     # it.  However, empirically, it has been observed that during abnormal
     # shutdown, Caffe2 deletes its default workspace fairly early in the
     # final calls to destructors.  In this case, we may attempt to exit
     # to a default workspace which no longer exists.  create_if_missing=True
     # will (harmlessly) recreate the workspace before we finally quit.)
     workspace.SwitchWorkspace(w, create_if_missing=True)
コード例 #21
0
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            parameter_data = workspace.FetchBlob(parameter)
            logger.info("DATA TYPE " + parameter_data.dtype.kind)
            if parameter_data.dtype.kind in {'U', 'S', 'O'}:
                continue  # Don't bother checking string blobs for nan
            logger.info("Checking parameter {} for nan".format(parameter))
            if np.any(np.isnan(parameter_data)):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
        workspace.SwitchWorkspace(previous_workspace)
コード例 #22
0
    def test_depthwise_convolution(self, batch_size, gc, dc):
        op = core.CreateOperator("Conv", ["X", "w", "b"], ["Y"],
                                 stride=1,
                                 pad=0,
                                 kernel=1,
                                 group=4,
                                 device_option=dc[0])
        op1 = core.CreateOperator("Conv", ["X", "w", "b"], ["Y"],
                                  stride=1,
                                  pad=0,
                                  kernel=1,
                                  group=4,
                                  device_option=dc[1])
        X = np.random.rand(batch_size, 544, 14, 14).astype(np.float32)
        w = np.random.rand(544, 136, 1, 1).astype(np.float32)
        b = np.random.rand(544).astype(np.float32)

        workspace.SwitchWorkspace("_device_check_", True)
        workspace.FeedBlob('X', X, dc[0])
        workspace.FeedBlob('w', w, dc[0])
        workspace.FeedBlob('b', b, dc[0])
        workspace.RunOperatorOnce(op)
        Y0 = workspace.FetchBlob('Y')

        workspace.ResetWorkspace()
        workspace.FeedBlob('X', X, dc[1])
        workspace.FeedBlob('w', w, dc[1])
        workspace.FeedBlob('b', b, dc[1])
        net = core.Net("net")
        old_net = caffe2_pb2.NetDef()
        old_net.op.extend([op1])
        net.Proto().CopyFrom(old_net)
        optimizeForMKLDNN(net)
        workspace.RunOperatorOnce(net.Proto().op[0])
        Y1 = workspace.FetchBlob('Y')

        if not np.allclose(Y0, Y1, atol=0.01, rtol=0.01):
            print(Y1.flatten())
            print(Y0.flatten())
            print(np.max(np.abs(Y1 - Y0)))
            self.assertTrue(False)

        workspace.ResetWorkspace()
        workspace.FeedBlob('X', X, dc[1])
        workspace.FeedBlob('w', w, dc[1])
        workspace.FeedBlob('b', b, dc[1])
        workspace.RunOperatorOnce(op1)
        Y2 = workspace.FetchBlob('Y')

        if not np.allclose(Y0, Y2, atol=0.01, rtol=0.01):
            print(Y2.flatten())
            print(Y0.flatten())
            print(np.max(np.abs(Y2 - Y0)))
            self.assertTrue(False)
コード例 #23
0
    def predict(self, examples):
        """ Returns values for each state
        :param examples A list of feature -> value dict examples
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)
        workspace.FeedBlob(
            'input/float_features.lengths',
            np.array([len(e) for e in examples], dtype=np.int32))
        workspace.FeedBlob(
            'input/float_features.keys',
            np.array([list(e.keys()) for e in examples],
                     dtype=np.int32).flatten())
        workspace.FeedBlob(
            'input/float_features.values',
            np.array([list(e.values()) for e in examples],
                     dtype=np.float32).flatten())
        workspace.RunNet(self._net)

        output_lengths = workspace.FetchBlob(
            'output/string_weighted_multi_categorical_features.values.lengths')
        output_names = workspace.FetchBlob(
            'output/string_weighted_multi_categorical_features.values.keys')
        output_values = workspace.FetchBlob(
            'output/string_weighted_multi_categorical_features.values.values')

        results = []

        cursor = 0
        for length in output_lengths:
            cursor_begin = cursor
            cursor_end = cursor_begin + length
            cursor = cursor_end

            result = {}
            for x in range(cursor_begin, cursor_end):
                result[output_names[x].decode("utf-8")] = output_values[x]
            results.append(result)

        workspace.SwitchWorkspace(previous_workspace)
        return results
コード例 #24
0
        def reference(input, initial_input):
            global_ws_name = workspace.CurrentWorkspace()
            input_all = workspace.blobs[input_blob]

            workspace.SwitchWorkspace("ref", create_if_missing=True)
            workspace.blobs[input_blob] = input
            workspace.blobs[output_t_prev] = initial_input.reshape(n, d)
            res_all = np.zeros(shape=input.shape, dtype=np.float32)

            for t_cur in range(T):
                workspace.blobs[input_t] = input_all[t_cur]
                workspace.RunNetOnce(step.net)
                result_t = workspace.blobs[output_t]
                workspace.blobs[output_t_prev] = result_t
                res_all[t_cur] = result_t

            workspace.SwitchWorkspace(global_ws_name)

            shape = list(input.shape)
            shape[0] = 1
            return (res_all, res_all[-1].reshape(shape))
コード例 #25
0
  def CheckSimple(self, op, inputs, outputs_to_check):
    """Checks the operator with different device implementations.

    Inputs:
      op: the operator to be checked.
      inputs: the input data in numpy arrays.
      outputs_to_check: the outputs to check between devices.
    Outputs:
      boolean: True if it passes, False if it does not pass.
    """
    # Entering the checker workspace
    old_ws_name = workspace.CurrentWorkspace()
    results = []
    workspace.SwitchWorkspace("_device_check_", True)
    for i, device_option in enumerate(self._device_options):
      for i, arr in enumerate(inputs):
        workspace.FeedBlob(op.input[i], arr, device_option)
      op.device_option.CopyFrom(device_option)
      workspace.RunOperatorOnce(op)
      results.append(
          [workspace.FetchBlob(op.output[idx]) for idx in outputs_to_check])
      # Everything is done, reset the workspace.
      workspace.ResetWorkspace()
    # After running on all devices, check correctness
    success = True
    for i in range(1, len(self._device_options)):
      for j in range(len(outputs_to_check)):
        x = results[i][j]
        y = results[0][j]
        if np.any(np.abs(x - y) > self._threshold):
          print 'Failure in checking device option', i, 'and output ',
          print op.output[j], '. The outputs are:'
          print x.flatten()
          print y.flatten()
          success = False
        #else:
        #  print ('Passed device pair (0, %d), %s %s' %
        #         (i, outputs_to_check[j], y.shape))
    workspace.SwitchWorkspace(old_ws_name)
    return success
コード例 #26
0
    def test_logit(self, seed):
        np.random.seed(seed)
        workspace.ResetWorkspace()
        n = 1
        m = 15361
        X = np.linspace(0, 1, num=m, dtype=np.float32)

        pred_net = caffe2_pb2.NetDef()
        pred_net.name = "pred"
        pred_net.external_input.append("X")
        pred_net.external_output.append("Y")
        pred_net.op.add().CopyFrom(
            core.CreateOperator('Logit', ['X'], ['Y'], eps=1e-6))
        ref_net = caffe2_pb2.NetDef()
        ref_net.name = "ref"
        ref_net.external_input.append("X")
        ref_net.external_output.append("Y")
        ref_net.op.add().CopyFrom(
            core.CreateOperator('LogitFakeFp16NNPI', ['X'], ['Y'], eps=1e-6))
        print("REF NET = {}".format(ref_net))

        shape_hints = {"X": (n, m)}
        pred_net_onnxified = onnxifi_caffe2_net(pred_net,
                                                shape_hints,
                                                debug=True,
                                                adjust_batch=False,
                                                use_onnx=False)
        num_onnxified_ops = sum(1 if o.type == "Onnxifi" else 0
                                for o in pred_net_onnxified.op)
        np.testing.assert_equal(num_onnxified_ops, 1)
        workspace.SwitchWorkspace("glow_test_ws", True)
        workspace.FeedBlob("X", X)
        workspace.CreateNet(ref_net)
        workspace.CreateNet(pred_net_onnxified)
        # Run Glow net
        workspace.RunNet(pred_net_onnxified.name)
        Y_glow = workspace.FetchBlob('Y')
        # Run caffe2 reference net
        workspace.RunNet(ref_net.name)
        Y_c2 = workspace.FetchBlob('Y')

        diff = np.abs(Y_c2 - Y_glow)
        if np.nanmax(diff) > 9e-3:
            np.save('/tmp/logit_diff', diff)
            np.save('/tmp/logit_result', Y_c2)
            print_test_debug_info('Logit', {
                "X": X,
                "Y_c2": Y_c2,
                "Y_glow": Y_glow,
                "diff": diff
            })
            assert (0)
コード例 #27
0
    def test_clip(self, seed):
        np.random.seed(seed)
        m, n, k = 8, 8, 8
        dtype = np.float32
        pred_net = caffe2_pb2.NetDef()
        pred_net.name = "pred"
        pred_net.external_input.extend(["X", "W0", "b0", "W1", "b1"])
        pred_net.external_output.append("Y")
        pred_net.op.add().CopyFrom(
            core.CreateOperator(
                "FC",
                ["X", "W0", "b0"],
                ["X1"],
            )
        )
        pred_net.op.add().CopyFrom(
            core.CreateOperator(
                "FC",
                ["X1", "W1", "b1"],
                ["Y"],
            )
        )
        workspace.GlobalInit(
            ['caffe2', '--caffe2_log_level=0', '--glow_global_fp16=1',
             '--glow_clip_fp16'])
        workspace.SwitchWorkspace("glow_test_ws", True)
        workspace.ResetWorkspace()
        W0 = np.full((n, k), 65536.0, dtype)
        b0 = np.random.randint(low=1, high=3, size=(n)).astype(dtype)
        W1 = np.random.randint(low=1, high=3, size=(n, k)).astype(dtype)
        b1 = np.random.randint(low=1, high=3, size=(n)).astype(dtype)
        workspace.FeedBlob("W0", W0)
        workspace.FeedBlob("b0", b0)
        workspace.FeedBlob("W1", W1)
        workspace.FeedBlob("b1", b1)

        pred_net_onnxified = onnxifi_caffe2_net(
            pred_net,
            {"X": (m, k)},
            debug=True,
            adjust_batch=False,
            use_onnx=False
        )

        X = np.random.randint(low=1, high=3, size=(m, k)).astype(dtype)
        workspace.FeedBlob("X", X)
        workspace.CreateNet(pred_net_onnxified)

        workspace.RunNet(pred_net_onnxified.name)
        Y_glow = workspace.FetchBlob("Y")
        np.testing.assert_allclose(Y_glow, np.full((m, n), 65504.0, dtype))
コード例 #28
0
    def test_nhwc2nchw(self, n, c, h, w, gc, dc):
        op0 = core.CreateOperator(
            "NCHW2NHWC",
            ["X"],
            ["Y"],
        )
        op1 = core.CreateOperator(
            "NHWC2NCHW",
            ["Y"],
            ["Z"],
        )

        X = np.random.rand(n, c, h, w).astype(np.float32) - 0.5

        old_ws_name = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace("_device_check_", True)
        workspace.FeedBlob('X', X, dc[0])
        op0.device_option.CopyFrom(dc[0])
        op1.device_option.CopyFrom(dc[0])
        workspace.RunOperatorOnce(op0)
        workspace.RunOperatorOnce(op1)
        Z0 = workspace.FetchBlob("Z")

        workspace.ResetWorkspace()
        workspace.FeedBlob('X', X, dc[1])
        op0.device_option.CopyFrom(dc[1])
        op1.device_option.CopyFrom(dc[1])
        workspace.RunOperatorOnce(op0)
        workspace.RunOperatorOnce(op1)
        Z1 = workspace.FetchBlob("Z")

        if not np.allclose(Z0, Z1, atol=0.01, rtol=0.01):
            print(Z1.flatten())
            print(Z0.flatten())
            print(np.max(np.abs(Z1 - Z0)))
            self.assertTrue(False)

        workspace.SwitchWorkspace(old_ws_name)
コード例 #29
0
def create_predictor(config, model_file=None):
    workspace_id = str(uuid.uuid4())
    workspace.SwitchWorkspace(workspace_id, True)
    predict_net = predictor_exporter.prepare_prediction_net(
        filename=model_file or config.export_caffe2_path, db_type=CAFFE2_DB_TYPE
    )

    task = config.task
    feature_config = task.features
    featurizer = create_featurizer(task.featurizer, feature_config)

    return lambda input: _predict(
        workspace_id, feature_config, predict_net, featurizer, input
    )
コード例 #30
0
def _predict(workspace_id, predict_net, model, tensorizers, input):
    workspace.SwitchWorkspace(workspace_id)
    tensor_dict = {
        name: tensorizer.prepare_input(input)
        for name, tensorizer in tensorizers.items()
    }
    model_inputs = model.arrange_model_inputs(tensor_dict)
    model_input_names = model.get_export_input_names(tensorizers)
    for blob_name, model_input in zip(model_input_names, model_inputs):
        converted_blob_name = convert_caffe2_blob_name(blob_name)
        workspace.blobs[converted_blob_name] = np.array([model_input], dtype=str)
    workspace.RunNet(predict_net)
    return {
        str(blob): workspace.blobs[blob][0] for blob in predict_net.external_outputs
    }