Пример #1
0
 def check_optimizer(self, optimizer):
     self.assertTrue(optimizer.get_auxiliary_parameters().shared)
     self.assertTrue(optimizer.get_auxiliary_parameters().local)
     self.assertTrue(workspace.HasBlob("optimizer_iteration"))
     iteration_tensor = workspace.FetchBlob("optimizer_iteration")
     np.testing.assert_allclose(np.array([2000]),
                                iteration_tensor,
                                atol=1e-5)
     for param in optimizer.get_auxiliary_parameters().shared:
         workspace.FetchBlob(param)
     for param in optimizer.get_auxiliary_parameters().local:
         workspace.FetchBlob(param)
Пример #2
0
 def test_create_map(self):
     dtypes = [core.DataType.INT32, core.DataType.INT64]
     for key_dtype, value_dtype in itertools.product(dtypes, dtypes):
         op = core.CreateOperator(
             'CreateMap',
             [],
             ['map'],
             key_dtype=key_dtype,
             value_dtype=value_dtype,
         )
         workspace.RunOperatorOnce(op)
         self.assertTrue(workspace.HasBlob('map'))
def prune_init_net(workspace, config, device_opt, experiment):
    # fetch all bn blobs
    bn_names = []
    bn_scales = []
    bn_bias = []
    for blob in workspace.Blobs():
        if blob.endswith('_bn_s'):
            name = blob[:-2]
            bn_names.append(name)
            bn_scales.append(workspace.FetchBlob(blob))
            # bias must be with scale for 'spatial_bn'
            assert (workspace.HasBlob(name + '_b'))
            bn_bias.append(workspace.FetchBlob(name + '_b'))

    # compute global threshold
    total_channels = 0
    for bs in bn_scales:
        total_channels += bs.shape[0]

    index = 0
    bn = np.zeros(total_channels)
    for bs in bn_scales:
        size = bs.shape[0]
        bn[index:(index + size)] = np.abs(bs)
        index += size

    sorted_bn = np.sort(bn)
    threshold_index = int(total_channels * config['solver']['percent'])
    threshold = sorted_bn[threshold_index]

    # zero out pruned channel
    pruned = 0
    pruned_bn_scales = []
    pruned_bn_bias = []
    for bname, bs, bb in zip(bn_names, bn_scales, bn_bias):
        # get mask with np.abs()!
        bs_abs = np.abs(bs)
        mask = (bs_abs > threshold).astype(np.float32)
        pruned += mask.shape[0] - np.sum(mask)
        # pruning
        pruned_bn_scales.append(bs * mask)
        pruned_bn_bias.append(bb * mask)
        experiment.add_log("bn layer name: {} \t layer total channels: {} \t remaining"\
              "channels: {} \t pruned rate: {:.2f}%".format(
                  bname, mask.shape[0], np.sum(mask),
                  100 * float(mask.shape[0] - np.sum(mask)) / mask.shape[0]))

    # feed blob back into workspace
    for bname, pruned_bs, pruned_bb in zip(bn_names, pruned_bn_scales,
                                           pruned_bn_bias):
        workspace.FeedBlob(bname + '_s', pruned_bs, device_option=device_opt)
        workspace.FeedBlob(bname + '_b', pruned_bb, device_option=device_opt)
Пример #4
0
    def test_int8_gen_quant_params_op(self, n, m, k, quantization_kind,
                                      preserve_sparsity, rnd_seed, gc, dc):
        assert n > 0, "Zero samples in the input data"
        X_min = 0 if preserve_sparsity else -77
        X_max = X_min + 255
        np.random.seed(rnd_seed)
        X = np.round(np.random.rand(n, m, k) * (X_max - X_min) + X_min).astype(
            np.float32)
        # Calculate X_qparam
        hist, bin_edges = np.histogram(X.flatten(), bins=2048)
        X_qparam = dnnlowp_pybind11.ChooseStaticQuantizationParams(
            np.min(X), np.max(X), hist, preserve_sparsity, 8,
            quantization_kind)

        # Build a net to generate X's qparam using the Int8GenQuantParams op
        workspace.FeedBlob("X", X, device_option=gc)
        dnnlowp_pybind11.CreateInt8QuantSchemeBlob("quant_scheme",
                                                   quantization_kind,
                                                   preserve_sparsity)
        assert workspace.HasBlob(
            "quant_scheme"
        ), "Failed to create the quant_scheme blob in current workspace"

        gen_quant_params_net = core.Net("gen_quant_params")
        gen_quant_params_op = core.CreateOperator(
            "Int8GenQuantParams",
            ["X", "quant_scheme"],
            ["quant_param"],
            device_option=gc,
        )
        gen_quant_params_net.Proto().op.extend([gen_quant_params_op])
        assert workspace.RunNetOnce(
            gen_quant_params_net), "Failed to run the gen_quant_params net"
        scale, zero_point = dnnlowp_pybind11.ObserveInt8QuantParamsBlob(
            "quant_param")
        shapes, types = workspace.InferShapesAndTypes([gen_quant_params_net],
                                                      blob_dimensions={
                                                          "X": [n, m, k],
                                                          "quant_scheme": [1]
                                                      },
                                                      blob_types={
                                                          "X":
                                                          core.DataType.FLOAT,
                                                          "quant_scheme":
                                                          core.DataType.STRING
                                                      })
        self.assertEqual(shapes["quant_param"], [1])
        self.assertEqual(types["quant_param"], core.DataType.FLOAT)

        np.testing.assert_equal(scale, X_qparam.scale)
        np.testing.assert_equal(zero_point, X_qparam.zero_point)
Пример #5
0
def caffe2_net_reference(init_net, predict_net, inputs):
    with _caffe2_workspace():
        if init_net:
            workspace.RunNetOnce(init_net)
        if type(inputs) is dict:
            for key, value in inputs.items():
                workspace.FeedBlob(key, value)
        else:
            uninitialized = filter(lambda x: not workspace.HasBlob(x),
                                   predict_net.external_input)
            for key, value in zip(uninitialized, inputs):
                workspace.FeedBlob(key, value)
        workspace.RunNetOnce(predict_net)
        return dict((name, workspace.FetchBlob(name))
                    for name in predict_net.external_output)
    def saveFile(self, tmp_folder, db_name, db_type, start_blob_id):
        dtypes = [np.float16, np.float32, np.float64, np.bool, np.int8,
                  np.int16, np.int32, np.int64, np.uint8, np.uint16]
        arrays = [np.random.permutation(6).reshape(2, 3).astype(T)
                  for T in dtypes]

        for i, arr in enumerate(arrays):
            self.assertTrue(workspace.FeedBlob(str(i + start_blob_id), arr))
            self.assertTrue(workspace.HasBlob(str(i + start_blob_id)))

        # Saves the blobs to a local db.
        tmp_file = os.path.join(tmp_folder, db_name)
        op = core.CreateOperator(
            "Save",
            [str(i + start_blob_id) for i in range(len(arrays))], [],
            absolute_path=1,
            db=tmp_file, db_type=db_type)
        workspace.RunOperatorOnce(op)
        return tmp_file, arrays
Пример #7
0
def c2_native_run_net(init_net, predict_net, inputs):
    with Workspace():
        if init_net:
            workspace.RunNetOnce(init_net)

        if isinstance(inputs, dict):
            for key, value in inputs.items():
                workspace.FeedBlob(key, value)
        else:
            uninitialized = [
                input_name for input_name in predict_net.external_input
                if not workspace.HasBlob(input_name)
            ]
            assert len(uninitialized) == len(inputs)
            for key, value in zip(uninitialized, inputs):
                workspace.FeedBlob(key, value)

        workspace.RunNetOnce(predict_net)

        output_names = predict_net.external_output
        output_values = [workspace.FetchBlob(name) for name in output_names]
        return namedtupledict('Outputs', output_names)(*output_values)
Пример #8
0
def onnxifi_caffe2_net(
        pred_net,
        input_shapes,
        infer_shapes=False,
        max_batch_size=1,
        max_seq_size=1,
        debug=False,
        use_onnx=True):
    """
    Transform the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    # Inject an fake input tensor to help popluate the shape if we
    # do not do shape inference
    shape_hints = {}
    external_inputs = []
    if not infer_shapes:
        for k, v in input_shapes.items():
            need_input_tensor = True
            if workspace.HasBlob(k):
                itensor = workspace.FetchBlob(k)
                if itensor.shape == v:
                    need_input_tensor = False
            if need_input_tensor:
                workspace.FeedBlob(k, np.random.randn(*v).astype(np.float32))
                external_inputs.append(k)

    for k, v in input_shapes.items():
        shape_hints[k] = v
    pred_net_str = C.onnxifi(pred_net.SerializeToString(),
                             external_inputs,
                             shape_hints,
                             infer_shapes,
                             max_batch_size,
                             max_seq_size,
                             debug,
                             use_onnx)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut
    def testRepeatedArgs(self):
        dtypes = [np.float16, np.float32, np.float64, np.bool, np.int8,
                  np.int16, np.int32, np.int64, np.uint8, np.uint16]
        arrays = [np.random.permutation(6).reshape(2, 3).astype(T)
                  for T in dtypes]

        for i, arr in enumerate(arrays):
            self.assertTrue(workspace.FeedBlob(str(i), arr))
            self.assertTrue(workspace.HasBlob(str(i)))

        # Saves the blobs to a local db.
        tmp_folder = tempfile.mkdtemp()
        op = core.CreateOperator(
            "Save",
            [str(i) for i in range(len(arrays))] * 2, [],
            absolute_path=1,
            db=os.path.join(tmp_folder, "db"), db_type=self._db_type)
        with self.assertRaises(RuntimeError):
            workspace.RunOperatorOnce(op)
        try:
            shutil.rmtree(tmp_folder)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise
Пример #10
0
    def test_onnx_while_fibb(
            self, condition, max_trip_count, save_scopes, seed, gc, dc):
        np.random.seed(seed)

        # Create body net
        body_net = caffe2_pb2.NetDef()
        # Two loop carried dependencies: first and second
        body_net.external_input.extend(['i', 'cond', 'first', 'second'])
        body_net.external_output.extend(['cond_new', 'second', 'third', 'third'])
        add_op = core.CreateOperator(
            'Add',
            ['first', 'second'],
            ['third'],
        )
        print3 = core.CreateOperator(
            'Print',
            ['third'],
            [],
        )
        limit_const = core.CreateOperator(
            'ConstantFill',
            [],
            ['limit_const'],
            shape=[1],
            dtype=caffe2_pb2.TensorProto.FLOAT,
            value=100.0,
        )
        cond = core.CreateOperator(
            'LT',
            ['third', 'limit_const'],
            ['cond_new'],
        )
        body_net.op.extend([add_op, print3, limit_const, cond])

        while_op = core.CreateOperator(
            'ONNXWhile',
            ['max_trip_count', 'condition', 'first_init', 'second_init'],
            ['first_a', 'second_a', 'third_a'],
            body=body_net,
            has_cond=True,
            has_trip_count=True,
            save_scopes=save_scopes,
        )

        condition_arr = np.array(condition).astype(np.bool)
        max_trip_count_arr = np.array(max_trip_count).astype(np.int64)
        first_init = np.array([1]).astype(np.float32)
        second_init = np.array([1]).astype(np.float32)

        def ref(max_trip_count, condition, first_init, second_init):
            first = 1
            second = 1
            results = []
            if condition:
                for _ in range(max_trip_count):
                    third = first + second
                    first = second
                    second = third
                    results.append(third)
                    if third > 100:
                        break
            return (first, second, np.array(results).astype(np.float32))

        self.assertReferenceChecks(
            gc,
            while_op,
            [max_trip_count_arr, condition_arr, first_init, second_init],
            ref,
        )
        self.assertFalse(workspace.HasBlob("cond_new"))
Пример #11
0
    def test_meta_net_def_net_runs(self):
        for param, value in viewitems(self.params):
            workspace.FeedBlob(param, value)

        extra_init_net = core.Net('extra_init')
        extra_init_net.ConstantFill('data', 'data', value=1.0)

        global_init_net = core.Net('global_init')
        global_init_net.ConstantFill(
            [],
            'global_init_blob',
            value=1.0,
            shape=[1, 5],
            dtype=core.DataType.FLOAT
        )
        pem = pe.PredictorExportMeta(
            predict_net=self.predictor_export_meta.predict_net,
            parameters=self.predictor_export_meta.parameters,
            inputs=self.predictor_export_meta.inputs,
            outputs=self.predictor_export_meta.outputs,
            shapes=self.predictor_export_meta.shapes,
            extra_init_net=extra_init_net,
            global_init_net=global_init_net,
            net_type='dag',
        )

        db_type = 'minidb'
        db_file = tempfile.NamedTemporaryFile(
            delete=False, suffix=".{}".format(db_type))
        pe.save_to_db(
            db_type=db_type,
            db_destination=db_file.name,
            predictor_export_meta=pem)

        workspace.ResetWorkspace()

        meta_net_def = pe.load_from_db(
            db_type=db_type,
            filename=db_file.name,
        )

        self.assertTrue("data" not in workspace.Blobs())
        self.assertTrue("y" not in workspace.Blobs())

        init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE)

        # 0-fills externalblobs blobs and runs extra_init_net
        workspace.RunNetOnce(init_net)

        self.assertTrue("data" in workspace.Blobs())
        self.assertTrue("y" in workspace.Blobs())

        print(workspace.FetchBlob("data"))
        np.testing.assert_array_equal(
            workspace.FetchBlob("data"), np.ones(shape=(1, 5)))
        np.testing.assert_array_equal(
            workspace.FetchBlob("y"), np.zeros(shape=(1, 10)))

        self.assertTrue("global_init_blob" not in workspace.Blobs())
        # Load parameters from DB
        global_init_net = pred_utils.GetNet(meta_net_def,
                                            pc.GLOBAL_INIT_NET_TYPE)
        workspace.RunNetOnce(global_init_net)

        # make sure the extra global_init_net is running
        self.assertTrue(workspace.HasBlob('global_init_blob'))
        np.testing.assert_array_equal(
            workspace.FetchBlob("global_init_blob"), np.ones(shape=(1, 5)))

        # Run the net with a reshaped input and verify we are
        # producing good numbers (with our custom implementation)
        workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32))
        predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE)
        self.assertEqual(predict_net.type, 'dag')
        workspace.RunNetOnce(predict_net)
        np.testing.assert_array_almost_equal(
            workspace.FetchBlob("y"),
            workspace.FetchBlob("data").dot(self.params["y_w"].T) +
            self.params["y_b"])
Пример #12
0
    def __init__(self):
        self.classnames = [
            "background", "person", "crutches", "walking_frame", "wheelchair",
            "push_wheelchair"
        ]

        detectron_ops_lib = net_helper.get_detectron_ops_lib()
        dyndep.InitOpsLibrary(detectron_ops_lib)

        model_path = rospy.get_param("~model_path")
        self.fixed_frame = rospy.get_param('~fixed_frame', 'odom')
        self.tracking = rospy.get_param('~tracking', True)
        self.filter_detections = rospy.get_param('~filter_inside_boxes', True)
        self.inside_box_ratio = rospy.get_param('~inside_box_ratio', 0.8)
        camera_topic = rospy.get_param('~camera_topic',
                                       '/camera/color/image_raw')
        camera_info_topic = rospy.get_param('~camera_info_topic',
                                            '/camera/color/camera_info')

        self.net = caffe2_pb2.NetDef()
        with open(os.path.join(model_path, "model.pb"), "rb") as f:
            self.net.ParseFromString(f.read())

        self.init_net = caffe2_pb2.NetDef()
        with open(os.path.join(model_path, "model_init.pb"), "rb") as f:
            self.init_net.ParseFromString(f.read())

        workspace.ResetWorkspace()
        workspace.RunNetOnce(self.init_net)
        for op in self.net.op:
            for blob_in in op.input:
                if not workspace.HasBlob(blob_in):
                    workspace.CreateBlob(blob_in)
        workspace.CreateNet(self.net)

        # initialize subscribers
        rospy.Subscriber(camera_topic,
                         Image,
                         self.image_callback,
                         queue_size=1)
        rospy.Subscriber(camera_info_topic,
                         CameraInfo,
                         self.cam_info_callback,
                         queue_size=1)

        # image queues
        self.last_received_image = None  # set from image topic
        self.last_processed_image = None  # set from image topic
        self.new_image = False

        self.cam_calib = None  # set from camera info
        self.camera_frame = None  # set from camera info

        bridge = CvBridge()
        self.publisher = Publisher(self.classnames, bridge)
        observation_model = np.loadtxt(os.path.join(model_path,
                                                    "observation_model.txt"),
                                       delimiter=',')
        ekf_sensor_noise = np.loadtxt(os.path.join(model_path, "meas_cov.txt"),
                                      delimiter=',')
        self.tracker = Tracker(ekf_sensor_noise,
                               observation_model,
                               use_hmm=True)
        self.tfl = tf.TransformListener()
        self.image_handler = ImageHandler(bridge, 540, 960)
        Server(TrackingParamsConfig, self.reconfigure_callback)
        thresholds = {}
        with open(os.path.join(model_path, "AP_thresholds.txt")) as f:
            for line in f:
                (key, val) = line.split(',')
                thresholds[key] = float(val)
        self.cla_thresholds = thresholds