Ejemplo n.º 1
0
def SaveModel(args, train_model, epoch):
    prefix = "[]_{}".format(train_model._device_prefix, train_model._devices[0])
    predictor_export_meta = pred_exp.PredictorExportMeta(
        predict_net=train_model.net.Proto(),
        parameters=data_parallel_model.GetCheckpointParams(train_model),
        inputs=[prefix + "/data"],
        outputs=[prefix + "/softmax"],
        shapes={
            prefix + "/softmax": (1, args.num_labels),
            prefix + "/data": (args.num_channels, args.image_size, args.image_size)
        }
    )

    # save the train_model for the current epoch
    model_path = "%s/%s_%d.mdl" % (
        args.file_store_path,
        args.save_model_name,
        epoch,
    )

    # set db_type to be "minidb" instead of "log_file_db", which breaks
    # the serialization in save_to_db. Need to switch back to log_file_db
    # after migration
    pred_exp.save_to_db(
        db_type="minidb",
        db_destination=model_path,
        predictor_export_meta=predictor_export_meta,
    )
Ejemplo n.º 2
0
def SaveModel(args, train_model, epoch, use_ideep):
    prefix = "[]_{}".format(train_model._device_prefix,
                            train_model._devices[0])
    predictor_export_meta = pred_exp.PredictorExportMeta(
        predict_net=train_model.net.Proto(),
        parameters=data_parallel_model.GetCheckpointParams(train_model),
        inputs=[prefix + "/data"],
        outputs=[prefix + "/softmax"],
        shapes={
            prefix + "/softmax": (1, args.num_labels),
            prefix + "/data":
            (args.num_channels, args.image_size, args.image_size)
        })

    # save the train_model for the current epoch
    model_path = "%s/%s_%d.mdl" % (
        args.file_store_path,
        args.save_model_name,
        epoch,
    )

    # set db_type to be "minidb" instead of "log_file_db", which breaks
    # the serialization in save_to_db. Need to switch back to log_file_db
    # after migration
    pred_exp.save_to_db(db_type="minidb",
                        db_destination=model_path,
                        predictor_export_meta=predictor_export_meta,
                        use_ideep=use_ideep)
Ejemplo n.º 3
0
def SaveModel(args, train_model, epoch):
    prefix = "gpu_{}".format(train_model._devices[0])
    predictor_export_meta = pred_exp.PredictorExportMeta(
        predict_net=train_model.net.Proto(),
        parameters=GetCheckpointParams(train_model),
        inputs=[prefix + "/data"],
        outputs=[prefix + "/softmax"],
        shapes={
            prefix + "/softmax": (1, args.num_labels),
            prefix + "/data":
            (args.num_channels,
             args.clip_length_of if args.input_type else args.clip_length_rgb,
             args.crop_size, args.crop_size)
        })

    # save the train_model for the current epoch
    model_path = "%s/%s_%d.mdl" % (
        args.file_store_path,
        args.model_name,
        epoch,
    )

    # save the model
    pred_exp.save_to_db(
        db_type='minidb',
        db_destination=model_path,
        predictor_export_meta=predictor_export_meta,
    )
Ejemplo n.º 4
0
def save_pytorch_model_and_load_c2_net(model):
    pem, ws = model.get_predictor_export_meta_and_workspace()
    with tempfile.TemporaryDirectory() as tmpdirname:
        db_path, db_type = os.path.join(tmpdirname, "model"), "minidb"
        with ws._ctx:
            save_to_db(db_type, db_path, pem)
        net = prepare_prediction_net(db_path, db_type)
    return net
Ejemplo n.º 5
0
 def test_db_fails_without_params(self):
     with self.assertRaises(Exception):
         for db_type in ["minidb"]:
             db_file = tempfile.NamedTemporaryFile(
                 delete=False, suffix=".{}".format(db_type))
             pe.save_to_db(db_type=db_type,
                           db_destination=db_file.name,
                           predictor_export_meta=self.predictor_export_meta)
Ejemplo n.º 6
0
def save_caffe2_rep_to_db(
    caffe2_backend_rep,
    output_path,
    input_names,
    output_names,
    num_workers,
):
    # netdef external_input includes internally produced blobs
    actual_external_inputs = set()
    produced = set()
    for operator in caffe2_backend_rep.predict_net.op:
        for blob in operator.input:
            if blob not in produced:
                actual_external_inputs.add(blob)
        for blob in operator.output:
            produced.add(blob)
    for blob in output_names:
        if blob not in produced:
            actual_external_inputs.add(blob)

    param_names = [
        blob for blob in actual_external_inputs
        if blob not in input_names
    ]

    init_net = core.Net(caffe2_backend_rep.init_net)
    predict_net = core.Net(caffe2_backend_rep.predict_net)

    # predictor_exporter requires disjoint params, inputs and outputs
    for i, param in enumerate(param_names):
        if param in output_names:
            saved_name = param + '_PARAM'
            init_net.Copy(param, saved_name)
            predict_net.Copy(saved_name, param)
            param_names[i] = saved_name

    output_shapes = {}
    for blob in output_names:
        output_shapes[blob] = (0,)

    # Required because of https://github.com/pytorch/pytorch/pull/6456/files
    with caffe2_backend_rep.workspace._ctx:
        workspace.RunNetOnce(init_net)
        predictor_export_meta = predictor_exporter.PredictorExportMeta(
            predict_net=predict_net,
            parameters=param_names,
            inputs=input_names,
            outputs=output_names,
            shapes=output_shapes,
            net_type='dag',
            num_workers=num_workers,
        )
        predictor_exporter.save_to_db(
            db_type='minidb',
            db_destination=output_path,
            predictor_export_meta=predictor_export_meta,
        )
    logger.info('Caffe2 predictor net saved as: {}'.format(output_path))
Ejemplo n.º 7
0
 def test_db_fails_without_params(self):
     with self.assertRaises(Exception):
         for db_type in ["minidb"]:
             db_file = tempfile.NamedTemporaryFile(
                 delete=False, suffix=".{}".format(db_type))
             pe.save_to_db(
                 db_type=db_type,
                 db_destination=db_file.name,
                 predictor_export_meta=self.predictor_export_meta)
Ejemplo n.º 8
0
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            if np.any(np.isnan(workspace.FetchBlob(parameter))):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
        workspace.SwitchWorkspace(previous_workspace)
Ejemplo n.º 9
0
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            parameter_data = workspace.FetchBlob(parameter)
            if parameter_data.dtype.kind in {"U", "S", "O"}:
                continue  # Don't bother checking string blobs for nan
            if np.any(np.isnan(parameter_data)):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
Ejemplo n.º 10
0
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            parameter_data = workspace.FetchBlob(parameter)
            if parameter_data.dtype.kind in {"U", "S", "O"}:
                continue  # Don't bother checking string blobs for nan
            if np.any(np.isnan(parameter_data)):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
Ejemplo n.º 11
0
 def _dummy_model_copy(self, model_name, path):
     net = core.Net(model_name)
     C2.set_net(net)
     inp = 'Input'
     output = 'Output'
     workspace.FeedBlob(inp, np.array([1.0]))
     workspace.FeedBlob(output, np.array([1.0]))
     net.Copy([inp], [output])
     meta = PredictorExportMeta(
         net,
         [],
         [inp],
         [output],
     )
     save_to_db('minidb', path, meta)
def save_sum_deterministic_policy(model_names, path, db_type):
    net = core.Net("DeterministicPolicy")
    C2.set_net(net)
    output = "ActionProbabilities"
    workspace.FeedBlob(output, np.array([1.0]))
    model_outputs = []
    for model in model_names:
        model_output = "{}_Output".format(model)
        workspace.FeedBlob(model_output, np.array([[1.0]], dtype=np.float32))
        model_outputs.append(model_output)
    max_action = C2.FlattenToVec(C2.ArgMax(C2.Transpose(C2.Sum(*model_outputs))))
    one_blob = C2.NextBlob("one")
    workspace.FeedBlob(one_blob, np.array([1.0], dtype=np.float32))
    C2.net().SparseToDense([max_action, one_blob, model_outputs[0]], [output])
    meta = PredictorExportMeta(net, [one_blob], model_outputs, [output])
    save_to_db(db_type, path, meta)
Ejemplo n.º 13
0
def check_save_load(
    self,
    model,
    expected_num_params,
    expected_num_inputs,
    expected_num_outputs,
    check_equality=True,
):
    pem, ws = model.get_predictor_export_meta_and_workspace()
    self.assertEqual(expected_num_params, len(pem.parameters))
    for p in pem.parameters:
        self.assertTrue(ws.HasBlob(p))
    self.assertEqual(expected_num_inputs, len(pem.inputs))
    self.assertEqual(expected_num_outputs, len(pem.outputs))

    input_prototype = model.input_prototype()

    with tempfile.TemporaryDirectory() as tmpdirname:
        db_path = os.path.join(tmpdirname, "model")
        logger.info("DB path: ", db_path)
        db_type = "minidb"
        with ws._ctx:
            save_to_db(db_type, db_path, pem)

        # Load the model from DB file and run it
        net = prepare_prediction_net(db_path, db_type)

        input_tensors = _flatten_named_tuple(input_prototype)
        input_names = model.input_blob_names()
        self.assertEqual(len(input_tensors), len(input_names))

        for name, tensor in zip(input_names, input_tensors):
            workspace.FeedBlob(name, tensor.numpy())

        workspace.RunNet(net)

        output_arrays = [
            workspace.FetchBlob(b) for b in model.output_blob_names()
        ]
        output = model(input_prototype)
        output_tensors = _flatten_named_tuple(output)
        self.assertEqual(len(output_arrays), len(output_tensors))
        if check_equality:
            for a, t in zip(output_arrays, output_tensors):
                # FXIME: PyTorch and Caffe2 has slightly different operator implementation;
                # assert_array_equal would fail in some cases :(
                npt.assert_allclose(t.detach().numpy(), a, atol=1e-6)
Ejemplo n.º 14
0
def export_nets_to_predictor_file(c2_prepared,
                                  input_names,
                                  output_names,
                                  predictor_path,
                                  extra_params=None):
    # netdef external_input includes internally produced blobs
    actual_external_inputs = set()
    produced = set()
    for operator in c2_prepared.predict_net.op:
        for blob in operator.input:
            if blob not in produced:
                actual_external_inputs.add(blob)
        for blob in operator.output:
            produced.add(blob)
    for blob in output_names:
        if blob not in produced:
            actual_external_inputs.add(blob)

    param_names = [
        blob for blob in actual_external_inputs
        if blob not in input_names and blob not in output_names
    ]
    if extra_params is not None:
        param_names += extra_params

    init_net = core.Net(c2_prepared.init_net)
    predict_net = core.Net(c2_prepared.predict_net)

    # Required because of https://github.com/pytorch/pytorch/pull/6456/files
    with c2_prepared.workspace._ctx:
        workspace.RunNetOnce(init_net)
        predictor_export_meta = pe.PredictorExportMeta(
            predict_net=predict_net,
            parameters=param_names,
            inputs=input_names,
            outputs=output_names,
            shapes={x: ()
                    for x in input_names + output_names},
            net_type="simple",
        )
        pe.save_to_db(
            db_type=CAFFE2_DB_TYPE,
            db_destination=predictor_path,
            predictor_export_meta=predictor_export_meta,
        )
Ejemplo n.º 15
0
    def save(self, db_path, db_type):
        """ Saves network to db

        :param db_path see save_to_db
        :param db_type see save_to_db
        """
        previous_workspace = workspace.CurrentWorkspace()
        workspace.SwitchWorkspace(self._workspace_id)
        meta = self.get_predictor_export_meta()
        for parameter in self._parameters:
            parameter_data = workspace.FetchBlob(parameter)
            logger.info("DATA TYPE " + parameter_data.dtype.kind)
            if parameter_data.dtype.kind in {'U', 'S', 'O'}:
                continue  # Don't bother checking string blobs for nan
            logger.info("Checking parameter {} for nan".format(parameter))
            if np.any(np.isnan(parameter_data)):
                logger.info("WARNING: parameter {} is nan".format(parameter))
        save_to_db(db_type, db_path, meta)
        workspace.SwitchWorkspace(previous_workspace)
Ejemplo n.º 16
0
def check_save_load(
    self, model, expected_num_params, expected_num_inputs, expected_num_outputs
):
    pem, ws = model.get_predictor_export_meta_and_workspace()
    self.assertEqual(expected_num_params, len(pem.parameters))
    for p in pem.parameters:
        self.assertTrue(ws.HasBlob(p))
    self.assertEqual(expected_num_inputs, len(pem.inputs))
    self.assertEqual(expected_num_outputs, len(pem.outputs))

    input_prototype = model.input_prototype()

    with tempfile.TemporaryDirectory() as tmpdirname:
        db_path = os.path.join(tmpdirname, "model")
        logger.info("DB path: ", db_path)
        db_type = "minidb"
        with ws._ctx:
            save_to_db(db_type, db_path, pem)

        # Load the model from DB file and run it
        net = prepare_prediction_net(db_path, db_type)

        input_tensors = _flatten_named_tuple(input_prototype)
        input_names = model.input_blob_names()
        self.assertEqual(len(input_tensors), len(input_names))

        for name, tensor in zip(input_names, input_tensors):
            workspace.FeedBlob(name, tensor.numpy())

        workspace.RunNet(net)

        output_arrays = [workspace.FetchBlob(b) for b in model.output_blob_names()]
        output = model(input_prototype)
        output_tensors = _flatten_named_tuple(output)
        self.assertEqual(len(output_arrays), len(output_tensors))
        for a, t in zip(output_arrays, output_tensors):
            # FXIME: PyTorch and Caffe2 has slightly different operator implementation;
            # assert_array_equal would fail in some cases :(
            npt.assert_allclose(t.detach().numpy(), a, atol=1e-6)
Ejemplo n.º 17
0
def SaveModel(train_model, save_dir, epoch):
    predictor_export_meta = pred_exp.PredictorExportMeta(
        predict_net=train_model.net.Proto(),
        parameters=GetCheckpointParams(train_model),
        inputs=['data_uint8'],
        outputs=['softmax'],
        shapes={
            'data': {1, 9600},
            'softmax': {1, 40}
        })

    model_path = '%s/%s_%d.mdl' % (
        save_dir,
        train_model.net.Proto().name,
        epoch,
    )

    pred_exp.save_to_db(
        db_type='minidb',
        db_destination=model_path,
        predictor_export_meta=predictor_export_meta,
    )
Ejemplo n.º 18
0
    def test_get_predictor_export_meta_and_workspace(self):
        model = Model()
        pem, ws = model.get_predictor_export_meta_and_workspace()
        self.assertEqual(3, len(pem.parameters))  # 2 params + 1 const
        for p in pem.parameters:
            self.assertTrue(ws.HasBlob(p))
        self.assertEqual(2, len(pem.inputs))
        self.assertEqual(4, len(pem.outputs))

        input_prototype = model.input_prototype()

        with tempfile.TemporaryDirectory() as tmpdirname:
            db_path = os.path.join(tmpdirname, "model")
            logger.info("DB path: ", db_path)
            db_type = "minidb"
            with ws._ctx:
                save_to_db(db_type, db_path, pem)

            # Load the model from DB file and run it
            net = prepare_prediction_net(db_path, db_type)

            state_features = input_prototype.state.float_features.numpy()
            action_features = input_prototype.action.float_features.numpy()
            workspace.FeedBlob("state:float_features", state_features)
            workspace.FeedBlob("action:float_features", action_features)
            workspace.RunNet(net)
            net_sum = workspace.FetchBlob("sum")
            net_mul = workspace.FetchBlob("mul")
            net_plus_one = workspace.FetchBlob("plus_one")
            net_linear = workspace.FetchBlob("linear")

            model_sum, model_mul, model_plus_one, model_linear = model(
                input_prototype)

            npt.assert_array_equal(model_sum.numpy(), net_sum)
            npt.assert_array_equal(model_mul.numpy(), net_mul)
            npt.assert_array_equal(model_plus_one.numpy(), net_plus_one)
            npt.assert_array_equal(model_linear.detach().numpy(), net_linear)
Ejemplo n.º 19
0
    def test_load_device_scope(self):
        for param, value in self.params.items():
            workspace.FeedBlob(param, value)

        pem = pe.PredictorExportMeta(
            predict_net=self.predictor_export_meta.predict_net,
            parameters=self.predictor_export_meta.parameters,
            inputs=self.predictor_export_meta.inputs,
            outputs=self.predictor_export_meta.outputs,
            shapes=self.predictor_export_meta.shapes,
            net_type='dag',
        )

        db_type = 'minidb'
        db_file = tempfile.NamedTemporaryFile(delete=False,
                                              suffix=".{}".format(db_type))
        pe.save_to_db(db_type=db_type,
                      db_destination=db_file.name,
                      predictor_export_meta=pem)

        workspace.ResetWorkspace()
        with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU, 1)):
            meta_net_def = pe.load_from_db(
                db_type=db_type,
                filename=db_file.name,
            )

        init_net = core.Net(
            pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE))
        predict_init_net = core.Net(
            pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE))

        # check device options
        for op in list(init_net.Proto().op) + list(
                predict_init_net.Proto().op):
            self.assertEqual(1, op.device_option.device_id)
            self.assertEqual(caffe2_pb2.CPU, op.device_option.device_type)
Ejemplo n.º 20
0
    def test_load_device_scope(self):
        for param, value in self.params.items():
            workspace.FeedBlob(param, value)

        pem = pe.PredictorExportMeta(
            predict_net=self.predictor_export_meta.predict_net,
            parameters=self.predictor_export_meta.parameters,
            inputs=self.predictor_export_meta.inputs,
            outputs=self.predictor_export_meta.outputs,
            shapes=self.predictor_export_meta.shapes,
            net_type='dag',
        )

        db_type = 'minidb'
        db_file = tempfile.NamedTemporaryFile(
            delete=False, suffix=".{}".format(db_type))
        pe.save_to_db(
            db_type=db_type,
            db_destination=db_file.name,
            predictor_export_meta=pem)

        workspace.ResetWorkspace()
        with core.DeviceScope(core.DeviceOption(caffe2_pb2.CPU, 1)):
            meta_net_def = pe.load_from_db(
                db_type=db_type,
                filename=db_file.name,
            )

        init_net = core.Net(pred_utils.GetNet(meta_net_def,
                            pc.GLOBAL_INIT_NET_TYPE))
        predict_init_net = core.Net(pred_utils.GetNet(
            meta_net_def, pc.PREDICT_INIT_NET_TYPE))

        # check device options
        for op in list(init_net.Proto().op) + list(predict_init_net.Proto().op):
            self.assertEqual(1, op.device_option.cuda_gpu_id)
            self.assertEqual(caffe2_pb2.CPU, op.device_option.device_type)
Ejemplo n.º 21
0
    def test_meta_net_def_net_runs(self):
        for param, value in viewitems(self.params):
            workspace.FeedBlob(param, value)

        extra_init_net = core.Net('extra_init')
        extra_init_net.ConstantFill('data', 'data', value=1.0)
        pem = pe.PredictorExportMeta(
            predict_net=self.predictor_export_meta.predict_net,
            parameters=self.predictor_export_meta.parameters,
            inputs=self.predictor_export_meta.inputs,
            outputs=self.predictor_export_meta.outputs,
            shapes=self.predictor_export_meta.shapes,
            extra_init_net=extra_init_net,
            net_type='dag',
        )

        db_type = 'minidb'
        db_file = tempfile.NamedTemporaryFile(delete=False,
                                              suffix=".{}".format(db_type))
        pe.save_to_db(db_type=db_type,
                      db_destination=db_file.name,
                      predictor_export_meta=pem)

        workspace.ResetWorkspace()

        meta_net_def = pe.load_from_db(
            db_type=db_type,
            filename=db_file.name,
        )

        self.assertTrue("data" not in workspace.Blobs())
        self.assertTrue("y" not in workspace.Blobs())

        init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE)

        # 0-fills externalblobs blobs and runs extra_init_net
        workspace.RunNetOnce(init_net)

        self.assertTrue("data" in workspace.Blobs())
        self.assertTrue("y" in workspace.Blobs())

        print(workspace.FetchBlob("data"))
        np.testing.assert_array_equal(workspace.FetchBlob("data"),
                                      np.ones(shape=(1, 5)))
        np.testing.assert_array_equal(workspace.FetchBlob("y"),
                                      np.zeros(shape=(1, 10)))

        # Load parameters from DB
        global_init_net = pred_utils.GetNet(meta_net_def,
                                            pc.GLOBAL_INIT_NET_TYPE)
        workspace.RunNetOnce(global_init_net)

        # Run the net with a reshaped input and verify we are
        # producing good numbers (with our custom implementation)
        workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32))
        predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE)
        self.assertEqual(predict_net.type, 'dag')
        workspace.RunNetOnce(predict_net)
        np.testing.assert_array_almost_equal(
            workspace.FetchBlob("y"),
            workspace.FetchBlob("data").dot(self.params["y_w"].T) +
            self.params["y_b"])
Ejemplo n.º 22
0
 def save(self, db_path, db_type):
     # The workspace here is expected to be the Workspace class from ONNX
     with self.ws._ctx:
         save_to_db(db_type, db_path, self.pem)
Ejemplo n.º 23
0
    def test_get_predictor_export_meta_and_workspace_with_feature_extractor(
            self):
        model = Model()

        state_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS)
            for i in range(1, 5)
        }
        action_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS)
            for i in range(5, 9)
        }

        extractor = PredictorFeatureExtractor(
            state_normalization_parameters=state_normalization_parameters,
            action_normalization_parameters=action_normalization_parameters,
            normalize=False,
        )

        pem, ws = model.get_predictor_export_meta_and_workspace(
            feature_extractor=extractor)
        # model has 2 params + 1 const. extractor has 1 const.
        self.assertEqual(4, len(pem.parameters))
        for p in pem.parameters:
            self.assertTrue(ws.HasBlob(p))
        self.assertEqual(3, len(pem.inputs))
        self.assertEqual(4, len(pem.outputs))

        input_prototype = model.input_prototype()

        with tempfile.TemporaryDirectory() as tmpdirname:
            db_path = os.path.join(tmpdirname, "model")
            logger.info("DB path: ", db_path)
            db_type = "minidb"
            with ws._ctx:
                save_to_db(db_type, db_path, pem)

            # Load the model from DB file and run it
            net = prepare_prediction_net(db_path, db_type)

            state_features = input_prototype.state.float_features
            action_features = input_prototype.action.float_features
            float_features_values = (torch.cat(
                (state_features, action_features), dim=1).reshape(-1).numpy())
            float_features_keys = np.arange(1, 9)
            float_features_lengths = np.array([8], dtype=np.int32)

            workspace.FeedBlob("input/float_features.keys",
                               float_features_keys)
            workspace.FeedBlob("input/float_features.values",
                               float_features_values)
            workspace.FeedBlob("input/float_features.lengths",
                               float_features_lengths)

            workspace.RunNet(net)
            net_sum = workspace.FetchBlob("sum")
            net_mul = workspace.FetchBlob("mul")
            net_plus_one = workspace.FetchBlob("plus_one")
            net_linear = workspace.FetchBlob("linear")

            model_sum, model_mul, model_plus_one, model_linear = model(
                input_prototype)

            npt.assert_array_equal(model_sum.numpy(), net_sum)
            npt.assert_array_equal(model_mul.numpy(), net_mul)
            npt.assert_array_equal(model_plus_one.numpy(), net_plus_one)
            npt.assert_allclose(model_linear.detach().numpy(),
                                net_linear,
                                rtol=1e-4)
Ejemplo n.º 24
0
    def test_get_predictor_export_meta_and_workspace_full(self):
        model = Model()

        state_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS)
            for i in range(1, 5)
        }
        action_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS)
            for i in range(5, 9)
        }

        extractor = PredictorFeatureExtractor(
            state_normalization_parameters=state_normalization_parameters,
            action_normalization_parameters=action_normalization_parameters,
            normalize=False,
        )
        output_transformer = TestOutputTransformer()

        pem, ws = model.get_predictor_export_meta_and_workspace(
            feature_extractor=extractor, output_transformer=output_transformer)
        # model has 2 params + 1 const. extractor has 1 const. output_transformer has 1 const.
        self.assertEqual(5, len(pem.parameters))
        for p in pem.parameters:
            self.assertTrue(ws.HasBlob(p))
        self.assertEqual(3, len(pem.inputs))
        self.assertEqual(5, len(pem.outputs))
        self.assertEqual(
            {
                "output/string_weighted_multi_categorical_features.lengths",
                "output/string_weighted_multi_categorical_features.keys",
                "output/string_weighted_multi_categorical_features.values.lengths",
                "output/string_weighted_multi_categorical_features.values.keys",
                "output/string_weighted_multi_categorical_features.values.values",
            },
            set(pem.outputs),
        )

        input_prototype = model.input_prototype()

        with tempfile.TemporaryDirectory() as tmpdirname:
            db_path = os.path.join(tmpdirname, "model")
            logger.info("DB path: {}".format(db_path))
            db_type = "minidb"
            with ws._ctx:
                save_to_db(db_type, db_path, pem)

            # Load the model from DB file and run it
            net = prepare_prediction_net(db_path, db_type)

            state_features = input_prototype.state.float_features
            action_features = input_prototype.action.float_features
            float_features_values = (torch.cat(
                (state_features, action_features), dim=1).reshape(-1).numpy())
            float_features_keys = np.arange(1, 9)
            float_features_lengths = np.array([8], dtype=np.int32)

            workspace.FeedBlob("input/float_features.keys",
                               float_features_keys)
            workspace.FeedBlob("input/float_features.values",
                               float_features_values)
            workspace.FeedBlob("input/float_features.lengths",
                               float_features_lengths)

            workspace.RunNet(net)

            model_sum, model_mul, model_plus_one, model_linear = model(
                input_prototype)

            lengths = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.lengths")
            keys = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.keys")
            values_lengths = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.lengths"
            )
            values_keys = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.keys"
            )
            values_values = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.values"
            )

            N = 1
            npt.assert_array_equal(np.ones(N, dtype=np.int32), lengths)
            npt.assert_array_equal(np.zeros(N, dtype=np.int64), keys)
            npt.assert_array_equal([1] * N, values_lengths)
            npt.assert_array_equal(np.array([b"TestAction"], dtype=np.object),
                                   values_keys)
            npt.assert_array_equal(model_linear.detach().numpy().reshape(-1),
                                   values_values)
Ejemplo n.º 25
0
# Let's save the deploy model with the trained weights and biases to a file. 

# In[18]:


# construct the model to be exported
# the inputs/outputs of the model are manually specified.
pe_meta = pe.PredictorExportMeta(
    predict_net=deploy_model.net.Proto(),
    parameters=[str(b) for b in deploy_model.params], 
    inputs=["data"],
    outputs=["softmax"],
)

# save the model to a file. Use minidb as the file format
pe.save_to_db("minidb", os.path.join(root_folder, "mnist_model.minidb"), pe_meta)
print("The deploy model is saved to: " + root_folder + "/mnist_model.minidb")


# Now we can load the model back and run the prediction to verify it works.

# In[19]:


# we retrieve the last input data out and use it in our prediction test before we scratch the workspace
blob = workspace.FetchBlob("data")
pyplot.figure()
_ = visualize.NCHW.ShowMultiple(blob)

# reset the workspace, to make sure the model is actually loaded
workspace.ResetWorkspace(root_folder)
Ejemplo n.º 26
0
 def save(self, db_path, db_type):
     # The workspace here is expected to be the Workspace class from ONNX
     with self.ws._ctx:
         save_to_db(db_type, db_path, self.pem)
Ejemplo n.º 27
0
    def test_get_predictor_export_meta_and_workspace_with_feature_extractor(self):
        model = Model()

        state_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS) for i in range(1, 5)
        }
        action_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS) for i in range(5, 9)
        }

        extractor = PredictorFeatureExtractor(
            state_normalization_parameters=state_normalization_parameters,
            action_normalization_parameters=action_normalization_parameters,
            normalize=False,
        )

        pem, ws = model.get_predictor_export_meta_and_workspace(
            feature_extractor=extractor
        )
        # model has 2 params + 1 const. extractor has 1 const.
        self.assertEqual(4, len(pem.parameters))
        for p in pem.parameters:
            self.assertTrue(ws.HasBlob(p))
        self.assertEqual(3, len(pem.inputs))
        self.assertEqual(4, len(pem.outputs))

        input_prototype = model.input_prototype()

        with tempfile.TemporaryDirectory() as tmpdirname:
            db_path = os.path.join(tmpdirname, "model")
            logger.info("DB path: ", db_path)
            db_type = "minidb"
            with ws._ctx:
                save_to_db(db_type, db_path, pem)

            # Load the model from DB file and run it
            net = prepare_prediction_net(db_path, db_type)

            state_features = input_prototype.state.float_features
            action_features = input_prototype.action.float_features
            float_features_values = (
                torch.cat((state_features, action_features), dim=1).reshape(-1).numpy()
            )
            float_features_keys = np.arange(1, 9)
            float_features_lengths = np.array([8], dtype=np.int32)

            workspace.FeedBlob("input/float_features.keys", float_features_keys)
            workspace.FeedBlob("input/float_features.values", float_features_values)
            workspace.FeedBlob("input/float_features.lengths", float_features_lengths)

            workspace.RunNet(net)
            net_sum = workspace.FetchBlob("sum")
            net_mul = workspace.FetchBlob("mul")
            net_plus_one = workspace.FetchBlob("plus_one")
            net_linear = workspace.FetchBlob("linear")

            model_sum, model_mul, model_plus_one, model_linear = model(input_prototype)

            npt.assert_array_equal(model_sum.numpy(), net_sum)
            npt.assert_array_equal(model_mul.numpy(), net_mul)
            npt.assert_array_equal(model_plus_one.numpy(), net_plus_one)
            npt.assert_allclose(model_linear.detach().numpy(), net_linear, rtol=1e-4)
Ejemplo n.º 28
0
test_accuracy = np.zeros(100)
for i in range(100):
    # Run a forward pass of the net on the current batch
    workspace.RunNet(test_model.net)
    # Collect the batch accuracy from the workspace
    test_accuracy[i] = workspace.FetchBlob('accuracy')

pe_meta = pe.PredictorExportMeta(
    predict_net=deploy_model.net.Proto(),
    parameters=[str(b) for b in deploy_model.params],
    inputs=["data"],
    outputs=["softmax"],
)

# save the model to a file. Use minidb as the file format
pe.save_to_db("minidb", os.path.join(sys.argv[2], "mnist_model.minidb"),
              pe_meta)
print("Deploy model saved to: " + sys.argv[2] + "/mnist_model.minidb")

# Grab and display the last data batch used before we scratch the workspace. This purely for our convenience...
blob = workspace.FetchBlob("data")

# reset the workspace, to make sure the model is actually loaded
workspace.ResetWorkspace(sys.argv[2])

# verify that all blobs from training are destroyed.
print("The blobs in the workspace after reset: {}".format(workspace.Blobs()))

# load the predict net
predict_net = pe.prepare_prediction_net(
    os.path.join(sys.argv[2], "mnist_model.minidb"), "minidb")
Ejemplo n.º 29
0
    def test_get_predictor_export_meta_and_workspace_full(self):
        model = Model()

        state_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS) for i in range(1, 5)
        }
        action_normalization_parameters = {
            i: NormalizationParameters(feature_type=CONTINUOUS) for i in range(5, 9)
        }

        extractor = PredictorFeatureExtractor(
            state_normalization_parameters=state_normalization_parameters,
            action_normalization_parameters=action_normalization_parameters,
            normalize=False,
        )
        output_transformer = TestOutputTransformer()

        pem, ws = model.get_predictor_export_meta_and_workspace(
            feature_extractor=extractor, output_transformer=output_transformer
        )
        # model has 2 params + 1 const. extractor has 1 const. output_transformer has 1 const.
        self.assertEqual(5, len(pem.parameters))
        for p in pem.parameters:
            self.assertTrue(ws.HasBlob(p))
        self.assertEqual(3, len(pem.inputs))
        self.assertEqual(5, len(pem.outputs))
        self.assertEqual(
            {
                "output/string_weighted_multi_categorical_features.lengths",
                "output/string_weighted_multi_categorical_features.keys",
                "output/string_weighted_multi_categorical_features.values.lengths",
                "output/string_weighted_multi_categorical_features.values.keys",
                "output/string_weighted_multi_categorical_features.values.values",
            },
            set(pem.outputs),
        )

        input_prototype = model.input_prototype()

        with tempfile.TemporaryDirectory() as tmpdirname:
            db_path = os.path.join(tmpdirname, "model")
            logger.info("DB path: {}".format(db_path))
            db_type = "minidb"
            with ws._ctx:
                save_to_db(db_type, db_path, pem)

            # Load the model from DB file and run it
            net = prepare_prediction_net(db_path, db_type)

            state_features = input_prototype.state.float_features
            action_features = input_prototype.action.float_features
            float_features_values = (
                torch.cat((state_features, action_features), dim=1).reshape(-1).numpy()
            )
            float_features_keys = np.arange(1, 9)
            float_features_lengths = np.array([8], dtype=np.int32)

            workspace.FeedBlob("input/float_features.keys", float_features_keys)
            workspace.FeedBlob("input/float_features.values", float_features_values)
            workspace.FeedBlob("input/float_features.lengths", float_features_lengths)

            workspace.RunNet(net)

            model_sum, model_mul, model_plus_one, model_linear = model(input_prototype)

            lengths = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.lengths"
            )
            keys = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.keys"
            )
            values_lengths = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.lengths"
            )
            values_keys = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.keys"
            )
            values_values = workspace.FetchBlob(
                "output/string_weighted_multi_categorical_features.values.values"
            )

            N = 1
            npt.assert_array_equal(np.ones(N, dtype=np.int32), lengths)
            npt.assert_array_equal(np.zeros(N, dtype=np.int64), keys)
            npt.assert_array_equal([1] * N, values_lengths)
            npt.assert_array_equal(
                np.array([b"TestAction"], dtype=np.object), values_keys
            )
            npt.assert_array_equal(
                model_linear.detach().numpy().reshape(-1), values_values
            )
Ejemplo n.º 30
0
pyplot.figure()
softmax = workspace.FetchBlob('softmax')
_ = pyplot.plot(softmax[0], 'ro')
pyplot.title('Prediction for the first image')
pyplot.figure()
softmax = workspace.FetchBlob('softmax')
_ = pyplot.plot(softmax[1], 'ro')
pyplot.title('Prediction for the second image')
pyplot.figure()
softmax = workspace.FetchBlob('softmax')
_ = pyplot.plot(softmax[2], 'ro')
pyplot.title('Prediction for the third image')
pyplot.show()

#GUARDAR MODELO EN UN FICHERO
'''
# construct the model to be exported
# the inputs/outputs of the model are manually specified.
pe_meta = pe.PredictorExportMeta(
    predict_net=deploy_model.net.Proto(),
    parameters=[str(b) for b in deploy_model.params],
    inputs=["data"],
    outputs=["softmax"],
)

# save the model to a file. Use minidb as the file format
pe.save_to_db("minidb", os.path.join(root_folder, "mnist_model.minidb"), pe_meta)
print("The deploy model is saved to: " + root_folder + "/mnist_model.minidb")
'''

#Coger el modelo
Ejemplo n.º 31
0
    if i % 25 == 0:
        print("Iter: {}, loss: {}, accuracy: {}".format(
            i, loss[i], accuracy[i]))
plt.plot(loss, 'b')
plt.plot(accuracy, 'r')
plt.title("Summary of Training Run")
plt.xlabel("Iteration")
plt.legend(("Loss", "Accuracy"), loc="upper right")
plt.show()
pe_meta = pe.PredictorExportMeta(
    predict_net=deploy_model.net.Proto(),
    parameters=[str(b) for b in deploy_model.params],
    inputs=["data"],
    outputs=["softmax"],
)
pe.save_to_db("minidb", os.path.join(root_folder, "mnist_model.minidb"),
              pe_meta)
print("Deploy model saved to:" + root_folder + "/mnist_model.minidb")

blob = workspace.FetchBlob("data")
plt.figure()
plt.title("Batch of Testing Data")
_ = visualize.NCHW.ShowMultiple(blob)

workspace.ResetWorkspace(root_folder)
print("The blobs in the workspace after reset: {}".format(workspace.Blobs()))
predict_net = pe.prepare_prediction_net(
    os.path.join(root_folder, "mnist_model.minidb"), "minidb")
print("The blobs in the workspace after loading the model: {}".format(
    workspace.Blobs()))
workspace.FeedBlob("data", blob)
workspace.RunNetOnce(predict_net)
Ejemplo n.º 32
0
    def test_meta_net_def_net_runs(self):
        for param, value in viewitems(self.params):
            workspace.FeedBlob(param, value)

        extra_init_net = core.Net('extra_init')
        extra_init_net.ConstantFill('data', 'data', value=1.0)
        pem = pe.PredictorExportMeta(
            predict_net=self.predictor_export_meta.predict_net,
            parameters=self.predictor_export_meta.parameters,
            inputs=self.predictor_export_meta.inputs,
            outputs=self.predictor_export_meta.outputs,
            shapes=self.predictor_export_meta.shapes,
            extra_init_net=extra_init_net,
            net_type='dag',
        )

        db_type = 'minidb'
        db_file = tempfile.NamedTemporaryFile(
            delete=False, suffix=".{}".format(db_type))
        pe.save_to_db(
            db_type=db_type,
            db_destination=db_file.name,
            predictor_export_meta=pem)

        workspace.ResetWorkspace()

        meta_net_def = pe.load_from_db(
            db_type=db_type,
            filename=db_file.name,
        )

        self.assertTrue("data" not in workspace.Blobs())
        self.assertTrue("y" not in workspace.Blobs())

        init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE)

        # 0-fills externalblobs blobs and runs extra_init_net
        workspace.RunNetOnce(init_net)

        self.assertTrue("data" in workspace.Blobs())
        self.assertTrue("y" in workspace.Blobs())

        print(workspace.FetchBlob("data"))
        np.testing.assert_array_equal(
            workspace.FetchBlob("data"), np.ones(shape=(1, 5)))
        np.testing.assert_array_equal(
            workspace.FetchBlob("y"), np.zeros(shape=(1, 10)))

        # Load parameters from DB
        global_init_net = pred_utils.GetNet(meta_net_def,
                                            pc.GLOBAL_INIT_NET_TYPE)
        workspace.RunNetOnce(global_init_net)

        # Run the net with a reshaped input and verify we are
        # producing good numbers (with our custom implementation)
        workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32))
        predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE)
        self.assertEqual(predict_net.type, 'dag')
        workspace.RunNetOnce(predict_net)
        np.testing.assert_array_almost_equal(
            workspace.FetchBlob("y"),
            workspace.FetchBlob("data").dot(self.params["y_w"].T) +
            self.params["y_b"])