pred_onx = sess.run([label_name],
                    {input_name: X_test.astype(numpy.float32)})[0]
print(pred_onx)

###############################################
# With DMatrix
# ++++++++++++
#
# Huge datasets cannot be handled with the scikit-learn API.
# DMatrix must be used. Let's see how to convert the trained
# model.

dtrain = DMatrix(X_train, label=y_train)

param = {'objective': 'multi:softmax', 'num_class': 3}
bst = train_xgb(param, dtrain, 10)

initial_type = [('float_input', FloatTensorType([None, 4]))]
onx = convert_xgboost(bst, initial_types=initial_type)

sess = rt.InferenceSession(onx.SerializeToString())
input_name = sess.get_inputs()[0].name
label_name = sess.get_outputs()[0].name
pred_onx = sess.run([label_name],
                    {input_name: X_test.astype(numpy.float32)})[0]
print(pred_onx)

##################################
# Display the ONNX graph
# ++++++++++++++++++++++
#
Esempio n. 2
0
        xgb_alltrain = DMatrix(
            X_train,
            label=y_train,
            feature_names=predictors,
            feature_types=feature_types)

        xgb_alltest = DMatrix(
            X_test,
            feature_names=predictors,
            feature_types=feature_types)

        print('\t[XGBoost ] training...')
        xgb_model = train_xgb(
            xgb_params,
            xgb_train,
            num_boost_round=15000,
            evals=[(xgb_val, 'valid_1')],
            verbose_eval=200,
            early_stopping_rounds=100,
            )

        y_pred = xgb_model.predict(X_train[val_idx, :])
        score = roc_auc_score(y_train[val_idx], y_pred)
        #print('\t[XGBoost ] best iteration: \033[92m%i\033[0m' % xgb_model.best_iteration)
        print('\t[XGBoost ] oof ROC-AUC is: \033[92m%.4f\033[0m' % score)

        y_train_xgb.append(xgb_model.predict(xgb_alltrain))
        y_pred_xgb.append(xgb_model.predict(xgb_alltest))
        xgb_scores.append(score)

        xgb_feature_importances.append(
            xgb_model.get_fscore())