Beispiel #1
0
    def evaluate(self, preds, labels=None):

        #print(preds)
        #print(preds.shape)
        #print(labels)
        #print(labels.shape)

        # Tries to load X and Y if not directly passed
        if (labels is None):
            print("No labels passed, cannot perform evaluation.")

        if (self.model is None):
            print("No model trained, cannot to perform evaluation.")

        else:
            # Declaring the class containing the metrics
            cm = CoMe(preds, labels)

            # Evaluating
            prauc = cm.compute_prauc()
            rce = cm.compute_rce()
            # Confusion matrix
            conf = cm.confMatrix()
            # Prediction stats
            max_pred, min_pred, avg = cm.computeStatistics()

            return prauc, rce, conf, max_pred, min_pred, avg
    def evaluate(self, pool_tst=None):
        if (pool_tst is None):
            print("No dataset provided.")    
        if (self.model is None):
            print("No model trained yet.")
        else:            
            #Preparing DMatrix
            #p_test = Pool(X_tst, label=Y_tst)
            #Making predictions
            #Y_pred = model.predict_proba(p_test)
            Y_pred = self.get_prediction(pool_tst)

            # Declaring the class containing the
            # metrics.
            Y_test = np.array(pool_tst.get_label()).astype(np.int32)
            cm = CoMe(Y_pred, Y_test)

            # Evaluating
            prauc = cm.compute_prauc()
            rce = cm.compute_rce()
            # Confusion matrix
            conf = cm.confMatrix()
            # Prediction stats
            max_pred, min_pred, avg = cm.computeStatistics()

            return prauc, rce, conf, max_pred, min_pred, avg
    def evaluate(self, X_tst=None, Y_tst=None):
        Y_pred = None

        #Tries to load X and Y if not directly passed
        if (X_tst is None) or (Y_tst is None):
            X_tst, Y_tst = Data.get_dataset_xgb_default_test()
            print("Test set loaded from file.")
        #Y_tst = np.array(Y_tst[Y_tst.columns[0]].astype(float))
        if (self.sround_model is None) and (self.batch_model is None):
            print("No model trained yet.")
        else:
            #Selecting the coherent model for the evaluation
            #According to the initial declaration (batch/single round)
            if self.batch is False:
                model = self.sround_model
            else:
                model = self.batch_model

            #Preparing DMatrix
            #d_test = xgb.DMatrix(X_tst)
            #Making predictions
            #Y_pred = model.predict(d_test)
            Y_pred = self.get_prediction(X_tst)

            # Declaring the class containing the
            # metrics.
            cm = CoMe(Y_pred, Y_tst)

            #Evaluating
            scores = cm.compute_multiclass()

            return scores
    def evaluate(self, X_tst=None, Y_tst=None):
        Y_pred = None

        # Tries to load X and Y if not directly passed
        if (X_tst is None) or (Y_tst is None):
            X_tst, Y_tst = Data.get_dataset_xgb_default_test()
            print("Test set loaded from file.")
        Y_tst = np.array(Y_tst[Y_tst.columns[0]].astype(float))

        if (self.sround_model is None) and (not os.path.exists(
                self.previous_model_path)):
            print("No model trained yet.")
        else:
            # Selecting the coherent model for the evaluation
            # According to the initial declaration (batch/single round)
            model = self.get_model()

            # Preparing DMatrix
            # d_test = xgb.DMatrix(X_tst)
            # Making predictions
            # Y_pred = model.predict(d_test)
            Y_pred = self.get_prediction(X_tst)

            # Declaring the class containing the
            # metrics.
            cm = CoMe(Y_pred, Y_tst)

            # Evaluating
            prauc = cm.compute_prauc()
            rce = cm.compute_rce()
            # Confusion matrix
            conf = confMatrix(Y_tst, Y_pred)
            # Prediction stats
            max_pred = max(Y_pred)
            min_pred = min(Y_pred)
            avg = np.mean(Y_pred)

            return prauc, rce, conf, max_pred, min_pred, avg
    def evaluate(self, dmat_test=None):
        # Tries to load X and Y if not directly passed
        if (dmat_test is None):
            print("No matrix passed, cannot perform evaluation.")

        if (self.model is None):
            print("No model trained, cannot to perform evaluation.")

        else:
            #Retrieving the predictions
            Y_pred = self.get_prediction(dmat_test)

            # Declaring the class containing the metrics
            cm = CoMe(Y_pred, dmat_test.get_label())

            # Evaluating
            prauc = cm.compute_prauc()
            rce = cm.compute_rce()
            # Confusion matrix
            conf = cm.confMatrix()
            # Prediction stats
            max_pred, min_pred, avg = cm.computeStatistics()

            return prauc, rce, conf, max_pred, min_pred, avg