Ejemplo n.º 1
0
def train_skmodel(model, average_n_predictions=None, binarize=False, **kwargs):
    train_x = model.train_x
    valid_x = model.valid_x

    if model.pca:
        train_x = model.pca.fit_transform(train_x)
        valid_x = model.pca.transform(valid_x)

    model.c.fit(train_x, model.train_y)
    preds = model.c.predict(valid_x)
    if average_n_predictions:
        correct = 0
        incorrect = 0
        for y in np.unique(model.valid_y):
            y_preds = preds[np.where(model.valid_y == y)]
            for i in xrange(0, len(y_preds), average_n_predictions):
                if sum(y_preds[i:(i + average_n_predictions)] == y) > (average_n_predictions / 2):
                    correct += 1
                else:
                    incorrect += 1
        acc = correct / (correct + incorrect)
    else:
        acc = sum(np.equal(preds, model.valid_y)) / len(preds)

    _auc = auc(model.valid_y[:len(preds)], preds, pos_label=1)
    if binarize:
        binary_preds = np.greater_equal(preds, np.median(preds))
        acc = sum(np.equal(binary_preds, model.valid_y)) / len(binary_preds)
    print("validation auc: {auc}".format(auc=_auc))
    log('epoch 0, validation accuracy {acc:.2%}'.format(acc=acc), True)
    return acc, 0, model.serialize()
Ejemplo n.º 2
0
 def evaluate(self, idxs, pred):
     '''scores the predictions of a given set of rows
     Args:
         idxs (int[]): the indices of the rows to be evaluated
         pred (float[]): the prediction for the label of that row
     Returns:
         float: an evaluation score (the higher the better)
     '''
     # _correct_y is int-casted, go to owner op (int-cast) to get shared variable
     # as first input and get its value without copying the value out
     _y = self._correct_y.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(_y[:len(pred)], pred, pos_label=1)
Ejemplo n.º 3
0
 def evaluate(self, idxs, pred):
     '''scores the predictions of a given set of rows
     Args:
         idxs (int[]): the indices of the rows to be evaluated
         pred (float[]): the prediction for the label of that row
     Returns:
         float: an evaluation score (the higher the better)
     '''
     # _correct_y is int-casted, go to owner op (int-cast) to get shared variable
     # as first input and get its value without copying the value out
     _y = self._correct_y.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(_y[:len(pred)], pred, pos_label=1)
Ejemplo n.º 4
0
 def valid_eval(idxs, pred):
     _y = correct_y.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(_y, pred, pos_label=1)
Ejemplo n.º 5
0
 def valid_eval(idxs, pred):
     _y = correct_y.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(_y, pred, pos_label=1)
Ejemplo n.º 6
0
 def evaluate(self, idxs, pred):
     y = self._ys.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(y[:len(pred)], pred, pos_label=1)
Ejemplo n.º 7
0
 def auc(self, input_vec, true_y, pos_label=1):
     """Returns auc score for binary classification."""
     res = self.infer(input_vec)[:, pos_label]
     return auc(true_y, self.infer(input_vec)[:, pos_label], pos_label=pos_label)
Ejemplo n.º 8
0
 def evaluate(self, idxs, pred):
     y = self._ys.owner.inputs[0].get_value(borrow=True)[idxs]
     return auc(y[:len(pred)], pred, pos_label=1)