예제 #1
0
파일: neuralnet.py 프로젝트: stachon/binet
 def predict(self, X):
     out = self.transform(X)
     if self.output == "softmax":
         return op.argmax(out, 1)
     elif self.output == "sigmoid":
         return out > 0.5
     else:
         return out
예제 #2
0
 def predict(self, X):
     out = self.transform(X)
     if self.output == "softmax":
         return op.argmax(out, 1)
     elif self.output == "sigmoid":
         return out > 0.5
     else:
         return out
예제 #3
0
파일: neuralnet.py 프로젝트: stachon/binet
    def _get_score(self, target, pred):
        '''Calculates the quality of predictions of a model.

        Like sklearn, we follow the convention that higher return values are
        better than lower return values.'''

        if self.output == "softmax":
            # convert from 1hot
            if len(target.shape) != 1 and target.shape[1] != 1:
                target = op.to_cpu(op.argmax(target, 1))
            if len(pred.shape) != 1 and pred.shape[1] != 1:
                pred = op.to_cpu(op.argmax(pred, 1))
            acc = op.to_cpu(op.mean(pred == target))
            return float(acc)
        elif self.output == "sigmoid":
            # Note: this is meant for multitask learning, but for e.g.
            # using sigmoid+squarederror as multiclass problem, this will
            # give the wrong result!
            return op.to_cpu(op.mean(target == (pred > 0.5)))
        elif self.output == "linear":
            return -op.to_cpu(op.mean((target - pred)**2)) # negate by convention
        else:
            raise NotImplementedError()
예제 #4
0
    def _get_score(self, target, pred):
        '''Calculates the quality of predictions of a model.

        Like sklearn, we follow the convention that higher return values are
        better than lower return values.'''

        if self.output == "softmax":
            # convert from 1hot
            if len(target.shape) != 1 and target.shape[1] != 1:
                target = op.to_cpu(op.argmax(target, 1))
            if len(pred.shape) != 1 and pred.shape[1] != 1:
                pred = op.to_cpu(op.argmax(pred, 1))
            acc = op.to_cpu(op.mean(pred == target))
            return float(acc)
        elif self.output == "sigmoid":
            # Note: this is meant for multitask learning, but for e.g.
            # using sigmoid+squarederror as multiclass problem, this will
            # give the wrong result!
            return op.to_cpu(op.mean(target == (pred > 0.5)))
        elif self.output == "linear":
            return -op.to_cpu(op.mean(
                (target - pred)**2))  # negate by convention
        else:
            raise NotImplementedError()