Пример #1
0
def one_hot_cross_entropy(y_true, y_pred, with_logit=True):
    if with_logit:
        y_pred = softmax(y_pred)

    else:
        pass

    y_pred = R.clip(y_pred, R.epsilon(), R.div(R.Scalar(1), R.epsilon()))
    N = y_pred.shape[0]
    loss = R.div(R.elemul(R.Scalar(-1), R.mul(R.sum(y_true, R.natlog(R.add(y_pred, 1e-9))))), R.Scalar(N))

    return loss
Пример #2
0
def huber(y_true, y_pred, d):
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    d = R.Scalar(d)
    x = R.sub(y_true, y_pred)

    if R.abs(x) <= d:
        return R.elemul(R.Scalar(d), R.elemul(x, x))

    if R.abs(x) > d:
        return R.add(R.elemul(R.Scalar(d), R.mul(d, d)), R.elemul(d, R.sub(R.abs(x), d)))
def tanh(x):
    """
    Tanh Activation Function
    """
    return R.div(R.sub(R.exp(x), R.exp(R.mul(R.minus_one(), x))),
                 R.add(R.exp(x), R.exp(R.mul(R.minus_one(), x))))
Пример #4
0
    def fit(self, X, y):

        n_samples, n_features = X.shape
        y_ = y
        # y_ = R.where(y <= 0, -1, 1)
        self.w = R.Tensor(np.zeros(n_features))
        self.b = Scalar(0)

        for epoch in range(self.n_iters):
            print("Epoch: ",  epoch)
            for idx, x_i in enumerate(X):
                x_i = Tensor(x_i)
                y_i = Tensor([y_[idx]])
                val = y_i *  (R.dot(x_i, self.w) - self.b)
                condition = R.greater_equal(val,  Scalar(1))
                while condition.status != 'computed':
                    pass
                if condition():
                    self.w = self.w - self.lr * (Scalar(2) * self.lambda_param * self.w)
                else:
                    self.w = self.w - self.lr * (Scalar(2) * self.lambda_param * self.w - R.mul(x_i, y_i))
                    self.b = self.b - (self.lr * y_i)