def __init__(self,x_points,y_points,theta):
     self.raw_X = x_points
     self.raw_y = y_points
     self.m = R.Scalar(self.raw_y.shape[0])
     self.X = R.Tensor(self.raw_X.tolist())
     self.y = R.Tensor(self.raw_y.tolist())
     self.theta = R.Tensor(theta.tolist())
Exemple #2
0
    def grow_tree(self, X, y, depth=0):
        pop_per_class = R.Tensor([])
        for c in range(self.num_classes):
            pop_per_class = pop_per_class.concat(
                R.sum(R.equal(y, R.Scalar(c))).expand_dims())
        predicted_class = R.argmax(pop_per_class)
        node = Node(predicted_class=predicted_class, depth=depth)
        node.samples = R.shape(y).gather(R.Scalar(0))

        if depth < self.max_depth:
            #col, threshold = self.find_split(X, y)
            col, threshold = 0, R.Tensor([12.895])
            '''
            
            '''
            decision = R.Scalar(col).logical_and(threshold)
            while decision.status != "computed":
                pass
            if decision.output == 1:
                indices_left = X.transpose().gather(
                    R.Scalar(col)).less(threshold)
                X_left, y_left = X.gather(indices_left), y.gather(indices_left)
                indices_right = X.transpose().gather(
                    R.Scalar(col)).greater_equal(threshold)
                X_right, y_right = X.gather(indices_right), y.gather(
                    indices_right)
                node.feature_index = col
                node.threshold = threshold
                node.left = self.grow_tree(X_left, y_left, depth + 1)
                node.left.leftbranch = True
                node.right = self.grow_tree(X_right, y_right, depth + 1)
                node.right.rightbranch = True
        return node
Exemple #3
0
    def predict(self, X):
        n_q = len(X)
        self._X = R.Tensor(X)
        d_list = self.__eucledian_distance(self._X)
        # print(d_list)
        fe = d_list.foreach(operation='sort')
        sl = fe.foreach(operation='slice', begin=0, size=self.k)
        while sl.status != "computed":
            pass
        pred = R.Tensor([], name="prediction")
        for i in range(n_q):
            row = R.gather(d_list, R.Tensor([i])).reshape(shape=[self.n])

            values = sl.gather(R.Tensor([i])).reshape(shape=[self.k])
            while values.status != 'computed':
                pass
            ind = R.find_indices(row, values)
            while ind.status != 'computed':
                pass
            ind = ind.foreach(operation='slice', begin=0, size=1)
            y_neighbours = R.gather(self.Y, ind).reshape(shape=[self.k])
            while y_neighbours.status != 'computed':
                pass
            pred = pred.concat(R.mean(y_neighbours).expand_dims(axis=0))
            while pred.status != 'computed':
                pass
            print(pred)

        while pred.status != 'computed':
            pass
        self._label = pred
        return pred
Exemple #4
0
def Logcosh(y_true, y_pred):
    """ not completed """

    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)
Exemple #5
0
    def fit(self, X, y):
        if self.fit_intercept:
            X = self.__add_intercept(X)
        self.leny = len(y)
        Y = R.Tensor(y)
        # weights initialization
        self.theta = R.Tensor([0] * self.x_shape1)

        for i in range(self.num_iter):
            h = self.__sigmoid(X.dot(self.theta))
            while h.status != 'computed':
                pass
            w = X.transpose()
            while w.status != 'computed':
                pass
            self.theta = self.theta.sub(
                self.lr.multiply((w.dot(h.sub(Y)).div(R.Scalar(self.leny)))))
            while self.theta.status != 'computed':
                pass
            loss = self.__loss(self.__sigmoid(X.dot(self.theta)), Y)
            while loss.status != 'computed':
                pass
            if (self.verbose == True):
                self.losses.append(loss)
            print('Iteration : ', i)
Exemple #6
0
def f1_score(true_labels, pred_labels):
    if not isinstance(true_labels, R.Tensor):
        y_true = R.Tensor(true_labels)
    if not isinstance(pred_labels, R.Tensor):
        pred_labels = R.Tensor(pred_labels)
    pre = precision(true_labels, pred_labels)
    rec = recall(true_labels, pred_labels)
    return R.div(R.multiply(R.Scalar(2), R.multiply(pre, rec)),
                 R.add(pre, rec))
Exemple #7
0
def accuracy(y_true, y_pred):
    if not isinstance(y_true, R.Tensor):
        if not isinstance(y_true, R.Op):
            y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        if not isinstance(y_pred, R.Op):
            y_pred = R.Tensor(y_pred)

    return R.div(R.sum(R.equal(y_pred, y_true)), y_pred.shape_())
Exemple #8
0
def KL_div_loss(y_true, y_pred, d):
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    y_pred = R.clip(y_pred, R.epsilon(), R.Saclar(1) - R.epsilon())

    return R.elemul(y_true, R.natlog(R.div(y_true, y_pred)))
Exemple #9
0
def Poisson_loss(y_true, y_pred):
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    y_pred = R.clip(y_pred, R.epsilon(), R.Saclar(1) - R.epsilon())

    return R.sub(y_pred, R.elemul(y_true, R.natlog(y_pred)))
def accuracy(y_true, y_pred):

  if not isinstance(y_true, R.Tensor):
      y_true = R.Tensor(y_true)
  if not isinstance(y_pred, R.Tensor):
      y_pred = R.Tensor(y_pred)
      
  accuracy = R.div(R.sum((y_pred == y_val)),y_pred.shape[0])
  return accuracy
Exemple #11
0
def mean_squared_error(y_true, y_pred):
    """
    Mean Squared Error
    """
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    return R.mean(R.pow(R.sub(y_true, y_pred), R.Scalar(2)))
Exemple #12
0
def root_mean_squared_error(y_true, y_pred):
    """
    Root Mean Squared Error
    """
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    return R.pow(mean_squared_error(y_true, y_pred), R.Scalar(0.5))
Exemple #13
0
def mean_absolute_error(y_true, y_pred):
    """
    Mean Absolute Error
    """
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    return R.mean(R.abs(R.sub(y_pred, y_true)))
Exemple #14
0
def mean_squared_log_error(y_true, y_pred):
    """
    Mean Squared Log Error
    """
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    return R.mean(R.pow(R.sub(R.natlog(R.add(y_true, R.one())), R.natlog(R.add(y_pred, R.one()))), R.Scalar(2)))
Exemple #15
0
 def fit(self, X, y):
     X = R.Tensor(X)
     y = R.Tensor(y)
     num_classes = y.unique().shape_().gather(R.Scalar(0))
     num_features = X.shape_().gather(R.Scalar(1))
     while num_features.status != 'computed':
         pass
     self.num_classes = int(num_classes.output)
     self.num_features = int(num_features.output)
     self.tree = self.grow_tree(X, y)
Exemple #16
0
def categorical_hinge(y_true, y_pred):
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    neg = R.max(R.elemul(R.sub(R.Scalar(-1), y_true), y_pred))
    pos = R.sum(R.elemul(y_true, y_pred))
    loss = R.max((R.sub(neg, pos), R.Scalar(1)), R.Scalar(0))

    return loss
def out_pred(y_true, y_pred, per_label=False, mode):

  if not isinstance(y_true, R.Tensor):
      y_true = R.Tensor(y_true)
  if not isinstance(y_pred, R.Tensor):
      y_pred = R.Tensor(y_pred)

  for i in sorted(set(y_true)):

    TP = R.sum(R.and(y_pred == i, y_true == i)) 
    TN = R.sum(R.and(y_pred =! i, y_true =! i))
def r2_score(y_true, y_pred):

  if not isinstance(y_true, R.Tensor):
      y_true = R.Tensor(y_true)
  if not isinstance(y_pred, R.Tensor):
      y_pred = R.Tensor(y_pred)    
  
  scalar1 = R.Scalar(1)    
        
  SS_res = R.sum(R.square(R.sub(y_true, y_pred)))
  SS_tot = R.sum(R.square(R.sub(y_true, R.mean(y_true))))  

  return R.sub(scalar1, R.div(SS_res, R.add(SS_tot, R.epsilon())))
    def score(self, X, y, name="r2"):
        g.graph_id = None
        if not isinstance(X, R.Tensor):
            X = R.Tensor(X)
        if not isinstance(y, R.Tensor):
            y = R.Tensor(y)

        y_pred = self.predict(X)
        y_true = y

        if name == "r2":
            return metrics.r2_score(y_true, y_pred)
        else:
            return None
Exemple #20
0
def huber(y_true, y_pred, d):
    if not isinstance(y_true, R.Tensor):
        y_true = R.Tensor(y_true)
    if not isinstance(y_pred, R.Tensor):
        y_pred = R.Tensor(y_pred)

    d = R.Scalar(d)
    x = R.sub(y_true, y_pred)

    if R.abs(x) <= d:
        return R.elemul(R.Scalar(d), R.elemul(x, x))

    if R.abs(x) > d:
        return R.add(R.elemul(R.Scalar(d), R.mul(d, d)), R.elemul(d, R.sub(R.abs(x), d)))
Exemple #21
0
    def preprocess(self, X_train, y_train):
        x_ = X_train.tolist()
        y_ = y_train.tolist()
        y = []
        for element in y_:
            t = []
            for e in element:
                t.append(int(e))
            y.append(R.Tensor(t))

        x = []
        for element in x_:
            x.append(R.Tensor(element).reshape(shape=[1, self.input_dims]))

        return x, y
def AUCROC(y_true, y_pred):
  
  '''
  not completed
  '''
  
  if not isinstance(y_true, R.Tensor):
      y_true = R.Tensor(y_true)
  if not isinstance(y_pred, R.Tensor):
      y_pred = R.Tensor(y_pred)

  for i in sorted(set(y_true)):

    TP = R.sum(R.and(y_pred == i, y_true == i)) 
    TN = R.sum(R.and(y_pred =! i, y_true =! i))
Exemple #23
0
    def grow_tree(self, X, y, depth=0):
        pop_per_class = R.Tensor([])
        for c in range(self.num_classes):
            pop_per_class = pop_per_class.concat(
                R.sum(R.equal(y, R.Scalar(c))).expand_dims())
        predicted_class = R.argmax(pop_per_class)
        while predicted_class.status != "computed":
            pass

        node = Node(predicted_class=predicted_class.output, depth=depth)
        node.samples = R.shape(y).gather(R.Scalar(0))
        if depth < self.max_depth:
            col, threshold = self.find_split(X, y)
            while threshold.status != "computed":
                pass
            z = X.shape_()
            z1 = y.shape_()
            while z1.status != "computed":
                pass
            if col is not None and threshold.output is not [None]:
                indices_left = X.transpose().gather(
                    R.Scalar(col)).less(threshold)
                X_left = X.gather(
                    R.find_indices(indices_left, R.Tensor(
                        [1])).reshape(shape=R.sum(indices_left).expand_dims()))
                y_left = y.gather(
                    R.find_indices(indices_left, R.Tensor(
                        [1])).reshape(shape=R.sum(indices_left).expand_dims()))

                indices_right = X.transpose().gather(
                    R.Scalar(col)).greater_equal(threshold)
                X_right = X.gather(
                    R.find_indices(indices_right, R.Tensor([
                        1
                    ])).reshape(shape=R.sum(indices_right).expand_dims()))
                y_right = y.gather(
                    R.find_indices(indices_right, R.Tensor([
                        1
                    ])).reshape(shape=R.sum(indices_right).expand_dims()))
                node.feature_index = col
                node.threshold = threshold

                node.left = self.grow_tree(X_left, y_left, depth + 1)
                node.left.leftbranch = True
                node.right = self.grow_tree(X_right, y_right, depth + 1)
                node.right.rightbranch = True
        return node
Exemple #24
0
def r2_score(y_true, y_pred):
    if isinstance(y_true, R.Tensor) or isinstance(y_true, R.Op):
        pass
    else:
        y_true = R.Tensor(y_true, name="y_true")

    if isinstance(y_pred, R.Tensor) or isinstance(y_pred, R.Op):
        pass
    else:
        y_pred = R.Tensor(y_pred, name="y_pred")

    print(type(y_true), type(y_pred))

    scalar1 = R.Scalar(1)

    SS_res = R.sum(R.square(R.sub(y_pred, y_true)), name="ss_res")
    SS_tot = R.sum(R.square(R.sub(y_true, R.mean(y_true))), name="ss_tot")

    return R.sub(scalar1, R.div(SS_res, SS_tot), name="r2_score")
Exemple #25
0
 def predict(self, X_test):
     x_test = X_test.tolist()
     test = R.Tensor(x_test).reshape(shape=[1, self.input_dims])
     lastout = self.f_forward(test, self.w1, self.w2)
     maxm = 0
     k = 0
     while lastout.status != 'computed':
         pass
     temp = list(lastout()[0])
     k = temp.index(max(temp))
     return k
Exemple #26
0
def standardize(x):
    """
    Standardize an array
    """
    if not isinstance(x, R.Tensor):
        x = R.Tensor(x)

    mean = R.mean(x)
    std = R.std(x)

    return R.div(R.sub(x, mean), std)
Exemple #27
0
def pearson_correlation(x, y):
    """
    Calculate linear correlation(pearson correlation)
    """

    if not isinstance(x, R.Tensor):
        x = R.Tensor(x)
    if not isinstance(y, R.Tensor):
        y = R.Tensor(y)

    a = R.sum(R.square(x))
    b = R.sum(R.square(y))

    n = a.output.shape[0]

    return R.div(
        R.sub(R.multiply(R.Scalar(n), R.sum(R.multiply(x, y))),
              R.multiply(R.sum(x), R.sum(y))),
        R.multiply(
            R.square_root(R.sub(R.multiply(R.Scalar(n), a), R.square(b))),
            R.square_root(R.sub(R.multiply(R.Scalar(n), b), R.square(b)))))
Exemple #28
0
def z_score(x, axis=None):
    if not isinstance(x, R.Tensor):
        x = R.Tensor(x)

    if axis is not None:
        mean = R.mean(x, axis=axis)
        std = R.std(x, axis=axis)
    else:
        mean = R.mean(x)
        std = R.std(x)

    return R.div(R.sub(x, mean), std)
Exemple #29
0
def normalize(x):
    """
    Normalize an array
    """
    if not isinstance(x, R.Tensor):
        x = R.Tensor(x)

    if len(x.output.shape) > 1:
        raise Exception("Unsupported input type")

    max = R.max(x)
    min = R.min(x)

    return R.div(R.sub(x, min), R.sub(max, min))
Exemple #30
0
    def fit(self, X, k=3, iter=10):
        self.points = R.Tensor(X)
        self.k = k
        self.centroids = self.initialize_centroids()
        inform_server()
        self.label = self.closest_centroids(self.centroids)
        self.update_centroids()

        for i in range(iter):
            print('iteration', i)
            self.update_centroids()
            self.label = self.closest_centroids(self.centroids)
            inform_server()
        while self.label.status != "computed":
            pass