예제 #1
0
def main():
    nn = NNDist()
    save = False
    load = False

    lr = 0.001
    lb = 0.001
    epoch = 1000
    record_period = 4

    x, y = DataUtil.gen_spiral(50, 3, 3, 2.5)

    if not load:
        nn.build([x.shape[1], 6, 6, 6, y.shape[1]])
        nn.optimizer = "Adam"
        nn.preview()
        nn.fit(x, y, lr=lr, lb=lb, verbose=1, record_period=record_period,
               epoch=epoch, batch_size=128, train_only=True,
               animation_params={"show": True, "mp4": True, "period": record_period})
        if save:
            nn.save()
        nn.visualize2d(x, y)
        nn.draw_results()
    else:
        nn.load()
        nn.preview()
        nn.evaluate(x, y)

    nn.show_timing_log()
예제 #2
0
def main():
    nn = NNDist()
    save = False
    load = False

    lr = 0.001
    lb = 0.001
    epoch = 1000
    record_period = 4

    x, y = DataUtil.gen_spiral(50, 3, 3, 2.5)

    if not load:
        nn.build([x.shape[1], 6, 6, 6, y.shape[1]])
        nn.optimizer = "Adam"
        nn.preview()
        nn.fit(x, y, lr=lr, lb=lb, verbose=1, record_period=record_period,
               epoch=epoch, batch_size=128, train_only=True,
               animation_params={"show": True, "mp4": False, "period": record_period})
        if save:
            nn.save()
        nn.visualize2d(x, y)
        nn.draw_results()
    else:
        nn.load()
        nn.preview()
        nn.evaluate(x, y)

    nn.show_timing_log()
예제 #3
0
def main():

    nn = NNDist()
    epoch = 1000

    timing = Timing(enabled=True)
    timing_level = 1
    nn.feed_timing(timing)

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(Softmax((y.shape[1], )))

    nn.fit(x,
           y,
           epoch=epoch,
           verbose=2,
           metrics=["acc", "f1_score"],
           train_rate=0.8)
    nn.draw_logs()
    nn.visualize_2d(x, y)

    timing.show_timing_log(timing_level)
예제 #4
0
def main():

    nn = NNDist()
    epoch = 1000

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24,)))
    nn.add(CrossEntropy((y.shape[1],)))

    nn.fit(x, y, epoch=epoch, verbose=2, metrics=["acc", "f1_score"], train_rate=0.8)
    nn.draw_logs()
    nn.visualize_2d(x, y)
예제 #5
0
def main():

    nn = NN()
    epoch = 1000

    x, y = DataUtil.gen_spiral(120, 7, 7, 4)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(CostLayer((y.shape[1], ), "CrossEntropy"))

    # nn.disable_timing()
    nn.fit(x, y, epoch=epoch, train_rate=0.8, metrics=["acc"])
    nn.evaluate(x, y)
    nn.visualize2d(x, y)
    nn.show_timing_log()
    nn.draw_logs()
예제 #6
0
def main():

    nn = NN()
    epoch = 1000

    x, y = DataUtil.gen_spiral(120, 7, 7, 4)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(CostLayer((y.shape[1],), "CrossEntropy"))

    # nn.disable_timing()
    nn.fit(x, y, epoch=epoch, train_rate=0.8, metrics=["acc"])
    nn.evaluate(x, y)
    nn.visualize2d(x, y)
    nn.show_timing_log()
    nn.draw_logs()
예제 #7
0
def main():

    nn = NNDist()
    epoch = 1000

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(CrossEntropy((y.shape[1], )))

    nn.fit(x,
           y,
           epoch=epoch,
           verbose=2,
           metrics=["acc", "f1_score"],
           train_rate=0.8)
    nn.draw_logs()
    nn.visualize_2d(x, y)
예제 #8
0
def main():

    nn = NNDist()
    epoch = 1000

    timing = Timing(enabled=True)
    timing_level = 1
    nn.feed_timing(timing)

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24,)))
    nn.add(Softmax((y.shape[1],)))

    nn.fit(x, y, epoch=epoch, verbose=2, metrics=["acc", "f1_score"], train_rate=0.8)
    nn.draw_logs()
    nn.visualize_2d(x, y)

    timing.show_timing_log(timing_level)
예제 #9
0
def main():

    nn = NNDist()
    epoch = 1000

    timing = Timing(enabled=True)
    timing_level = 1
    nn.feed_timing(timing)

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(Softmax((y.shape[1], )))

    nn.fit(x, y, epoch=epoch)
    nn.visualize_2d(x, y)
    nn.evaluate(x, y)

    timing.show_timing_log(timing_level)
예제 #10
0
def main():

    nn = NNDist()
    epoch = 1000

    timing = Timing(enabled=True)
    timing_level = 1
    nn.feed_timing(timing)

    x, y = DataUtil.gen_spiral(100)

    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24,)))
    nn.add(Softmax((y.shape[1],)))

    nn.fit(x, y, epoch=epoch)
    nn.visualize_2d(x, y)
    nn.evaluate(x, y)

    timing.show_timing_log(timing_level)
예제 #11
0
def main():
    nn = NNDist()
    save = False
    load = False

    lr = 0.001
    lb = 0.001
    epoch = 1000

    timing = Timing(enabled=True)
    timing_level = 1

    x, y = DataUtil.gen_spiral(50, 3, 3, 2.5)

    if not load:
        nn.build([x.shape[1], 6, 6, 6, y.shape[1]])
        nn.optimizer = "Adam"
        nn.preview()
        nn.feed_timing(timing)
        nn.fit(x,
               y,
               lr=lr,
               lb=lb,
               verbose=1,
               record_period=4,
               epoch=epoch,
               batch_size=128,
               train_only=True,
               draw_detailed_network=True,
               make_mp4=True,
               show_animation=True)
        if save:
            nn.save()
        nn.draw_results()
        nn.visualize2d()
    else:
        nn.load()
        nn.preview()
        nn.evaluate(x, y)

    timing.show_timing_log(timing_level)
예제 #12
0
def main():
    """
    模型的重点:
    1.构建网络时w,b的初始化:输入层,隐层..,输出层(损失层)
    w,b是层与层之间连接的参数,我们把参数的分层比较桥,
    第一个桥的w的维度是数据的(特征维度,第一个隐层的神经元个数),b的维度就是(第一个隐层神经元个数)
    第二个桥的w的维度是(第一个隐层神经元的个数,第二个隐层的神经元的个数)
    ...
    最后一个桥是连接隐层和输出的,其维度是(最后一个隐层的神经元个数,输出层的类别)
    2.前向传导把所有激活值保存下来
    
    3.反向传播的梯度求解
    第一步有别与其他步,
    第一步调用CostLayers的bp_first进行bp算法的第一步得到损失层的输出对输入的梯度delta[-1]
    
    4.w,b的更新
    最后一步有别与前面的步骤,在更新w0的时候, W_(i-1)' = v^T_(i-1) * delta_(i)中,v用的是输入x_batch
   
    5.模型的优化
    采用了Adam的优化器,效果最稳定高效,在tf中也可以用这个
    
    6.模型的预测
    实际上是通过训练好的w,b一层层计算 X * W + b,并取最后一层输出作为预测值,然后取预测值中的最大值下标得到预测标签y^,
    最后通过y,y^求准确率
    """
    nn = NN()
    epoch = 1000
    x, y = DataUtil.gen_spiral(
        120, 7, 7, 4
    )  #x(840,2) y(840,7):7是one_hot形式下的类别[1,0,0,0,0,0,0]表示第0个类别[0,1,0,0,0,0,0]表示第二个类别
    nn.add(ReLU((x.shape[1], 24)))
    nn.add(ReLU((24, )))
    nn.add(CostLayer((y.shape[1], ), "CrossEntropy"))

    nn.fit(x, y, epoch=epoch, train_rate=0.8, metrics=["acc", "f1-score"])
    nn.evaluate(x, y)
    nn.visualize2d(x, y)
    nn.show_timing_log()
    nn.draw_logs()
예제 #13
0
def vis_test():
    nn = NNDist()
    epoch = 1000
    record_period = 4
    make_mp4 = True

    timing = Timing(enabled=True)
    timing_level = 1
    x, y = DataUtil.gen_spiral(50, 3, 3, 2.5)
    nn.build([x.shape[1], 6, 6, 6, y.shape[1]])
    nn.optimizer = "Adam"
    nn.preview()
    nn.feed_timing(timing)
    nn.fit(x,
           y,
           verbose=1,
           record_period=record_period,
           epoch=epoch,
           train_only=True,
           draw_detailed_network=True,
           make_mp4=make_mp4,
           show_animation=True)
    nn.draw_results()
    timing.show_timing_log(timing_level)
예제 #14
0
        self._model_built = True
        if net is None:
            net = self._tfx
        current_dimension = net.shape[1].value
        if self.activations is None:
            self.activations = [None] * len(self.hidden_units)
        elif isinstance(self.activations, str):
            self.activations = [self.activations] * len(self.hidden_units)
        else:
            self.activations = self.activations
        for i, n_unit in enumerate(self.hidden_units):
            net = self._fully_connected_linear(net,
                                               [current_dimension, n_unit], i)
            net = self._build_layer(i, net)
            current_dimension = n_unit
        appendix = "_final_projection"
        fc_shape = self.hidden_units[
            -1] if self.hidden_units else current_dimension
        self._output = self._fully_connected_linear(net,
                                                    [fc_shape, self.n_class],
                                                    appendix)


if __name__ == '__main__':
    from Util.Util import DataUtil

    x_train, y_train = DataUtil.gen_spiral(size=100, one_hot=False)
    x_test, y_test = DataUtil.gen_spiral(size=10, one_hot=False)

    nn = Basic(x_train, y_train, x_test, y_test).fit(snapshot_ratio=0)
if __name__ == '__main__':
    # _x, _y = gen_random()
    # test(_x, _y, algorithm="RF", epoch=1)
    # test(_x, _y, algorithm="RF", epoch=10)
    # test(_x, _y, algorithm="RF", epoch=50)
    # test(_x, _y, algorithm="SKRandomForest")
    # test(_x, _y, epoch=1)
    # test(_x, _y, epoch=1)
    # test(_x, _y, epoch=10)
    # _x, _y = gen_xor()
    # test(_x, _y, algorithm="RF", epoch=1)
    # test(_x, _y, algorithm="RF", epoch=10)
    # test(_x, _y, algorithm="RF", epoch=1000)
    # test(_x, _y, algorithm="SKAdaBoost")
    _x, _y = DataUtil.gen_spiral(size=20, n=4, n_class=2, one_hot=False)
    _y[_y == 0] = -1
    # test(_x, _y, clf="SKTree", epoch=10)
    # test(_x, _y, clf="SKTree", epoch=1000)
    # test(_x, _y, algorithm="RF", epoch=10)
    test(_x, _y, algorithm="RF", epoch=30, n_cores=4)
    test(_x, _y, algorithm="SKAdaBoost")

    train_num = 6000
    (x_train,
     y_train), (x_test,
                y_test), *_ = DataUtil.get_dataset("mushroom",
                                                   "../_Data/mushroom.txt",
                                                   n_train=train_num,
                                                   quantize=True,
                                                   tar_idx=0)
예제 #16
0
        err = -y_batch * (x_batch.dot(self._alpha) + self._b) * sample_weight_batch
        mask = err >= 0  # type: np.ndarray
        if not np.any(mask):
            self._model_grads = [None, None]
        else:
            delta = -y_batch[mask] * sample_weight_batch[mask]
            self._model_grads = [
                np.sum(delta[..., None] * x_batch[mask], axis=0),
                np.sum(delta)
            ]
        return np.sum(err[mask])


if __name__ == '__main__':
    # xs, ys = DataUtil.gen_two_clusters(center=5, dis=1, scale=2, one_hot=False)
    xs, ys = DataUtil.gen_spiral(20, 4, 2, 2, one_hot=False)
    # xs, ys = DataUtil.gen_xor(one_hot=False)
    ys[ys == 0] = -1

    animation_params = {
        "show": False, "mp4": False, "period": 50,
        "dense": 400, "draw_background": True
    }

    kp = KP(animation_params=animation_params)
    kp.fit(xs, ys, kernel="poly", p=12, epoch=200)
    kp.evaluate(xs, ys)
    kp.visualize2d(xs, ys, dense=400)

    kp = GDKP(animation_params=animation_params)
    kp.fit(xs, ys, kernel="poly", p=12, epoch=10000)
예제 #17
0
                          self._b) * sample_weight_batch
        mask = err >= 0  # type: np.ndarray
        if not np.any(mask):
            self._model_grads = [None, None]
        else:
            delta = -y_batch[mask] * sample_weight_batch[mask]
            self._model_grads = [
                np.sum(delta[..., None] * x_batch[mask], axis=0),
                np.sum(delta)
            ]
        return np.sum(err[mask])


if __name__ == '__main__':
    # xs, ys = DataUtil.gen_two_clusters(center=5, dis=1, scale=2, one_hot=False)
    xs, ys = DataUtil.gen_spiral(20, 4, 2, 2, one_hot=False)
    # xs, ys = DataUtil.gen_xor(one_hot=False)
    ys[ys == 0] = -1

    animation_params = {
        "show": False,
        "mp4": False,
        "period": 50,
        "dense": 400,
        "draw_background": True
    }

    kp = KP(animation_params=animation_params)
    kp.fit(xs, ys, kernel="poly", p=12, epoch=200)
    kp.evaluate(xs, ys)
    kp.visualize2d(xs, ys, dense=400)
예제 #18
0
def main():

    # x, y = DataUtil.gen_xor(100, one_hot=False)
    x, y = DataUtil.gen_spiral(20, 4, 2, 2, one_hot=False)
    # x, y = DataUtil.gen_two_clusters(n_dim=2, one_hot=False)
    y[y == 0] = -1

    animation_params = {
        "show": False,
        "mp4": False,
        "period": 50,
        "dense": 400,
        "draw_background": True
    }

    svm = SVM(animation_params=animation_params)
    svm.fit(x, y, kernel="poly", p=12, epoch=600)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    svm = GDSVM(animation_params=animation_params)
    svm.fit(x, y, kernel="poly", p=12, epoch=10000)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    if TorchSVM is not None:
        svm = TorchSVM(animation_params=animation_params)
        svm.fit(x, y, kernel="poly", p=12)
        svm.evaluate(x, y)
        svm.visualize2d(x,
                        y,
                        padding=0.1,
                        dense=400,
                        emphasize=svm["alpha"] > 0)

    svm = TFSVM()
    svm.fit(x, y)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400)

    svm = SKSVM()
    # svm = SKSVM(kernel="poly", degree=12)
    svm.fit(x, y)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm.support_)

    (x_train,
     y_train), (x_test,
                y_test), *_ = DataUtil.get_dataset("mushroom",
                                                   "../_Data/mushroom.txt",
                                                   n_train=100,
                                                   quantize=True,
                                                   tar_idx=0)
    y_train[y_train == 0] = -1
    y_test[y_test == 0] = -1

    svm = SKSVM()
    svm.fit(x_train, y_train)
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    svm = TFSVM()
    svm.fit(x_train, y_train)
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    if TorchSVM is not None:
        svm = TorchSVM()
        svm.fit(x_train, y_train)
        svm.evaluate(x_train, y_train)
        svm.evaluate(x_test, y_test)

        svm = TorchSVM()
        logs = [
            log[0] for log in svm.fit(x_train,
                                      y_train,
                                      metrics=["acc"],
                                      x_test=x_test,
                                      y_test=y_test)
        ]
        svm.evaluate(x_train, y_train)
        svm.evaluate(x_test, y_test)

        plt.figure()
        plt.title(svm.title)
        plt.plot(range(len(logs)), logs)
        plt.show()

    svm = SVM()
    logs = [
        log[0] for log in svm.fit(
            x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test)
    ]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(logs)), logs)
    plt.show()

    svm = GDSVM()
    logs = [
        log[0] for log in svm.fit(
            x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test)
    ]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(logs)), logs)
    plt.show()

    svm.show_timing_log()
예제 #19
0
def main():

    # x, y = DataUtil.gen_xor(100, one_hot=False)
    x, y = DataUtil.gen_spiral(20, 4, 2, 2, one_hot=False)
    # x, y = DataUtil.gen_two_clusters(n_dim=2, one_hot=False)
    y[y == 0] = -1
    #
    # svm = SKSVM()
    # # svm = SKSVM(kernel="poly", degree=12)
    # svm.fit(x, y)
    # svm.evaluate(x, y)
    # svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm.support_)
    #
    # svm = TFSVM()
    # svm.fit(x, y, lr=0.0001)
    # svm.evaluate(x, y)
    # svm.visualize2d(x, y, padding=0.1, dense=400)
    #
    svm = SVM()
    svm.fit(x, y, kernel="poly", p=12)
    # _logs = [_log[0] for _log in svm.fit(x, y, metrics=["acc"])]
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    (x_train,
     y_train), (x_test,
                y_test), *_ = DataUtil.get_dataset("mushroom",
                                                   "../_Data/mushroom.txt",
                                                   train_num=100,
                                                   quantize=True,
                                                   tar_idx=0)
    y_train[y_train == 0] = -1
    y_test[y_test == 0] = -1

    svm = SKSVM()
    svm.fit(x_train, y_train)
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    svm = TFSVM()
    _logs = [
        _log[0] for _log in svm.fit(
            x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test)
    ]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(_logs)), _logs)
    plt.show()

    svm = SVM()
    _logs = [
        _log[0] for _log in svm.fit(
            x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test)
    ]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(_logs)), _logs)
    plt.show()

    svm.show_timing_log()
예제 #20
0
if __name__ == '__main__':
    # _x, _y = gen_random()
    # test(_x, _y, algorithm="RF", epoch=1)
    # test(_x, _y, algorithm="RF", epoch=10)
    # test(_x, _y, algorithm="RF", epoch=50)
    # test(_x, _y, algorithm="SKRandomForest")
    # test(_x, _y, epoch=1)
    # test(_x, _y, epoch=1)
    # test(_x, _y, epoch=10)
    # _x, _y = gen_xor()
    # test(_x, _y, algorithm="RF", epoch=1)
    # test(_x, _y, algorithm="RF", epoch=10)
    # test(_x, _y, algorithm="RF", epoch=1000)
    # test(_x, _y, algorithm="SKAdaBoost")
    _x, _y = DataUtil.gen_spiral(size=20, n=4, n_class=2, one_hot=False)
    _y[_y == 0] = -1
    # test(_x, _y, clf="SKTree", epoch=10)
    # test(_x, _y, clf="SKTree", epoch=1000)
    # test(_x, _y, algorithm="RF", epoch=10)
    test(_x, _y, algorithm="RF", epoch=30, n_cores=4)
    test(_x, _y, algorithm="SKAdaBoost")

    train_num = 6000
    (x_train, y_train), (x_test, y_test), *_ = DataUtil.get_dataset(
        "mushroom", "../_Data/mushroom.txt", n_train=train_num, quantize=True, tar_idx=0)
    y_train[y_train == 0] = -1
    y_test[y_test == 0] = -1

    cv_test(x_train, y_train, x_test, y_test, clf="MNB", epoch=1)
    cv_test(x_train, y_train, x_test, y_test, clf="MNB", epoch=5)
예제 #21
0
def main():

    # x, y = DataUtil.gen_xor(100, one_hot=False)
    x, y = DataUtil.gen_spiral(20, 4, 2, 2, one_hot=False)
    # x, y = DataUtil.gen_two_clusters(n_dim=2, one_hot=False)
    y[y == 0] = -1

    animation_params = {
        "show": False, "mp4": False, "period": 50,
        "dense": 400, "draw_background": True
    }

    svm = SVM(animation_params=animation_params)
    svm.fit(x, y, kernel="poly", p=12, epoch=600)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    svm = GDSVM(animation_params=animation_params)
    svm.fit(x, y, kernel="poly", p=12, epoch=10000)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    if TorchSVM is not None:
        svm = TorchSVM(animation_params=animation_params)
        svm.fit(x, y, kernel="poly", p=12)
        svm.evaluate(x, y)
        svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm["alpha"] > 0)

    svm = TFSVM()
    svm.fit(x, y)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400)

    svm = SKSVM()
    # svm = SKSVM(kernel="poly", degree=12)
    svm.fit(x, y)
    svm.evaluate(x, y)
    svm.visualize2d(x, y, padding=0.1, dense=400, emphasize=svm.support_)

    (x_train, y_train), (x_test, y_test), *_ = DataUtil.get_dataset(
        "mushroom", "../_Data/mushroom.txt", n_train=100, quantize=True, tar_idx=0)
    y_train[y_train == 0] = -1
    y_test[y_test == 0] = -1

    svm = SKSVM()
    svm.fit(x_train, y_train)
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    svm = TFSVM()
    svm.fit(x_train, y_train)
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    if TorchSVM is not None:
        svm = TorchSVM()
        svm.fit(x_train, y_train)
        svm.evaluate(x_train, y_train)
        svm.evaluate(x_test, y_test)

        svm = TorchSVM()
        logs = [log[0] for log in svm.fit(
            x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test
        )]
        svm.evaluate(x_train, y_train)
        svm.evaluate(x_test, y_test)

        plt.figure()
        plt.title(svm.title)
        plt.plot(range(len(logs)), logs)
        plt.show()

    svm = SVM()
    logs = [log[0] for log in svm.fit(
        x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test
    )]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(logs)), logs)
    plt.show()

    svm = GDSVM()
    logs = [log[0] for log in svm.fit(
        x_train, y_train, metrics=["acc"], x_test=x_test, y_test=y_test
    )]
    svm.evaluate(x_train, y_train)
    svm.evaluate(x_test, y_test)

    plt.figure()
    plt.title(svm.title)
    plt.plot(range(len(logs)), logs)
    plt.show()

    svm.show_timing_log()