Exemple #1
0
def retrain_in_f_with_grid(name, label_p, label_n, oracle, n_features, ftype,
                           test_x, test_y, benchmark):
    print '--------------- retrain in F with grid -----------------'
    for n_pts in xrange(50, 601, 50):

        online = OnlineBase(name,
                            label_p,
                            label_n,
                            oracle,
                            n_features,
                            ftype,
                            error=.1)
        online.collect_pts(n_pts, -1)
        ex = RBFKernelRetraining(
            name,
            online.get_QSV(),
            online.get_QSV_labels(),  # training data
            online.get_QSV(),
            online.get_QSV_labels(),  # validation data
            test_x,
            test_y,  # test data
            n_features)

        print 'nQSV=%d, Q=%d, dim=100,' % (
            n_pts, online.get_n_query()), ex.grid_retrain_in_f(100)
Exemple #2
0
        # assume gamma=1, c = 0
        n = len(x)
        r = []

        r.extend([x[i] * x[i] for i in range(n - 1, -1, -1)])
        for i in range(n - 1, -1, -1):
            for j in range(i - 1, -1, -1):
                r.append(sqrt(2) * x[i] * x[j])
        return r

    print 'solve in F'
    online = OnlineBase(train_data, p, n, poly_svc.predict, n_features, f_type,
                        1e-5)
    online.collect_pts(-1, budget=5000)

    ex = PolySolver(online.get_QSV(), online.get_QSV_labels(), Xt, Yt,
                    polynomial_map, n_features)
    ex.solve_in_f()
    print 'TRAIN SCORE  : %f' % ex.solve_score
    print 'TEST SCORE   : %f' % ex.calc_test_score()

    # print 'retrain in F'
    # ex = RBFKernelRetraining(train_data,
    #                   poly_svc.predict, Xt, Yt,
    #                          n_features, OfflineMethods.RT_in_F, error=1,
    #              kernel='poly', fmap=polynomial_map)
    # ex.train_SGD_for_poly_in_F()
    # ex.benchmark()
    # ex.print_perf()

Exemple #3
0
        # feature map for polynomial kernel (gamma* u`v + c)^2
        # assume gamma=1, c = 0
        n = len(x)
        r = []

        r.extend([x[i]*x[i] for i in range(n-1, -1, -1)])
        for i in range(n-1, -1, -1):
            for j in range(i-1, -1, -1):
                r.append(sqrt(2)*x[i]*x[j])
        return r

    print 'solve in F'
    online = OnlineBase(train_data, p, n, poly_svc.predict, n_features, f_type, 1e-5)
    online.collect_pts(-1, budget=5000)

    ex = PolySolver(online.get_QSV(), online.get_QSV_labels(), Xt, Yt, polynomial_map, n_features)
    ex.solve_in_f()
    print 'TRAIN SCORE  : %f' % ex.solve_score
    print 'TEST SCORE   : %f' % ex.calc_test_score()

    # print 'retrain in F'
    # ex = RBFKernelRetraining(train_data,
    #                   poly_svc.predict, Xt, Yt,
    #                          n_features, OfflineMethods.RT_in_F, error=1,
    #              kernel='poly', fmap=polynomial_map)
    # ex.train_SGD_for_poly_in_F()
    # ex.benchmark()
    # ex.print_perf()


# run('./data/mushrooms.aa', './data/mushrooms.ab', (1,2), 112, 'binary')