Esempio n. 1
0
def binary_cp(n_train=100,
              C=1,
              dataset=1,
              max_iter=1000,
              n_inference_iter=5,
              check_dual_every=10,
              test_samples=100,
              inference_method='gco'):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2,
               n_features=10,
               n_edge_features=1,
               alpha=1,
               inference_method=inference_method,
               n_iter=n_inference_iter)
    clf = OneSlackSSVM(crf, verbose=2, n_jobs=4, max_iter=max_iter, C=C)

    x_train, y_train, x_test, y_test = load_binary_syntetic(dataset, n_train)

    logger.info('start training')

    start = time()
    clf.fit(x_train,
            y_train,
            train_scorer=lambda w: compute_score(
                crf, w, x_train, y_train, invert=False),
            test_scorer=lambda w: compute_score(
                crf, w, x_test, y_test, invert=False))
    stop = time()

    train_score = clf.score(x_train, y_train)
    test_score = clf.score(x_test, y_test)
    time_elapsed = stop - start

    logger.info('============================================================')
    logger.info('Score on train set: %f', train_score)
    logger.info('Score on test set: %f', test_score)
    logger.info('Elapsed time: %f s', time_elapsed)

    exp_data = {}

    exp_data['train_scores'] = clf.train_scores
    exp_data['test_scores'] = clf.test_scores

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'cutting plane'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)
Esempio n. 2
0
def binary_over(n_train=100, C=1, dataset=1,
                max_iter=100, verbose=1,
                test_samples=100, check_every=10,
                test_method='gco', test_n_iter=5, relaxed_test=False):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2, n_features=10, n_edge_features=1, alpha=1,
               inference_method=test_method, n_iter=test_n_iter)
    trainer = Over(n_states=2, n_features=10, n_edge_features=1,
                   C=C, max_iter=max_iter, verbose=verbose, check_every=check_every)

    x_train, y_train, x_test, y_test = \
        load_binary_syntetic(dataset, n_train)
    x_test = x_test[:test_samples]
    y_test = y_test[:test_samples]

    logger.info('start training')

    start = time()
    trainer.fit(x_train, y_train,
                train_scorer=lambda w: compute_score(crf, w, x_train, y_train, invert=True, relaxed=relaxed_test),
                test_scorer=lambda w: compute_score(crf, w, x_test, y_test, invert=True, relaxed=relaxed_test))
    stop = time()
    time_elapsed = stop - start

    logger.info('testing')

    test_score = compute_score(crf, trainer.w, x_test, y_test, invert=True, relaxed=relaxed_test)
    train_score = compute_score(crf, trainer.w, x_train, y_train, invert=True, relaxed=relaxed_test)

    logger.info('========================================')
    logger.info('train score: %f', train_score)
    logger.info('test score: %f', test_score)

    exp_data = {}

    exp_data['timestamps'] = trainer.timestamps
    exp_data['objective'] = trainer.objective_curve
    exp_data['w'] = trainer.w
    exp_data['train_scores'] = trainer.train_score
    exp_data['test_scores'] = trainer.test_score
    exp_data['w_history'] = trainer.w_history

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'komodakis'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)
Esempio n. 3
0
def binary_full_fw(n_train=100, C=1, dataset=1,
                   max_iter=1000, n_inference_iter=5,
                   check_dual_every=10, test_samples=100,
                   inference_method='gco'):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2, n_features=10, n_edge_features=1, alpha=1,
               inference_method=inference_method, n_iter=n_inference_iter)
    clf = FrankWolfeSSVM(crf, verbose=2, n_jobs=1, check_dual_every=check_dual_every,
                         max_iter=max_iter, C=C)

    x_train, y_train, x_test, y_test = \
        load_binary_syntetic(dataset, n_train)

    logger.info('start training')

    start = time()
    clf.fit(x_train, y_train, Xtest=x_test[:test_samples], Ytest=y_test[:test_samples])
    stop = time()

    train_score = clf.score(x_train, y_train)
    test_score = clf.score(x_test, y_test)
    time_elapsed = stop - start

    logger.info('============================================================')
    logger.info('Score on train set: %f', train_score)
    logger.info('Score on test set: %f', test_score)
    logger.info('Elapsed time: %f s', time_elapsed)

    exp_data = {}

    exp_data['timestamps'] = clf.timestamps_
    exp_data['primal_objective'] = clf.primal_objective_curve_
    exp_data['objective'] = clf.objective_curve_
    exp_data['w_history'] = clf.w_history
    exp_data['test_scores'] = clf.test_scores
    exp_data['train_scores'] = clf.train_scores
    exp_data['w'] = clf.w

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'frank-wolfe'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)
Esempio n. 4
0
def binary_cp(n_train=100, C=1, dataset=1,
              max_iter=1000, n_inference_iter=5,
              check_dual_every=10, test_samples=100,
              inference_method='gco'):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2, n_features=10, n_edge_features=1, alpha=1,
               inference_method=inference_method, n_iter=n_inference_iter)
    clf = OneSlackSSVM(crf, verbose=2, n_jobs=4,
                            max_iter=max_iter, C=C)

    x_train, y_train, x_test, y_test = load_binary_syntetic(dataset, n_train)

    logger.info('start training')

    start = time()
    clf.fit(x_train, y_train,
            train_scorer=lambda w: compute_score(crf, w, x_train, y_train, invert=False),
            test_scorer=lambda w: compute_score(crf, w, x_test, y_test, invert=False))
    stop = time()

    train_score = clf.score(x_train, y_train)
    test_score = clf.score(x_test, y_test)
    time_elapsed = stop - start

    logger.info('============================================================')
    logger.info('Score on train set: %f', train_score)
    logger.info('Score on test set: %f', test_score)
    logger.info('Elapsed time: %f s', time_elapsed)

    exp_data = {}

    exp_data['train_scores'] = clf.train_scores
    exp_data['test_scores'] = clf.test_scores

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'cutting plane'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)
Esempio n. 5
0
def binary_over(n_train=100,
                C=1,
                dataset=1,
                max_iter=100,
                verbose=1,
                test_samples=100,
                check_every=10,
                test_method='gco',
                test_n_iter=5,
                relaxed_test=False):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2,
               n_features=10,
               n_edge_features=1,
               alpha=1,
               inference_method=test_method,
               n_iter=test_n_iter)
    trainer = Over(n_states=2,
                   n_features=10,
                   n_edge_features=1,
                   C=C,
                   max_iter=max_iter,
                   verbose=verbose,
                   check_every=check_every)

    x_train, y_train, x_test, y_test = \
        load_binary_syntetic(dataset, n_train)
    x_test = x_test[:test_samples]
    y_test = y_test[:test_samples]

    logger.info('start training')

    start = time()
    trainer.fit(
        x_train,
        y_train,
        train_scorer=lambda w: compute_score(
            crf, w, x_train, y_train, invert=True, relaxed=relaxed_test),
        test_scorer=lambda w: compute_score(
            crf, w, x_test, y_test, invert=True, relaxed=relaxed_test))
    stop = time()
    time_elapsed = stop - start

    logger.info('testing')

    test_score = compute_score(crf,
                               trainer.w,
                               x_test,
                               y_test,
                               invert=True,
                               relaxed=relaxed_test)
    train_score = compute_score(crf,
                                trainer.w,
                                x_train,
                                y_train,
                                invert=True,
                                relaxed=relaxed_test)

    logger.info('========================================')
    logger.info('train score: %f', train_score)
    logger.info('test score: %f', test_score)

    exp_data = {}

    exp_data['timestamps'] = trainer.timestamps
    exp_data['objective'] = trainer.objective_curve
    exp_data['w'] = trainer.w
    exp_data['train_scores'] = trainer.train_score
    exp_data['test_scores'] = trainer.test_score
    exp_data['w_history'] = trainer.w_history

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'komodakis'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)
Esempio n. 6
0
def binary_full_fw(n_train=100,
                   C=1,
                   dataset=1,
                   max_iter=1000,
                   n_inference_iter=5,
                   check_dual_every=10,
                   test_samples=100,
                   inference_method='gco'):
    # save parameters as meta
    meta_data = locals()

    logger = logging.getLogger(__name__)

    crf = HCRF(n_states=2,
               n_features=10,
               n_edge_features=1,
               alpha=1,
               inference_method=inference_method,
               n_iter=n_inference_iter)
    clf = FrankWolfeSSVM(crf,
                         verbose=2,
                         n_jobs=1,
                         check_dual_every=check_dual_every,
                         max_iter=max_iter,
                         C=C)

    x_train, y_train, x_test, y_test = \
        load_binary_syntetic(dataset, n_train)

    logger.info('start training')

    start = time()
    clf.fit(x_train,
            y_train,
            Xtest=x_test[:test_samples],
            Ytest=y_test[:test_samples])
    stop = time()

    train_score = clf.score(x_train, y_train)
    test_score = clf.score(x_test, y_test)
    time_elapsed = stop - start

    logger.info('============================================================')
    logger.info('Score on train set: %f', train_score)
    logger.info('Score on test set: %f', test_score)
    logger.info('Elapsed time: %f s', time_elapsed)

    exp_data = {}

    exp_data['timestamps'] = clf.timestamps_
    exp_data['primal_objective'] = clf.primal_objective_curve_
    exp_data['objective'] = clf.objective_curve_
    exp_data['w_history'] = clf.w_history
    exp_data['test_scores'] = clf.test_scores
    exp_data['train_scores'] = clf.train_scores
    exp_data['w'] = clf.w

    meta_data['dataset_name'] = 'syntetic binary'
    meta_data['annotation_type'] = 'full'
    meta_data['label_type'] = 'full'
    meta_data['trainer'] = 'frank-wolfe'
    meta_data['train_score'] = train_score
    meta_data['test_score'] = test_score
    meta_data['time_elapsed'] = time_elapsed

    return ExperimentResult(exp_data, meta_data)