Exemplo n.º 1
0
def karate_results(embeddings, names, n_reps, train_size):
    deepwalk_path = '../../local_resources/zachary_karate/size8_walks1_len10.emd'

    y_path = '../../local_resources/zachary_karate/y.p'
    x_path = '../../local_resources/zachary_karate/X.p'

    target = utils.read_target(y_path)

    x, y = utils.read_data(x_path, y_path, threshold=0)

    # names = [['embedding'], ['logistic']]

    names.append(['logistics'])

    # x_deepwalk = utils.read_embedding(deepwalk_path, target)
    # all_features = np.concatenate((x.toarray(), x_deepwalk), axis=1)
    # X = [normalize(embedding, axis=0), normalize(x, axis=0)]
    X = embeddings + [normalize(x, axis=0)]
    # names = ['embedding']
    # X = embedding

    results = []
    for exp in zip(X, names):
        tmp = run_detectors.run_experiments(exp[0], y, exp[1], classifiers,
                                            n_reps, train_size)
        results.append(tmp)
    all_results = utils.merge_results(results, n_reps)
    results, tests = utils.stats_test(all_results)
    tests[0].to_csv('../../results/karate/tf_macro_pvalues' +
                    utils.get_timestamp() + '.csv')
    tests[1].to_csv('../../results/karate/tf_micro_pvalues' +
                    utils.get_timestamp() + '.csv')
    print('macro', results[0])
    print('micro', results[1])
    macro_path = '../../results/karate/tf_macro' + utils.get_timestamp(
    ) + '.csv'
    micro_path = '../../results/karate/tf_micro' + utils.get_timestamp(
    ) + '.csv'
    results[0].to_csv(macro_path, index=True)
    results[1].to_csv(micro_path, index=True)
    return results
Exemplo n.º 2
0
def run_embedding_array(embeddings, names, n_reps, train_size):
    """
    As embeddings show significant variation we must compare many embeddings with the same params to ascertain quality
    :param embeddings:
    :param names:
    :param n_reps:
    :param train_size:
    :return: A tuple of pandas DataFrames (macro, micro)
    """
    y_path = 'local_resources/zachary_karate/y.p'
    x_path = 'local_resources/zachary_karate/X.p'

    x, y = utils.read_data(x_path, y_path, threshold=0)

    results = []
    for exp in zip(embeddings, names):
        tmp = run_detectors.run_experiments(exp[0], y, exp[1], classifiers, n_reps, train_size)
        results.append(tmp)

    all_results = utils.merge_results(results)

    return all_results