Пример #1
0
def run_experiment(k, N, dataset=DEFAULT_DATASET, theta=DEFAULT_THETA,
                   dfnc_window=DEFAULT_WINDOW, m=DEFAULT_M, n=DEFAULT_N,
                   metrics=METRIC_NAMES,
                   methods=METHOD_NAMES, **kwargs):
    """
        Run an experiment with a particular choice of
            1. Data set
            2. Data Parameters k, n, theta, dfnc_window, m, n
            3. metric
            4. method
            5. Method parameters passed in kwargs
    """
    X = get_dataset(N, dataset=dataset, theta=theta,
                    dfnc_window=dfnc_window, m=m, n=n)
    res = {method: run_method(X, k, method=method, **kwargs)
           for method in methods}
    measures = {res[r]['name']: {metric: evaluate_metric(res[r]['X'],
                                 res[r]['cluster_labels'], metric)
                                 for metric in metrics} for r in res}
    return measures, res
Пример #2
0
                local.gradient_step(remote_optimizer, remote_centroids)

        # Check the stopping condition "locally" at the aggregator
        # - returns false if converged
        remote_check, delta = local.check_stopping(remote_centroids, previous,
                                                   epsilon)
        if verbose:
            print("Multi-Shot %s ; iter : %d delta : %f" %
                  (optimization, num_iter, delta))
        not_converged = remote_check
        tracked_delta.append(delta)
        num_iter += 1

    # Compute the final clustering "locally" at the aggregator
    cluster_labels = [
        clusters for node in nodes
        for clusters in local.compute_clustering(node, remote_centroids)
    ]
    return {
        'centroids': remote_centroids,
        'cluster_labels': cluster_labels,
        'X': X,
        'delta': tracked_delta,
        'num_iter': i,
        'name': 'multishot_%s' % optimization
    }


if __name__ == '__main__':
    w = main(get_dataset(100, theta=[[-1, 0.1], [1, 0.1]], m=1, n=2), 2)