def main(out_path, values_path, neighbors = 8, workers = 0):
    """Run TTT state-clustering experiment(s)."""

    # load the value function
    with specmine.openz(values_path) as values_file:
        values = pickle.load(values_file)

    # convert states to their vector representations
    states_adict = specmine.tictac.load_adjacency_dict()
    states = list(states_adict)

    logger.info("converting states to their vector representation")

    affinity_index = dict(zip(states, xrange(len(states))))
    vectors_ND = numpy.array(map(raw_state_features, states))

    # build the affinity graph
    affinity_NN = specmine.discovery.affinity_graph(vectors_ND, neighbors)
    (gameplay_NN, gameplay_index) = specmine.discovery.adjacency_dict_to_matrix(states_adict)

    def yield_jobs():
        for B in numpy.r_[0:400:64j].astype(int):
            yield (run_random_features, [B, vectors_ND, affinity_index, values])
            yield (run_graph_features, ["gameplay", B, vectors_ND, gameplay_NN, gameplay_index, values])
            yield (run_graph_features, ["affinity", B, vectors_ND, affinity_NN, affinity_index, values])

    with open(out_path, "wb") as out_file:
        writer = csv.writer(out_file)

        writer.writerow(["map_name", "features", "score_mean", "score_variance"])

        condor.do_or_distribute(yield_jobs(), workers, lambda _, r: writer.writerow(r))
def main(workers=20):

    print 'creating domain and opponent'

    pickle_path = specmine.util.static_path("ttt_states.pickle.gz")
    with specmine.util.openz(pickle_path) as pickle_file:
            adj_dict = pickle.load(pickle_file)
    adj_matrix, index = specmine.discovery.adjacency_dict_to_matrix(adj_dict)    

    w = csv.writer(file(specmine.util.static_path( \
        'feature_number_test_td.csv'),'wb'))
    w.writerow(['method','features','reward_mean','reward_variance'])

    def yield_jobs():
        for k in numpy.array(range(1,11))*50:
            yield (run_laplacian_evaluation, [k, adj_matrix, index])
            yield (run_random_evaluation, [k, adj_matrix, index])

    condor.do_or_distribute(yield_jobs(), workers, lambda _, r: w.writerow(r))
Example #3
0
def main(workers=20):

    print 'creating domain and opponent'

    pickle_path = specmine.util.static_path("ttt_states.pickle.gz")
    with specmine.util.openz(pickle_path) as pickle_file:
        adj_dict = pickle.load(pickle_file)
    adj_matrix, index = specmine.discovery.adjacency_dict_to_matrix(adj_dict)

    w = csv.writer(file(specmine.util.static_path( \
        'feature_number_test_td.csv'),'wb'))
    w.writerow(['method', 'features', 'reward_mean', 'reward_variance'])

    def yield_jobs():
        for k in numpy.array(range(1, 11)) * 50:
            yield (run_laplacian_evaluation, [k, adj_matrix, index])
            yield (run_random_evaluation, [k, adj_matrix, index])

    condor.do_or_distribute(yield_jobs(), workers, lambda _, r: w.writerow(r))
def main(out_path, values_path, neighbors=8, workers=0):
    """Run TTT state-clustering experiment(s)."""

    # load the value function
    with specmine.openz(values_path) as values_file:
        values = pickle.load(values_file)

    # convert states to their vector representations
    states_adict = specmine.tictac.load_adjacency_dict()
    states = list(states_adict)

    logger.info("converting states to their vector representation")

    affinity_index = dict(zip(states, xrange(len(states))))
    vectors_ND = numpy.array(map(raw_state_features, states))

    # build the affinity graph
    affinity_NN = specmine.discovery.affinity_graph(vectors_ND, neighbors)
    (gameplay_NN, gameplay_index
     ) = specmine.discovery.adjacency_dict_to_matrix(states_adict)

    def yield_jobs():
        for B in numpy.r_[0:400:64j].astype(int):
            yield (run_random_features,
                   [B, vectors_ND, affinity_index, values])
            yield (run_graph_features, [
                "gameplay", B, vectors_ND, gameplay_NN, gameplay_index, values
            ])
            yield (run_graph_features, [
                "affinity", B, vectors_ND, affinity_NN, affinity_index, values
            ])

    with open(out_path, "wb") as out_file:
        writer = csv.writer(out_file)

        writer.writerow(
            ["map_name", "features", "score_mean", "score_variance"])

        condor.do_or_distribute(yield_jobs(), workers,
                                lambda _, r: writer.writerow(r))