Exemplo n.º 1
0
def store_results_csv(list_of_results, filename):
    logging.debug("Store evaluation results to csv file: " + filename)
    [path, filename] = gdo.check_filepath(filename)

    with open(path + filename + ".csv", 'w') as csvfile:
        csvwriter = csv.DictWriter(
            csvfile, fieldnames=list_of_results[0].to_dict().keys())
        csvwriter.writeheader()
        for r in list_of_results:
            csvwriter.writerow(r.to_dict())
Exemplo n.º 2
0
def load_stats_from_file(datadir):
    [path, filename] = gdo.check_filepath(datadir + "/stats.json")

    if not os.path.isdir(path):
        ## TO DO: raise error
        return

    if not ".json" in filename:
        filename += ".json"

    with open(path + filename) as jsonfile:
        data = json.load(jsonfile)

    return data
Exemplo n.º 3
0
def run_subset_of_experiments(algo, randomized, repetitions, reduce_graph,
                              timelimit, datadir, filename, result_filename):
    '''
	Run a specified algorithm on all graphs of a single dataset-file
	'''
    results = []
    list_of_graphs = gdo.load_graphs_from_json(datadir + "/input/" + filename)
    #print(filename)
    for graphdata in list_of_graphs:
        #print(graphdata.id)
        evaldata = EvalData(algo, graphdata, randomized, repetitions,
                            reduce_graph, timelimit)
        results.append(run_single_experiment(evaldata))
    store_results_json(results, datadir + "/results/" + result_filename)
    store_results_csv(results, datadir + "/results/" + result_filename)
Exemplo n.º 4
0
def load_evaldata_from_json(basedir, filename):
    '''
	Loads the Evaldata from a specific file
	'''
    graphdataset = []
    evaldataset = []
    filepath = basedir + "/results/" + filename
    if not "json" in filepath:
        filepath += ".json"
    with open(filepath, "r") as jsonfile:
        dataset = json.load(jsonfile)
        for data in dataset:
            graph_id = re.split(r'\.', data["input_id"])[0]
            if graphdataset == []:
                graphdatafile = "_".join(
                    re.split(r'_', data["input_id"])[:-1]) + ".json"
                graphdataset = gdo.load_graphs_from_json(basedir + "/input/" +
                                                         graphdatafile)
            graphdata = None
            for gd in graphdataset:
                gd.id = re.split(r'\.', gd.id)[0]
                if gd.id == graph_id:
                    graphdata = gd
                    break
            if "reduce_graph" not in data:
                data["reduce_graph"] = True
            if "timelimit" not in data:
                data["timelimit"] = -1
            if "randomized" not in data:
                data["randomized"] = False
            if "repetitions" not in data:
                data["repetitions"] = 1
            if "algo" in data:
                evaldata = em.EvalData(data["algo"], graphdata,
                                       data["randomized"], data["repetitions"],
                                       data["reduce_graph"], data["timelimit"])
            else:
                evaldata = em.EvalData("generic", graphdata)
            evaldata.set_results(data["output"], data["running_time"])
            if "output mean" in data:
                evaldata.out_mean = data["output mean"]
            if "output variance" in data:
                evaldata.out_var = data["output variance"]
            evaldataset.append(evaldata)
    return evaldataset
Exemplo n.º 5
0
MAX_CLIQUE_SIZE = 4
logging.info("Parameters for graph construction:")
logging.info("NUM_NODES: " + str(NUM_NODES))
logging.info("PROB_EDGES: " + str(PROB_EDGES))
logging.info("NUM_EDGES_PLANAR: " + str(NUM_EDGES_PLANAR))
logging.info("MAX_DEGREE: " + str(MAX_DEGREE))
logging.info("MAX_CLIQUE_SIZE: " + str(MAX_CLIQUE_SIZE))

logging.info("Start tests")
print("Start tests")

# ======= Test file operations =======
if DO_TEST_FIO:
    logging.info("======= TEST FILE OPERATIONS =======")
    print("TEST FILE OPERATIONS")
    gdo.write_graphs_to_json([GRAPH_TEST], TEST_FILEPATH)
    gdo.load_graphs_from_json(TEST_FILEPATH)

if DO_TEST_GGEN:
    # ======= Test graph construction algorithms =======
    logging.info("======= TEST GRAPH CONSTRUCTION ALGORITHMS =======")
    print("TEST GRAPH CONSTRUCTION ALGORITHMS")
    gg = gca.GraphGenerator()

    # ===== Connected Erdös-Renyi-Graph =====
    logging.info("===== TEST CONNECTED ER-GRAPH =====")
    print("TEST CONNECTED ER-GRAPH")
    connected_er = gg.construct_connected_er(NUM_NODES, PROB_EDGES)
    logging.debug("Constructed graph:")
    logging.debug("number of nodes: " + str(len(connected_er.nodes())))
    logging.debug("number of edges: " + str(len(connected_er.edges())))
Exemplo n.º 6
0
def store_results_json(list_of_results, filename):
    logging.debug("Store evaluation results to json file: " + filename)
    [path, filename] = gdo.check_filepath(filename)

    with open(path + filename + ".json", 'w') as jsonfile:
        json.dump(list_of_results, jsonfile, cls=meta.My_JSON_Encoder)
Exemplo n.º 7
0
def run_build_all(forcenew=False):
    for set in gs.GRAPH_CLASSES:
        gdo.construct_full_set_graphs(set)
Exemplo n.º 8
0
    if mode == "test":
        import tests

    elif mode == "evalall":
        run_eval_all(forcenew)

    elif mode == "buildall":
        run_build_all(forcenew)

    elif (mode == "undefined" or dataset == "undefined"
          or (mode == "eval" and algo_code == None)):
        print("Error! Missing parameters!")
        printhelp()

    elif mode == "build":
        gdo.construct_full_set_graphs(dataset, threaded=threaded)

    elif mode == "eval":
        algo = ALGORITHMS[algo_code]
        if "_R" in algo_code:
            randomized = True

        em.run_set_of_experiments(algo,
                                  data_dir,
                                  randomized=randomized,
                                  repetitions=num_iter,
                                  threaded=threaded,
                                  reduce_graph=reduced,
                                  timelimit=timelimit,
                                  force_new_data=forcenew)
Exemplo n.º 9
0
def load_data(graphclass="general",
              density_class="dense",
              n=None,
              p=None,
              rel_m=None,
              d=None,
              c=None,
              algocode=None,
              randomized=False,
              rand_repetitions=None,
              reduced=False,
              axis="OUTPUT",
              keep_nulls=False,
              cutoff_at_timelimit=False):
    '''
	loads all data from the evaldata-database that is conform to the specified parameters.
	
	args:
		n, p, rel_m, d, c: restrictions on the subclass of graphs.
			Only restrictions that are not "None" are considered.
		axis: "OUTPUT" or "TIME", defines which data to load
		keep_nulls: if False, null-entries are removed from the data before returning
		cutoff_at_timelimit : if True, evaldata that terminated with exceeded timelimit is considered as not terminated		
	'''
    logging.debug("sm.load_data")

    if not graphclass in gs.GRAPH_CLASSES:
        raise gdo.ParameterMissingException("Wrong parameter: graphclass: " +
                                            graphclass)

    if not density_class in ["dense", "sparse"]:
        raise gdo.ParameterMissingException(
            "Wrong parameter: density_class: " + density_class)

    #if p == None and rel_m == None:
    #	raise gdo.ParameterMissingException("Missing parameters in initialization: p or rel_m")

    if density_class == "dense" and not graphclass == "general":
        raise gdo.ParameterMissingException(
            "Incompatible parameters: graphclass: not general and density_class: dense"
        )

    if graphclass == "maxdeg" and d == None:
        raise gdo.ParameterMissingException(
            "Missing parameters in initialization: d")

    if graphclass == "maxclique" and c == None:
        raise gdo.ParameterMissingException(
            "Missing parameters in initialization: c")

    if algocode not in gs.BASE_ALGO_CODES:
        raise gdo.ParameterMissingException("Wrong parameter: algocode: " +
                                            algocode)

    if randomized and rand_repetitions == None:
        raise gdo.ParameterMissingException(
            "Missing parameters in initialization: rand_repetitions")

    base_dir = "data/eval/random_" + graphclass + "/results"

    if n == None:
        options_for_n = gs.GRAPH_SIZES
    else:
        options_for_n = [n]

    if density_class == "dense":
        if p == None:
            options_for_p = gs.GRAPH_DENSITIY_P
        else:
            options_for_p = [p]
    else:
        options_for_p = [-1]

    if density_class == "sparse":
        if rel_m == None:
            options_for_relm = gs.SPARSE_DENSITY_RELM
        else:
            options_for_relm = [rel_m]
    else:
        options_for_relm = [-1]

    if graphclass == "maxdeg":
        if d == None:
            options_for_d = gs.MAXDEGREE_SETTINGS
        else:
            options_for_d = [d]
    else:
        options_for_d = [-1]

    if graphclass == "maxclique":
        if c == None:
            options_for_c = gs.MAXCLIQUE_SETTINGS
        else:
            options_for_c = [c]
    else:
        options_for_c = [-1]

    data = {}
    for n in options_for_n:
        if n not in data:
            data[n] = {}
        for p in options_for_p:
            if p not in data[n]:
                data[n][p] = {}
            for rel_m in options_for_relm:
                if rel_m not in data[n][p]:
                    data[n][p][rel_m] = {}
                for d in options_for_d:
                    if d not in data[n][p][rel_m]:
                        data[n][p][rel_m][d] = {}
                    for c in options_for_c:
                        if c not in data[n][p][rel_m][d]:
                            data[n][p][rel_m][d][c] = {}
                        if density_class == "dense":
                            p_as_string = p_as_string = "{0:.2f}".format(p)
                            graph_base_filename = "dense_n" + str(
                                n) + "_p" + p_as_string
                        elif density_class == "sparse":
                            graph_base_filename = "sparse_n" + str(
                                n) + "_relm" + str(rel_m)
                        if graphclass == "maxdeg":
                            graph_base_filename += "_d" + str(d)
                        if graphclass == "maxclique":
                            graph_base_filename += "_c" + str(c)
                        graph_filename = re.sub('\.', '', graph_base_filename)
                        extended_algo_code = algocode
                        if randomized:
                            extended_algo_code += "_R" + str(rand_repetitions)
                        else:
                            extended_algo_code += "_X"
                        if not reduced:
                            extended_algo_code += "_B"
                        else:
                            extended_algo_code += "_X"

                        evaldata_filename = "results_triangulate_" + extended_algo_code + "_" + graph_filename
                        #print (evaldata_filename)

                        filepath = base_dir + "/" + evaldata_filename + ".json"
                        data[n][p][rel_m][d][c][
                            density_class] = load_axis_data_from_file(
                                filepath, axis, keep_nulls,
                                cutoff_at_timelimit)
    return data