#!/usr/bin/env python3
from os.path import exists, join
from time import time
from sys import exit
from subprocess import getstatusoutput
from common import WORKSPACE_ROOT, DATASET_ROOT, LOG_ROOT, get_all_dataset_path, parse_all_metadata

GROUTE_PATH = join(WORKSPACE_ROOT, "groute")
GROUTE_BUILD_PATH = join(GROUTE_PATH, "build")
BFS_BINARY_PATH = join(GROUTE_BUILD_PATH, "bfs")

if not exists(BFS_BINARY_PATH):
    exit("Could not found BFS binary")

for data_dirname, metadata in parse_all_metadata().items():
    link = next((x for x in metadata["links"] if "undirected" in x and x.endswith("gr")), None)
    data_filename = link.split("/")[-1]
    data_path = join(join(DATASET_ROOT, data_dirname), data_filename)

    if not exists(data_path):
        exit("Could not found file %s" % data_path)

    # required parameter
    is_sparse = bool(metadata["sparse"])
    source_node = metadata["source_node"]
    prio_delta = metadata["Groute_bfs_prio_delta_fused"]

    timestamp = str(int(time()))
    log_path = join(LOG_ROOT, "Groute_bfs_%s_%s.log" % (data_dirname, timestamp))
    cmd = "%s" \
          " -graphfile=%s" \
예제 #2
0
#!/usr/bin/env python3
from os import listdir, system
from os.path import exists, join, isdir, isfile
from sys import exit
from common import DATASET_ROOT, parse_all_metadata

if not exists(DATASET_ROOT):
    exit("Can not found dataset path")

res = parse_all_metadata()

for data_dirname, metadata in res.items():
    links = metadata["links"]

    for link in links:
        data_filename = link.split("/")[-1]
        data_dirpath = join(DATASET_ROOT, data_dirname)
        data_path = join(data_dirpath, data_filename)

        if exists(data_path):
            print("%s exists" % data_filename)
        else:
            system('wget -P %s %s' % (data_dirpath, link))

print("All dataset downloaded")