Exemple #1
0
def process_interval_jobs(dataset, tag, job, conn):

    data = bUtil.process_common_data(dataset, tag, 'interval', job)

    sharpness = deepcopy(data)
    sharpness.extend(["sharpness", job["sharpness"]])
    bUtil.insert_benchmark(sharpness, conn)
    resolution = deepcopy(data)
    resolution.extend(["resolution", job["resolution"]])
    bUtil.insert_benchmark(resolution, conn)
    coverage = deepcopy(data)
    coverage.extend(["coverage", job["coverage"]])
    bUtil.insert_benchmark(coverage, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
    Q05 = deepcopy(data)
    Q05.extend(["Q05", job["Q05"]])
    bUtil.insert_benchmark(Q05, conn)
    Q25 = deepcopy(data)
    Q25.extend(["Q25", job["Q25"]])
    bUtil.insert_benchmark(Q25, conn)
    Q75 = deepcopy(data)
    Q75.extend(["Q75", job["Q75"]])
    bUtil.insert_benchmark(Q75, conn)
    Q95 = deepcopy(data)
    Q95.extend(["Q95", job["Q95"]])
    bUtil.insert_benchmark(Q95, conn)
    W05 = deepcopy(data)
    W05.extend(["winkler05", job["winkler05"]])
    bUtil.insert_benchmark(W05, conn)
    W25 = deepcopy(data)
    W25.extend(["winkler25", job["winkler25"]])
    bUtil.insert_benchmark(W25, conn)
Exemple #2
0
def process_point_jobs(dataset, tag,  job, conn):
    """
    Extract information from a dictionary with point benchmark results and save it on a database

    :param dataset: the benchmark dataset name
    :param tag: alias for the benchmark group being executed
    :param job: a dictionary with the benchmark results
    :param conn: a connection to a Sqlite database
    :return:
    """

    data = bUtil.process_common_data(dataset, tag, 'point',job)

    rmse = deepcopy(data)
    rmse.extend(["rmse", job["rmse"]])
    bUtil.insert_benchmark(rmse, conn)
    smape = deepcopy(data)
    smape.extend(["smape", job["smape"]])
    bUtil.insert_benchmark(smape, conn)
    u = deepcopy(data)
    u.extend(["u", job["u"]])
    bUtil.insert_benchmark(u, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
Exemple #3
0
def process_probabilistic_jobs(dataset, tag, job, conn):

    data = bUtil.process_common_data(dataset, tag, 'density', job)

    crps = deepcopy(data)
    crps.extend(["crps", job["CRPS"]])
    bUtil.insert_benchmark(crps, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
    brier = deepcopy(data)
    brier.extend(["brier", job["brier"]])
    bUtil.insert_benchmark(brier, conn)
Exemple #4
0
def process_point_jobs(dataset, tag, job, conn):

    data = bUtil.process_common_data(dataset, tag, 'point', job)

    rmse = deepcopy(data)
    rmse.extend(["rmse", job["rmse"]])
    bUtil.insert_benchmark(rmse, conn)
    smape = deepcopy(data)
    smape.extend(["smape", job["smape"]])
    bUtil.insert_benchmark(smape, conn)
    u = deepcopy(data)
    u.extend(["u", job["u"]])
    bUtil.insert_benchmark(u, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
Exemple #5
0
def process_interval_jobs(dataset, tag, job, conn):
    """
    Extract information from an dictionary with interval benchmark results and save it on a database

    :param dataset: the benchmark dataset name
    :param tag: alias for the benchmark group being executed
    :param job: a dictionary with the benchmark results
    :param conn: a connection to a Sqlite database
    :return:
    """

    data = bUtil.process_common_data(dataset, tag, 'interval', job)

    sharpness = deepcopy(data)
    sharpness.extend(["sharpness", job["sharpness"]])
    bUtil.insert_benchmark(sharpness, conn)
    resolution = deepcopy(data)
    resolution.extend(["resolution", job["resolution"]])
    bUtil.insert_benchmark(resolution, conn)
    coverage = deepcopy(data)
    coverage.extend(["coverage", job["coverage"]])
    bUtil.insert_benchmark(coverage, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
    Q05 = deepcopy(data)
    Q05.extend(["Q05", job["Q05"]])
    bUtil.insert_benchmark(Q05, conn)
    Q25 = deepcopy(data)
    Q25.extend(["Q25", job["Q25"]])
    bUtil.insert_benchmark(Q25, conn)
    Q75 = deepcopy(data)
    Q75.extend(["Q75", job["Q75"]])
    bUtil.insert_benchmark(Q75, conn)
    Q95 = deepcopy(data)
    Q95.extend(["Q95", job["Q95"]])
    bUtil.insert_benchmark(Q95, conn)
    W05 = deepcopy(data)
    W05.extend(["winkler05", job["winkler05"]])
    bUtil.insert_benchmark(W05, conn)
    W25 = deepcopy(data)
    W25.extend(["winkler25", job["winkler25"]])
    bUtil.insert_benchmark(W25, conn)
Exemple #6
0
def process_probabilistic_jobs(dataset, tag,  job, conn):
    """
    Extract information from an dictionary with probabilistic benchmark results and save it on a database

    :param dataset: the benchmark dataset name
    :param tag: alias for the benchmark group being executed
    :param job: a dictionary with the benchmark results
    :param conn: a connection to a Sqlite database
    :return:
    """

    data = bUtil.process_common_data(dataset, tag,  'density', job)

    crps = deepcopy(data)
    crps.extend(["crps",job["CRPS"]])
    bUtil.insert_benchmark(crps, conn)
    time = deepcopy(data)
    time.extend(["time", job["time"]])
    bUtil.insert_benchmark(time, conn)
    brier = deepcopy(data)
    brier.extend(["brier", job["brier"]])
    bUtil.insert_benchmark(brier, conn)