Beispiel #1
0
def main():
    """ Adds trials to the db file again when the normal script messes up. """

    parser = argparse.ArgumentParser()
    parser.add_argument("--db_file", type=str, required=True)
    parser.add_argument("--trial_dir", type=str, required=True)
    args = parser.parse_args()

    db = sqlite_helper_object.SQLiteHelperObject(args.db_file)
    db.connect()
    for dir in os.walk(args.trial_dir):
        dirname = dir[0]
        subdirs = dir[1]
        files = dir[2]
        if "latency_throughput" in subdirs and "logs" in subdirs:
            csv = os.path.join(dirname, "results.csv")
            ini_file = files[0] if ".ini" in files[0] else files[1]

            params = config_io.read_config_from_file(os.path.join(dirname, ini_file))

            db.insert_csv_data_into_sqlite_table("trials_table", csv, None,
                                                 logs_dir="jennlost",
                                                 cockroach_commit=params["cockroach_commit"],
                                                 server_nodes=params["num_warm_nodes"],
                                                 disabled_cores=params["disable_cores"],
                                                 keyspace=params["keyspace"],
                                                 read_percent=params["read_percent"],
                                                 n_keys_per_statement=params["n_keys_per_statement"],
                                                 skews=params["skews"])
    db.close()

    return 0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("ini_file")
    parser.add_argument("lt_ini_file")
    import constants
    parser.add_argument("--log_dir", type=str, default=constants.SCRATCH_DIR)
    args = parser.parse_args()

    config = config_io.read_config_from_file(args.ini_file)
    lt_config = config_io.read_config_from_file(args.lt_ini_file)

    unique_suffix = datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")
    log_dir = os.path.join(args.log_dir, "lt_{}".format(unique_suffix))
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    run(config, lt_config, log_dir)
Beispiel #3
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("ini_file")
    parser.add_argument("concurrency", type=int)
    parser.add_argument("--log_dir", type=str, default=constants.SCRATCH_DIR)
    args = parser.parse_args()

    import config_io
    config = config_io.read_config_from_file(args.ini_file)
    config["concurrency"] = args.concurrency
    import datetime
    unique_suffix = datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")
    log_dir = os.path.join(args.log_dir,
                           "run_single_trial_{0}".format(unique_suffix))
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)

    run(config, log_dir)
Beispiel #4
0
def main():
    cfg_lt_pairs = process_and_setup_configs(DB_DIR, CONFIG_OBJ_LIST)
    # db_connector = setup_sqlite_db(DB_DIR)

    for cfg_fpath, lt_fpath in cfg_lt_pairs:

        # generate config object
        cfg = generate_configs.generate_configs_from_files_and_add_fields(
            cfg_fpath)

        # generate lt_config objects that match those config objects
        lt_cfg = config_io.read_config_from_file(lt_fpath)

        # make directory in which trial will be run
        logs_dir = generate_dir_name(cfg[constants.CONFIG_FPATH_KEY], DB_DIR)
        if not os.path.exists(logs_dir):
            os.makedirs(logs_dir)

        # copy over config into directory
        system_utils.call("cp {0} {1}".format(cfg[constants.CONFIG_FPATH_KEY],
                                              logs_dir))

        # generate latency throughput trials
        cfg = adjust_cfg(cfg)
        lt_fpath_csv = latency_throughput.run(
            cfg, lt_cfg, logs_dir, run_func=run_single_trial_wrapper)

        # run trial
        cfg["concurrency"] = latency_throughput.find_optimal_concurrency(
            lt_fpath_csv)
        cfg = adjust_cfg(cfg)
        results_fpath_csv = run_single_trial_wrapper(cfg, logs_dir)

        # insert results in sqlite db # THIS INSERTION METHOD ISN'T CORRECT
        # insert_into_sqlite_db(db_connector,
        #                       results_fpath_csv,
        #                       logs_dir,
        #                       cfg)

    return 0
Beispiel #5
0
def main():
    # takes arguments in the case of recovery
    parser = argparse.ArgumentParser()
    parser.add_argument("--recovery_mode", action="store_true")
    parser.add_argument("--recovery_file")
    parser.add_argument("--db_dir")
    args = parser.parse_args()
    assert_args_are_correct(args)

    db_dir = args.db_dir if args.recovery_mode else DB_DIR
    files_to_process = args.recovery_file if args.recovery_mode else os.path.join(
        db_dir, "configs_to_process.csv")

    if not args.recovery_mode:
        # create the database and table
        if not os.path.exists(db_dir):
            os.makedirs(db_dir)

        # populate configs to process
        for cfg_obj, lt_fpath in CONFIG_OBJ_LIST:
            cfg_fpath_list = cfg_obj.generate_all_config_files()
            data = [{
                constants.CONFIG_FPATH_KEY: cfg_fpath,
                "lt_fpath": lt_fpath
            } for cfg_fpath in cfg_fpath_list]
            csv_utils.append_data_to_file(data, files_to_process)

    # file of failed configs
    failed_configs_csv = os.path.join(db_dir, "failed_configs.csv")
    f = open(failed_configs_csv,
             "w")  # make sure it's only the failures from this round
    f.close()

    # connect to db
    db = sqlite_helper_object.SQLiteHelperObject(
        os.path.join(db_dir, "trials.db"))
    db.connect()
    _, cfg_lt_tuples = csv_utils.read_in_data_as_tuples(files_to_process,
                                                        has_header=False)

    for cfg_fpath, lt_fpath in cfg_lt_tuples:

        # generate config object
        cfg = generate_configs.generate_configs_from_files_and_add_fields(
            cfg_fpath)

        # generate lt_config objects that match those config objects
        lt_cfg = config_io.read_config_from_file(lt_fpath)

        #try:
        # make directory in which trial will be run
        logs_dir = generate_dir_name(cfg[constants.CONFIG_FPATH_KEY], db_dir)
        if not os.path.exists(logs_dir):
            os.makedirs(logs_dir)

        # copy over config into directory
        system_utils.call("cp {0} {1}".format(cfg[constants.CONFIG_FPATH_KEY],
                                              logs_dir))

        # generate latency throughput trials
        lt_fpath_csv = latency_throughput.run(cfg, lt_cfg, logs_dir)

        # run trial
        cfg["concurrency"] = latency_throughput.find_optimal_concurrency(
            lt_fpath_csv)
        results_fpath_csv = run_single_data_point.run(cfg, logs_dir)

        # insert into sqlite db
        # TODO get the actual commit hash, not the branch
        db.insert_csv_data_into_sqlite_table(
            "trials_table",
            results_fpath_csv,
            None,
            logs_dir=logs_dir,
            cockroach_commit=cfg["cockroach_commit"],
            server_nodes=cfg["num_warm_nodes"],
            disabled_cores=cfg["disable_cores"],
            keyspace=cfg["keyspace"],
            read_percent=cfg["read_percent"],
            n_keys_per_statement=cfg["n_keys_per_statement"],
            skews=cfg["skews"])

        #except BaseException as e:
        #    print("Config {0} failed to run, continue with other configs. e:[{1}]"
        #          .format(cfg[constants.CONFIG_FPATH_KEY], e))
        #    csv_utils.append_data_to_file([{constants.CONFIG_FPATH_KEY: cfg[constants.CONFIG_FPATH_KEY],
        #                                    "lt_fpath": lt_fpath}],
        #                                  failed_configs_csv)

    db.close()
def generate_configs_from_files_and_add_fields(file):
    cfg = config_io.read_config_from_file(file)
    cfg["config_fpath"] = file

    return cfg