Beispiel #1
0
def _record_scenario(scenario_id, runtime):
    """Updates execute and scenario list on server after simulation.

    :param str scenario_id: scenario index.
    :param int runtime: runtime of simulation in seconds.
    """

    # Update status in ExecuteList.csv on server
    insert_in_file(const.EXECUTE_LIST, scenario_id, "status", "finished")

    hours, minutes, seconds = sec2hms(runtime)
    insert_in_file(const.SCENARIO_LIST, scenario_id, "runtime",
                   "%d:%02d" % (hours, minutes))
Beispiel #2
0
def main(args):
    # Get scenario info if using PowerSimData
    if args.scenario_id:
        scenario_args = get_scenario(args.scenario_id)

        args.start_date = scenario_args[0]
        args.end_date = scenario_args[1]
        args.interval = scenario_args[2]
        args.input_dir = scenario_args[3]
        args.execute_dir = scenario_args[4]

        # Update status in ExecuteList.csv on server
        insert_in_file(const.EXECUTE_LIST, args.scenario_id, "status",
                       "running")

    # Check to make sure all necessary arguments are there
    # (start_date, end_date, interval, input_dir)
    if not (args.start_date and args.end_date and args.interval
            and args.input_dir):
        err_str = ("The following arguments are required: "
                   "start-date, end-date, interval, input-dir")
        raise WrongNumberOfArguments(err_str)

    launcher = GurobiLauncher(
        args.start_date,
        args.end_date,
        args.interval,
        args.input_dir,
    )
    runtime = launcher.launch_scenario(args.execute_dir, args.threads)

    # If using PowerSimData, record the runtime
    if args.scenario_id:
        _record_scenario(args.scenario_id, runtime)
        args.matlab_dir = const.INPUT_DIR
        args.output_dir = const.OUTPUT_DIR

    if args.extract_data:
        if not args.execute_dir:
            args.execute_dir = os.path.join(args.input_dir, "output")

        extract_scenario(
            args.execute_dir,
            args.start_date,
            args.end_date,
            scenario_id=args.scenario_id,
            output_dir=args.output_dir,
            mat_dir=args.matlab_dir,
            keep_mat=args.keep_matlab,
        )
Beispiel #3
0
def test_insert_in_file():
    shape = (10, 100)
    table = pd.DataFrame({
        c: np.random.randint(0, 1000, size=shape[1])
        for c, _ in zip(
            list(string.ascii_lowercase)[:shape[0]], range(shape[0]))
    })
    table.index.name = "id"

    filename = os.path.join(
        pathlib.Path(__file__).parent.absolute(), "test_insert_in_file.csv")

    table.to_csv(filename)
    cell = (np.random.choice(table.index), np.random.choice(table.columns))
    try:
        insert_in_file(filename, cell[0], cell[1], table.loc[cell])
        assert table.equals(pd.read_csv(filename, index_col=0))
    finally:
        for f in glob.glob(filename + "*"):
            os.remove(f)
Beispiel #4
0
    # If using PowerSimData, record the runtime
    if args.scenario_id:
        _record_scenario(args.scenario_id, runtime)
        args.matlab_dir = const.INPUT_DIR
        args.output_dir = const.OUTPUT_DIR

    if args.extract_data:
        if not args.execute_dir:
            args.execute_dir = os.path.join(args.input_dir, "output")

        extract_scenario(
            args.execute_dir,
            args.start_date,
            args.end_date,
            scenario_id=args.scenario_id,
            output_dir=args.output_dir,
            mat_dir=args.matlab_dir,
            keep_mat=args.keep_matlab,
        )


if __name__ == "__main__":
    args = parser.parse_call_args()
    try:
        main(args)
    except Exception as ex:
        print(ex)  # sent to redirected stdout/stderr
        if args.scenario_id:
            insert_in_file(const.EXECUTE_LIST, args.scenario_id, "status",
                           "failed")
Beispiel #5
0
def extract_scenario(
    execute_dir,
    start_date,
    end_date,
    scenario_id=None,
    output_dir=None,
    mat_dir=None,
    freq="H",
    keep_mat=True,
):
    """Extracts data and save data as pickle files to the output directory

    :param str execute_dir: directory containing all of the result.mat files from REISE.jl
    :param str start_date: the start date of the simulation run
    :param str end_date: the end date of the simulation run
    :param str scenario_id: optional identifier for the scenario, used to label output files
    :param str output_dir: optional directory in which to store the outputs. defaults to the execute_dir
    :param str mat_dir: optional directory in which to store the converted grid.mat file. defaults to the execute_dir
    :param bool keep_mat: optional parameter to keep the large result*.mat files after the data has been extracted. Defaults to True.
    """

    # If output or input dir were not specified, default to the execute_dir
    output_dir = output_dir or execute_dir
    mat_dir = mat_dir or execute_dir

    # Copy input.mat from REISE.jl and convert to .mat v7 for scipy compatibility
    converted_mat_path = copy_input(execute_dir, mat_dir, scenario_id)

    # Extract outputs, infeasibilities, cost
    mat_results = glob.glob(os.path.join(execute_dir, "result_*.mat"))
    mat_results = sorted(mat_results, key=result_num)

    outputs, infeasibilities, cost = extract_data(mat_results)

    # Write log file with costs for each result*.mat file
    build_log(mat_results, cost, output_dir, scenario_id)

    # Update outputs with date indices from the copied input.mat file
    matfile = loadmat(converted_mat_path, squeeze_me=True, struct_as_record=False)
    _update_outputs_labels(outputs, start_date, end_date, freq, matfile)

    # Save pickles
    pkl_path = _get_pkl_path(output_dir, scenario_id)

    for k, v in outputs.items():
        v.to_pickle(pkl_path(k.upper()))

    # Calculate and save averaged congestion
    calculate_averaged_congestion(outputs["congl"], outputs["congu"]).to_pickle(
        pkl_path("AVERAGED_CONG")
    )

    if scenario_id:
        # Record infeasibilities
        insert_in_file(
            const.SCENARIO_LIST,
            scenario_id,
            "infeasibilities",
            "_".join(infeasibilities),
        )

        # Update execute and scenario list
        insert_in_file(const.EXECUTE_LIST, scenario_id, "status", "extracted")
        insert_in_file(const.SCENARIO_LIST, scenario_id, "state", "analyze")

    if not keep_mat:
        print("deleting matfiles")
        for matfile in mat_results:
            os.remove(matfile)