Ejemplo n.º 1
0
def run_tasks(
    args,
    configs,
):
    """
    Runs the specified set of tasks (configs)
    """
    start = datetime.now()
    num_failed = 0

    jobs = create_jobs(args, configs)

    run_dirs = {}
    for config in configs:
        task_dir = find_task_dir(config)
        task_run_dir = get_next_run_dir(task_dir)
        run_dirs[config.task_name] = task_run_dir

    # We could potentially support other 'run' systems (e.g. a cluster),
    # rather than just the local machine
    if args.system == "local":
        assert args.j > 0, "Invalid number of processors"

        if args.run:
            num_failed = run_parallel(args, jobs, run_dirs)
            print("Elapsed time: {}".format(
                format_elapsed_time(datetime.now() - start)))

        if args.parse:
            start = datetime.now()
            print("\nParsing test results...")
            if len(args.list_file) > 0:
                print("scripts/parse_vtr_task.py -l {}".format(
                    args.list_file[0]))
            parse_tasks(configs, jobs)
            print("Elapsed time: {}".format(
                format_elapsed_time(datetime.now() - start)))

        if args.create_golden:
            create_golden_results_for_tasks(configs)

        if args.check_golden:
            num_failed += check_golden_results_for_tasks(configs)

        if args.calc_geomean:
            summarize_qor(configs)
            calc_geomean(args, configs)
    # This option generates a shell script (vtr_flow.sh) for each architecture,
    # circuit, script_params
    # The generated can be used to be submitted on a large cluster
    elif args.system == "scripts":
        for _, value in run_dirs.items():
            Path(value).mkdir(parents=True)
        run_scripts = create_run_scripts(jobs, run_dirs)
        for script in run_scripts:
            print(script)
    else:
        raise VtrError(
            "Unrecognized run system {system}".format(system=args.system))
    return num_failed
Ejemplo n.º 2
0
def find_latest_run_dir(config):
    """ Find the latest run directory for given configuration """
    task_dir = find_task_dir(config)

    run_dir = get_latest_run_dir(task_dir)

    if not run_dir:
        raise InspectError(
            "Failed to find run directory for task '{}' in '{}'".format(config.task_name, task_dir)
        )

    assert Path(run_dir).is_dir()

    return run_dir
Ejemplo n.º 3
0
def parse_task(config, config_jobs, flow_metrics_basename=FIRST_PARSE_FILE):
    """
    Parse a single task run.

    This generates a file parse_results.txt in the task's working directory,
    which is an amalgam of the parse_rests.txt's produced by each job (flow invocation)
    """
    run_dir = find_latest_run_dir(config)

    # Record max widths for pretty printing
    max_arch_len = len("architecture")
    max_circuit_len = len("circuit")
    for job in config_jobs:
        work_dir = job.work_dir(get_latest_run_dir(find_task_dir(config)))
        if job.parse_command():
            parse_filepath = str(PurePath(work_dir) / flow_metrics_basename)
            with open(parse_filepath, "w+") as parse_file:
                with redirect_stdout(parse_file):
                    parse_vtr_flow(job.parse_command())
        if job.second_parse_command():
            parse_filepath = str(PurePath(work_dir) / SECOND_PARSE_FILE)
            with open(parse_filepath, "w+") as parse_file:
                with redirect_stdout(parse_file):
                    parse_vtr_flow(job.second_parse_command())
        if job.qor_parse_command():
            parse_filepath = str(PurePath(work_dir) / QOR_PARSE_FILE)
            with open(parse_filepath, "w+") as parse_file:
                with redirect_stdout(parse_file):
                    parse_vtr_flow(job.qor_parse_command())
        max_arch_len = max(max_arch_len, len(job.arch()))
        max_circuit_len = max(max_circuit_len, len(job.circuit()))
    parse_files(config_jobs, run_dir, flow_metrics_basename)

    if config.second_parse_file:
        parse_files(config_jobs, run_dir, SECOND_PARSE_FILE)

    if config.qor_parse_file:
        parse_files(config_jobs, run_dir, QOR_PARSE_FILE)
Ejemplo n.º 4
0
def create_jobs(args,
                configs,
                longest_name=0,
                longest_arch_circuit=0,
                after_run=False):
    """
    Create the jobs to be executed depending on the configs.
    """
    jobs = []
    for config in configs:
        for arch, circuit in itertools.product(config.archs, config.circuits):
            golden_results = load_parse_results(
                str(
                    PurePath(
                        config.config_dir).joinpath("golden_results.txt")))
            abs_arch_filepath = resolve_vtr_source_file(
                config, arch, config.arch_dir)
            abs_circuit_filepath = resolve_vtr_source_file(
                config, circuit, config.circuit_dir)
            work_dir = str(PurePath(arch).joinpath(circuit))

            run_dir = (str(
                Path(get_latest_run_dir(find_task_dir(config))) /
                work_dir) if after_run else str(
                    Path(get_next_run_dir(find_task_dir(config))) / work_dir))

            # Collect any extra script params from the config file
            cmd = [abs_circuit_filepath, abs_arch_filepath]

            # Check if additional architectural data files are present
            if config.additional_files_list_add:
                for additional_file in config.additional_files_list_add:
                    flag, file_name = additional_file.split(',')

                    cmd += [flag]
                    cmd += [
                        resolve_vtr_source_file(config, file_name,
                                                config.arch_dir)
                    ]

            if hasattr(args, "show_failures") and args.show_failures:
                cmd += ["-show_failures"]
            cmd += config.script_params if config.script_params else []
            cmd += config.script_params_common if config.script_params_common else []
            cmd += (args.shared_script_params
                    if hasattr(args, "shared_script_params")
                    and args.shared_script_params else [])

            # Apply any special config based parameters
            if config.cmos_tech_behavior:
                cmd += [
                    "-cmos_tech",
                    resolve_vtr_source_file(config, config.cmos_tech_behavior,
                                            "tech"),
                ]

            cmd += ([
                "--fix_pins",
                resolve_vtr_source_file(config, config.pad_file)
            ] if config.pad_file else [])

            if config.sdc_dir:
                sdc_name = "{}.sdc".format(Path(circuit).stem)
                sdc_file = resolve_vtr_source_file(config, sdc_name,
                                                   config.sdc_dir)

                cmd += ["-sdc_file", "{}".format(sdc_file)]

            if config.place_constr_dir:
                place_constr_name = "{}.place".format(Path(circuit).stem)
                place_constr_file = resolve_vtr_source_file(
                    config, place_constr_name, config.place_constr_dir)

                cmd += ["--fix_clusters", "{}".format(place_constr_file)]

            parse_cmd = None
            second_parse_cmd = None
            qor_parse_command = None
            if config.parse_file:
                parse_cmd = [
                    resolve_vtr_source_file(
                        config,
                        config.parse_file,
                        str(PurePath("parse").joinpath("parse_config")),
                    )
                ]

            if config.second_parse_file:
                second_parse_cmd = [
                    resolve_vtr_source_file(
                        config,
                        config.second_parse_file,
                        str(PurePath("parse").joinpath("parse_config")),
                    )
                ]

            if config.qor_parse_file:
                qor_parse_command = [
                    resolve_vtr_source_file(
                        config,
                        config.qor_parse_file,
                        str(PurePath("parse").joinpath("qor_config")),
                    )
                ]
            # We specify less verbosity to the sub-script
            # This keeps the amount of output reasonable
            if hasattr(args, "verbosity") and max(0, args.verbosity - 1):
                cmd += ["-verbose"]
            if config.script_params_list_add:
                for value in config.script_params_list_add:
                    jobs.append(
                        create_job(
                            args,
                            config,
                            circuit,
                            arch,
                            value,
                            cmd,
                            parse_cmd,
                            second_parse_cmd,
                            qor_parse_command,
                            work_dir,
                            run_dir,
                            longest_name,
                            longest_arch_circuit,
                            golden_results,
                        ))
            else:
                jobs.append(
                    create_job(
                        args,
                        config,
                        circuit,
                        arch,
                        None,
                        cmd,
                        parse_cmd,
                        second_parse_cmd,
                        qor_parse_command,
                        work_dir,
                        run_dir,
                        longest_name,
                        longest_arch_circuit,
                        golden_results,
                    ))

    return jobs