Esempio n. 1
0
def ot_test_pgm_group(bms, N=1, test_len=[16]):
    argparser = opentuner.default_argparser()
    with open("ot_test_pgm_group.txt", "w") as fout:
        fout.write(
            "Benchmark | Cycle Counts | Algorithm Runtime (s)| Sample Sizes | Passes \n"
        )

        for length in test_len:
            envs = []
            i = 0
            for pgm, path in bms:
                env_config = {
                    'pgm': pgm,
                    'pgm_files': path,
                    'run_dir': 'run_' + pgm.replace(".c", ""),
                    'normalize': False,
                    'orig_and_normalize': False,
                    'log_obs_reward': False,
                    'verbose': False,
                    'shrink': True,
                }

                envs.append(Env(env_config))
                i = i + 1
            begin = time.time()
            cycles, passes, sample_size, _ = GccFlagsTuner.main(
                envs, argparser.parse_args())
            end = time.time()
            print("Best individuals are: {}".format(passes))
            print("Cycles: {}".format(timings[0]))
            compile_time = end - begin
            print("Compile Time: %d" % (int(compile_time)))
            fout.write("{}|{}|{}|{}|{}\n".format("test_pgm_group", cycles,
                                                 compile_time, sample_size,
                                                 passes))
Esempio n. 2
0
def ot_test_pgm(bms, test_len=[16]):
    argparser = opentuner.default_argparser()
    #argparser.add_argument('source', help='source file to compile')
    # from gym_hls.envs.chstone_bm import get_chstone, get_others, get_all9
    #bms = get_all9()
    #bms = bms[0:1]
    # for i, bm in enumerate(bms):
    #   pgm, path = bm
    #   env_configs = {}
    #   env_configs['pgm'] = pgm
    #   env_configs['pgm_dir'] = path
    #   env_configs['run_dir'] = 'run_'+str(i)
    #   env_configs['verbose'] = True
    #   env_configs['log_results'] = True

    for i, bm in enumerate(bms):
        pgm, files = bm
        env_configs = {}
        env_configs['pgm'] = pgm
        env_configs['pgm_files'] = files
        env_configs['run_dir'] = 'run_' + pgm.replace(".c", "")
        #env_configs['feature_type'] = 'act_hist'
        env_configs['verbose'] = True
        env_configs['log_results'] = True

        print("Tune for {}".format(pgm))
        GccFlagsTuner.main(env_configs, argparser.parse_args())
Esempio n. 3
0
def opentuner_ga(
    env: ClientServiceCompilerEnv,
    optimization_target: OptimizationTarget,
    search_time_seconds: int,
    seed: int,
    max_copies_of_pass: int = 4,
    population: int = 200,
    tournament: int = 5,
    mutate: int = 2,
    sharing: int = 1,
    **kwargs,
) -> None:
    """Optimize an environment using opentuner.

    OpenTuner is an extensible framework for program autotuning:

        https://opentuner.org/
    """
    cache_dir = transient_cache_path("llvm_autotuning")
    cache_dir.mkdir(exist_ok=True, parents=True)
    with tempfile.TemporaryDirectory(dir=cache_dir,
                                     prefix="opentuner-") as tmpdir:
        argparser = ot.default_argparser()
        args = argparser.parse_args(args=[
            f"--stop-after={search_time_seconds}",
            f"--database={tmpdir}/opentuner.db",
            "--no-dups",
            "--technique=custom",
            f"--seed={seed}",
            "--parallelism=1",
        ])
        ot.search.technique.register(
            BinaryGA(
                population=population,
                tournament=tournament,
                mutate=mutate,
                sharing=sharing,
                name="custom",
            ))
        manipulator = LlvmOptFlagsTuner(
            args,
            target=optimization_target,
            benchmark=env.benchmark,
            max_copies_of_pass=max_copies_of_pass,
        )
        tuner = TuningRunMain(manipulator, args)
        tuner.main()

        class DesiredResult:
            def __init__(self, configuration) -> None:
                self.configuration = configuration

        class Configuration:
            def __init__(self, data) -> None:
                self.data = data

        wrapped = DesiredResult(Configuration(manipulator.best_config))
        manipulator.run(wrapped, None, None)
        env.reset()
        env.multistep(manipulator.serialize_actions(manipulator.best_config))
    def train(self, data):
        ModelTuner.run_id = self.run_id + "-train"
        ModelTuner.data = data
        tuner = ModelTuner()
        args = ["--no-dups"]
        if self.dry:
            args.append("--stop-after=10")
        tuner.main(opentuner.default_argparser().parse_args(args))

        # Copy the final config into the results folder for this model
        curr_config_path = os.path.join(tuner.model.config_path, "config.json")
        desired_config_path = os.path.join(self.config_path, "config.json")
        if not os.path.isdir(self.config_path):
            os.makedirs(self.config_path)
        shutil.move(curr_config_path, desired_config_path)

        # Delete training results and config
        shutil.rmtree(tuner.model.results_path)
        shutil.rmtree(tuner.model.config_path)
        os.rmdir(os.path.dirname(tuner.model.config_path))
        os.rmdir(os.path.dirname(tuner.model.results_path))

        # Clean opentuner cache
        top_level_dir = os.path.dirname(os.path.dirname(__file__))
        os.remove(os.path.join(top_level_dir, "opentuner.log"))
        shutil.rmtree(os.path.join(top_level_dir, "opentuner.db"))
def str2bool(v):
    argparser = opentuner.default_argparser()
    if isinstance(v, bool):
        return v
    if v.lower() in ('yes', 'true', 't', 'y', '1'):
        return True
    elif v.lower() in ('no', 'false', 'f', 'n', '0'):
        return False
    else:
        raise argparser.ArgumentTypeError('Boolean value expected.')
Esempio n. 6
0
    def test(self):

        #default parameter:
        # --bail_threshold=500 --database=None --display_frequency=10 \
        # --generate_bandit_technique=False --label=None --list_techniques=False \
        # --machine_class=None --no_dups=False --parallel_compile=False \
        # --parallelism=4 --pipelining=0 --print_params=False \
        # --print_search_space_size=False --quiet=False --results_log=None \
        # --results_log_details=None --seed_configuration=[] --stop_after=None \
        # --technique=None --test_limit=5000"

        args = opentuner.default_argparser().parse_args()
        args.no_dups = True

        args.stop_after = gl.opentuner_parameter[0][1]
        # args.print_params = True


        GccFlagsTuner.main(args)
Esempio n. 7
0
    return manipulator

  def run(self, desired_result, input, limit):
    """
    Compile and run a given configuration then
    return performance
    """
    cfg = desired_result.configuration.data

    gcc_cmd = 'g++ apps/raytracer.cpp -o ./tmp.bin'
    gcc_cmd += ' -O{0}'.format(cfg['opt_level'])
    for flag in GCC_FLAGS:
      if cfg[flag] == 'on':
        gcc_cmd += ' -f{0}'.format(flag)
      elif cfg[flag] == 'off':
        gcc_cmd += ' -fno-{0}'.format(flag)
    for param, min, max in GCC_PARAMS:
      gcc_cmd += ' --param {0}={1}'.format(
        param, cfg[param])

    compile_result = self.call_program(gcc_cmd)
    assert compile_result['returncode'] == 0

    run_result = self.call_program('./tmp.bin')
    assert run_result['returncode'] == 0
    return Result(time=run_result['time'])

if __name__ == '__main__':
  argparser = opentuner.default_argparser()
  GccFlagsTuner.main(argparser.parse_args())
Esempio n. 8
0
            raise RuntimeError("Execution failed for \"" + run_cmd + "\"!")

    def save_final_config(self, configuration):
        """
        called at the end of tuning
        """
        filename = ("transpose-" + str(max(self.args.begin, 0)) + "_" +
                    str(max(self.args.end, 0)) + "_" +
                    str(max(self.args.nruns, 1)) +
                    time.strftime("-%Y%m%d-%H%M%S") + ".json")
        print("Optimal block size written to " + filename + ": ",
              configuration.data)
        self.manipulator().save_to_file(configuration.data, filename)


if __name__ == "__main__":
    argparser = opentuner.default_argparser()
    argparser.add_argument("begin",
                           type=int,
                           default=1024,
                           help="Begin of the range")
    argparser.add_argument("end",
                           type=int,
                           default=2048,
                           help="End of the range")
    argparser.add_argument("nruns",
                           type=int,
                           default=100,
                           help="Number of runs")
    TransposeTune.main(argparser.parse_args())
Esempio n. 9
0
import argparse

gl.spark_parameter = {
    'early-inlining-insns': ['IntegerParameter', '0-1000'],
    'align-functions': ['EnumParameter', 'on|off|default'],
    'align-jumps': ['EnumParameter', 'on|off|default'],
    'align-labels': ['EnumParameter', 'on|off|default'],
    'align-labels': ['EnumParameter', 'on|off|default'],
    'align-loops': ['EnumParameter', 'on|off|default'],
    'asynchronous-unwind-tables': ['EnumParameter', 'on|off|default'],
    'branch-count-reg': ['EnumParameter', 'on|off|default'],
    'branch-probabilities': ['EnumParameter', 'on|off|default'],
}

#default parameter:
# --bail_threshold=500 --database=None --display_frequency=10 \
# --generate_bandit_technique=False --label=None --list_techniques=False \
# --machine_class=None --no_dups=False --parallel_compile=False \
# --parallelism=4 --pipelining=0 --print_params=False \
# --print_search_space_size=False --quiet=False --results_log=None \
# --results_log_details=None --seed_configuration=[] --stop_after=None \
# --technique=None --test_limit=5000"

args = opentuner.default_argparser().parse_args()
args.no_dups = True
args.pp = True
args.display_frequency = 1
args.stop_after = 500
GccFlagsTuner.main(args)