def solve(self, job):
     logging.debug('Starting opentuner')
     failed_jobs_threshold = self.jobs_limit * _FAILED_JOBS_COEF
     manipulator = ConfigurationManipulator()
     for var in job.optimization_job.task_variables:
         if var.HasField('continuous_variable'):
             cont_var = var.continuous_variable
             param = FloatParameter(var.name, cont_var.l, cont_var.r)
         else:
             int_var = var.integer_variable
             param = IntegerParameter(var.name, int_var.l, int_var.r)
         manipulator.add_parameter(param)
     parser = argparse.ArgumentParser(parents=opentuner.argparsers())
     args = parser.parse_args([])
     args.parallelism = 4
     args.no_dups = True
     interface = DefaultMeasurementInterface(args=args,
                                             manipulator=manipulator,
                                             project_name=job.job_id)
     api = TuningRunManager(interface, args)
     jobs = []
     current_value = None
     failed_jobs = 0
     while failed_jobs < failed_jobs_threshold and not self._check_for_termination(
             job):
         remaining_jobs = []
         for job_id, desired_result in jobs:
             res = self._get_evaluation_job_result(job_id)
             if res is not None:
                 if current_value is None or current_value > res + _THRESHOLD_EPS:
                     failed_jobs = 0
                 else:
                     failed_jobs += 1
                 result = Result(time=res)
                 api.report_result(desired_result, result)
             else:
                 remaining_jobs.append((job_id, desired_result))
         jobs = remaining_jobs
         while len(jobs) < self.jobs_limit:
             desired_result = api.get_next_desired_result()
             if desired_result is None:
                 break
             job_id = self._start_evaluation_job(
                 job, desired_result.configuration.data)
             if job_id is None:
                 api.report_result(desired_result, Result(time=math.inf))
             else:
                 jobs.append((job_id, desired_result))
         if not jobs:
             break
         r = api.get_best_result()
         if r is not None:
             current_value = r.time
             logging.debug('Opentuner current state: %s %s', r.time,
                           api.get_best_configuration())
         time.sleep(5)
     res = api.get_best_result().time
     vars = api.get_best_configuration()
     api.finish()
     return res, vars
Exemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())
    args = parser.parse_args()
    manipulator = ConfigurationManipulator()
    manipulator.add_parameter(IntegerParameter('x', -200, 200))
    interface = DefaultMeasurementInterface(args=args,
                                            manipulator=manipulator,
                                            project_name='examples',
                                            program_name='api_test',
                                            program_version='0.1')
    api = TuningRunManager(interface, args)
    for x in xrange(500):
        desired_result = api.get_next_desired_result()
        if desired_result is None:
            # The search space for this example is very small, so sometimes
            # the techniques have trouble finding a config that hasn't already
            # been tested.  Change this to a continue to make it try again.
            break
        cfg = desired_result.configuration.data
        result = Result(time=test_func(cfg))
        api.report_result(desired_result, result)

    best_cfg = api.get_best_configuration()
    api.finish()
    print 'best x found was', best_cfg['x']
Exemplo n.º 3
0
    def run_session(self):
        params_set = dict(self.planner_config[self.planner].items())
        manipulator = self._load_search_space(params_set)
        interface = DefaultMeasurementInterface(args=self.args,
                                                manipulator=manipulator)
        # Using OpenTuner API
        # https://github.com/jansel/opentuner/blob/master/examples/py_api/api_example.py
        api = TuningRunManager(interface, self.args)

        self.n_trial = 0  # Reset to n_trials to zero for each planner
        start_time = timer()
        params = {'planner': self.planner, 'start_time': start_time}
        if (self.MAX_RUNTIME != 'None'):
            params['end_time'] = timer() + self.MAX_RUNTIME
            print('\n')
            rospy.loginfo('Executing %s on %s for %d secs', self.MODE,
                          params['planner'], self.MAX_RUNTIME)
        else:
            print('\n')
            rospy.loginfo('Executing %s on %s for %d trials', self.MODE,
                          params['planner'], self.MAX_TRIALS)

        for _ in range(self.MAX_TRIALS):
            desired_result = api.get_next_desired_result()
            params_set = desired_result.configuration.data
            params['params_set'] = params_set

            run_result = self._opentuner_obj(params)
            result = Result(time=run_result['loss'])
            api.report_result(desired_result, result)
Exemplo n.º 4
0
def create_benchmark_api(benchmark):
    args = argparser.parse_args()
    args.benchmark = benchmark
    args.database = "{}_{}{}".format(
        os.path.splitext(args.database)[0], benchmark,
        os.path.splitext(args.database)[1])
    interface = ReproBLASTuner(args)
    api = TuningRunManager(interface, args)
    return api
Exemplo n.º 5
0
def create_test_tuning_run(db):
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())
    args = parser.parse_args()
    args.database = db
    manipulator = ConfigurationManipulator()
    manipulator.add_parameter(IntegerParameter('x', -200, 200))
    interface = DefaultMeasurementInterface(args=args,
                                            manipulator=manipulator,
                                            project_name='examples',
                                            program_name='api_test',
                                            program_version='0.1')
    api = TuningRunManager(interface, args)
    return api
Exemplo n.º 6
0
 def __init__(self, *ot_args, **ot_kwargs):
     """
     Creates communication queues and spawn a thread
     to run the tuning logic.
     """
     super(OpenTunerDriver, self).__init__()
     self._best_config = None
     interface = CtreeMeasurementInterface(self, *ot_args, **ot_kwargs)
     arg_parser = argparse.ArgumentParser(parents=opentuner.argparsers())
     config_args = CONFIG.get("opentuner", "args").split()
     tuner_args = arg_parser.parse_args(config_args)
     self.manager = TuningRunManager(interface, tuner_args)
     self._converged = False
Exemplo n.º 7
0
    def __init__(self, train_queries, test_queries, experiment_config,
                 result_dir, system_environment):
        """
        Args:
            train_queries (list of queries (SQLQuery queries))
            test_queries (list of queries (SQLQuery queries))
            
            result_dir (str) 
            system (PostgresSystemEnvironment): encapsulates environment
        """
        self.logger = logging.getLogger(__name__)
        self.logger.info('Setting up OpenTuner...')

        self.train_queries = train_queries
        self.test_queries = test_queries
        self.experiment_config = experiment_config

        self.max_size = experiment_config['max_size']
        self.size_weight = experiment_config['size_weight']
        self.max_runtime = experiment_config['max_runtime']
        self.runtime_weight = experiment_config['runtime_weight']
        self.reward_penalty = experiment_config['reward_penalty']

        self.system_environment = system_environment

        self.result_dir = result_dir

        # 2nd search space
        self.n_idxs = experiment_config['n_queries_per_episode']
        self.n_cols_per_idx = 3  # TODO hardcode

        self.tbls = experiment_config['tables']
        self.n_idxs_per_tbl = int(self.n_idxs / len(self.tbls))

        # maps tbls to mapping of tbl's column indices to tbl's columns, per search space representation
        self.tbl_2_col_idx_2_col = {}
        for tbl in self.tbls:
            self.tbl_2_col_idx_2_col[tbl] = {}
            for col_idx, col in enumerate(tpch_table_columns[tbl].keys()):
                self.tbl_2_col_idx_2_col[tbl][col_idx +
                                              1] = col  # + 1 b/c 0 is noop

        # api
        sys.argv = [sys.argv[0]]  # opentuner expects own args
        parser = argparse.ArgumentParser(parents=opentuner.argparsers())
        args = parser.parse_args()
        manipulator = self.build_search_space()
        interface = DefaultMeasurementInterface(args=args,
                                                manipulator=manipulator)
        self.api = TuningRunManager(interface, args)
Exemplo n.º 8
0
    def __init__(self,
                 api_config,
                 techniques=DEFAULT_TECHNIQUES,
                 n_suggestions=1):
        """Build wrapper class to use opentuner optimizer in benchmark.

        Parameters
        ----------
        api_config : dict-like of dict-like
            Configuration of the optimization variables. See API description.

        techniques : iterable of strings
            A list or tuple of techniques to use in opentuner. If the list
            has only one technique, then that technique will be used. If the
            list has multiple techniques a bandit over those techniques
            will be used.

        n_suggestions : int
            Default number of suggestions to be made in parallel.
        """
        AbstractOptimizer.__init__(self, api_config)

        # Opentuner requires DesiredResult to reference suggestion when making
        # its observation. x_to_dr maps the dict suggestion to DesiredResult.
        self.x_to_dr = {}
        # Keep last suggested x and repeat it whenever opentuner gives up.
        self.dummy_suggest = None
        """Setting up the arguments for opentuner. You can see all possible
        arguments using:
        ```
        >>> import opentuner
        >>> opentuner.default_argparser().parse_args(['-h'])
        ```
        We only change a few arguments (other arguments are set to defaults):
        * database = MEMORY_ONLY_DB: to use an in-memory sqlite database
        * parallelism = n_suggestions: num of suggestions to give in parallel
        * technique = techniques: a list of techniques to be used by opentuner
        * print_params = False: to avoid opentuner from exiting after printing
            param spaces
        """
        args = Namespace(
            bail_threshold=500,
            database=MEMORY_ONLY_DB,
            display_frequency=10,
            generate_bandit_technique=False,
            label=None,
            list_techniques=False,
            machine_class=None,
            no_dups=False,
            parallel_compile=False,
            parallelism=n_suggestions,
            pipelining=0,
            print_params=False,
            print_search_space_size=False,
            quiet=False,
            results_log=None,
            results_log_details=None,
            seed_configuration=[],
            stop_after=None,
            technique=techniques,
            test_limit=5000,
        )

        # Setup some dummy classes required by opentuner to actually run.
        manipulator = OpentunerOptimizer.build_manipulator(api_config)
        interface = DMI(args=args, manipulator=manipulator)
        self.api = TuningRunManager(interface, args)
Exemplo n.º 9
0
def tuning_loop():
    report_delay = 5
    last_time = time.time()
    start_time = last_time
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())

    parser.add_argument("--processes",
                        type=int,
                        help="Number of Python threads available.")
    parser.add_argument(
        "--no-wait",
        action="store_true",
        help="Do not wait for requested results to generate more requests.")

    args = parser.parse_args()
    pool = ThreadPool(args.processes)
    manipulator = ConfigurationManipulator()

    for name in legup_parameters.parameters:
        parameter_type = legup_parameters.parameter_type(name)
        values = legup_parameters.parameter_values(name)
        if parameter_type == int:
            manipulator.add_parameter(
                IntegerParameter(name, values[0], values[1]))
        elif parameter_type == bool:
            manipulator.add_parameter(BooleanParameter(name))
        elif parameter_type == Enum:
            manipulator.add_parameter(EnumParameter(name, values))
        else:
            print("ERROR: No such parameter type \"{0}\"".format(name))

    interface = DefaultMeasurementInterface(args=args,
                                            manipulator=manipulator,
                                            project_name='HLS-FPGAs',
                                            program_name='legup-tuner',
                                            program_version='0.0.1')

    manager = TuningRunManager(interface, args)

    current_time = time.time()
    computing_results = []
    computed_results = []
    desired_results = manager.get_desired_results()

    while current_time - start_time < args.stop_after:
        if args.no_wait:
            if len(desired_results) != 0 or len(computing_results) != 0:
                for desired_result in desired_results:
                    computing_results.append([
                        desired_result,
                        pool.apply_async(get_wallclock_time,
                                         (desired_result.configuration.data, ))
                    ])

                for result in computing_results:
                    if result[1].ready() and result[0] not in computed_results:
                        cost = result[1].get()
                        manager.report_result(result[0], Result(time=cost))
                        computed_results.append(result)

                for result in computed_results:
                    if result in computing_results:
                        computing_results.remove(result)

                computed_results = []
        else:
            if len(desired_results) != 0:
                cfgs = [dr.configuration.data for dr in desired_results]
                results = pool.map_async(get_wallclock_time,
                                         cfgs).get(timeout=None)

                for dr, result in zip(desired_results, results):
                    manager.report_result(dr, Result(time=result))

        desired_results = manager.get_desired_results()

        current_time = time.time()

        if (current_time - last_time) >= report_delay:
            log_intermediate(current_time - start_time, manager)
            last_time = current_time

    current_time = time.time()
    log_intermediate(current_time - start_time, manager)

    save_final_configuration(manager.get_best_configuration())
    manager.finish()
Exemplo n.º 10
0
import argparse

def get_next_desired_result():
    global desired_result
    desired_result = api.get_next_desired_result()
    while desired_result is None:
        desired_result = api.get_next_desired_result()
    return desired_result.configuration.data

def report_result(runtime):
    api.report_result(desired_result, Result(time=runtime))

def finish():
    api.finish()

parser = argparse.ArgumentParser(parents=opentuner.argparsers())
args = parser.parse_args()

manipulator = ConfigurationManipulator()
:::parameters:::
interface = DefaultMeasurementInterface(args=args,
                                        manipulator=manipulator,
                                        project_name='atf_library',
                                        program_name='atf_library',
                                        program_version='0.1')


api = TuningRunManager(interface, args)

)"
Exemplo n.º 11
0
def tuning_loop():
    report_delay = 30
    last_time = time.time()
    start_time = last_time
    iterations = 5
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())

    parser.add_argument("--processes",
                        type=int,
                        help="Number of Python threads available.")
    parser.add_argument(
        "--no-wait",
        action="store_true",
        help="Do not wait for requested results to generate more requests.")
    parser.add_argument("--application", type=str, help="Application name.")
    parser.add_argument("--verilog-file",
                        type=str,
                        help="Verilog file for the application.")

    args = parser.parse_args()
    pool = ThreadPool(args.processes)
    manipulator = ConfigurationManipulator()

    global application
    global verilog_file
    global application_path
    global container_path
    global host_path
    global image_name
    global script_name

    global tuning_init

    application = args.application
    verilog_file = args.verilog_file
    application_path = "/root/legup_src/legup-4.0/examples/chstone/{0}".format(
        application)
    container_path = "/root/legup_src/legup-4.0/examples/chstone/{0}/tuner".format(
        application)
    host_path = "/home/bruelp/legup-tuner/post_place_and_route/py"
    image_name = "legup_quartus"
    script_name = "measure.sh"

    print(application, container_path, application_path)

    for name in legup_parameters.parameters:
        parameter_type = legup_parameters.parameter_type(name)
        values = legup_parameters.parameter_values(name)
        if parameter_type == int:
            manipulator.add_parameter(
                IntegerParameter(name, values[0], values[1]))
        elif parameter_type == bool:
            manipulator.add_parameter(BooleanParameter(name))
        elif parameter_type == Enum:
            manipulator.add_parameter(EnumParameter(name, values))
        else:
            print("ERROR: No such parameter type \"{0}\"".format(name))

    interface = DefaultMeasurementInterface(args=args,
                                            manipulator=manipulator,
                                            project_name='HLS-FPGAs',
                                            program_name='legup-tuner',
                                            program_version='0.0.1')

    manager = TuningRunManager(interface, args)

    current_time = time.time()
    computing_results = []
    computed_results = []
    desired_results = manager.get_desired_results()

    while current_time - start_time < args.stop_after:
        if args.no_wait:
            if len(desired_results) != 0 or len(computing_results) != 0:
                for desired_result in desired_results:
                    computing_results.append([
                        desired_result,
                        pool.apply_async(get_wallclock_time,
                                         (desired_result.configuration.data, ))
                    ])

                for result in computing_results:
                    if result[1].ready() and result[0] not in computed_results:
                        cost = result[1].get()
                        manager.report_result(result[0], Result(time=cost))
                        computed_results.append(result)

                for result in computed_results:
                    if result in computing_results:
                        computing_results.remove(result)

                computed_results = []
        else:
            if len(desired_results) != 0:
                cfgs = [dr.configuration.data for dr in desired_results]
                results = pool.map_async(get_wallclock_time,
                                         cfgs).get(timeout=None)

                for dr, result in zip(desired_results, results):
                    manager.report_result(
                        dr,
                        Result(time=result['value'],
                               cycles=result['cycles'],
                               fmax=result['fmax'],
                               LU=result['lu'],
                               pins=result['pins'],
                               regs=result['regs'],
                               block=result['block'],
                               ram=result['ram'],
                               dsp=result['dsp']))

        desired_results = manager.get_desired_results()

        current_time = time.time()

        if (current_time - last_time) >= report_delay:
            log_intermediate(current_time - start_time, manager)
            last_time = current_time

    current_time = time.time()
    log_intermediate(current_time - start_time, manager)

    save_final_configuration(manager.get_best_configuration())
    manager.finish()