Ejemplo n.º 1
0
def getOptimizer(optimizer_config):
    """Construct optimizer from a config dict
  
  Args:
    optimizer_config(dict): configuration for optimizer
  
  Returns:
    Optimizer
  """
    if optimizer_config == None:
        return BayesianOptimizer(n_init=2, n_iter=2)
    optimizer_type = optimizer_config.get('type')
    if optimizer_type == 'bayesopt':
        n_init = optimizer_config.get('n_init')
        n_iter = optimizer_config.get('n_iter')
        try:
            n_init = int(n_init)
            n_iter = int(n_iter)
            return BayesianOptimizer(n_init=n_init, n_iter=n_iter)
        except:
            return None
    elif optimizer_type == 'grid':
        num_configs_per_param = optimizer_config.get('num_configs_per_param')
        try:
            num_configs_per_param = int(num_configs_per_param)
            return GridSearch(num_configs_per_param=num_configs_per_param)
        except:
            return None
Ejemplo n.º 2
0
def getOptimizer(optimizer_config):
    #TODO: add support to more optimizer
    """Construct optimizer from a config dict
  
  Args:
    optimizer_config(dict): configuration for optimizer
  
  Returns:
    Optimizer
  """
    if optimizer_config == None:
        return BayesianOptimizer(n_init=2, n_iter=2)
    optimizer_type = optimizer_config.get('type')
    if optimizer_type == 'grid':
        num_configs_per_param = optimizer_config.get('num_configs_per_param')
        try:
            # num_configs_per_param = int(num_configs_per_param)
            num_configs_per_param = list(num_configs_per_param)
            return GridSearch(num_configs_per_param=num_configs_per_param)
        except:
            return None

    n_iter = get_from_dic(optimizer_config, 'n_iter')
    budget = get_from_dic(optimizer_config, 'budget')
    converge_thres = get_from_dic(optimizer_config, 'converge_thres')
    converge_steps = get_from_dic(optimizer_config, 'converge_steps')
    # n_iter = optimizer_config.get('n_iter')
    if n_iter is not None:
        n_iter = int(n_iter)
    if budget is not None:
        budget = float(budget)
    if converge_thres is not None:
        converge_thres = float(converge_thres)
    if converge_steps is not None:
        converge_steps = int(converge_steps)

    if optimizer_type == 'bayesopt':
        n_init = get_from_dic(optimizer_config, 'n_init')
        alpha = get_from_dic(optimizer_config, 'alpha')
        kappa = get_from_dic(optimizer_config, 'kappa')
        if n_init is not None:
            n_init = int(n_init)
        if alpha is not None:
            alpha = float(alpha)
        if kappa is not None:
            kappa = float(kappa)
        try:
            return BayesianOptimizer(n_init=n_init,
                                     n_iter=n_iter,
                                     alpha=alpha,
                                     kappa=kappa,
                                     budget=budget,
                                     converge_thres=converge_thres,
                                     converge_steps=converge_steps)
        except:
            return None
        # try:
        #   # n_iter = int(n_iter)
        #   if 'alpha' in optimizer_config.keys():
        #     alpha = optimizer_config.get('alpha')
        #     alpha = float(alpha)
        #     if 'kappa' in optimizer_config.keys():
        #       kappa = optimizer_config.get('kappa')
        #       kappa = float(kappa)
        #       return BayesianOptimizer(n_init=n_init, n_iter=n_iter, alpha=alpha, kappa=kappa)
        #     else:
        #       return BayesianOptimizer(n_init=n_init, n_iter=n_iter, alpha=alpha)
        #   else:
        #     return BayesianOptimizer(n_init=n_init, n_iter=n_iter)
        # except:
        #   return None
    elif optimizer_type == 'random':
        random_seed = get_from_dic(optimizer_config, 'random_seed')
        if random_seed is not None:
            random_seed = int(random_seed)
        try:
            return RandomSearch(n_iter=n_iter,
                                random_seed=random_seed,
                                budget=budget,
                                converge_thres=converge_thres,
                                converge_steps=converge_steps)
        except:
            return None
    elif optimizer_type == 'coordinate':
        random_seed = get_from_dic(optimizer_config, 'random_seed')
        if random_seed is not None:
            random_seed = int(random_seed)
        try:
            return CoordinateSearch(n_iter=n_iter,
                                    random_seed=random_seed,
                                    budget=budget,
                                    converge_thres=converge_thres,
                                    converge_steps=converge_steps)
        except:
            return None
Ejemplo n.º 3
0
    'sqlite',
    '',
    '',
    '',
    'liteTest',
)

# need to define these DB related parameters
AWSRDS_storage = RelationalDB('postgresql', DB_USER, DB_PASSWORD, DB_HOST,
                              DB_NAME)

# define optimizer
bayesian_optimizer = BayesianOptimizer(n_init=2,
                                       n_iter=1,
                                       alpha=1e-3,
                                       kappa=2.5,
                                       utility='ucb',
                                       budget=None,
                                       converge_thres=None,
                                       converge_steps=None)

# search on 2*2*2 grid
grid_optimizer = GridSearch([2, 2, 2])

random_optimizer = RandomSearch(n_iter=10,
                                random_seed=None,
                                budget=None,
                                converge_thres=None,
                                converge_steps=None)

coordinate_optimizer = CoordinateSearch(n_init=1,
    n_iter=20,
Ejemplo n.º 4
0
    command_template_string=command_template_string,
    # we use LocalCompute here b/c we don't want to launch jobs on EC2 like the server does
    compute=LocalCompute(max_threads=8))

# when run on the server, this doesn't change - we always connect to an AWS RDS postgres database
# When running locally you can just use a sqlite database like below. The last argument is the database name
# so you could test a blank slate by just changing the name or deleting the old liteTest.db file.
storage = RelationalDB(
    'sqlite',
    '',
    '',
    '',
    'liteTest',
)

# when run on server, this is determined by optimizer the user POSTs
optimizer = BayesianOptimizer(n_init=2, n_iter=1, alpha=1e-3)
li = [2, 2, 2]
#optimizer = GridSearch(li)
#optimizer = RandomSearch(n_iter=10)
#optimizer = CoordinateSearch(n_iter=20)
# this is what runs it all
po = ParslRunner(obj_func=getattr(paropt.runner.parsl, "timeCmd"),
                 obj_func_params={'timeout': 15},
                 optimizer=optimizer,
                 storage=storage,
                 experiment=experiment_inst,
                 logs_root_dir='./myTestLogs')

po.run()
Ejemplo n.º 5
0
def setupAWS():
    # launch a small parsl job on AWS to initialize parsl's AWS VPC stuff
    # If run successfully, it will create the awsproviderstate.json file on host in paropt-service/config/
    # Needs to be run each time the AWS credentials are changed for the server
    # Intended to be used with a `docker run ...` command before running production server
    import os

    import paropt
    from paropt.runner import ParslRunner
    from paropt.storage import RelationalDB
    from paropt.optimizer import BayesianOptimizer, GridSearch
    from paropt.runner.parsl import timeCommand
    from paropt.storage.entities import Parameter, PARAMETER_TYPE_INT, Experiment, LocalCompute, EC2Compute

    container_state_file_dir = os.getenv("CONTAINER_STATE_FILE_DIR")
    if not container_state_file_dir:
        raise Exception(
            "Missing required env var CONTAINER_STATE_FILE_DIR which is used for copying awsproviderstate.json to host"
        )

    paropt.setConsoleLogger()

    command_template_string = """
    #! /bin/bash

    sleep ${myParam}
    """

    experiment_inst = Experiment(
        tool_name='tmptool',
        parameters=[
            Parameter(name="myParam",
                      type=PARAMETER_TYPE_INT,
                      minimum=0,
                      maximum=10),
        ],
        command_template_string=command_template_string,
        compute=EC2Compute(
            type='ec2',
            instance_model=
            "c4.large",  # using c5 b/c previously had trouble with t2 spot instances
            instance_family="c4",
            ami=
            "ami-0257427d05c8c18ac"  # parsl base ami - preinstalled apt packages
        ))

    # use an ephemeral database
    storage = RelationalDB(
        'sqlite',
        '',
        '',
        '',
        'tmpSqliteDB',
    )

    # run simple bayes opt
    bayesian_optimizer = BayesianOptimizer(
        n_init=1,
        n_iter=1,
    )

    po = ParslRunner(parsl_app=timeCommand,
                     optimizer=bayesian_optimizer,
                     storage=storage,
                     experiment=experiment_inst,
                     logs_root_dir='/var/log/paropt')

    po.run(debug=True)
    po.cleanup()

    # print result
    print(po.run_result)

    # move the awsproviderstate file into expected directory
    from shutil import copyfile
    copyfile("awsproviderstate.json",
             f'{container_state_file_dir}/awsproviderstate.json')