示例#1
0
    def new_tasks(self, extra):

        import logging
        log = logging.getLogger('gc3.gc3libs.EvolutionaryAlgorithm')
        log.setLevel(logging.DEBUG)
        log.propagate = 0
        stream_handler = logging.StreamHandler()
        stream_handler.setLevel(logging.DEBUG)
        import gc3libs
        log_file_name = os.path.join(os.getcwd(), 'EvolutionaryAlgorithm.log')
        file_handler = logging.FileHandler(log_file_name, mode = 'w')
        file_handler.setLevel(logging.DEBUG)
        log.addHandler(stream_handler)
        log.addHandler(file_handler)

        #Population size reduced to 5 for testing purposes
        # nlc needs to be a pickable function: http://docs.python.org/2/library/pickle.html#what-can-be-pickled-and-unpickled
        de_solver = DifferentialEvolutionParallel(
            dim = vec_dimension,
            lower_bds = [-2] * vec_dimension,
            upper_bds = [ 2] * vec_dimension,
            pop_size = 5,
            de_step_size = 0.85,
            prob_crossover = 1.,
            itermax = 200,
            dx_conv_crit = None,
            y_conv_crit = None,
            de_strategy = 'DE_rand',
            logger = log
        )

        initial_pop = []
        if not initial_pop:
            de_solver.new_pop = de_solver.draw_initial_sample()
        else:
            de_solver.new_pop = initial_pop

        # create an instance globalObt
        path_to_stage_dir = os.getcwd()
        jobname = 'geometries'
        kwargs = extra.copy()
        kwargs['path_to_stage_dir'] = path_to_stage_dir
        kwargs['opt_algorithm'] = de_solver
        kwargs['task_constructor'] = task_constructor_geometries
        kwargs['target_fun'] = compute_target_geometries

        return [gc3libs.drivers.GridOptimizer(jobname=jobname, **kwargs)]
示例#2
0
    def new_tasks(self, extra):

        import logging
        log = logging.getLogger('gc3.gc3libs.EvolutionaryAlgorithm')
        log.setLevel(logging.DEBUG)
        log.propagate = 0
        stream_handler = logging.StreamHandler()
        stream_handler.setLevel(logging.DEBUG)
        import gc3libs
        log_file_name = os.path.join(os.getcwd(), 'EvolutionaryAlgorithm.log')
        file_handler = logging.FileHandler(log_file_name, mode = 'w')
        file_handler.setLevel(logging.DEBUG)
        log.addHandler(stream_handler)
        log.addHandler(file_handler)

        #Population size reduced to 5 for testing purposes
        # nlc needs to be a pickable function: http://docs.python.org/2/library/pickle.html#what-can-be-pickled-and-unpickled
        de_solver = DifferentialEvolutionParallel(
            dim = vec_dimension,
            lower_bds = [-2] * vec_dimension,
            upper_bds = [ 2] * vec_dimension,
            pop_size = 5,
            de_step_size = 0.85,
            prob_crossover = 1.,
            itermax = 200,
            dx_conv_crit = None,
            y_conv_crit = None,
            de_strategy = 'DE_rand',
            logger = log
        )

        initial_pop = []
        if not initial_pop:
            de_solver.new_pop = de_solver.draw_initial_sample()
        else:
            de_solver.new_pop = initial_pop

        # create an instance globalObt
        path_to_stage_dir = os.getcwd()
        jobname = 'geometries'
        kwargs = extra.copy()
        kwargs['path_to_stage_dir'] = path_to_stage_dir
        kwargs['opt_algorithm'] = de_solver
        kwargs['task_constructor'] = task_constructor_geometries
        kwargs['target_fun'] = compute_target_geometries

        return [gc3libs.drivers.GridOptimizer(jobname=jobname, **kwargs)]
def calibrate_forwardPremium():
  """
  Drver script to calibrate forwardPremium EX and sigmaX parameters.
  It uses DifferentialEvolutionParallel as an implementation of
  Ken Price's differential evolution
  algorithm: [[http://www1.icsi.berkeley.edu/~storn/code.html]].
  """
  global LOGGER
  dim = 2 # the population will be composed of 2 parameters to  optimze: [ EX, sigmaX ]
  lower_bounds = [0.5,0.001] # Respectivaly for [ EX, sigmaX ]
  upper_bounds = [1,0.01]  # Respectivaly for [ EX, sigmaX ]
  y_conv_crit = 0.98 # convergence treshold; stop when the evaluated output function y_conv_crit

  # define constraints
  ev_constr = nlcOne4eachPair(lower_bounds, upper_bounds)
  
  global POPULATION_SIZE
  opt = DifferentialEvolutionParallel(
    dim = dim,          # number of parameters of the objective function
    lower_bds = lower_bounds,
    upper_bds = upper_bounds,
    pop_size = POPULATION_SIZE,     # number of population members
    de_step_size = 0.85,# DE-stepsize ex [0, 2]
    prob_crossover = 1, # crossover probabililty constant ex [0, 1]
    itermax = 20,      # maximum number of iterations (generations)
    x_conv_crit = None, # stop when variation among x's is < this
    y_conv_crit = y_conv_crit, # stop when ofunc < y_conv_crit
    de_strategy = 'DE_local_to_best',
    nlc = ev_constr # pass constraints object 
    )

  # Initialise population using the arguments passed to the
  # DifferentialEvolutionParallel iniitalization
  opt.new_pop = opt.draw_initial_sample()
  LOGGER.info("Initial sample drawn: " + ', '.join(map(str, opt.new_pop)) )
  
  # This is where the population gets evaluated
  # it is part of the initialization step
  newVals = forwardPremium(opt.new_pop)
    
  # Update iteration count
  opt.cur_iter += 1

  # Update population and evaluate convergence
  opt.update_population(opt.new_pop, newVals)
    
  while not opt.has_converged():
    LOGGER.info("*********************************************************************")
    LOGGER.info("Optimization has not converged after performing iteration [" + str(opt.cur_iter) + "].")  
    # Generate new population and enforce constrains
    opt.new_pop = opt.enforce_constr_re_evolve(opt.modify(opt.pop))

    # Update iteration count
    opt.cur_iter += 1

    # This is where the population gets evaluated
    # this step gets iterated until a population converges
    newVals = forwardPremium(opt.new_pop)

    # Update population and evaluate convergence
    opt.update_population(opt.new_pop, newVals)               

  # Once iteration has terminated, extract 'bestval' which should represent
  # the element in *all* populations that lead to the closest match to the
  # empirical value
  EX_best, sigmaX_best = opt.best

  LOGGER.info("Optimization converged after [%d] steps. EX_best: %f, sigmaX_best: %f" % (opt.cur_iter, EX_best, sigmaX_best))
  
  # write result fiel
  result_file = open('/home/lsci/result_output', 'w')
  result_file.write("Optimization converged after [%d] steps. EX_best: %f, sigmaX_best: %f" % (opt.cur_iter, EX_best, sigmaX_best))
  result_file.close()
示例#4
0
def calibrate_forwardPremium():
    """
    Drver script to calibrate forwardPremium EX and sigmaX parameters.
    It uses DifferentialEvolutionParallel as an implementation of
    Ken Price's differential evolution
    algorithm: [[http://www1.icsi.berkeley.edu/~storn/code.html]].
    """

    dim = 2 # the population will be composed of 2 parameters to  optimze: [ EX, sigmaX ]
    lower_bounds = [0.5,0.001] # Respectivaly for [ EX, sigmaX ]
    upper_bounds = [1,0.01]  # Respectivaly for [ EX, sigmaX ]
    y_conv_crit = 0.98 # convergence treshold; stop when the evaluated output function y_conv_crit

    # define constraints
    ev_constr = nlcOne4eachPair(lower_bounds, upper_bounds)

    opt = DifferentialEvolutionParallel(
        dim = dim,          # number of parameters of the objective function
        lower_bds = lower_bounds,
        upper_bds = upper_bounds,
        pop_size = POPULATION_SIZE,   # number of population members
        de_step_size = 0.85,# DE-stepsize ex [0, 2]
        prob_crossover = 1, # crossover probabililty constant ex [0, 1]
        itermax = 20,      # maximum number of iterations (generations)
        x_conv_crit = None, # stop when variation among x's is < this
        y_conv_crit = y_conv_crit, # stop when ofunc < y_conv_crit
        de_strategy = 'DE_local_to_best',
        nlc = ev_constr # pass constraints object
      )

    try:
        tmp = LocalState.load("driver", opt)
        #print tmp, opt
    except:
        print 'Nothing to be loaded...'

    # Jobs: create and manage population
    pop = getJobs()

    if not pop: # empty
        # Initialise population using the arguments passed to the
        # DifferentialEvolutionParallel iniitalization
        opt.new_pop = opt.draw_initial_sample()

        putJobs(pop2Jobs(opt))

    else: # finished?
        finished = True
        for job in pop:
            finished &= job.finished
#            if job.iteration > opt.cur_iter:  #restore current iteration counter
#                opt.cur_iter = job.iteration

        if finished:
            # Update population and evaluate convergence
            newVals = []
            opt.new_pop = np.zeros( (POPULATION_SIZE, dim) )
            k = 0
            for job in pop:
                newVals.append(job.result if job.result != None else PENALTY_VALUE)
                opt.new_pop[k,:] = (job.paraEA, job.paraSigma)
                k += 1

            # Update iteration count
#            global cur_iter, bestval
            opt.cur_iter += 1
#            opt.cur_iter = cur_iter
#            opt.bestvtest/fp_lib.pyal = bestval #!!!
#            opt.vals = newVals #!!!
#            opt.pop = opt.new_pop #!!!

            opt.update_population(opt.new_pop, newVals)
#            bestval = opt.bestval #!!!

            if not opt.has_converged():
                # Generate new population and enforce constrains
                opt.new_pop = opt.enforce_constr_re_evolve(opt.modify(opt.pop))

                # Push and run again!
                putJobs(pop2Jobs(opt))

            else:
                # Once iteration has terminated, extract `bestval` which should represent
                # the element in *all* populations that lead to the closest match to the
                # empirical value
                EX_best, sigmaX_best = opt.best

                print "Calibration converged after [%d] steps. EX_best: %f, sigmaX_best: %f" % (opt.cur_iter, EX_best, sigmaX_best)
                # TODO: Cleanup
                sys.exit()


    # VM's: create and manage dispatchers
    vms = getVMs()

    if not vms: # empty
        print "[+] No running EC2 instances found, creating %d" % N_NODES
        nodes = fp_ec2_create_vms(N_NODES, pubkey_file='/home/tklauser/.ssh/id_rsa.pub')
        vms = []
        for node in nodes:
            vm = { 'ip' : node.public_ips[0], 'vmtype' : 'Amazon', 'dateUpdate' : str(datetime.datetime.now()) }
            vms.append(vm)
        putVMs(vms)
    else:
        pass  #TODO manage VMs

    # Then, we could also run the forwardPremium binary here; Single script solution

    LocalState.save("driver", opt)
示例#5
0
def calibrate_forwardPremium():
    """
    Drver script to calibrate forwardPremium EX and sigmaX parameters.
    It uses DifferentialEvolutionParallel as an implementation of
    Ken Price's differential evolution
    algorithm: [[http://www1.icsi.berkeley.edu/~storn/code.html]].
    """
    
    dim = 2 # the population will be composed of 2 parameters to  optimze: [ EX, sigmaX ]
    lower_bounds = [0.5,0.001] # Respectivaly for [ EX, sigmaX ]
    upper_bounds = [1,0.01]  # Respectivaly for [ EX, sigmaX ]
    y_conv_crit = 0.98 # convergence treshold; stop when the evaluated output function y_conv_crit
    
    # define constraints
    ev_constr = nlcOne4eachPair(lower_bounds, upper_bounds)
    
    opt = DifferentialEvolutionParallel(
        dim = dim,          # number of parameters of the objective function
        lower_bds = lower_bounds,
        upper_bds = upper_bounds,
        pop_size = POPULATION_SIZE,   # number of population members
        de_step_size = 0.85,# DE-stepsize ex [0, 2]
        prob_crossover = 1, # crossover probabililty constant ex [0, 1]
        itermax = 20,      # maximum number of iterations (generations)
        x_conv_crit = None, # stop when variation among x's is < this
        y_conv_crit = y_conv_crit, # stop when ofunc < y_conv_crit
        de_strategy = 'DE_local_to_best',
        nlc = ev_constr # pass constraints object 
      )


    # Jobs: create and manage population
    pop = getJobs()
    
    if not pop: # empty
        # Initialise population using the arguments passed to the
        # DifferentialEvolutionParallel iniitalization
        opt.new_pop = opt.draw_initial_sample()
            
        putJobs(pop2Jobs(opt.new_pop))
        
    else: # finished?
        finished = True
        for job in pop:
            finished &= job.finished
            
        if finished:
            # Update population and evaluate convergence
            newVals = []
            opt.new_pop = np.zeros( (POPULATION_SIZE, dim) )
            k = 0
            for job in pop:
                newVals.append(job.result if job.result != None else PENALTY_VALUE)                
                opt.new_pop[k,:] = (job.paraEA, job.paraSigma)
                k += 1
                
            # Update iteration count
            opt.cur_iter += 1 #TODO: get from db
            opt.bestval = PENALTY_VALUE+1 #!!!
            opt.vals = newVals #!!!
            opt.pop = opt.new_pop #!!!
                
            opt.update_population(opt.new_pop, newVals)

            if not opt.has_converged():
                # Generate new population and enforce constrains
                opt.new_pop = opt.enforce_constr_re_evolve(opt.modify(opt.pop))
                
                # Push and run again!
                putJobs(pop2Jobs(opt.new_pop))
                
            else:
                # Once iteration has terminated, extract `bestval` which should represent
                # the element in *all* populations that lead to the closest match to the
                # empirical value
                EX_best, sigmaX_best = opt.best
                
                print "Calibration converged after [%d] steps. EX_best: %f, sigmaX_best: %f" % (opt.cur_iter, EX_best, sigmaX_best)
                sys.exit()
        
        
    # VM's: create and manage dispatchers
    vms = getVMs()

    if not vms: # empty
        createVMs(POPULATION_SIZE) #TODO create VMs
    else:
        pass  #TODO manage VMs