Ejemplo n.º 1
0
def harmony_search(
        func, 
        bounds: np.ndarray, 
        n_harmonies: int, 
        hm_considering_rate: float, 
        bandwidth: float, 
        pitch_adjusting_rate: float, 
        iters: int,
        starting_xs: [np.ndarray] = [],
        post_processing_func = None):
    # hm == harmony memory
    n_harmonies = max(n_harmonies, len(starting_xs))
    seen = set()
    hm = numpyize([[np.random.uniform(bounds[0][i], bounds[1][i]) for i in range(len(bounds[0]))] for _ in range(n_harmonies)])
    for i in range(len(starting_xs)):
        assert len(starting_xs[i]) == len(bounds[0])
        harmony = np.array(starting_xs[i])
        for z in range(len(bounds[0])):
            harmony[z] = max(bounds[0][z], min(bounds[1][z], harmony[z]))
        tpl = tuple(harmony)
        if tpl not in seen:
            hm[i] = harmony
        seen.add(tpl)
    print('evaluating initial harmonies...')
    hm_evals = numpyize([func(h) for h in hm])

    print('best harmony')
    print(round_values(denumpyize(hm[hm_evals.argmin()]), 5), f'{hm_evals.min():.8f}')
    if post_processing_func is not None:
        post_processing_func(hm[hm_evals.argmin()])
    print('starting search...')
    worst_eval_i = hm_evals.argmax()
    for itr in range(iters):
        new_harmony = np.zeros(len(bounds[0]))
        for note_i in range(len(bounds[0])):
            if np.random.random() < hm_considering_rate:
                new_note = hm[np.random.randint(0, len(hm))][note_i]
                if np.random.random() < pitch_adjusting_rate:
                    new_note = new_note + bandwidth * (np.random.random() - 0.5) * abs(bounds[0][note_i] - bounds[1][note_i])
                    new_note = max(bounds[0][note_i], min(bounds[1][note_i], new_note))
            else:
                new_note = np.random.uniform(bounds[0][note_i], bounds[1][note_i])
            new_harmony[note_i] = new_note
        h_eval = func(new_harmony)
        if h_eval < hm_evals[worst_eval_i]:
            hm[worst_eval_i] = new_harmony
            hm_evals[worst_eval_i] = h_eval
            worst_eval_i = hm_evals.argmax()
            print('improved harmony')
            print(round_values(denumpyize(new_harmony), 5), f'{h_eval:.8f}')
        print('best harmony')
        print(round_values(denumpyize(hm[hm_evals.argmin()]), 5), f'{hm_evals.min():.8f}')
        print('iteration', itr, 'of', iters)
        if post_processing_func is not None:
            post_processing_func(hm[hm_evals.argmin()])
    return hm[hm_evals.argmin()]
Ejemplo n.º 2
0
 def __init__(self, pool, base_config):
     self.pool = pool
     self.base_config = base_config
     self.xs_conf_map = [k for k in sorted(base_config['ranges'])]
     self.bounds = numpyize([[self.base_config['ranges'][k][0] for k in self.xs_conf_map],
                             [self.base_config['ranges'][k][1] for k in self.xs_conf_map]])
     self.now_date = ts_to_date(time())[:19].replace(':', '-')
     self.test_symbol = base_config['symbols'][0]
     self.results_fname = make_get_filepath(f'tmp/harmony_search_results_{self.test_symbol}_{self.now_date}.txt')
     self.best_conf_fname = f'tmp/harmony_search_best_config_{self.test_symbol}_{self.now_date}.json'
Ejemplo n.º 3
0
def load_live_config(live_config_path: str) -> dict:
    try:
        live_config = json.load(open(live_config_path))
        live_config = json.loads(
            json.dumps(live_config).replace('secondary_grid_spacing',
                                            'secondary_pprice_diff'))
        assert all(k in live_config['long']
                   for k in get_template_live_config()['long'])
        return numpyize(live_config)
    except Exception as e:
        raise Exception(f'failed to load live config {live_config_path} {e}')
Ejemplo n.º 4
0
def pso_multiprocess(reward_func: Callable,
                     n_particles: int,
                     bounds: np.ndarray,
                     c1: float,
                     c2: float,
                     w: float,
                     lr: float = 1.0,
                     initial_positions: [np.ndarray] = [],
                     n_cpus: int = 3,
                     iters: int = 10000,
                     post_processing_func: Callable = lambda x: x):
    '''
    if len(initial_positions) <= n_particles: use initial positions as particles, let remainder be random
    else: let n_particles = len(initial_positions)
    '''

    def get_new_velocity_and_position(velocity, position, lbest_, gbest_) -> (np.ndarray, np.ndarray):

        new_velocity = (
            w * velocity
            + c1 * np.random.random(velocity.shape) * (lbest_ - position)
            + c2 * np.random.random(velocity.shape) * (gbest_ - position)
        )
        new_position = position + lr * new_velocity
        new_position = np.where(new_position > bounds[0], new_position, bounds[0])
        new_position = np.where(new_position < bounds[1], new_position, bounds[1])
        return new_velocity, new_position

    if len(initial_positions) > n_particles:
        positions = numpyize(initial_positions)
    else:
        positions = numpyize([[np.random.uniform(bounds[0][i], bounds[1][i])
                               for i in range(len(bounds[0]))]
                              for _ in range(n_particles)])
        if len(initial_positions) > 0:
            positions[:len(initial_positions)] = initial_positions[:len(positions)]
    positions = np.where(positions > bounds[0], positions, bounds[0])
    positions = np.where(positions < bounds[1], positions, bounds[1])
    # velocities = np.zeros_like(positions)
    velocities = (np.random.random(positions.shape) - 0.5) * 0.0001  # init velocities to small random number
    lbests = np.zeros_like(positions)
    lbest_scores = np.zeros(len(positions))
    lbest_scores[:] = np.inf
    gbest = np.zeros_like(positions[0])
    gbest_score = np.inf

    tested = set()

    itr_counter = 0
    worker_cycler = 0
    pos_cycler = 0

    workers = [None for _ in range(n_cpus)]
    working = set()
    pool = Pool(processes=n_cpus)

    while True:
        if itr_counter >= iters:
            if all(worker is None for worker in workers):
                break
        else:
            if workers[worker_cycler] is None:
                if pos_cycler not in working:
                    pos_hash = sha256(str(positions[pos_cycler]).encode('utf-8')).hexdigest()
                    for _ in range(100):
                        if pos_hash not in tested:
                            break
                        print('debug duplicate candidate')
                        print('pos', positions[pos_cycler])
                        print('vel', velocities[pos_cycler])
                        velocities[pos_cycler], positions[pos_cycler] = \
                            get_new_velocity_and_position(velocities[pos_cycler],
                                                          positions[pos_cycler],
                                                          lbests[pos_cycler],
                                                          gbest)
                        pos_hash = sha256(str(positions[pos_cycler]).encode('utf-8')).hexdigest()
                    else:
                        print('too many duplicates, choosing random position')
                        positions[pos_cycler] = numpyize([np.random.uniform(bounds[0][i], bounds[1][i])
                                                          for i in range(len(bounds[0]))])
                        #raise Exception('too many duplicate candidates')
                    tested.add(pos_hash)
                    workers[worker_cycler] = (pos_cycler, pool.apply_async(reward_func, args=(positions[pos_cycler],)))
                    working = set([e[0] for e in workers if e is not None])
                pos_cycler = (pos_cycler + 1) % len(positions)
        if workers[worker_cycler] is not None and workers[worker_cycler][1].ready():
            score = post_processing_func(workers[worker_cycler][1].get())
            pos_idx = workers[worker_cycler][0]
            workers[worker_cycler] = None
            working = set([e[0] for e in workers if e is not None])
            itr_counter += 1
            if score < lbest_scores[pos_idx]:
                lbests[pos_idx], lbest_scores[pos_idx] = positions[pos_idx], score
                if score < gbest_score:
                    gbest, gbest_score = positions[pos_idx], score
            velocities[pos_cycler], positions[pos_cycler] = \
                get_new_velocity_and_position(velocities[pos_cycler],
                                              positions[pos_cycler],
                                              lbests[pos_cycler],
                                              gbest)
        worker_cycler = (worker_cycler + 1) % len(workers)
        sleep(0.001)
    return gbest, gbest_score
Ejemplo n.º 5
0
 def xs_to_config(self, xs):
     config = self.config.copy()
     for i, k in enumerate(self.expanded_ranges):
         config[k] = xs[i]
     return numpyize(denanify(pack_config(config)))
Ejemplo n.º 6
0
def load_live_config(live_config_path: str) -> dict:
    try:
        live_config = json.load(open(live_config_path))
        return sort_dict_keys(numpyize(make_compatible(live_config)))
    except Exception as e:
        raise Exception(f"failed to load live config {live_config_path} {e}")