def test_algorithm(fish_label, nreps=1):
    niters = 1000
    pack_size = 26
    forager = ftf.FieldTestFish(fish_label)
    with open(CONVERGENCE_RESULTS_FOLDER + 'GWO convergence ' + fish_label + '.csv', 'a') as csvfile:
        writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
        for rep in range(nreps):
            for strategy in candidate_strategies:
                use_chaos, use_dynamic_C, use_exponential_decay, use_levy, use_only_alpha, use_weighted_alpha = strategy
                key = (pack_size, use_chaos, use_dynamic_C, use_exponential_decay, use_levy, use_only_alpha, use_weighted_alpha)
                fitnesses = forager.optimize(niters, pack_size, True, use_chaos, use_dynamic_C, use_exponential_decay, use_levy, use_only_alpha, use_weighted_alpha)
                writer.writerow([rep] + list(key) + fitnesses)
                csvfile.flush()
                print("Completed calculation ", rep+1, " of ", nreps, " for key ", key, " for fish ", fish_label)
Beispiel #2
0
#from mayavi import mlab
#import Fish3D

import math
import numpy as np
import importlib.util
import json
import sys
import field_test_fish as ftf

test_fish = ftf.FieldTestFish('2015-07-11-1 Chena - Chinook Salmon (id #4)')
# test_fish.evaluate_fit()
# test_fish.optimize(200, 7)
# test_fish.evaluate_fit()

# next step: do this optimize / evaluate for a bunch of parameter sets
# https://github.com/befelix/SafeOpt/blob/master/examples/2d_example.ipynb

import safeopt
import random
import GPy

# also probably need to optimize these three
# 1.0,  # lambda_c
# 0.5,  # sigma_t
# 0.03,  # base crypticity
# but for now, starting with just the main 5

bounds = [
    (
        0.00001, 2.0
JOB_NAME = "SecondFiveOfEach"
FISH_GROUP = "calibration_five_of_each"
n_initial_points = 400
fixed_parameters = {  # Fix a parameter's value here to exclude it from optimization analysis, especially alpha_tau and alpha_d if not allowing search images
    'alpha_tau': 1,
    'alpha_d': 1,
    'flicker_frequency': 50
}
log_scaled_params = [
    'delta_0', 'A_0', 'alpha_tau', 'alpha_d', 'beta', 't_s_0', 'tau_0', 'nu'
]

# CODE THAT SHOULD REMAIN THE SAME FOR ALL JOBS

actual_parameter_bounds = field_test_fish.FieldTestFish(
    '2015-06-10-1 Chena - Chinook Salmon (id #1)').parameter_bounds.items()
scaled_parameter_bounds = {
    key: ((np.log10(value[0]),
           np.log10(value[1])) if key in log_scaled_params else value)
    for key, value in actual_parameter_bounds
}
full_domain = [  # must contain all inputs and in order the're given to cforager.set_parameters()
    {
        'name': 'delta_0',
        'type': 'continuous',
        'domain': scaled_parameter_bounds['delta_0']
    },
    {
        'name': 'alpha_tau',
        'type': 'continuous',
        'domain': scaled_parameter_bounds['alpha_tau']
IS_MAC = (sys.platform == 'darwin')
if IS_MAC:
    RESULTS_FOLDER = '/Users/Jason/Dropbox/Drift Model Project/Calculations/cluster_pretest_results/s'
    method, scaling, batch_name, fish_label = 'ei', 'linear', 'mytest', '2015-07-11-1 Chena - Chinook Salmon (id #4)'
    n_iterations = 5  # will really be 2x this + initial 30
    opt_cores = 7  # grey wolf algorithm pack size
    opt_iters = 500  # grey wolf algorithm iterations
else:
    RESULTS_FOLDER = '/home/alaskajn/results/bayes_opt_test/'
    method, scaling, batch_name, fish_label = sys.argv[1:]
    n_iterations = 1000  # will really be 2x this + initial 30
    opt_cores = 26  # grey wolf algorithm pack size
    opt_iters = 500  # grey wolf algorithm iterations

test_fish = field_test_fish.FieldTestFish(fish_label)
invalid_objective_function_value = -1000000  # used to replace inf, nan, or extreme values with something slightly less bad

if scaling == 'linear':

    def f(delta_0, alpha_0, beta, Z_0, c_1, discriminability, sigma_t, tau_0,
          t_V):
        test_fish.cforager.modify_parameters(delta_0, alpha_0, beta, Z_0, c_1,
                                             discriminability, sigma_t, tau_0,
                                             t_V)
        test_fish.optimize(opt_iters, opt_cores, False, False, False, False,
                           False, False, True)
        obj = -test_fish.evaluate_fit(verbose=True)
        if np.isfinite(obj) and obj > invalid_objective_function_value:
            return obj
        else:
Beispiel #5
0
    ]
    RESULTS_FOLDER = '/Users/Jason/Dropbox/Drift Model Project/Calculations/cluster_pretest_results/'
    n_iterations = 50  # number of times new values are requested to calculate fitnesses
    n_evals_per_iteration = 1  # number of jobs per iteration above, for parallelizing across nodes eventually
    opt_cores = 7  # grey wolf algorithm pack size
    opt_iters = 100  # grey wolf algorithm iterations
else:
    import field_test_fish
    acquisition_type, scaling, batch_name = argv[1:]
    fish_labels = [
        '2015-07-11-1 Chena - Chinook Salmon (id #4)',
        '2015-08-05-1 Chena - Chinook Salmon (id #4)',
        '2015-07-10-1 Chena - Chinook Salmon (id #4)'
    ]
    fishes = [
        field_test_fish.FieldTestFish(fish_label) for fish_label in fish_labels
    ]
    RESULTS_FOLDER = '/home/alaskajn/results/bayes_opt_test/'
    n_iterations = 1000  # number of times new values are requested to calculate fitnesses
    n_evals_per_iteration = 1  # number of jobs per iteration above, for parallelizing across nodes eventually
    opt_cores = 26  # grey wolf algorithm pack size
    opt_iters = 100  # grey wolf algorithm iterations


def objective_function(*args):
    invalid_objective_function_value = 1000000  # used to replace inf, nan, or extreme values with something slightly less bad
    argnames = [item['name'] for item in domain]
    argvalues = args

    def scale(argname, argvalue):
        return 10**argvalue if argname in log_scaled_params else argvalue