def evaluate_model(params, i, D, return_dict):
    print("Starting sensitivity run %i out of %i" % (i + 1, len(D)))

    # Defining model parameters
    model_params, _ = data.get_params()
    for var, val in zip(vars, params):
        model_params[var] = val

    # Initiate and run models
    models = initiate_models(model_params)
    run_models(models)

    # Assign data to patches
    patches = data.assign_data(models)

    results = []
    for t in range(measurements):
        t += 1
        mean = get_average_biomass(models, t)
        loss = get_total_loss(patches, t)
        results += [*mean, *loss]

    return_dict[i] = results

    print("Finishing sensitivity run %i out of %i" % (i + 1, len(D)))
    return
def minimize_total(param, param_range, params_model, iterations, subset_nr, artificial, fname):
    loss_list = []
    optimalisation_list = []
    values = np.linspace(*param_range, iterations)
    for value in values:
        print("%s value: %.3f" % (param, value))

        # Collect parameters for fitting
        these_params = copy.deepcopy(params_model)
        these_params[param] = value

        # Initiate and run models
        models = initiate_models(these_params)
        run_models(models)

        # Assign data to patches
        patches = data.assign_data(models, artificial)

        # Subset patches for parameter fitting
        if subset_nr == 5: subset_nr = 4
        subsets = subset.all(patches)[subset_nr]

        loss_list.append((
            *get_total_loss(patches, 1), *get_subset_loss(subsets, 1),
            *get_total_loss(patches, 2), *get_subset_loss(subsets, 2),
            *get_total_loss(patches, 3), *get_subset_loss(subsets, 3)
        ))

    # total_loss = get_total_loss(patches, 2)
    # optimalisation_list.append(total_loss)
    # opt_RL, opt_BR = zip(*optimalisation_list)
    # i = opt_RL.index(min(opt_RL))
    # j = opt_BR.index(min(opt_BR))
    #
    # if param in ['c_rr', 'c_rb']:
    #     params_model[param] = values[i]
    # elif param in ['c_bb', 'c_br']:
    #     params_model[param] = values[j]
    # else:
    #     params_model[param] = values[int(round((i+j)/2))]

    np.savetxt(fname, loss_list)
    return
def minimize_diam(param, param_range, params_model, iterations, artificial, fname):
    loss_list = []
    values = np.linspace(*param_range, iterations)
    for value in values:
        print("%s value: %.3f" % (param, value))

        # Collect parameters for fitting
        these_params = copy.deepcopy(params_model)
        these_params[param] = value

        # Initiate and run models
        models = initiate_models(these_params)
        run_models(models)

        # Assign data to patches
        patches = data.assign_data(models, artificial)

        # Calculate loss
        loss_list.append(get_diameter_loss(patches))

    np.savetxt(fname, loss_list)
    return
Exemple #4
0
from growth import get_FL
from main import initiate_models

models = initiate_models()

# Get maximum global flowlength
FL_list = []
for model in models:
    this_FL = 0
    for cell in model.allVegetation:
        this_FL += cell.FL
    FL_list.append(this_FL)

FL_glob_max = max(FL_list)
print(FL_glob_max)

# Get maximum local flowlength
print(get_FL(200))
FL_list = []
for model in models:
    for cell in model.allVegetation:
        FL_list.append(cell.FL)

FL_loc_max = max(FL_list)
print(FL_loc_max)
            # Update beta settings
            step_size = init_beta * 0.5**b
            for key in settings.keys():
                old_beta, diff, cover = settings[key]
                new_beta = old_beta + np.sign(diff) * step_size
                settings[key] = (new_beta, diff, cover)

            # Set up procedure for both species for all plots
            proc = []
            for cell_type in cell_types:
                # Initiate models without runoff return
                params, _ = data.get_params()
                params["alpha"] = alpha
                params["gamma"] = gamma
                models = [
                    model for model in initiate_models(params)
                    if not model.runoff_return
                ]

                # Run models and collect differences
                for model in models:
                    p = Process(target=evaluate_model,
                                args=(model, settings, cell_type))
                    p.start()
                    proc.append(p)

            # Run procedures in multi-processing
            for p in proc:
                p.join()
                proc = []
fname = "../results/patchsize.txt"

if __name__ == '__main__':
    artificial = False
    prob_range = (.01, .04)
    N = 6
    probs = np.linspace(*prob_range, N)

    errors = []
    for value in probs:
        print("Running model for seed probability ", value)
        these_params, _ = data.get_params()
        these_params['seed_prob'] = value

        # Initiate and run models
        models = initiate_models(these_params)
        run_models(models)

        # Assign data to patches
        patches = data.assign_data(models, artificial)

        error = 0
        for patch in patches:
            if len(patch.BR_original) != 0:
                model_size = get_size(patch)
                # TODO: Use also other diameters than the last one?
                error += abs(model_size - patch.size[-1])

        errors.append(error)
        np.savetxt(fname, errors)