Пример #1
0
# create an approximation of the Lotka-Volterra model
model = LV(dt=0.005, max_t=10)

# create data
test_parameters = {"alpha": 1, "beta": 0.05, "gamma": 0.02, "delta": 0.14}
x0 = 20
y0 = 30
# data = model.get_data(test_parameters, x0, y0, 0, 10, 31)

data = get_data_from_csv("exp4_data_1.csv", t_scale=1)

# new_parameters = gradient_descent_minimal_parameters(data, model, initial_parameters, 0.00001, 1e-7, 10000)
new_parameters = gradient_descent_minimal_parameters2(data,
                                                      model,
                                                      initial_parameters,
                                                      0.00001,
                                                      1e1,
                                                      10000,
                                                      y_factor=100,
                                                      debug_mod=100)
# new_parameters = find_minimal_parameters([data], model, initial_parameters, 0.00001, 1e1, 10000,
#                                          y_factor=100, debug_mod=100)

x0 = data[0][1]
y0 = data[0][2]
# print_model(model, test_parameters, x0, y0, 10, 1)
print_data(data)
print(" ")
print_model(model, new_parameters, x0, y0, 7, 1)
Пример #2
0
all_data_sets_errors = [[], []]

# create the output folder if it doesn't exist already:
if not os.path.exists(output_folder):
    os.mkdir(output_folder)


# run both the normal and modified gradient descent algorithms on all datasets seperately
for i in range(len(datas)):
    new_parameters1 = gradient_descent_minimal_parameters(datas[i], model, initial_parameters, epsilon1, step_size1,
                                                          single_data_set_num_steps,
                                                          y_factor=y_factor, error_out=single_data_errors[2*i],
                                                          debug_mod=debug_mod)
    new_parameters2 = gradient_descent_minimal_parameters2(datas[i], model, initial_parameters, epsilon2, step_size2,
                                                           single_data_set_num_steps,
                                                           y_factor=y_factor, error_out=single_data_errors[2*i+1],
                                                           debug_mod=debug_mod,
                                                           minimal_second_gradient=minimal_second_gradient)

    # write to the output file content string
    result_file_content += "Using data from {}:\n".format(data_file_names[i])
    result_file_content += "Normal gradient descent found the following parameters that gave error {}:\n {}\n".format(
        single_data_errors[2*i][-1], str(new_parameters1)
    )
    result_file_content += "Improved gradient descent found the following parameters that gave error {}:\n {}\n".format(
        single_data_errors[2*i + 1][-1], str(new_parameters2)
    )
    result_file_content += "\n"

    # write some files that are used to create graphs to visually compare the model
    x0, y0 = find_t0_data_point(datas[i])