# "rate_of_learning": 0.26343183932470754, # "max_epoch_size": 3, # "gaussian_noise_stdev": 0.0007517656514955944, # "l2_regularization": 0.00022259525510874703, # "max_num_epochs": 14, # "random_normal_initializer_stdev": 0.0005827304210740794 # } # cif # optimized_configuration = {'num_hidden_layers': 5.0, 'cell_dimension': 28.471127262736434, # 'minibatch_size': 10.135034205224617, 'max_epoch_size': 9.1502825822926326, # 'max_num_epochs': 20.962475980675006, 'l2_regularization': 0.0006369387641617046, # 'gaussian_noise_stdev': 0.00057001364478555087, # 'random_normal_initializer_stdev': 0.00025797511482927632, # 'rate_of_learning': 0.20172634121590136} # persist the optimized configuration to a file persist_results( optimized_configuration, optimized_config_directory + '/' + model_identifier + '.txt') #optimized_configuration = read_optimal_hyperparameter_values(optimized_config_directory + '/' + model_identifier + '.txt') # test the model #for i in range(1, 11): # args.seed = i testing(args, optimized_configuration) end = time.time() print(end - start)
count = 0 for lag in lags: current_lag = lag if (require_validation): range_of_series = [i for i in range(0, num_of_series)] # Randomly choose 1/7 series and tune hyperparameters for that chosen_indices = random.sample(range_of_series, int(len(range_of_series) * (1 / 7))) optimized_configuration = smac() # persist the optimized configuration to a file persist_results( optimized_configuration, base_dir + optimized_config_directory + "fnn_" + dataset_name + '_lag_' + str(lag) + '.txt') validation_error = train_model(optimized_configuration) print(optimized_configuration) print(validation_error) else: # Calculate forecasts FFNN forecasts for the original dataset using a chosen clustering approach subprocess.call([ "Rscript", "--vanilla", base_dir + "feed_forward_nn/ffnn_cluster_tester.R", str(num_of_hidden_nodes[count]), str(decay[count]), original_file, results_file, dataset_name, str(horizon), str(lag), str(address_near_zero_insability), str(integer_conversion),