#propagating the error to the weights between the hidden layer and the output node
        for idx,y in enumerate(y_results):
            ann.update_outgoing_weights_for_neuron(wj,idx,network_error,y_results,LEARNING_RATE)


# the gran finale! setting the predictions using the learned weights
for idx,row in enumerate(prediction_set):
    prediction_set[idx][PREDICT_TARGET] = ann.run_test_instance(wij,wj,row,PREDICT_TARGET,NUM_NEURONS_HIDDEN_LAYER,NUM_DIGITS)


predictions_dir_name = files.get_predictions_dir_from_partitions_dir(dir_name)
files.save_list_of_lists_as_csv(predictions_dir_name+"_ann/trained_weights_for_input_nodes.csv",wij)
files.save_list_as_csv(predictions_dir_name+"_ann/trained_weights_for_output_node.csv",wj)


#this is useful in case we want to know how we got these variables
constants = "NUM_NEURONS_HIDDEN_LAYER =>",NUM_NEURONS_HIDDEN_LAYER
constants += "LEARNING_RATE =>",LEARNING_RATE
constants += "NUM_EPOCHS =>",NUM_EPOCHS

files.save_matrix_as_csv(predictions_dir_name+"_ann/prediction_set.csv",prediction_set)
files.save_matrix_as_csv(predictions_dir_name+"_ann/configs_used.txt",constants)

print "\n  "+SUCCESS+""" Artificial Neural Network has been successfully trained and executed!
  Look at \033[36m"""+predictions_dir_name+"""_ann/trained_weights_for_input_nodes.csv\033[0m
  and \033[36m"""+predictions_dir_name+"""_ann/trained_weights_for_output_node.csv\033[0m 
  for the weights that were used.

  In addition the set located at 
  \033[36m"""+predictions_dir_name+"""_ann/prediction_set.csv\033[0m contains the predictions
  based upon the previously mentioned trained weights.\n"""
示例#2
0
file_name = str(sys.argv[1])

if not os.path.isfile(file_name):
    print ERROR+" File "+file_name+" not found.\n"
    sys.exit()

#load everything into a matrix (not very scalable I think)
data_matrix = files.load_into_matrix(file_name,num_targets=NUM_TARGETS,num_attributes=NUM_ATTRS,input_delimiter=INPUT_DELIMITER,skip_first=HAS_HEADER)

#normalizing and shuffling
data_matrix = prep.normalize(data_matrix,NUM_ATTRS,NUM_TARGETS,NORMALIZE_TARGETS)
data_matrix = prep.shuffle(data_matrix)

#training set is twice as large as test set
train_set_matrix = prep.take_train_set(data_matrix,TRAIN_RATIO)
test_set_matrix = prep.take_test_set(data_matrix,TRAIN_RATIO)

# finding out the target directory where i should save the partitions

current_dir = os.path.dirname(os.path.realpath(__file__))

partitions_directory = files.get_partitions_dir_from_file_name(file_name)


#saving the preprocessed partitions

files.save_matrix_as_csv(partitions_directory+"/train_set.csv",train_set_matrix)
files.save_matrix_as_csv(partitions_directory+"/test_set.csv",test_set_matrix)

print SUCCESS+" Partitions successfully created\n"
示例#3
0
    neighbour_index_distances_list = knn.get_nearest_neighbours(row,train_set,indexes_to_use,NUM_NEIGHBOURS,PREDICT_TARGET)
    
    total_weight = 0.0

    for neighbour_index_distance_pair in neighbour_index_distances_list:

        neighbour_index = neighbour_index_distance_pair[0]
        distance = neighbour_index_distance_pair[1]

        neighbour_target = train_set[neighbour_index][PREDICT_TARGET]

        weight = 1.0/distance

        total_weight += weight
        prediction += ( neighbour_target * weight )


    prediction = round(prediction/total_weight,NUM_DIGITS)

    row[PREDICT_TARGET] = prediction



predictions_dir_name = files.get_predictions_dir_from_partitions_dir(dir_name)


files.save_matrix_as_csv(predictions_dir_name+"_knn/prediction_set.csv",prediction_set)

print SUCCESS+" Predictions have been made using KNN algorithm.\n  Look at \033[36m"+predictions_dir_name+"_knn/prediction_set.csv\033[0m for the results\n"