def on_and_off(): """Maps single 'off' neuron to 1 and 0, and 'on' to 0 and 1""" data = np.array([ [-100, 1, 0 ], [100, 0, 1 ], ] ) inputs = data[:,[0]] outputs = data[:,1:] nn=neural.NN([1,3,3,2]) nn.setTrainingData(inputs, outputs) nn.train(5000) print(nn.predict(inputs))
class Value_iter_agent(Deep_Q_Learning_Player): exp_replay = [] exp_replay_max_size = 100 # net is class attribute - same for all agents, allowing agents to learn from playing against each other. try: # load neural net from file pickle_file_name = "NN_10_10_VI.pickle" with open(pickle_file_name, 'rb') as file: moving_net = pickle.load(file) except: # create new neural net moving_net = nn.NN([12, 10, 10, 6]) def __init__(self, gamma=0.9, epsilon=0.2, final_eps=0.05): super().__init__(False, gamma, epsilon, final_eps)
def greater_than(): """Should discover whether numbers between 0 and 10 are greater than 5""" inputs=np.random.random((1000, 1))*10 outputs = (inputs > 5 ) * 1 nn=neural.NN([1,2,1]) nn.setTrainingData(inputs, outputs) nn.train(10000) test_inputs = np.linspace(0,10,15).reshape((15,1)) test_outputs = nn.predict(test_inputs) collected = np.hstack((test_inputs, test_outputs.T)) for row in collected: i = row[0] o = row[1] print("{:.1f} > 5? {} ({:.2f})".format(i, o > 0.5, o))
def run(): print(f'{"":-<20} Loading data {"":-<20}') X_train = np.load("train-images.npy").reshape(-1, 28 * 28) Y_train = np.load("train-labels.npy") X_test = np.load("t10k-images.npy").reshape(-1, 28 * 28) Y_test = np.load("t10k-labels.npy") print(f'{"":-<20} Data loaded {"":-<20}') model = neural_network.NN(X_train, X_test, Y_train, Y_test, neurons=512, epochs=5, learning_rate=1e-3, dropout=0.2) print(f'{"":-<20} Training model {"":-<20}') model.fit() print(f'{"":-<20} Model trained {"":-<20}') print(f'{"":-<20} Accuracy curve {"":-<20}') model.accuracy_curve() print(f'{"":-<20} Loss curve {"":-<20}') model.loss_curve() print(f'{"":-<20} Saving model {"":-<20}') model.save("toto.sav") print(f'{"":-<20} Loading model {"":-<20}') carotte = neural_network.load_model("toto.sav") print(f'{"":-<20} Accuracy curve {"":-<20}') carotte.accuracy_curve() print(f'{"":-<20} Image prediction {"":-<20}') carotte.img_pred("test.jpg")
def triangle_network(): """Builds neural network to check whether 3 sticks of specified length can form a triangle""" shortest = 1 longest = 100 rows = [] for _n in range(2500): x = random.randint(shortest, longest) y = random.randint(shortest, longest) z = random.randint(shortest, longest) rows.append([x, y, z, float(_is_triangle(x, y, z))]) data = np.array(rows) inputs = data[:, 0:3] outputs = data[:, [3]] nn = neural.NN([3, 5, 5, 1]) nn.setTrainingData(inputs, outputs) nn.train(10000) rows = [] for _n in range(50): x = random.randint(shortest, longest) y = random.randint(shortest, longest) z = random.randint(shortest, longest) rows.append([x,y,z]) test_inputs = np.array(rows) test_results = nn.predict(test_inputs) total_count=0 correct_count=0 for xx,res in zip(test_inputs, test_results[0]): isCorrect = _is_triangle(xx[0],xx[1],xx[2]) == (res > 0.5) print("({:2d},{:2d},{:2d}) :: {:^5} ({:.2f}).".format(xx[0], xx[1], xx[2], str(isCorrect), res)) total_count += 1 correct_count += float(isCorrect)
def spotify(update, context): ''' ''' # Imports import sys sys.path.append('../spotify_analyzer/') import spotify_code as sc num_tracks = 20 chat_id = update.effective_chat.id # -------- GET SEED/GENRE -------- # genres = sc.get_genres() num_genres = len(genres) genre_choice = random.randint(0, (num_genres - 1)) update.message.reply_text( "Escolhendo músicas e adicionando na playlist...") # -------- CREATE NEW PLAYLIST -------- # username = '******' scope = 'playlist-modify-public' newplaylist_id, playlist_link = sc.set_playlist( username, "F**k off, I'm Samantha! " + str(genres[genre_choice]).capitalize(), "As músicas dessa playlist foram escolhidas pela Samantha, uma assistente pessoal baseada em inteligência artificial." ) # -------- CLASSIFIER -------- # import neural_network num_tracks, tracks_analysis = sc.get_new_tracks(num_tracks, str(genres[genre_choice])) id_list = [] i = 0 while (i < num_tracks): if (neural_network.NN(tracks_analysis.loc[i, 'popularity'], tracks_analysis.loc[i, 'danceability'], tracks_analysis.loc[i, 'energy'], tracks_analysis.loc[i, 'key'], tracks_analysis.loc[i, 'loudness'], tracks_analysis.loc[i, 'mode'], tracks_analysis.loc[i, 'speechiness'], tracks_analysis.loc[i, 'acousticness'], tracks_analysis.loc[i, 'instrumentalness'], tracks_analysis.loc[i, 'liveness'], tracks_analysis.loc[i, 'valence'], tracks_analysis.loc[i, 'tempo'])) == 1: print(tracks_analysis.loc[i, 'spotify_link'], tracks_analysis.loc[i, 'name_track'], tracks_analysis.loc[i, 'id_track']) id_list.append(tracks_analysis.loc[i, 'id_track']) i = i + 1 # -------- PUT SELECTED TRACKS IN THE NEW PLAYLIST -------- # sc.insert_tracks_playlist(newplaylist_id, id_list) update.message.reply_text("Sua playlist tá quentinha!") update.message.reply_text(playlist_link)
import logger as lg from joblib import Parallel, delayed def run(opt, cnf, i): if cnf.parallel: cnf.setRandomSeed(seed=i + 1) if not cnf.predict: opt.initialization() opt.training() opt.out_errorgraph() else: opt.predict() if __name__ == '__main__': cnf = cf.Configuration() cnf.outSetting() log = lg.Logger(cnf) if cnf.parallel: opt = nn.NN(cnf, log) Parallel(n_jobs=-1)( [delayed(run)(opt, cnf, i) for i in range(cnf.max_trial)]) else: for i in range(cnf.max_trial): cnf.setRandomSeed(seed=i + 1) opt = nn.NN(cnf, log) run(opt, cnf, 0) del opt sts = lg.Statistics(cnf, log.path_out, log.path_trial) sts.outStatistics()
import scipy as sc import matplotlib.pyplot as plt from sklearn.datasets import make_circles from IPython.display import clear_output nData = 500 nFeaturesPattern = 2 dt = dataset.Data(nData) dt.buildDataInCircles() # dt.showDataGraph() X, Y = dt.getData() topology = [nFeaturesPattern, 4, 8, 1] nn = neural_network.NN(topology) iteration = 25000 loss = [1] for i in range(iteration): pY = nn.trainingNeuralNetwork(X, Y, learning_factor=0.5) error = nn.functionCost[0](nn.out[-1][1], Y) if error < loss[-1]: print(f'itr = {i} | error = {error} | status = <') else: print(f'itr = {i} | error = {error} | status = >') loss.append(error) time.sleep(0.1)
def main(): print "1 : Breast Cancer dataset" print "2 : Car Evaluation dataset" print "3 : Student Performance dataset" print "4 : Balloon dataset" print "5 : Balance Scale dataset" user_input = input("Enter choice of dataset: ") #Reading the control file and associated data. if user_input == 0: ctrl_file = open("sample-dataset/control-file.txt","r") data = sample_main.reading_data() elif user_input == 1: ctrl_file = open("breast-cancer-dataset/control-file.txt","r") data = cancer_main.reading_data() elif user_input == 2: ctrl_file = open("car-evaluation-dataset/control-file.txt","r") data = car_main.reading_data() elif user_input == 3: ctrl_file = open("student-performance-dataset/control-file.txt","r") data = student_main.reading_data() elif user_input == 4: ctrl_file = open("balloon-dataset/control-file.txt","r") data = balloon_main.reading_data() elif user_input == 5: ctrl_file = open("balance-scale-dataset/control-file.txt","r") data = balance_main.reading_data() # list of all the attributes with their properties. attr_list = [] for line in ctrl_file: line = line.strip('\n') words = line.split(" ") if(line[0] != "#"): num = words[0] name = words[1] num_of_vals = words[2] obj = attribute.Attribute(num, name, num_of_vals) for x in words[3:]: obj.adding_values(x) attr_list.append(obj) #copying target attribute from list of attributes seperately and removing it from the total list target_attr = attribute.Attribute(attr_list[-1].num, attr_list[-1].name, attr_list[-1].num_of_vals) for y in attr_list[-1].vals: target_attr.adding_values(y) print "Target attribute details:", target_attr.printvals() del attr_list[-1] pat = create_pattern.pattern(data, attr_list, target_attr) # myNN = neural_network.NN(5, 10, 2) # myNN.train(f_pat) datasets = [[]] valid_data_len = len(pat)*1/10 # dividing data into 10 sets. j=0 for i in range(1,11): dat = [] for d in range(j,j+valid_data_len): dat.append(pat[d]) j=j+1 i=i+1 datasets.append(dat) datasets.remove([]) #print "it should be 10: ", len(datasets) accuracy = [] errors = [] # sizes = [] num = 1 for d in datasets: print "***********Iteration", num, "***********" num += 1 test_data = d train_data = [] for left_d in datasets: if left_d != d: train_data = train_data + left_d train_pat = [] test_pat = [] for p in train_data: train_pat.append(p) for p in test_data: test_pat.append(p) myNN = neural_network.NN(len(attr_list), 6, int(attribute.Attribute.getvals(target_attr)[2])) myNN.train(train_pat) tested_pat = myNN.test(test_pat) #frac = 1.0/int(attribute.Attribute.getvals(target_attr)[2]) acc = 0.0 for t in tested_pat: res = t[0] res_now = [] for r in res: if r < 0.5: res_now.append(0.0) else: res_now.append(1.0) cnt = 0 for i in range(0,len(res_now)): if res_now[i] == t[1][i]: cnt = cnt+1 if cnt == len(res_now): acc = acc+1 #print res_now , " compare to ", t[1] accu = acc/len(tested_pat) accuracy.append(accu) errors.append(1.0-accu) # # # # #finding accuracy and confidence interval. print "***********Statistics***********" sum_acc = 0.0 for acc in accuracy: print acc*100 sum_acc += (acc*100) #print sum_acc mean_acc = sum_acc/len(accuracy) print "Mean accuracy: ", mean_acc print "Mean Error rate: ", 100-mean_acc sum_err = 0.0 for err in errors: #print err sum_err += err mean_err = sum_err/len(accuracy) #print "Mean Size of tree (before pruning):", max(sizes) #print "Mean Size of tree (after pruning):", min(sizes) print "Confidence Interval on Accuracy: [",(mean_acc-1.96*mean_err)," , ",(mean_acc+1.96*mean_err) , "]"