def __init__(self): self.hog_num_file = "ann/input/hog-nums.ann" self.hog_sym_file = "ann/input/hog-syms.ann" self.num_ann = lf.neural_net() self.sym_ann = lf.neural_net() self.num_ann.create_from_file(self.hog_num_file) self.sym_ann.create_from_file(self.hog_sym_file) print 'loading ANN...'
def __init__(self): self.hog_num_file="ann/input/hog-nums.ann" self.hog_sym_file = "ann/input/hog-syms.ann" self.num_ann = lf.neural_net() self.sym_ann = lf.neural_net() self.num_ann.create_from_file(self.hog_num_file) self.sym_ann.create_from_file(self.hog_sym_file) print 'loading ANN...'
def test_network(file_name): # Grab testing file from the user test_file = input('\nWhat file do you want to test on?: ') # Now read from the file file = open(test_file, 'r') # Loop through the file line by line and grab the input and outputs inputs = [] outputs = [] num = 0 for line in file: array = line.split(',') nums = [int(i) for i in array] if num % 2 == 0: inputs.append(nums) else: outputs.append(nums) num += 1 ann = nn.neural_net() ann.create_from_file(file_name) # Now test the network correct = 0 for i, inpt in enumerate(inputs): answer = ann.run(inpt) answer = math.ceil(outputs[i][0] - answer[0]) if answer == outputs[i][0]: correct += 1 print('\nAccuracy is ', (correct / len(outputs)) * 100, '%\n')
def select_train_set(weibo_items): start_time = timeit.default_timer() """ Randomly Select Train Set """ # 75% total train_list = random.sample(weibo_items, len(weibo_items) * 3 / 4) # 25% for test test_list = [] for item in weibo_items: if item not in train_list: test_list.append(item) train_set_lines = '' # First line: #pairs #inputs #output train_set_lines += str(len(train_list)) + ' 3 1\n' # [1]emotion grate [2]time [3]forward # [4]suicide or not for item in weibo_items: train_set_lines += str(item[1]) + ' ' + str(item[2]) + ' ' + str(item[3]) + ' \n' train_set_lines += str(item[4]) + '\n' # Create neural network folder if not(os.path.exists('neural-network')): os.mkdir('neural-network') # /Users/LemonC/Code/Python/WeiboPrediction/neural-network os.chdir(os.path.abspath(os.curdir + '/neural-network')) # Write input file for neural network with open('NN-train-set.txt', 'w') as fout_for_NN: fout_for_NN.write(train_set_lines) print('Prepare train set for Neural Network successfully') """ Neural Network Training """ connection_rate = 1 # full connect learning_rate = 0.7 number_input = 3 number_hidden = 5 number_output = 1 desired_error = 0.001 max_iterations = 10000 iterations_between_reports = 1000 ann = libfann.neural_net() ann.create_sparse_array(connection_rate, (number_input, number_hidden, number_output)) ann.set_learning_rate(learning_rate) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC) # (-1, 1) ann.set_activation_function_output(libfann.SIGMOID) # (0,1) ann.train_on_file('NN-train-set.txt', max_iterations, iterations_between_reports, desired_error) ann.save('trained.net') ann.destroy() stop_time = timeit.default_timer() print('Total training time: ' + str('%.2f' % (stop_time - start_time) + ' seconds.\n')) # Back to root folder os.chdir('..') return test_prediction(test_list)
def configure_net(self): self.net = fann.neural_net() self.net.create_standard_array([len(self.ids), 10, 1]) self.net.set_activation_function_hidden(fann.SIGMOID_SYMMETRIC_STEPWISE) self.net.set_activation_function_output(fann.SIGMOID_SYMMETRIC_STEPWISE) self.net.set_train_stop_function(fann.STOPFUNC_BIT) self.net.set_bit_fail_limit(0.1)
def test_anns(results): data = read_metrics(results) split = int(len(data) * 0.8) num_input = len(data[0]) - 1 random.shuffle(data) test_data = data[split:] # Read ANNs ann_dir = '/home/tomas/Dropbox/Git/ga_sandbox/projects/denoising/neural/trained_anns' trained_anns = [] for filename in os.listdir(ann_dir): if filename.endswith('.net'): ann_path = os.path.join(ann_dir, filename) ann = libfann.neural_net() ann.create_from_file(ann_path) trained_anns.append(ann) points = [] for row in test_data: actual_output = row[num_input] ann_mean_output = np.mean([ ann.run(row[:num_input]) for ann in trained_anns ]) points.append([ann_mean_output, actual_output]) print "actual: " + str(actual_output) + ", predicted: " + str(ann_mean_output) points = sorted(points, key=lambda p: p[1]) fig = plt.figure(figsize=(6, 4)) plt.plot([p[0] for p in points]) plt.plot([p[1] for p in points]) plt.ylim([0, 1.2]) plt.show()
def fann_init_net(self, hidden_func=libfann.SIGMOID_SYMMETRIC_STEPWISE, output_func=libfann.SIGMOID_STEPWISE): net = libfann.neural_net() net.set_activation_function_hidden(hidden_func) net.set_activation_function_output(output_func) return net
def from_file(cls, name, prefix): prefix += '.intent' self = cls(name) self.net = fann.neural_net() self.net.create_from_file(str(prefix + '.net')) # Must have str() self.ids.load(prefix) return self
def recreate_images(result_dir, noisy_image_dir): # Read noisy images first test_images = {} for image_name in os.listdir(noisy_image_dir): if image_name.endswith('.png'): image_path = os.path.join(noisy_image_dir, image_name) image = util.img_as_float(io.imread(image_path)) image_name_noext = os.path.splitext(image_name)[0] test_images[image_name_noext] = image # Enumerate results - image directories for image_name in sorted(os.listdir(result_dir)): image_dir = os.path.join(result_dir, image_name) if os.path.isdir(image_dir): print image_name for result_file in sorted(os.listdir(image_dir)): if result_file.endswith('.net'): # Instantiate trained ANN from .net file net_path = os.path.join(image_dir, result_file) ann = libfann.neural_net() ann.create_from_file(net_path) # Filter the same image which it was trained with filtered_image = filter_fann( test_images[image_name], ann) param_set_name = os.path.splitext(result_file)[0] io.imsave( os.path.join(image_dir, param_set_name + '.png'), filtered_image)
def train(self, train_data): self.set_train_data(train_data) hidden_layers = [self.hidden_neurons] * self.hidden_layers layers = [self.train_data.num_input] layers.extend(hidden_layers) layers.append(self.train_data.num_output) sys.stderr.write("Network layout:\n") sys.stderr.write("* Neuron layers: %s\n" % layers) sys.stderr.write("* Connection rate: %s\n" % self.connection_rate) if self.training_algorithm not in ('TRAIN_RPROP',): sys.stderr.write("* Learning rate: %s\n" % self.learning_rate) sys.stderr.write("* Activation function for the hidden layers: %s\n" % self.activation_function_hidden) sys.stderr.write("* Activation function for the output layer: %s\n" % self.activation_function_output) sys.stderr.write("* Training algorithm: %s\n" % self.training_algorithm) self.ann = libfann.neural_net() self.ann.create_sparse_array(self.connection_rate, layers) self.ann.set_learning_rate(self.learning_rate) self.ann.set_activation_function_hidden(getattr(libfann, self.activation_function_hidden)) self.ann.set_activation_function_output(getattr(libfann, self.activation_function_output)) self.ann.set_training_algorithm(getattr(libfann, self.training_algorithm)) fann_train_data = libfann.training_data() fann_train_data.set_train_data(self.train_data.get_input(), self.train_data.get_output()) self.ann.train_on_data(fann_train_data, self.epochs, self.iterations_between_reports, self.desired_error) return self.ann
def main(): args = parse_args() print "loading data" x_pca_test = np.load("data/x-test-pca%s_%s.npy" % (args.pca, args.type)) x_test = np.load("data/x-test_%s.npy" % args.type) y_test = np.load("data/y-test_%s.npy" % args.type) print "x_pca_test Shape", x_pca_test.shape print "x_test Shape", x_test.shape print "y_test Shape", y_test.shape print "Loading fann" ann = libfann.neural_net() ann.create_from_file("nnets/%s" % args.fann_file) predicted = [] for test in x_pca_test: predicted.append(np.array(ann.run(test))) print "Test Error:", get_error(predicted, y_test) print "Saving predictions" np.save("data/p-test_%s" % args.type, np.array(predicted)) print "Smoothing predictions" predicted_smooth = smooth(args, predicted) print "Smooth Test Error:", get_error(predicted_smooth, y_test) print "Saving smoothed predictions" np.save("data/ps-test_%s" % args.type, np.array(predicted_smooth)) plot_classification(args, x_test, y_test, np.array(predicted_smooth))
def testing_from_file(ann_data_file, ann_test_data_file, dir_report): ann = lf.neural_net() ann.create_from_file(ann_data_file) with open(ann_test_data_file) as f: content = f.readlines() count, err = len(content) // 2, 0 for i in range(1, len(content)): if i % 2 == 1: # input vector for ann vec = hp.get_int_array_from_string(content[i][0:-2]) rec = hp.get_max_from_int_array(ann.run(vec)) # control vector vec = hp.get_int_array_from_string(content[i + 1][0:-2]) num = hp.get_max_from_int_array(vec) if num != rec: print(rec, num) err += 1 print "total:", count print "error:", err print "p={0:.3f}%".format(100 - float(err) / float(count) * 100)
def testing_from_file(ann_data_file, ann_test_data_file, dir_report): ann = lf.neural_net() ann.create_from_file(ann_data_file) with open(ann_test_data_file) as f: content = f.readlines() count, err = len(content) // 2, 0 for i in range(1, len(content)): if i % 2 == 1: # input vector for ann vec = hp.get_int_array_from_string(content[i][0:-2]) rec = hp.get_max_from_int_array(ann.run(vec)) # control vector vec = hp.get_int_array_from_string(content[i + 1][0:-2]) num = hp.get_max_from_int_array(vec) if num != rec: print (rec, num) err += 1 print "total:", count print "error:", err print "p={0:.3f}%".format(100 - float(err) / float(count) * 100)
def load_or_create_neural_network(path: str, input_size: int, output_size: int) -> libfann.neural_net: if os.path.exists(path): network = libfann.neural_net() network.create_from_file(path) else: network = create_neural_network(input_size, output_size) return network
def __init__(self, num_inputs, learning_rate, momentum, src_file=None): """ Constructor for a single-output neural network. :param num_inputs: Number of inputs to the neural network. :param learning_rate: Learning rate to use when training the neural network. :param momentum: Learning momentum to use when training the neural network. :param src_file: If None, then a neural network with random weights is initialized. Otherwise, the neural network is loaded from the file. """ self.nn = libfann.neural_net() if src_file is not None: # Initialize neural network from file self.nn.create_from_file(src_file) else: self.nn.create_standard_array([num_inputs, 20, 1]) self.nn.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC) self.nn.set_activation_function_output(libfann.LINEAR) self.nn.set_training_algorithm(libfann.TRAIN_INCREMENTAL) self.nn.set_learning_rate(learning_rate) self.nn.set_learning_momentum(momentum) self.num_inputs = num_inputs self.learning_rate = learning_rate self.momentum = momentum
def learn(self, episodes): state_size = len(episodes[0].states[0]) # Create the model if needed if self._model is None: self._model = libfann.neural_net() self._model.create_sparse_array( 1, (state_size, self.hidden_neurons, self.nb_actions)) self._model.randomize_weights(-0.1, 0.1) self._model.set_activation_function_layer(libfann.GAUSSIAN, 1) self._model.set_activation_function_layer(libfann.LINEAR, 2) # Store the values of all the states encountered in all the episodes states = [] values = [] for episode in episodes: states.extend(episode.states) values.extend(episode.values) # Train for these values data = libfann.training_data() data.set_train_data(states, values) self._model.train_on_data(data, 150, 50, 1e-5)
def test(file): """ excepts that <filename>.net and <filename>_test.data exists Hardkoodattu kaksi tuloa ja yksi lähtö tulostuksiin :param file: filename without extensions. :return: """ net_file = file + '.net' data_file = file + '_test.data' testdata = libfann.training_data() # Luo olion testdata.read_train_from_file(data_file) # Lukee testi materiaalin joka pitäisi olla eri kuin opetusmateriaali inputs = testdata.get_input() outputs = testdata.get_output() ann = libfann.neural_net() ann.create_from_file(net_file) # Lataa aikaisemmin luotu verkko print("MSE ERROR : %.5f" %(ann.test_data(testdata))) # Ilmoittaa verkon virheen testidatalla for i in range(len(inputs)): # Tulostaa testidatan läpi result = ann.run(inputs[i]) print("Input: %.2f %.2f, Output %.4f, Excepted %.4f" %(inputs[i][0], inputs[i][1], result[0], outputs[i][0] ))
def result(self): """ 获取验证码识别结果 :return: str """ if self._manual: return self._human_recognization() neural = libfann.neural_net() libfann.neural_net.create_from_file(neural, 'eduLogin/captcha/data/training.data') self._binaryzation() self._clear_noise() image_list = self._cut_images() captcha = '' for image in image_list: image = self._rotate_image(image) image = self._resize_to_norm(image) string = self._captcha_to_string(image) arr = [] for x in string: arr.append(int(x)) neural_result = libfann.neural_net.run(neural, arr) max_element = 0 max_pos = 0 for index, item in enumerate(neural_result): if item > max_element: max_element = item max_pos = index if max_pos in range(0, 10): captcha += str(max_pos) else: captcha += chr(max_pos - 10 + 97) return captcha
def load(self, prefix): prefix += '.' + {-1: 'l', +1: 'r'}[self.dir] self.net = fann.neural_net() if not self.net.create_from_file( str(prefix + '.net')): # Must have str() raise FileNotFoundError(str(prefix + '.net')) self.ids.load(prefix)
def from_file(cls, name, prefix): prefix += '.intent' self = cls(name) self.net = fann.neural_net() if not self.net.create_from_file(str(prefix + '.net')): # Must have str() raise FileNotFoundError(str(prefix + '.net')) self.ids.load(prefix) return self
def create_ann(self): ann = libfann.neural_net() ann.create_standard_array(self.layers) ann.set_learning_rate(0.7) ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.set_activation_steepness_hidden(0.4) ann.set_activation_steepness_output(0.4) return ann
def __init__(self, settings): self.k = settings.k self.networks = [] self.mses = [0]*settings.k for i in range(0,settings.k): self.networks.append(libfann.neural_net()) self.networks[i].create_standard_array((len(settings.parameterRanges),settings.networkSize,1)) self.networks[i].set_activation_function_output(libfann.LINEAR)
def construct(self, *args): num_input = args[0] num_output = args[1] ann = libfann.neural_net() ann.create_standard_array((num_input, self.num_hidden, num_output)) ann.set_activation_function_hidden(libfann.SIGMOID) #ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC) ann.set_training_algorithm(libfann.TRAIN_QUICKPROP) return (ann)
def filter_image(ann_path, noisy_image_path, output_path): from fann2 import libfann from skimage import io, util from projects.denoising.neural.filtering import filter_fann noisy_image = util.img_as_float(io.imread(noisy_image_path)) ann = libfann.neural_net() ann.create_from_file(ann_path) filtered_image = filter_fann(noisy_image, ann) io.imsave(output_path, filtered_image)
def construct(self, *args): num_input = args[0] num_output = args[1] ann = libfann.neural_net() ann.create_sparse_array(self.connection_rate, (num_input, self.num_hidden, num_output)) ann.set_learning_rate(self.learning_rate) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.set_activation_function_output(libfann.GAUSSIAN) return (ann)
def __init__(self, phenotype, *args, **kwargs): self.phenotype = phenotype self.filtered_q = None # Initialize network self.mlp = libfann.neural_net() self.mlp.create_standard_array(self.phenotype.network_shape) self.filtered_image = None super(NeuralFilterMLP._Individual, self).__init__(*args, **kwargs)
def plot_ann_connections(result_dir): connections = [] for filename in os.listdir(result_dir): if not filename.endswith('.net'): continue set_id = int(os.path.splitext(filename)[0].split('-')[1]) filepath = os.path.join(result_dir, filename) ann = libfann.neural_net() ann.create_from_file(filepath) connections.append((set_id, len(ann.get_connection_array()))) print set_id, len(ann.get_connection_array()) connections = sorted(connections, key=lambda x: x[0]) # import pdb; pdb.set_trace() fig = plt.figure(figsize=(6, 4)) # http://stackoverflow.com/questions/3918028/how-do-i-plot-multiple-x-or-y-axes-in-matplotlib ax = fig.add_subplot(111) ax.spines['bottom'].set_position(('outward', 40)) make_second_bottom_spine(label='Filtravimo lango dydis') groups = [ ('3x3', (0, 13)), ('5x5', (14, 27)), ('7x7', (28, 41)), ('9x9', (42, 55)), ] for name, xspan in groups: annotate_group(name, xspan) major_ticks = np.arange(0, 56, 14) minor_ticks = np.arange(0, 56, 1) ax.set_xticks(major_ticks) ax.set_xticks(minor_ticks, minor=True) # ax.set_yticks(np.arange(0, 2100, 500)) ax.grid(which='both') # or if you want differnet settings for the grids: ax.grid(which='minor', alpha=0.2) ax.grid(which='major', alpha=0.5) # plt.tight_layout() plt.plot([ c[1] for c in connections ], label='DNT su Gauso a.f.', linewidth=2) # plt.legend(loc=loc) # plt.ylim([0.0, 1]) # plt.ylim([0, 2100]) plt.xlabel(u'DNT topologijos eil. nr.') plt.ylabel(u'Jungčių skaičius') plt.grid(True) plt.tight_layout() plt.show() return connections
def _get_nn(inputs, hidden): """ Construct a neural network. """ ann = libfann.neural_net() ann.create_standard_array((inputs, hidden[0], 1)) ann.set_learning_rate(_LEARNING_RATE) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC) ann.set_activation_function_output(libfann.LINEAR_PIECE_SYMMETRIC) ann.set_training_algorithm(libfann.TRAIN_INCREMENTAL) #ann.set_rprop_delta_zero(1e-6) return ann
def _get_nn(inputs, hidden): """ Construct a neural network. """ ann = libfann.neural_net() ann.create_standard_array((inputs, hidden[0], 1)) ann.set_learning_rate(LEARNING_RATE) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC) ann.set_activation_function_output(libfann.LINEAR_PIECE_SYMMETRIC) ann.set_training_algorithm(libfann.TRAIN_RPROP) ann.set_rprop_delta_zero(1e-6) return ann
def _create_network(self): """ create_sparse_array Creates a standard back propagation neural network """ self.ann = libfann.neural_net() """ Setup the network """ self.ann.create_sparse_array(self.connection_rate, self.network) self.ann.set_learning_rate(self.learning_rate) self.ann.set_learning_momentum(self.learning_momentum) self.ann.randomize_weights(-self.initial_weight, self.initial_weight) self.ann.set_training_algorithm(self.training_algorithm) """ set activation function """ self.ann.set_activation_function_hidden(self.hidden_activation) self.ann.set_activation_function_output(self.output_activation) """ This option is only used in cascading network """
def _parse_ann_info(image_res_dir): """ Input - directory of a single image """ results = {} for result_file in os.listdir(image_res_dir): if result_file.endswith('.net'): ann_path = os.path.join(image_res_dir, result_file) param_set = os.path.splitext(result_file)[0] ann = libfann.neural_net() ann.create_from_file(ann_path) results[param_set] = {} results[param_set]['connection_count'] = len(ann.get_connection_array()) return results
def fann_ble_test_recovered(self, data, test_data, normalize=True, savepath="./temp_save.conf"): if not os.path.exists(savepath): print "No File Included" return [] net = libfann.neural_net() net.create_from_file(savepath) res = zeros((test_data.mat_res.mat.shape[0], len(data.mat_res.sep_mat.keys()))) for i, test_array in enumerate(test_data.mat_res.mat): if normalize: # test_array = -test_array / 100.0 test_array = self.normalize(test_array, self.m_min, self.m_max) res[i, :] = net.run(test_array) self.net = net return res
def predict_input_button_clicked(self): fake_id = '10000' input_text = str(self.weibo_input_text.toPlainText()).replace( '\n', ' ') if input_text == '': self.statusBar.showMessage( 'Prediction Fail! Please enter the text.', 3000) return input_date_time = str( self.date_time_edit.dateTime().toString('yyyy.M.d HH:mm')) input_forward = str(self.forward_spinbox.value()) start_time = timeit.default_timer() input_line = fake_id + '\t' + input_text + '\t' + input_date_time + '\t' + input_forward input_list = list() input_list.append(input_line) computed_list = train_prediction.process_weibo(input_list, True) os.chdir(os.path.abspath(os.curdir + '/neural-network')) ann = libfann.neural_net() ann.create_from_file("trained.net") # single_item: [0]id [1]emotion grate [2]time [3]forward [4]content single_item = computed_list[0] # Only has the first one result = ann.run([ float(single_item[1]), float(single_item[2]), float(single_item[3]) ]) print('Prediction:' + str('%-18s' % result[0])) prediction = 'No' if (result[0] - 0.5) > 0: prediction = 'Yes' result_text = unicode('Content: ' + single_item[4] + '\nTime: ' + str('%.2f' % single_item[2]) + '(hour)\nForward: ' + single_item[3] + '\nEmotion Grate: ' + str(single_item[1]) + '\nSuicide prediction: ' + prediction + '\n\n') stop_time = timeit.default_timer() result_text += 'Total prediction time: ' + str( '%.2f' % (stop_time - start_time)) + ' seconds.\n' self.result_text.setText(QtCore.QString(result_text)) self.result_text.moveCursor(QtGui.QTextCursor.End) # Back to root folder os.chdir('..') self.statusBar.showMessage('Prediction Complete!', 3000)
def __init__(self, retrain=False): ##################################### # Get the qual predictions to blend # ##################################### # Collect the ratings by the predictors RT_T = [] for p in self.predictors: g = pandas.read_csv( ROOT + '/results/blend/' + p, header=None, names=['rating'] )['rating'].tolist() # Convert the list to a numpy array g = np.array(g, dtype=float) # Subtract out the mean rating of p # g -= np.sum(g) / len(g) g /= 5 RT_T.append(g) # Convert the predictor ratings into a matrix to transpose it RT_T = np.matrix(RT_T) RT = RT_T.T # Get it back in list of list form, each inner list is the set of # predictions for a single user/movie combination self.test_inputs = RT.tolist() ################## # Create the ANN # ################## if retrain: self.train() else: filename = ROOT + '/results/ann.net' print("Loading ANN from file.") print(" - path = {0}\n".format(filename)) ann = libfann.neural_net() ann.create_from_file(filename) self.ann = ann print("Predicting qual ratings using the loaded ANN.") self.predict()
def _create_network(self): """ create_sparse_array Creates a standard back propagation neural network """ self.ann = libfann.neural_net() """ Setup the network """ self.ann.create_sparse_array(self.connection_rate, self.network) self.ann.set_learning_rate(self.learning_rate) self.ann.set_learning_momentum(self.learning_momentum) self.ann.randomize_weights(-self.initial_weight,self.initial_weight) self.ann.set_training_algorithm(self.training_algorithm) """ set activation function """ self.ann.set_activation_function_hidden(self.hidden_activation) self.ann.set_activation_function_output(self.output_activation) """ This option is only used in cascading network """
def fann_ble_test_recovered_accum(self, data, test_data, normalize=True, savepath="./temp_save.conf", accum_depth=3): if not os.path.exists(savepath): print "No File Included" return [] net = libfann.neural_net() net.create_from_file(savepath) res = zeros((test_data.mat_res.mat.shape[0], int(len(data.mat_res.sep_mat.keys())/4.0))) for i, test_array_miao in enumerate(test_data.mat_res.mat): test_array_raw = test_data.mat_res.mat[max(0, i - accum_depth):i+1, :] test_array = self.stack_data(test_array_raw) if normalize: test_array = self.normalize(test_array, self.m_min, self.m_max) # print test_array res[i, :] = net.run(test_array) self.net = net return res
def predict_file_button_clicked(self): self.statusBar.showMessage("Opening Weibo file...") result_text = '' filename = QtGui.QFileDialog.getOpenFileName(self, 'Open file', './') try: with open(filename) as fin_predict: start_time = timeit.default_timer() weibo_predict_list = train_prediction.process_weibo( fin_predict.readlines()[1:], True) os.chdir(os.path.abspath(os.curdir + '/neural-network')) ann = libfann.neural_net() ann.create_from_file("trained.net") # item: [0]id [1]emotion grate [2]time [3]forward [4]content for index, item in enumerate(weibo_predict_list): result = ann.run( [float(item[1]), float(item[2]), float(item[3])]) print('Prediction:' + str('%-18s' % result[0])) prediction = 'No' if (result[0] - 0.5) > 0: prediction = 'Yes' result_text += unicode('Weibo ID: ' + item[0] + '\nContent: ' + item[4] + '\nTime: ' + str('%.2f' % item[2]) + '(hour)\nForward: ' + item[3] + '\nEmotion Grate: ' + str(item[1]) + '\nSuicide prediction: ' + prediction + '\n\n') stop_time = timeit.default_timer() result_text += 'Total prediction time: ' + str( '%.2f' % (stop_time - start_time)) + ' seconds.\n' self.result_text.setText(QtCore.QString(result_text)) self.result_text.moveCursor(QtGui.QTextCursor.End) # Back to root folder os.chdir('..') self.statusBar.showMessage('Prediction Complete!', 3000) except IOError as err: print('Input predict file error: ' + str(err)) self.statusBar.showMessage( "Prediction Fail! Please choose a correct file.", 3000)
def testNet(testSet, netFilename, labelHandler): testSummaryFilename = time.strftime("testingOutput_%d_%m_%Y_%H.csv") if NNWrapper.numThatActuallyHaveLabel == None: NNWrapper.numThatActuallyHaveLabel = {} NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled = {} try: os.remove(testSummaryFilename) except: print "already no such file" testingSummaryFile = open(testSummaryFilename, "a") ann = libfann.neural_net() ann.create_from_file(netFilename) #ann.print_connections() numTested = 0 stats = {} for pair in testSet: featureVec = pair[0] actualLabelVec = pair[1] result = ann.run(featureVec) testingSummaryFile.write(str(actualLabelVec)+","+str(result)+"\n") numTested += 1 actualLabels = labelHandler.getLabelsForXInNRep(pair[1]) guessedLabels = labelHandler.labelsFromNetAnswer(result) for actualLabel in actualLabels: boxStats = stats.get(actualLabel, {}) for guessedLabel in guessedLabels: boxStats[guessedLabel] = boxStats.get(guessedLabel, 0) + 1 stats[actualLabel] = boxStats for key in stats: print key print "*******************" for label in labelHandler.labelIdsToLabels: count = stats[key].get(label, 0) print label, "\t\t\t", count testingSummaryFile.close()
def read(self): start = time.process_time() self.client.initProtocol(self.cfg) ann = libfann.neural_net() ann.create_from_file("./data/net_16000.net") proccess = processbasic.ProcessBasic.ProcessBasic(ann, self.client) packetReader = common.PacketReader.PacketReader(proccess) dataReader = common.DataReader.DataReader(self.cfg, packetReader) dataReader.readDataSet() dataReader.printFiles() packetReader.printSamples() proccess.printPackets() self.client.close() end = time.process_time() print("Time: " + str((end - start) * 1000) + "ms")
def __init__(self, source_image, network_shape, init_method, stats, fitness_func): self.source_image = source_image self.target_image = None self.network_shape = network_shape # Statistical chromosome initialization self.init_method = init_method self.stats = stats # Other self.fitness_func = fitness_func self.initial_q = metrics.q_py(source_image) self.trained_anns = [] if fitness_func == 'ann': # Instantiate all neural nets ann_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'trained_anns' ) for filename in os.listdir(ann_dir): if filename.endswith('.net'): ann_path = os.path.join(ann_dir, filename) ann = libfann.neural_net() ann.create_from_file(ann_path) self.trained_anns.append(ann) else: slope = 0.65722219398100967 intercept = 0.099529774137723237 self.ideal_q_guess = slope * self.initial_q + intercept # print "Initial Q guess: " + str(self.ideal_q_guess) exp_coefs = [ 6.58953834, 29.54967305, 6.00895362, -40.3269125 ] exp_p = lambda x, a, b, c, d: -a * np.exp(-b * x + c) + d exp = lambda x: exp_p(x, *exp_coefs) self.parabola_coef = exp(self.ideal_q_guess) s = "INITIAL Q: " + str(self.initial_q) s += ", IDEAL Q GUESS: " + str(self.ideal_q_guess) s += ", PARABOLA COEF: " + str(self.parabola_coef) print s
def build_pheno(self, p, pop): ann = libfann.neural_net() #ann.set_activation_function_output (libfann.SIGMOID_SYMMETRIC_STEPWISE) #ann.set_activation_function_hidden (libfann.SIGMOID_SYMMETRIC_STEPWISE) # connection_rate, (inputs, hidden, outputs) ann.create_sparse_array(1, (2, 2, 1)) # get_num_layers get_num_input get_num_output # get_total_connections get_total_neurons # init_weights (self, data) print_connections, print_parameters # set_weight (self, from_neuron, to_neuron, weight) for frm in range(3): for to in range(2): v = self.get_allele(p, pop, frm * 2 + to) ann.set_weight(frm, to + 3, v) #print (frm, to + 3, ann.get_weight (frm, to + 3)) for frm in range(3): ann.set_weight(frm + 3, 6, self.get_allele(p, pop, frm + 6)) #print (frm + 3, 6, ann.get_weight (frm + 3, 6)) return ann
def testNet(testSet, netFilename, labelHandler): if NNWrapper.numThatActuallyHaveLabel == None: NNWrapper.numThatActuallyHaveLabel = {} NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled = {} try: os.remove(testingSummaryFilename) except: print "already no such file" testingSummaryFile = open(NNWrapper.testingSummaryFilename, "a") ann = libfann.neural_net() ann.create_from_file(netFilename) #ann.print_connections() numTested = 0 numLabeledCorrectly = 0 for pair in testSet: featureVec = pair[0] actualLabel = labelHandler.getLabelForOneInNRep(pair[1]) result = ann.run(featureVec) #print result, actualLabel numTested += 1 NNWrapper.numThatActuallyHaveLabel[actualLabel] = NNWrapper.numThatActuallyHaveLabel.get(actualLabel, 0) + 1 guessedLabel = labelHandler.closestLabel(result) testingSummaryFile.write(guessedLabel+","+actualLabel+"\n") if actualLabel == guessedLabel: numLabeledCorrectly += 1 NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled[actualLabel] = NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled.get(actualLabel, 0) + 1 print "numTested", numTested print "numLabeledCorrectly", numLabeledCorrectly NNWrapper.totalTested += numTested NNWrapper.totalCorrect += numLabeledCorrectly print "totalTested", NNWrapper.totalTested print "totalCorrect", NNWrapper.totalCorrect print "percentageCorrect", float(NNWrapper.totalCorrect)/NNWrapper.totalTested print "*****************" for key in NNWrapper.numThatActuallyHaveLabel: print key, NNWrapper.numThatActuallyHaveLabel.get(key,0), NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled.get(key,0), float(NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled.get(key,0))/NNWrapper.numThatActuallyHaveLabel.get(key,0) testingSummaryFile.close()
def train(self, training_file_path, num_inputs, num_outputs, nn_path=DEFAULT_ANN_PATH, num_hid_neurons=None): """ Trains an ANN from data containing in a text-based training file that is created by xml2trainingdata.py. :param two_hid: specifies whether to use two hidden layers or not (default not) :param training_file_path: path to training file :param nn_path: path to save ANN to :param num_inputs: number of input neurons :param num_outputs: number of output neurons :param num_hid_neurons: number of hidden neurons :return: None """ # if hidden neurons are not specified, set the number to 2/3 of the sum of input and output neurons if num_hid_neurons is None: num_hid_neurons = (2 * (num_inputs * num_outputs)) / 3 ann_tuple = (num_inputs, num_hid_neurons, num_outputs) # create the ANN ann = fann.neural_net() ann.create_sparse_array(CONNECTION_RATE, ann_tuple) # set learning style ann.set_learning_rate(LEARNING_RATE) # set activation function ann.set_activation_function_output(fann.SIGMOID_SYMMETRIC) # train the ANN on file ann.train_on_file(training_file_path, MAX_ITERATIONS, ITERATIONS_BETWEEN_REPORTS, DESIRED_ERROR) # save ann to file and free memory associated with it ann.save(nn_path) ann.destroy() # set own ann to saved ann self.load_ann_from_file(nn_path)
def main(): args = parse_args() print "Loading data" x_train = np.load("data/x-train-pca%s_%s.npy" % (args.pca, args.type)) x_valid = np.load("data/x-valid-pca%s_%s.npy" % (args.pca, args.type)) y_train = np.load("data/y-train_%s.npy" % args.type) y_valid = np.load("data/y-valid_%s.npy" % args.type) print "x_train Shape", x_train.shape print "x_valid Shape", x_valid.shape print "y_train Shape", y_train.shape print "y_valid Shape", y_valid.shape print "Creating ann" ann = libfann.neural_net() ann.create_standard_array((x_train.shape[1], num_neurons_hidden, y_train.shape[1])) ann.set_bit_fail_limit(0.5) ann.set_learning_rate(learning_rate) print "Converting training data" train_data = XY_to_fann_train_data(x_train, y_train) print "Converting testing data" test_data = XY_to_fann_train_data(x_valid, y_valid) ann.train_on_data(train_data, max_epochs, 1, desired_error) ann.test_data(train_data) bit_fail = ann.get_bit_fail() train_err = (float(bit_fail) / (y_train.shape[0] * y_train.shape[1])) print "Train Error:", train_err ann.test_data(test_data) bit_fail = ann.get_bit_fail() valid_err = (float(bit_fail) / (y_valid.shape[0] * y_valid.shape[1])) print "Validation Error:", valid_err print "Saving ANN to %s" % args.fann_file ann.save("nnets/%s" % args.fann_file)
def fann_ble_test_recovered(self, data, test_data, normalize=True, savepath="./temp_save.conf"): if not os.path.exists(savepath): print "No File Included" return [] net = libfann.neural_net() net.create_from_file(savepath) res = zeros( (test_data.mat_res.mat.shape[0], len(data.mat_res.sep_mat.keys()))) for i, test_array in enumerate(test_data.mat_res.mat): if normalize: # test_array = -test_array / 100.0 test_array = self.normalize(test_array, self.m_min, self.m_max) res[i, :] = net.run(test_array) self.net = net return res
def __init__(self): super().__init__() self.learnbuffer = [] self.dumbtraining = False connection_rate = 1 num_input = 2 #hidden = (40,40) hidden = (50,) #hidden = (20,10,7) num_output = 4 learning_rate = 0.7 self.NN = libfann.neural_net() #self.NN.set_training_algorithm(libfann.TRAIN_BATCH) #self.NN.set_training_algorithm(libfann.TRAIN_RPROP) #self.NN.set_training_algorithm(libfann.TRAIN_QUICKPROP) self.NN.create_sparse_array(connection_rate, (num_input,)+hidden+(num_output,)) self.NN.randomize_weights(-1,1) self.NN.set_learning_rate(learning_rate) self.NN.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE) self.NN.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
def entrenar(): connection_rate = 1 learning_rate = 0.7 num_input = 400 num_hidden = 50 num_output = 5 desired_error = 0.0001 max_iterations = 100000 iterations_between_reports = 1000 ann = libfann.neural_net() ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output)) ann.set_learning_rate(learning_rate) ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.train_on_file("patrones.data", max_iterations, iterations_between_reports, desired_error) ann.save("red.net")
def trainNetwork(dataFilename, netFilename, layerSizes, max_iterations, desired_error): # layerSizes should look something like this: (numInput, 200, 80, 40, 20, 10, numOutput) ann = libfann.neural_net() #ann.create_sparse_array(NNWrapper.connection_rate, (numInput, 6, 4, numOutput)) #TODO: is this what we want? # the one that works in 40 seconds 4, 10, 6, 1. the one that trained in 30 secs was 6,6 ann.create_sparse_array(NNWrapper.connection_rate, layerSizes) ann.set_learning_rate(NNWrapper.learning_rate) ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.set_bit_fail_limit(.1) #ann.randomize_weights(0,0) t0 = time.clock() ann.train_on_file(dataFilename, max_iterations, NNWrapper.iterations_between_reports, desired_error) t1 = time.clock() seconds = t1-t0 m, s = divmod(seconds, 60) h, m = divmod(m, 60) print "Time to train:" print "%d:%02d:%02d" % (h, m, s) ann.save(netFilename)
def train_network(file): print('\nTraining Network from this file:', file, '\n') # Setup all the variables connection_rate = 1 learning_rate = 0.7 num_input = 2 num_hidden = 4 num_output = 1 desired_error = 0.0001 max_iterations = 100000 iterations_between_reports = 1000 ann = nn.neural_net() ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output)) ann.set_learning_rate(learning_rate) ann.set_activation_function_output(nn.SIGMOID_SYMMETRIC_STEPWISE) ann.train_on_file(file, max_iterations, iterations_between_reports, desired_error) ann.save(file.replace('.data', '.net'))
def predict_input_button_clicked(self): fake_id = '10000' input_text = str(self.weibo_input_text.toPlainText()).replace('\n', ' ') if input_text == '': self.statusBar.showMessage('Prediction Fail! Please enter the text.', 3000) return input_date_time = str(self.date_time_edit.dateTime().toString('yyyy.M.d HH:mm')) input_forward = str(self.forward_spinbox.value()) start_time = timeit.default_timer() input_line = fake_id + '\t' + input_text + '\t' + input_date_time + '\t' + input_forward input_list = list() input_list.append(input_line) computed_list = train_prediction.process_weibo(input_list, True) os.chdir(os.path.abspath(os.curdir + '/neural-network')) ann = libfann.neural_net() ann.create_from_file("trained.net") # single_item: [0]id [1]emotion grate [2]time [3]forward [4]content single_item = computed_list[0] # Only has the first one result = ann.run([float(single_item[1]), float(single_item[2]), float(single_item[3])]) print('Prediction:' + str('%-18s' % result[0])) prediction = 'No' if (result[0] - 0.5) > 0: prediction = 'Yes' result_text = unicode('Content: ' + single_item[4] + '\nTime: ' + str('%.2f' % single_item[2]) + '(hour)\nForward: ' + single_item[3] + '\nEmotion Grate: ' + str(single_item[1]) + '\nSuicide prediction: ' + prediction + '\n\n') stop_time = timeit.default_timer() result_text += 'Total prediction time: ' + str('%.2f' % (stop_time - start_time)) + ' seconds.\n' self.result_text.setText(QtCore.QString(result_text)) self.result_text.moveCursor(QtGui.QTextCursor.End) # Back to root folder os.chdir('..') self.statusBar.showMessage('Prediction Complete!', 3000)
def run_fann(dataFile): with open(dataFile) as f: connection_rate = 1 learning_rate = 0.7 num_hidden = 10 _, num_input, num_output = tuple(map(lambda d: int(d), f.readline().split())) desired_error = 0.0001 max_iterations = 10000 iterations_between_reports = 1000 ann = libfann.neural_net() ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output)) ann.set_learning_rate(learning_rate) ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE) ann.train_on_file(dataFile, max_iterations, iterations_between_reports, desired_error) ann.save(os.path.splitext(dataFile)[0] + ".net")
def learn(train_file): """ :param train_file: filename without extensions. :return: method will create nn called <filename>.net and except that <filename>.data exists Kaikki arvot on kovakoodattu tällä hetkellä. Voisi olla vapaaehtoisia parametrejä. Hidden layerien määrän hallintaan pitää keksiä jokin juttu. """ net_file = train_file + '.net' data_file = train_file + '_train.data' connection_rate = 1 learning_rate = 0.5 # learning rate ei saa olla liian suuri, toisaalta liian pienellä oppiminen kestää kauan num_input = 2 num_hidden = 4 num_output = 1 desired_error = 0.00005 max_iterations = 100000 iterations_between_reports = 10 ann = libfann.neural_net() # Lue tiedosto trainindata = libfann.training_data() trainindata.read_train_from_file(data_file) # Luo verkon ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_hidden, num_output)) ann.set_learning_rate(learning_rate) ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE) # Aktivointi funktio ann.set_training_algorithm(libfann.TRAIN_INCREMENTAL) # Oppimis algoritmi ann.train_on_data(trainindata, max_iterations, iterations_between_reports, desired_error) ann.save(net_file)
def fann_ble_test_recovered_accum(self, data, test_data, normalize=True, savepath="./temp_save.conf", accum_depth=3): if not os.path.exists(savepath): print "No File Included" return [] net = libfann.neural_net() net.create_from_file(savepath) res = zeros((test_data.mat_res.mat.shape[0], int(len(data.mat_res.sep_mat.keys()) / 4.0))) for i, test_array_miao in enumerate(test_data.mat_res.mat): test_array_raw = test_data.mat_res.mat[max(0, i - accum_depth):i + 1, :] test_array = self.stack_data(test_array_raw) if normalize: test_array = self.normalize(test_array, self.m_min, self.m_max) # print test_array res[i, :] = net.run(test_array) self.net = net return res
def trainANN(features, labels, connRate, hidNodes, error, binary): """ Train the neural network using the given training data and parameters. Returns a fully trained ANN. """ # Organize ANN parameters connection_rate = connRate num_input = 72 num_hidden = hidNodes num_output = 3 desired_error = error max_iterations = 100000 # Print out two reports for every ANN iterations_between_reports = 50000 # Binarize labels as it is necessary for ANN labels = binary.fit_transform(labels) # Cast numpy to python list annFeatures = features.tolist() annLabels = labels.tolist() # Create an ANN training data instance and set data training = libfann.training_data() training.set_train_data(annFeatures, annLabels) ann = libfann.neural_net() ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output)) # Train the ANN ann.train_on_data(training, max_iterations, iterations_between_reports, desired_error) return ann
def __init__(self): super().__init__() self.learnbuffer = [] self.dumbtraining = False connection_rate = 1 num_input = 2 #hidden = (40,40) hidden = (50, ) #hidden = (20,10,7) num_output = 4 learning_rate = 0.7 self.NN = libfann.neural_net() #self.NN.set_training_algorithm(libfann.TRAIN_BATCH) #self.NN.set_training_algorithm(libfann.TRAIN_RPROP) #self.NN.set_training_algorithm(libfann.TRAIN_QUICKPROP) self.NN.create_sparse_array(connection_rate, (num_input, ) + hidden + (num_output, )) self.NN.randomize_weights(-1, 1) self.NN.set_learning_rate(learning_rate) self.NN.set_activation_function_hidden( libfann.SIGMOID_SYMMETRIC_STEPWISE) self.NN.set_activation_function_output( libfann.SIGMOID_SYMMETRIC_STEPWISE)
def train(captchas_dir): NUM_INPUT = CH_WIDTH * CH_HEIGHT NUM_NEURONS_HIDDEN = NUM_INPUT // 3 NUM_OUTPUT = 10 ann = libfann.neural_net() ann.create_standard_array((NUM_INPUT, NUM_NEURONS_HIDDEN, NUM_OUTPUT)) # ann.set_activation_function_hidden(libfann.SIGMOID) # ann.set_activation_function_output(libfann.SIGMOID) # ann.randomize_weights(0.0, 0.0) start = time.time() succeed = 0 captchas_dir = os.path.abspath(captchas_dir) captchas = os.listdir(captchas_dir) report() for i, name in enumerate(captchas): answer = re.match(r'(\d{6})\.png$', name) if not answer: continue answer = answer.group(1) fpath = os.path.join(captchas_dir, name) try: img = get_image(fpath) ch_imgs = segment(img) for ch_img, digit in zip(ch_imgs, answer): ann.train(get_ch_data(ch_img), make_ann_output(digit)) except Exception as exc: report('Error occured while processing {}: {}'.format(name, exc)) report() else: succeed += 1 report('{}/{}'.format(i + 1, len(captchas)), progress=True) runtime = time.time() - start report('Done training on {}/{} captchas in {:.3f} seconds'.format( succeed, len(captchas), runtime)) return ann
def test_prediction(test_list): os.chdir(os.path.abspath(os.curdir + '/neural-network')) ann = libfann.neural_net() ann.create_from_file("trained.net") correct_count = 0 # item: [0]id [1]emotion grate [2]time [3]forward [4]suicide or not for item in test_list: result = ann.run([float(item[1]), float(item[2]), float(item[3])]) print('Prediction:' + str('%-18s' % result[0]) + 'Suicide:' + item[4]) prediction = 0 if (result[0] - 0.5) > 0: prediction = 1 if int(item[4]) == prediction: correct_count += 1 accuracy = round(float(correct_count) / len(test_list) * 100, 2) print('Results: Correct number is ' + str(correct_count) + '. Model accuracy is ' + str('%.2f' % accuracy) + '%.') # Back to root folder os.chdir('..') return accuracy