def testA(): #this part shows the abiliy of autoencoders to recognise piecewise patterns x, y = dh.createSetFromCSV('dataset//ac1.csv') xt, yt = dh.createSetFromCSV('dataset//ac2.csv') x = np.array(x, np.float32) ae = Autoencoder(sizes=[30, 20], lr=0.1) ae.train(x, 1000, output=True) print("Showing results for original set:") ae.use(x) print("Showing results for test set:") ae.use(xt)
return h def fprop(self, input_d): for layer in range(len(self.weights)): input_d = self.model(input_d, layer) return input_d def use(self, useset): predict_op = tf.argmax(self.fprop(useset), 1) return self.sess.run(predict_op) def accuracy(self, testset, y): correct_prediction = tf.equal(tf.argmax(y, 1), self.use(testset)) return self.sess.run( tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) if __name__ == "__main__": x, y = dh.createSetFromCSV('training.csv') testset, y_ = dh.createSetFromCSV('test.csv') ann = NeuralNetwork(sizes=[500], lr=0.01) ann.train(x, y, 1500, printInterval=100, output=True, save=False, load=False) # print("Accuracy:",ann.accuracy(testset,y_)) # dh.csvOutput(ann.use(testset),'output//output_ann.csv')
self.Y: Y_labels })) return def model(self, input, i): h = tf.nn.sigmoid(tf.matmul(input, self.weights[i]) + self.biases[i]) return h def fprop(self, input_d): for layer in range(len(self.weights)): input_d = self.model(input_d, layer) return input_d def use(self, useset): predict_op = tf.argmax(self.fprop(useset), 1) return self.sess.run(predict_op) def accuracy(self, testset, y): correct_prediction = tf.equal(tf.argmax(y, 1), self.use(testset)) return self.sess.run( tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) x, y = dh.createSetFromCSV('dataset//training.csv') testset, y_ = dh.createSetFromCSV('dataset//test.csv') ann = NeuralNetwork(sizes=[3], lr=0.2) ann.train(x, y, 1000, output=False) print("Accuracy:", ann.accuracy(testset, y_)) dh.csvOutput(ann.use(testset), 'output//output_ann.csv')