def fetch_data(source, targets, amount, numthreads=10, threadtimeout=1): faces = [] fclass = [] classes = to_one_hot(targets) for k,target in enumerate(targets): tfaces = sorted(set(glob("cropped/" + target + "/*"))) if len(tfaces) < amount or amount == 0: fetch_data_files(source, [target], amount - len(tfaces), numthreads, threadtimeout) tfaces = sorted(set(glob("cropped/" + target + "/*"))) for i in range(len(tfaces)): face = imread(tfaces[i], mode='RGB') faces.append(face) fclass.append(classes[k]) return faces, fclass
def fetch_data(source, targets, amount, numthreads=10, threadtimeout=1): faces = [] fclass = [] classes = to_one_hot(targets) for k, target in enumerate(targets): tfaces = sorted(set(glob("cropped/" + target + "/*"))) if len(tfaces) < amount or amount == 0: fetch_data_files(source, [target], amount - len(tfaces), numthreads, threadtimeout) tfaces = sorted(set(glob("cropped/" + target + "/*"))) for i in range(len(tfaces)): face = imread(tfaces[i], mode='RGB') faces.append(face) fclass.append(classes[k]) return faces, fclass
import numpy as np import tensorflow as tf import matplotlib.pyplot as plt from random import shuffle from set_utils import make_sets, to_one_hot # Load data with np.load("notMNIST.npz") as data: images, labelso = data["images"], data["labels"] images = images.transpose(2,0,1) images = images.reshape(18720, 784) poissonNoise = np.random.poisson(50,784).astype(float) images = images.astype('float32')/255 labels = to_one_hot(labelso) ''' PARAMETERS ''' learning_rate = 1e-2 training_epochs = 600 batch_size = 500 momentum = 1e-2 hidden_units = 1000 ''' SETS ''' x_train, t_train, x_validation, t_validation, x_test, t_test = make_sets(images, labels, 15000, 1000) #NN Model x = tf.placeholder(tf.float32, [None, 784]) y = tf.placeholder(tf.float32, [None, 10]) #Input Layer
import numpy as np import tensorflow as tf import matplotlib.pyplot as plt from random import shuffle from set_utils import make_sets, to_one_hot # Load data with np.load("notMNIST.npz") as data: images, labelso = data["images"], data["labels"] images = images.transpose(2, 0, 1) images = images.reshape(18720, 784) poissonNoise = np.random.poisson(50, 784).astype(float) images = images.astype('float32') / 255 labels = to_one_hot(labelso) ''' PARAMETERS ''' learning_rate = 1e-2 training_epochs = 600 batch_size = 500 momentum = 1e-2 hidden_units = 1000 ''' SETS ''' x_train, t_train, x_validation, t_validation, x_test, t_test = make_sets( images, labels, 15000, 1000) #NN Model x = tf.placeholder(tf.float32, [None, 784]) y = tf.placeholder(tf.float32, [None, 10]) #Input Layer input_w = tf.Variable(tf.truncated_normal([784, hidden_units], stddev=0.01), name="Input_Weight")