def Highway1(network, num_out, drop_prob=1.0): dense1 = tflearn.fully_connected(network, 64, activation='elu', regularizer='L2', weight_decay=0.001) highway = dense1 for i in range(10): highway = tflearn.highway(highway, 64, activation='elu',regularizer='L2', weight_decay=0.001, transform_dropout=0.7) network = tflearn.fully_connected(highway, num_out, activation='softmax') return network
def Highway1(network, scale=False): if scale is True: network = scale(network) dense1 = tflearn.fully_connected(network, 64, activation='elu', regularizer='L2', weight_decay=0.001) highway = dense1 for i in range(10): highway = tflearn.highway(highway, 64, activation='elu',regularizer='L2', weight_decay=0.001, transform_dropout=0.7) network = tflearn.fully_connected(highway, 3, activation='sigmoid') return network
def do_dnn(): #optimizer, loss, activation): # print(y1.shape, testy1.shape) #data_prep = tflearn.data_preprocessing.DataPreprocessing() #data_prep.add_samplewise_zero_center() #data_prep.add_samplewise_stdnorm() # Building deep neural network xshape = X.shape activation = "relu6" input_layer = tflearn.input_data(shape=[None, xshape[1]])# , data_preprocessing=data_prep) dense1 = tflearn.fully_connected(input_layer, 40, activation=activation, regularizer='L2', weight_decay=0.001) # install a deep network of highway layers highway = dense1 for i in range(1): highway = tflearn.highway(highway, 40, activation=activation, regularizer='L2', weight_decay=0.001, transform_dropout=0.726) softmax = tflearn.fully_connected(highway, 4, activation="softmax") #final = tflearn.layers.merge_outputs(highway) # Regression using SGD with learning rate decay and accuracy # sgd = tflearn.SGD(learning_rate=.001, lr_decay=0.96, decay_step=1000) # top_k = tflearn.metrics.Top_k(1) # lossfunc = tflearn.objectives.softmax_categorical_crossentropy(softmax, ) #adam = tflearn.optimizers.Adam(learning_rate=.001, beta1=.9, beta2=.99, epsilon=math.pow(10, -8)) # .001 is default, radically raised? Might be wrong. .1 and .00001 didn't work with 10k batches adagrad = tflearn.optimizers.AdaGrad(learning_rate=.036) # must use batchsize = 1 top3 = tflearn.metrics.Top_k(3) net = tflearn.regression(softmax, optimizer=adagrad, metric="accuracy", loss="mean_square")# , #n_classes=11, # to_one_hot=True, batch_size=100000) # metric=normal_R2 #categorical_crossentropy # Training model = tflearn.DNN(net, tensorboard_verbose=0, checkpoint_path='/home/thorbinator/PycharmProjects/production/saves/save', best_checkpoint_path='/home/thorbinator/PycharmProjects/production/saves/best', max_checkpoints=4, best_val_accuracy=.31) # model.load('./saves/save-30000') #print (final.get_shape().as_list() , y1.get_shape().as_list(), X.get_shape().as_list()) model.fit(X, Y, validation_set=(testX, testY), n_epoch=99999, show_metric=True, run_id="th_run", snapshot_epoch=True, batch_size=1, snapshot_step=75000)
train_y = y_frames[split:] print x_frames.shape print x_frames.shape[1], x_frames.shape[2] net = tflearn.input_data([None, x_frames.shape[1], x_frames.shape[2]]) print net.get_shape().as_list() net = bidirectional_rnn(net, BasicLSTMCell(number_hidden), BasicLSTMCell(number_hidden)) net = dropout(net, 0.8) fc = tflearn.fully_connected(net, highway_size, activation='elu', regularizer='L2', weight_decay=0.001) net = fc for i in xrange(highway_layer_amount): net = tflearn.highway(net, highway_size, activation='elu', regularizer='L2', weight_decay=0.001, transform_dropout=0.8) net = tflearn.fully_connected(net, y_frames.shape[1], activation='elu') net = tflearn.regression(net, optimizer='adam', learning_rate=learning_rate, loss='mean_square') class MonitorCallback(tflearn.callbacks.Callback): def __init__(self, model, tf_id): self.lowest_loss = np.inf self.number_saves = 0 self.model = model self.tf_id = tf_id
X, Y, testX, testY = mnist.load_data(one_hot=True) # Building deep neural network input_layer = tflearn.input_data(shape=[None, 784]) dense1 = tflearn.fully_connected(input_layer, 64, activation='elu', regularizer='L2', weight_decay=0.001) #install a deep network of highway layers highway = dense1 for i in range(10): highway = tflearn.highway(highway, 64, activation='elu', regularizer='L2', weight_decay=0.001, transform_dropout=0.8) softmax = tflearn.fully_connected(highway, 10, activation='softmax') # Regression using SGD with learning rate decay and Top-3 accuracy sgd = tflearn.SGD(learning_rate=0.1, lr_decay=0.96, decay_step=1000) top_k = tflearn.metrics.Top_k(3) net = tflearn.regression(softmax, optimizer=sgd, metric=top_k, loss='categorical_crossentropy') # Training model = tflearn.DNN(net, tensorboard_verbose=0)
X, Y, testX, testY = mnist.load_data(one_hot=True) # Building deep neural network input_layer = tflearn.input_data(shape=[None, 784]) dense1 = tflearn.fully_connected(input_layer, 64, activation='elu', regularizer='L2', weight_decay=0.001) #install a deep network of highway layers highway = dense1 for i in range(10): highway = tflearn.highway(highway, 64, activation='elu', regularizer='L2', weight_decay=0.001) dropout2 = tflearn.dropout(highway, 0.5) softmax = tflearn.fully_connected(dropout2, 10, activation='softmax') # Regression using SGD with learning rate decay and Top-3 accuracy sgd = tflearn.SGD(learning_rate=0.1, lr_decay=0.96, decay_step=1000) top_k = tflearn.metrics.Top_k(3) net = tflearn.regression(softmax, optimizer=sgd, metric=top_k, loss='categorical_crossentropy') # Training
def modelevaluation(): input_layer = tflearn.input_data(shape=[None, 392]) # , data_preprocessing=data_prep) dense1 = tflearn.fully_connected(input_layer, 100, activation=activation, regularizer='L2', weight_decay=0.001) # install a deep network of highway layers highway = dense1 for i in range(100): highway = tflearn.highway(highway, 100, activation=activation, regularizer='L2', weight_decay=0.001, transform_dropout=0.8) softmax = tflearn.fully_connected(highway, 10, activation="softmax") # final = tflearn.layers.merge_outputs(highway) # Regression using SGD with learning rate decay and accuracy # sgd = tflearn.SGD(learning_rate=.001, lr_decay=0.96, decay_step=1000) # top_k = tflearn.metrics.Top_k(1) # lossfunc = tflearn.objectives.softmax_categorical_crossentropy(softmax, ) adam = tflearn.optimizers.Adam(learning_rate=.00001, beta1=.9, beta2=.99, epsilon=math.pow(10, -8)) # .001 is default, radically raised? Might be wrong. .1 and .00001 didn't work with 10k batches top3 = tflearn.metrics.Top_k(3) net = tflearn.regression(softmax, optimizer=adam, metric='accuracy', loss="categorical_crossentropy") # , #n_classes=11, # to_one_hot=True, batch_size=100000) # metric=normal_R2 # Training model = tflearn.DNN(net, tensorboard_verbose=0, # checkpoint_path='/home/thorbinator/PycharmProjects/production/saves/save', best_checkpoint_path='/home/thorbinator/PycharmProjects/production/saves/best', max_checkpoints=4, best_val_accuracy=.31) model.load('./saves/save-300000') minitestY, minitestX = [], [] for index, value in enumerate(testY): if index < 100: minitestY.append(value) for index, value in enumerate(testX): if index < 100: minitestX.append(value) minitestY, minitestX = np.array(minitestY), np.array(minitestX) results = model.evaluate(minitestX, minitestY, batch_size=1) for key in sorted(results): print(key) updownscore = 0 score4bin = 0 bad4bin = 0 offbyone = 0 predicted = [] offbytwo = 0 zerobound = .0001 # 0.000550902932254 for index, result in enumerate(model.predict(minitestX)): if label < 0: if label < -zerobound: label_mod = 0 # "bigdown" else: label_mod = 1 # "down" else: if label > zerobound: label_mod = 3 # "bigup" else: label_mod = 2 # "up" if result < 0: if result < -zerobound: pred4bin = 0 # "bigdown" else: pred4bin = 1 # "down" else: if result > zerobound: pred4bin = 3 # "bigup" else: pred4bin = 2 # "up" if pred4bin == label_mod: score4bin += 1 if abs(pred4bin - label_mod) == 3: bad4bin += 1 if abs(pred4bin - label_mod) == 2: offbytwo += 1 if abs(pred4bin - label_mod) == 1: offbyone += 1 if (label > 0) == (result > 0): updownscore += 1 if index % 100 == 0: print(label, ":", result, ":", label_mod, ":", pred4bin) predicted.extend(result) print("updown correctness:", updownscore / len(testY)) print("4 bins correctness:", score4bin / len(testY)) print("off by one category:", offbyone / len(testY)) print("off by two categories:", offbytwo / len(testY)) print("really bad predictions:", bad4bin / len(testY)) sns.set(style="darkgrid") sns.jointplot(np.array(predicted), testY, kind="reg", color="r", xlim=(-.01, .01), ylim=(-.01, .01), size=40, space=0).plot_marginals(sns.distplot, bins=40) plt.show()
import tflearn # Data loading and preprocessing import tflearn.datasets.mnist as mnist X, Y, testX, testY = mnist.load_data(one_hot=True) # Building deep neural network input_layer = tflearn.input_data(shape=[None, 784]) dense1 = tflearn.fully_connected(input_layer, 64, activation='elu', regularizer='L2', weight_decay=0.001) #install a deep network of highway layers highway = dense1 for i in range(10): highway = tflearn.highway(highway, 64, activation='elu', regularizer='L2', weight_decay=0.001) dropout2 = tflearn.dropout(highway, 0.5) softmax = tflearn.fully_connected(dropout2, 10, activation='softmax') # Regression using SGD with learning rate decay and Top-3 accuracy sgd = tflearn.SGD(learning_rate=0.1, lr_decay=0.96, decay_step=1000) top_k = tflearn.metrics.Top_k(3) net = tflearn.regression(softmax, optimizer=sgd, metric=top_k, loss='categorical_crossentropy') # Training model = tflearn.DNN(net, tensorboard_verbose=0) model.fit(X, Y, n_epoch=20, validation_set=(testX, testY), show_metric=True, run_id="highway_dense_model")
elif evSample > 1000: labels[i] = 2 elif evSample > 500: labels[i] = 1 else: labels[i] = 0 TrainingSetFeatures = preprocessor(TrainingSetFeatures) categorizeLabels(TrainingSetLabels) TrainingSetLabels = to_categorical(TrainingSetLabels, 9) #create a test set from the number of samples and traning set net = tflearn.input_data(shape=[None, 12]) net = tflearn.fully_connected(net, 32) net = tflearn.highway(net, 32, activation="LeakyReLu", name="ReLuLayer") net = tflearn.fully_connected(net, 32) net = tflearn.fully_connected(net, 9, activation="softmax") net = tflearn.regression(net, learning_rate=0.005) #adam = tflearn.Optimizer() #net = tflearn.regression(net, learning_rate=0.001, optimizer=adam) # Define model model = tflearn.DNN(net, clip_gradients=1.0, tensorboard_verbose=3, tensorboard_dir='./tmp/weather1.log') # Start training (apply gradient descent algorithm) model.fit(TrainingSetFeatures,
else: labels[i] = 0 TrainingSetFeatures = preprocessor(TrainingSetFeatures) categorizeLabels(TrainingSetLabels) TrainingSetLabels = to_categorical(TrainingSetLabels, 5) #create a test set from the number of samples and traning set net = tflearn.input_data(shape=[None, 12]) net = tflearn.fully_connected(net, 32, weights_init='xavier', activation="softsign", name='First_Fully_Connected') net = tflearn.highway(net, 32, activation="softsign", name="highwayLayer") net = tflearn.fully_connected(net, 32, weights_init='xavier', activation="softsign", name='Third_Fully_Connected') net = tflearn.fully_connected(net, 5, activation="softmax", name='Final_Fully_Connected') # todo: confusion matrix adam = tflearn.Adam() net = tflearn.regression(net, learning_rate=0.001, optimizer=adam) # Define model