def full_test(): # config = tf.ConfigProto() # config.gpu_options.allow_growth = True sess = tf.Session() sda = SDAutoencoder(dims=[6000, 1000, 500, 100], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.5, loss="rmse", pretrain_lr=1e-6, finetune_lr=1e-5, batch_size=100, print_step=500) sda.pretrain_network(X_TRAIN_PATH, epochs=1) trained_parameters = sda.finetune_parameters(X_TRAIN_PATH, Y_TRAIN_PATH, output_dim=6, epochs=1) sda.write_encoded_input(TRANSFORMED_PATH, X_TEST_PATH) sda.save_variables(VARIABLE_SAVE_PATH) sess.close() test_model(parameters_dict=trained_parameters, input_dim=sda.output_dim, output_dim=6, x_test_filepath=TRANSFORMED_PATH, y_test_filepath=Y_TEST_PATH, output_filepath=OUTPUT_PATH)
def full_test(): sess = tf.Session() sda = SDAutoencoder(dims=[4000, 400, 400, 400], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.20, loss="cross-entropy", pretrain_lr=1e-6, finetune_lr=1e-5, batch_size=50, print_step=500) sda.pretrain_network(X_TRAIN_PATH, epochs=50) trained_parameters = sda.finetune_parameters(X_TRAIN_PATH, Y_TRAIN_PATH, output_dim=2, epochs=80) sda.write_encoded_input(TRANSFORMED_PATH, X_TEST_PATH) sda.save_variables(VARIABLE_SAVE_PATH) sess.close() test_model(parameters_dict=trained_parameters, input_dim=sda.output_dim, output_dim=2, x_test_filepath=TRANSFORMED_PATH, y_test_filepath=Y_TEST_PATH, output_filepath=OUTPUT_PATH)
def main(): sess = tf.Session() sda = SDAutoencoder(dims=[784, 500], activations=["sigmoid"], sess=sess, noise=0.40, loss="cross-entropy") mnist_train_gen_f = lambda: get_mnist_batch_xs_generator(True, batch_size=100, batch_limit=12000) sda.pretrain_network_gen(mnist_train_gen_f) trained_parameters = sda.finetune_parameters_gen( get_mnist_batch_generator(True, batch_size=100, batch_limit=18000), output_dim=10) transformed_filepath = "../data/mnist_test_transformed.csv" test_ys_filepath = "../data/mnist_test_ys.csv" output_filepath = "../data/mnist_pred_ys.csv" sda.write_encoded_input_with_ys(transformed_filepath, test_ys_filepath, get_mnist_batch_generator(False, batch_size=100, batch_limit=100)) sess.close() test_model(parameters_dict=trained_parameters, input_dim=sda.output_dim, output_dim=10, x_test_filepath=transformed_filepath, y_test_filepath=test_ys_filepath, output_filepath=output_filepath)
def sda(): sess = tf.Session() sda = SDAutoencoder(dims=[784, 400, 200, 80], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.20, loss="cross-entropy", pretrain_lr=0.0001, finetune_lr=0.0001) mnist_train_gen_f = lambda: get_mnist_batch_xs_generator( True, batch_size=100, batch_limit=12000) # pretrain locally, layer by layer sda.pretrain_network_gen(mnist_train_gen_f) # fine-tune the model by training all the weights trained_parameters = sda.finetune_parameters_gen(get_mnist_batch_generator( True, batch_size=100, batch_limit=18000), output_dim=10) mainDir = data_storage_path if not os.path.exists(mainDir): os.makedirs(mainDir) transformed_filepath = mainDir + "/mnist_test_transformed.csv" test_ys_filepath = mainDir + "/mnist_test_ys.csv" output_filepath = mainDir + "/mnist_pred_ys.csv" # for testing. write the encoded x value along with y values to csv sda.write_encoded_input_with_ys( transformed_filepath, test_ys_filepath, get_mnist_batch_generator(False, batch_size=100, batch_limit=100)) sess.close() test_model(parameters_dict=trained_parameters, input_dim=sda.output_dim, output_dim=10, x_test_filepath=transformed_filepath, y_test_filepath=test_ys_filepath, output_filepath=output_filepath)
def unsupervised(): sess = tf.Session() sda = SDAutoencoder(dims=[6000, 1000, 500, 200], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.05, loss="rmse", batch_size=100, print_step=50) layer_1_weights_path = "../data/outputs/last_weights" layer_1_biases_path = "../data/outputs/last_biases" sda.pretrain_network(X_TRAIN_PATH, epochs=8) sda.write_data(sda.hidden_layers[1].weights, layer_1_weights_path) sda.write_data(sda.hidden_layers[1].biases, layer_1_biases_path) sda.write_encoded_input(TRANSFORMED_PATH, X_TEST_PATH) sda.save_variables(VARIABLE_SAVE_PATH) sess.close()
def wrap_sensor(X=[]): sess = tf.Session() # Restore variables from disk. new_saver = tf.train.import_meta_graph(VARIABLE_SAVE_PATH + ".meta") saver = tf.train.Saver() saver.restore(sess, tf.train.latest_checkpoint('../data/deephand/')) weight = openData(WEIGHT_PATH) biases = openData(BIASES_PATH) #write encoded sda = SDAutoencoder(dims=[160, 80, 30, 5], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.2, loss="rmse", pretrain_lr=1e-5, finetune_lr=1e-3, batch_size=1, print_step=1) print(len(sda.hidden_layers)) var = tf.all_variables() encW = [] encB = [] for v in var: # print(v.name) # print(sess.run(v)) # if(v.name[0:13]=="hidden_layer_" and v.name[] =="encoding_vars") n = v.name.split("/") if (len(n) == 3): if (n[1][0:13] == "hidden_layer_" and n[0] == "finetuning"): ind = int(n[1][13:15]) val = sess.run(v) if (n[2] == "weights:0"): sda.hidden_layers[ind].set_weights(val) else: sda.hidden_layers[ind].set_biases(val) trained_parameters = {"weights": weight, "biases": biases} if (len(X) == 0): X = openData("../data/TestX.csv") X = X[14] print(type(X), X) Transformed = sda.transformX(X) print(Transformed) c = get_class(parameters_dict=trained_parameters, input_dim=5, output_dim=3, data=[Transformed]) print(c) cS = [] from HandCommand import Hand hand = Hand() for x in range(len(c)): if (c[x] > 0.49999): cS.append(1) else: cS.append(0) if (cS[0] == 1): hand.lateral() elif (cS[1] == 1): hand.fist() elif (cS[2] == 1): hand.grasp()
def full_test(): clearCsv(TRANSFORMED_PATH) clearCsv(OUTPUT_PATH) clearCsv(BIASES_PATH) clearCsv(WEIGHT_PATH) # config = tf.ConfigProto() # config.gpu_options.allow_growth = True sess = tf.Session() sda = SDAutoencoder(dims=[160, 80, 30, 5], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.2, loss="rmse", pretrain_lr=1e-4, finetune_lr=1e-2, batch_size=20, print_step=100) sda.pretrain_network(X_TRAIN_PATH, epochs=2000) trained_parameters = sda.finetune_parameters(X_TRAIN_PATH, Y_TRAIN_PATH, output_dim=3, epochs=1000) sda.write_data(trained_parameters["weights"], WEIGHT_PATH) sda.write_data([trained_parameters["biases"]], BIASES_PATH) sda.write_encoded_input(TRANSFORMED_PATH, X_TEST_PATH) sda.save_variables(VARIABLE_SAVE_PATH) sess.close() # print(len(sda.hidden_layers)) # print(sda.hidden_layers[0].weights,sda.hidden_layers[0].biases) # print(sda.hidden_layers[1].weights,sda.hidden_layers[1].biases) # with open("sda.file", "wb") as f: # pickle.dump(sda, f, pickle.HIGHEST_PROTOCOL) test_model(parameters_dict=trained_parameters, input_dim=sda.output_dim, output_dim=3, x_test_filepath=TRANSFORMED_PATH, y_test_filepath=Y_TEST_PATH, output_filepath=OUTPUT_PATH)
def unsupervised(): sess = tf.Session() sda = SDAutoencoder(dims=[4000, 1000, 500, 200], activations=["sigmoid", "sigmoid", "sigmoid"], sess=sess, noise=0.05, loss="rmse", batch_size=100, print_step=50) layer_1_weights_path = "../data/outputs/last_weights" layer_1_biases_path = "../data/outputs/last_biases" sda.pretrain_network(X_TRAIN_PATH, epochs=8) sda.write_data(sda.hidden_layers[1].weights, layer_1_weights_path) sda.write_data(sda.hidden_layers[1].biases, layer_1_biases_path) sda.write_encoded_input(TRANSFORMED_PATH, X_TEST_PATH) sda.save_variables(VARIABLE_SAVE_PATH) sess.close()