Exemplo n.º 1
0
rng = np.random.RandomState(888)
# np.random.seed(423453)
base_dir = os.getcwd()
# train_csv_path = os.path.join(base_dir,'trainingData.csv')
test_csv_path = os.path.join(base_dir, 'validationData.csv')
valid_csv_path = os.path.join(base_dir, 'AllValuationData.csv')
train_csv_path = os.path.join(base_dir, 'arrAllTrainingData.csv')

log_dir = 'DEEPLEARNING_MODEL_log.txt'

if __name__ == '__main__':
    # Load data
    (train_x,
     train_y), (valid_x,
                valid_y), (test_x, test_y) = data_helper.load_data_all(
                    train_csv_path, valid_csv_path, test_csv_path)
    # train_x, train_y, valid_x, valid_y, test_x, test_y=data_helper.load(train_csv_path, test_csv_path)

    # patience=[i for i in range(1,50,2)]
    # patience=[21,]

    # B=[i for i in np.linspace(3.0,3.1,2)]
    # for b in B:
    # for p in patience:

    # dropout=[i for i in np.linspace(0.4,0.7,4)]
    dropout = [
        0.7,
    ]
    for dp in dropout:
Exemplo n.º 2
0
#!/usr/bin/env python

import os
import time
from model_patient import *
from data_helper import load_data, load_data_all
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data

batch_size = 128
test_size = 256

allX, allY = load_data_all()
word_embedding = np.load('../data/model_50.npy')

# print teY
# raw_input()
py_x, cost = model()

# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(py_x, Y))
train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)
predict_op = py_x

# print trY.shape

# Launch the graph in a session
with tf.Session() as sess:
    # you need to initialize all variables
    tf.initialize_all_variables().run()