from tools import fetch_stock_price, format_dataset

tf.reset_default_graph(
)  # Clear the default graph and resets the global default graph
tf.set_random_seed(101)

# Settings for the dataset creation
symbol = "AAPl"
feat_dimension = 20
train_size = 252
test_size = 252 - feat_dimension

# Fetch the values, and prepare the  train/test split
stock_values = fetch_stock_price(symbol, datetime.date(2015, 1, 1),
                                 datetime.date(2016, 12, 31))
minibatch_X, minibatch_y = format_dataset(stock_values, feat_dimension)

train_X = minibatch_X[:train_size, :].astype(np.float32)
train_y = minibatch_y[:train_size].reshape((-1, 1)).astype(np.float32)
test_X = minibatch_X[train_size:, :].astype(np.float32)
test_y = minibatch_y[train_size:].reshape((-1, 1)).astype(np.float32)

# Define the place holders
X_tf = tf.placeholder("float", shape=(None, feat_dimension), name="X")
y_tf = tf.placeholder("float", shape=(None, 1), name="y")

# Setting for the tensorflow
learning_rate = 0.1
optimizer = tf.train.AdamOptimizer
n_epochs = 10000
コード例 #2
0
time_dimension = 20
train_size = 504
test_size = 252 - time_dimension

# Setting for tensorflowjp
tf_logdir = "./logs/tf/stock_price_lstm"
os.makedirs(tf_logdir, exist_ok=1)
learning_rate = 0.05
optimizer = tf.train.AdagradOptimizer
n_epochs = 5000
n_embeddings = 128

# Fetch the values, and prepare the train/test split
stock_values = fetch_stock_price(symbol, datetime.date(2014, 1, 1),
                                 datetime.date(2016, 12, 31))
minibatch_cos_X, minibatch_cos_y = format_dataset(stock_values, time_dimension)

train_X = minibatch_cos_X[:train_size, :].astype(np.float32)
train_y = minibatch_cos_y[:train_size].reshape((-1, 1)).astype(np.float32)
test_X = minibatch_cos_X[train_size:, :].astype(np.float32)
test_y = minibatch_cos_y[train_size:].reshape((-1, 1)).astype(np.float32)

train_X_ts = train_X[:, :, np.newaxis]
test_X_ts = test_X[:, :, np.newaxis]

# Create the placeholders
X_tf = tf.placeholder("float", shape=(None, time_dimension, 1), name="X")
y_tf = tf.placeholder("float", shape=(None, 1), name="y")


# The LSTM model
コード例 #3
0
tf.set_random_seed(101)

#  size of dataset
time_dimension = 20
train_size = 250
test_size = 250

#  hyper parameters
learning_rate = 0.01
optimizer = tf.train.AdagradOptimizer
n_epochs = 100
n_embeddings = 64

#  fetch noisy cosine and reshape it to have a 3D tensor shape
cos_values = fetch_cosine_values(train_size + test_size + time_dimension)
minibatch_cos_x, minibatch_cos_y = format_dataset(cos_values, time_dimension)
train_x = minibatch_cos_x[:train_size, :].astype(np.float32)
train_y = minibatch_cos_y[:train_size].reshape((-1, 1)).astype(np.float32)
test_x = minibatch_cos_x[:train_size:, :].astype(np.float32)
test_y = minibatch_cos_y[train_size:].reshape((-1, 1)).astype(np.float32)
train_x_ts = train_x[:, :, np.newaxis]
test_x_ts = test_x[:, :, np.newaxis]

x_tf = tf.placeholder("float", shape=(None, time_dimension, 1), name="X")
y_tf = tf.placeholder("float", shape=(None, 1), name="Y")


#  define the model(RNN) - LSTM(Long short term memory)
def RNN(x, weights, biases):
    x_ = tf.unstack(x, time_dimension, 1)
    lstm_cell = rnn.BasicLSTMCell(n_embeddings)
コード例 #4
0
import seaborn
from tools import fetch_cosine_values, fetch_stock_price, format_dataset

np.set_printoptions(precision=2)

# # Print 2 oscillations (20 points, with a frequency of 0.1, i.e. a period of 10)
cos_values = fetch_cosine_values(20, frequency=0.1)
seaborn.tsplot(cos_values)
plt.xlabel("Days since start of the experiment")
plt.ylabel("Value of the cosine function")
plt.title("Cosine time series over time")
plt.show()

# Format the feature and label set
features_size = 5
minibatch_cos_X, minibatch_cos_y = format_dataset(cos_values, features_size)
print("minibatch_cos_X.shape=", minibatch_cos_X.shape)
print("minibatch_cos_y.shape=", minibatch_cos_y.shape)

# Print some features of the experiment
samples_to_plot = 5
f, axarr = plt.subplots(samples_to_plot, sharex=True)
for i in range(samples_to_plot):
    feats = minibatch_cos_X[i, :]
    label = minibatch_cos_y[i]

    print("Observation {}: X={} y={}".format(i, feats, label))
    # plt.subplot(samples_to_plot, 1, i+1)
    axarr[i].plot(range(i, features_size + i), feats, '--o')
    axarr[i].plot([features_size + i], label, 'rx')
    axarr[i].set_ylim([-1.1, 1.1])
コード例 #5
0
feat_dimension = 20
train_size = 250
test_size = 250

# Settings for tensorflow
learning_rate = 0.01
optimizer = tf.train.AdamOptimizer
n_epochs = 10

if True:  # best values
    learning_rate = 0.1
    n_epochs = 1000

# Fetch the values, and prepare the train/test split
cos_values = fetch_cosine_values(train_size + test_size + feat_dimension)
minibatch_cos_X, minibatch_cos_y = format_dataset(cos_values, feat_dimension)

train_X = minibatch_cos_X[:train_size, :].astype(np.float32)
train_y = minibatch_cos_y[:train_size].reshape((-1, 1)).astype(np.float32)
test_X = minibatch_cos_X[train_size:, :].astype(np.float32)
test_y = minibatch_cos_y[train_size:].reshape((-1, 1)).astype(np.float32)

# Here, the tensorflow code
X_tf = tf.placeholder("float", shape=(None, feat_dimension), name="X")
y_tf = tf.placeholder("float", shape=(None, 1), name="y")


# Here the model: a simple linear regressor
def regression_ANN(x, weights, biases):
    return tf.add(biases, tf.matmul(x, weights))