from pandas import concat

# from lstm_predictor import generate_data, lstm_model

warnings.filterwarnings("ignore")

LOG_DIR = 'resources/logs/'
TIMESTEPS = 1
RNN_LAYERS = [{'num_units': 400}]
DENSE_LAYERS = None
TRAINING_STEPS = 3000
PRINT_STEPS = TRAINING_STEPS  # / 10
BATCH_SIZE = 1

regressor = SKCompat(
    learn.Estimator(model_fn=predictor.lstm_model(TIMESTEPS, RNN_LAYERS,
                                                  DENSE_LAYERS), ))
#   model_dir=LOG_DIR)
from pandas import read_csv
from pandas import Series

from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler

series = read_csv(
    '../CorpData/InventoryHistory/2010_2018_books_sortable inventory.csv',
    header=0,
    parse_dates=[0],
    index_col=0,
    squeeze=True,
    usecols=[0, 4])
import lstm_predictior as predictor

# from lstm_predictor import generate_data, lstm_model

warnings.filterwarnings("ignore")

LOG_DIR = 'resources/logs/'
TIMESTEPS = 1
RNN_LAYERS = [{'num_units': 400}]
DENSE_LAYERS = None
TRAINING_STEPS = 500
PRINT_STEPS = TRAINING_STEPS  # / 10
BATCH_SIZE = 100

regressor = SKCompat(learn.Estimator(model_fn=predictor.lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS), ))
#   model_dir=LOG_DIR)

X, y = predictor.generate_data(np.sin, np.linspace(0, 100, 10000, dtype=np.float32), TIMESTEPS, seperate=False)

noise_train = np.asmatrix(np.random.normal(0, 0.2, len(y['train'])), dtype=np.float32)
noise_val = np.asmatrix(np.random.normal(0, 0.2, len(y['val'])), dtype=np.float32)
noise_test = np.asmatrix(np.random.normal(0, 0.2, len(y['test'])), dtype=np.float32)  # asmatrix

noise_train = np.transpose(noise_train)
noise_val = np.transpose(noise_val)
noise_test = np.transpose(noise_test)

y['train'] = np.add(y['train'], noise_train)
y['val'] = np.add(y['val'], noise_val)
y['test'] = np.add(y['test'], noise_test)
Ejemplo n.º 3
0
DENSE_LAYERS = None
TRAINING_STEPS = 100
PRINT_STEPS = TRAINING_STEPS / 10
BATCH_SIZE = 100
''' org
LOG_DIR = 'resources/logs/'
TIMESTEPS = 1
RNN_LAYERS = [{'num_units': 4}]
DENSE_LAYERS = None
TRAINING_STEPS = 100
PRINT_STEPS = TRAINING_STEPS / 10
BATCH_SIZE = 100
'''

regressor = SKCompat(
    learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),
                    model_dir=LOG_DIR))  # new
# regressor = learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),model_dir=LOG_DIR) # old

X, y = generate_data(np.sin,
                     np.arange(600, dtype=np.int32),
                     TIMESTEPS,
                     seperate=False)
print(X['train'].shape)
print(y['train'].shape)

print(X['train'])
print(y['train'])

# create a lstm instance and validation monitor
Ejemplo n.º 4
0
TRAINING_STEPS = 100
PRINT_STEPS = TRAINING_STEPS / 10
BATCH_SIZE = 100


''' org
LOG_DIR = 'resources/logs/'
TIMESTEPS = 1
RNN_LAYERS = [{'num_units': 4}]
DENSE_LAYERS = None
TRAINING_STEPS = 100
PRINT_STEPS = TRAINING_STEPS / 10
BATCH_SIZE = 100
'''

regressor = SKCompat(learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),model_dir=LOG_DIR)) # new
# regressor = learn.Estimator(model_fn=lstm_model(TIMESTEPS, RNN_LAYERS, DENSE_LAYERS),model_dir=LOG_DIR) # old


X, y = generate_data(np.sin, np.linspace(0, 100, 10000, dtype=np.float32), TIMESTEPS, seperate=False)
print(X['train'].shape)
print(y['train'].shape)

# create a lstm instance and validation monitor

validation_monitor = learn.monitors.ValidationMonitor(X['val'], y['val'], every_n_steps=PRINT_STEPS, early_stopping_rounds=1000)
#validation_monitor = tf.train.SessionRunHook(X['val'], y['val'], every_n_steps=PRINT_STEPS, early_stopping_rounds=1000)

# print(X['train'])
# print(y['train'])