from torch.autograd import Variable from torch import optim import torch.nn.functional as F import matplotlib # matplotlib.use('Agg') get_ipython().magic(u'matplotlib inline') import datetime as dt, itertools, pandas as pd, matplotlib.pyplot as plt, numpy as np import utility as util global logger util.setup_log() util.setup_path() logger = util.logger use_cuda = torch.cuda.is_available() logger.info("Is CUDA available? %s.", use_cuda) # In[2]: class encoder(nn.Module): def __init__(self, input_size, hidden_size, T, logger): # input size: number of underlying factors (81) # T: number of time steps (10) # hidden_size: dimension of the hidden state super(encoder, self).__init__()
from torch.autograd import Variable from torch import optim import torch.nn.functional as F import matplotlib # matplotlib.use('Agg') get_ipython().magic(u'matplotlib inline') import datetime as dt, itertools, pandas as pd, matplotlib.pyplot as plt, numpy as np import utility as util global logger util.setup_log() util.setup_path() logger = util.logger use_cuda = torch.cuda.is_available() logger.info("Is CUDA available? %s.", use_cuda) # In[2]: class encoder(nn.Module): def __init__(self, input_size, hidden_size, T, logger): # input size: number of underlying factors (81) # T: number of time steps (10) # hidden_size: dimension of the hidden state super(encoder, self).__init__() self.input_size = input_size
from torch.autograd import Variable from torch import optim import torch.nn.functional as F import matplotlib # matplotlib.use('Agg') #get_ipython().magic(u'matplotlib inline') import datetime as dt, pandas as pd, matplotlib.pyplot as plt, numpy as np import utility as util global logger util.setup_log() util.setup_path('/home/wang/Wang/da_rnn3/s3_prefix','/home/wang/Wang/da_rnn3/data_dir') logger = util.logger use_cuda = torch.cuda.is_available() logger.info("Is CUDA available? %s.", use_cuda) # In[2]: class encoder(nn.Module): def __init__(self, input_size, hidden_size, T, logger): # input size: number of underlying factors (81) # T: number of time steps (10) # hidden_size: dimension of the hidden state super(encoder, self).__init__() self.input_size = input_size
import torch.nn.functional as F #import matplotlib # matplotlib.use('Agg') #get_ipython().magic(u'matplotlib inline') import pandas as pd import matplotlib.pyplot as plt import numpy as np import utility as util global logger util.setup_log() util.setup_path(s3_prefix='prefix', data_dir='~/nasdaq100') logger = util.logger use_cuda = torch.cuda.is_available() logger.info("Is CUDA available? %s.", use_cuda) # In[2]: class encoder(nn.Module): def __init__(self, input_size, hidden_size, T, logger): # input size: number of underlying factors (81) # T: number of time steps (10) # hidden_size: dimension of the hidden state super(encoder, self).__init__() self.input_size = input_size