Exemplo n.º 1
0
class RNN(object):
    def __init__(self, input_size, output_size, resume=False):
        super(RNN, self).__init__()

        self.encoder = Encoder(input_size)
        self.decoder = Decoder(output_size)

        self.loss = nn.CrossEntropyLoss()
        self.encoder_optimizer = optim.Adam(self.encoder.parameters())
        self.decoder_optimizer = optim.Adam(self.decoder.parameters())

        if resume:
            self.encoder.load_state_dict(torch.load("models/encoder.ckpt"))
            self.decoder.load_state_dict(torch.load("models/decoder.ckpt"))

    def train(self, input, target):
        self.encoder_optimizer.zero_grad()
        self.decoder_optimizer.zero_grad()

        # Encoder
        hidden_state = self.encoder.first_hidden()
        for ivec in input:
            _, hidden_state = self.encoder.forward(ivec, hidden_state)

        # Decoder
        total_loss, outputs = 0, []
        for i in range(len(target) - 1):
            _, softmax, hidden_state = self.decoder.forward(
                target[i], hidden_state)

            outputs.append(np.argmax(softmax.data.numpy(), 1)[:, np.newaxis])
            total_loss += self.loss(softmax, target[i + 1].squeeze(1))

        total_loss /= len(outputs)
        total_loss.backward()

        self.decoder_optimizer.step()
        self.encoder_optimizer.step()

        return total_loss.data[0], outputs

    def eval(self, input):
        hidden_state = self.encoder.first_hidden()

        # Encoder
        for ivec in input:
            _, hidden_state = self.encoder.forward(Variable(ivec),
                                                   hidden_state)

        sentence = []
        input = self.sos
        # Decoder
        while input.data[0, 0] != 1:
            output, _, hidden_state = self.decoder.forward(input, hidden_state)
            word = np.argmax(output.data.numpy()).reshape((1, 1))
            input = Variable(torch.LongTensor(word))
            sentence.append(word)

        return sentence

    def save(self):
        torch.save(self.encoder.state_dict(), "models/encoder.ckpt")
        torch.save(self.decoder.state_dict(), "models/decoder.ckpt")
Exemplo n.º 2
0

if __name__ == "__main__":
    debug = False
    save_plots = False

    with open(os.path.join("data", "enc_kwargs.json"), "r") as fi:
        enc_kwargs = json.load(fi)
    enc = Encoder(**enc_kwargs)
    enc.load_state_dict(
        torch.load(os.path.join("data", "encoder.torch"), map_location=device))

    with open(os.path.join("data", "dec_kwargs.json"), "r") as fi:
        dec_kwargs = json.load(fi)
    dec = Decoder(**dec_kwargs)
    dec.load_state_dict(
        torch.load(os.path.join("data", "decoder.torch"), map_location=device))

    scaler = joblib.load(os.path.join("data", "scaler.pkl"))
    raw_data = pd.read_csv(os.path.join("data", "nasdaq100_padding.csv"),
                           nrows=100 if debug else None)
    targ_cols = ("NDX", )
    data = preprocess_data(raw_data, targ_cols, scaler)

    with open(os.path.join("data", "da_rnn_kwargs.json"), "r") as fi:
        da_rnn_kwargs = json.load(fi)
    final_y_pred = predict(enc, dec, data, **da_rnn_kwargs)

    plt.figure()
    plt.plot(final_y_pred, label='Predicted')
    plt.plot(data.targs[(da_rnn_kwargs["T"] - 1):], label="True")
    plt.legend(loc='upper left')
Exemplo n.º 3
0
dec_params = pd.read_csv(os.path.join('results', args.save, 'dec_params.csv'))
print(enc_params)
print(dec_params)
enc = Encoder(input_size=enc_params['input_size'][0].item(),
              hidden_size=enc_params['hidden_size'][0].item(),
              T=enc_params['T'][0].item()).cuda()
enc.load_state_dict(
    torch.load(os.path.join("results", args.save, "encoder.pt"),
               map_location=device))

dec = Decoder(encoder_hidden_size=dec_params['encoder_hidden_size'][0].item(),
              decoder_hidden_size=dec_params['decoder_hidden_size'][0].item(),
              T=dec_params['T'][0].item(),
              out_feats=dec_params['out_feats'][0].item()).cuda()
dec.load_state_dict(
    torch.load(os.path.join("results", args.save, "decoder.pt"),
               map_location=device))

scaleX = joblib.load(os.path.join("results", args.save, "scaleX.pkl"))
scaley = joblib.load(os.path.join("results", args.save, "scaley.pkl"))

raw_data = pd.read_csv(os.path.join('data', 'covid', args.data))

del raw_data['Id']
del raw_data['Date']
del raw_data['Province_State']
del raw_data['Country_Region']
print(raw_data[140 * 90:141 * 90].head())
targ_cols = ('Fatalities', )
data = preprocess_data(raw_data[140 * 90:141 * 90], targ_cols, scaleX, scaley)