starts, ends = F.get_valid_start_end(series_is_nan) series_lags = F.make_lags(series, LAGS, use_smooth=True) series_lags_corr = F.batch_autocorr(series, LAGS, starts, ends, threshold=1.05) series_lags_corr = normalize(series_lags_corr, axis=0)[0] series_lags_corr = Values(series_lags_corr, name='series_lags_corr') series, series_mean, series_std = normalize(series[:, np.newaxis, DROP_BEFORE:], axis=2) series_lags = np.nan_to_num( (series_lags[:, :, DROP_BEFORE:] - series_mean) / series_std) series_lags = Values(series_lags, 'xy_lags') time_idxes = np.arange(series.shape[2]) trn_idx, val_idx = forward_split(time_idxes, ENC_LEN, VALID_LEN + TEST_LEN) val_idx, test_idx = forward_split(val_idx, ENC_LEN, TEST_LEN) trn_dl = create_seq2seq_data_loader(series, enc_len=ENC_LEN, dec_len=DEC_LEN, time_idx=trn_idx, batch_size=BATCH_SIZE, num_iteration_per_epoch=4, features=[series_lags, series_lags_corr], seq_last=False, device='cuda', mode='train') val_dl = create_seq2seq_data_loader(series, enc_len=ENC_LEN, dec_len=DEC_LEN,
@time : 2020/5/12 16:33 """ from deepseries.models import RNN2RNN, Wave2WaveV1 from deepseries.train import Learner from deepseries.dataset import Values, create_seq2seq_data_loader, forward_split import numpy as np from torch.optim import Adam import torch batch_size = 16 enc_len = 36 dec_len = 12 series = np.sin(np.arange(0, 1000)) series = series.reshape(1, 1, -1) train_idx, valid_idx = forward_split(np.arange(series.shape[2]), enc_len=14, valid_size=200) def test_rnn2rnn(): train_dl = create_seq2seq_data_loader(series, enc_len=14, dec_len=7, time_idx=train_idx, batch_size=12, num_iteration_per_epoch=12, seq_last=False) valid_dl = create_seq2seq_data_loader(series, enc_len=14, dec_len=7, time_idx=valid_idx,