Пример #1
0
    'max_epochs': 300,
    'early_stopping_patience': 10
}

if __name__ == '__main__':
    # Declare path to run directory and create it
    run_path = 'data/run_logreg_{:%Y%m%d_%H%M%S}'.format(datetime.now())
    os.makedirs(run_path)

    # Redirecting outputs
    sys.stdout.flush()
    sys.stderr = open(run_path + '/error_log.txt', 'w')

    # Initialise AmphibianReader
    ar = AmphibianReader('data/all_values/banking',
                         datetime(2010, 7, 16),
                         datetime(2018, 12, 31))

    # Create tensors
    _ = ar.create_torch()

    # Initialise CrossValidation
    cv = CrossValidation(am_reader=ar, int_start=0,
                         int_end=ar.torch['AMERICA'].shape[0],
                         architecture='SoftmaxRegressionModel',
                         sampled_param_grid=SAMPLING_GRID,
                         constant_param_grid=CONSTANT_GRID,
                         log_path=run_path,
                         n_iter=200)

    # Run CrossValidation
from torch.utils.data import DataLoader
from torchvision import transforms
import torch
from amphibian.architectures import SoftmaxRegressionModel, RNNModel, LSTMModel, AttentionModel
import warnings
from numpy import nanmean, nanstd, floor
import torch.utils.data as data_utils
'''disable warnings'''
warnings.filterwarnings('ignore')
'''
torch.cuda.get_device_name(0)
torch.cuda.is_available()
'''

a = AmphibianReader('./data/all_values/stocks/Lodging',
                    datetime.datetime(2012, 1, 10),
                    datetime.datetime(2018, 1, 30))
_ = a.read_csvs()
_ = a.get_unique_dates()
_ = a.create_torch()
a.torch['EMEIA'].size()
tts = TrainTestSplit(a, int_start=0, int_end=500, train_size=0.8)
tts.whole_set['train_y'].size()
batch_size = 10
n_neurons = 5
n_outputs = 3
n_layers = 1
n_steps = 4
ds = TimeSeriesDataset(tt_split=tts,
                       int_len=n_steps,
                       transform=transforms.Compose([