コード例 #1
0
ファイル: tests.py プロジェクト: ALevitskyy/Foreq
def spreadsheet_load(currency):
    dataManager = DataManager()
    dataManager.load_all()
    table = dataManager.raw_data[currency]
    return dataManager, table
コード例 #2
0
    callbacks["optimizer"] = OptimizerCallback()
    callbacks["logger"] = ConsoleLogger()
    callbacks["tflogger"] = TensorboardLogger()
    return callbacks


log_dir = "logs"
workers = 4
batch_size = 10
epoch_size_train = 200
epoch_size_val = 200
num_epochs = 3

dataManager = DataManager(data_path="./reduced",
                          start_end_func=train_start_end_func)
dataManager.load_all()
dataManager.init_norm_params()
dataManager.init_splits()
dataManagerVal = DataManager(data_path="./reduced",
                             start_end_func=val_start_end_func)
dataManagerVal.load_all()
dataManagerVal.init_norm_params()
dataManagerVal.init_splits()
dataset_train = Currency_Dataset(dataManager, epoch_size_train)
# Trick because deepcopy does not work with generators
# doesn`t work... need 2 datamanagers (load data twice... idea,
# load once, then pass) - manual deepcopy
dataset_val = Currency_Dataset(dataManagerVal, epoch_size_val)
dataloader_train = DataLoader(dataset=dataset_train,
                              batch_size=batch_size,
                              shuffle=True,
コード例 #3
0
ファイル: tests.py プロジェクト: ALevitskyy/Foreq
def normparams_splits_test():
    dataManager = DataManager()
    dataManager.load_all()
    dataManager.init_norm_params()
    dataManager.init_splits()
    return dataManager.norm_params, dataManager.bins