Esempio n. 1
0
def test_lstm_siminv(data_input, epoch=-1):
    model_dict = data_input.lstm_model.data_source.data_config.model_dict
    opt_data = model_dict['data']
    opt_model = model_dict['model']
    opt_train = model_dict['train']
    batch_size, rho = model_dict['train']['miniBatch']

    # data
    xqqnch, xqnct, qt = data_input.load_data()
    theta_length = 10
    # generate file names and run model
    out = os.path.join(model_dict['dir']['Out'], 'model')
    t_range = data_input.lstm_model.t_s_dict["t_final_range"]
    if epoch < 0:
        epoch = opt_train["nEpoch"]
    file_path = name_pred(model_dict, out, t_range, epoch)
    print('output files:', file_path)
    model = model_run.model_load(out, epoch)
    if opt_model["name"] == "CudnnLstmModelInv":
        data_pred, data_params = model_run.model_test_inv(model, xqqnch, xqnct, batch_size)
    else:
        data_pred, data_params = model_run.model_test_inv_kernel(model, xqqnch, xqnct, batch_size)

    data_stack = reduce(lambda a, b: np.vstack((a, b)),
                        list(map(lambda x: x.reshape(x.shape[0], x.shape[1]), data_pred)))
    pred = np.expand_dims(data_stack, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_input.lstm_model.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        qt = _trans_norm(qt, 'usgsFlow', stat_dict, to_norm=False)

    return pred, qt
Esempio n. 2
0
def test_lstm_da(data_input, epoch=-1):
    model_dict = data_input.data_model.data_source.data_config.model_dict
    opt_data = model_dict['data']
    opt_model = model_dict['model']
    opt_train = model_dict['train']
    batch_size, rho = model_dict['train']['miniBatch']

    # data
    qx, obs, c = data_input.load_data(model_dict)
    # generate file names and run model
    out = model_dict['dir']['Out']
    t_range = data_input.data_model.t_s_dict["t_final_range"]
    if epoch < 0:
        epoch = opt_train["nEpoch"]
    file_path = name_pred(model_dict, out, t_range, epoch)
    print('output files:', file_path)
    model = model_run.model_load(out, epoch)

    model_run.model_test(model, qx, c, file_path=file_path, batch_size=batch_size)
    data_pred = pd.read_csv(file_path, dtype=np.float, header=None).values

    pred = np.expand_dims(data_pred, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_input.data_model.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)

    return pred, obs
Esempio n. 3
0
def master_test_natural_flow(model_input, epoch=-1):
    data_model = model_input.data_model2
    model_dict = data_model.data_source.data_config.model_dict
    opt_data = model_dict['data']
    batch_size, rho = model_dict['train']['miniBatch']

    x, obs, c = model_input.load_data(model_dict)

    # generate file names and run model
    out = os.path.join(model_dict['dir']['Out'], "model")
    t_range = data_model.t_s_dict["t_final_range"]
    if epoch < 0:
        epoch = model_dict['train']["nEpoch"]
    file_path = name_pred(model_dict, out, t_range, epoch)
    print('output files:', file_path)
    re_test = False
    if not os.path.isfile(file_path):
        re_test = True
    if re_test:
        print('Runing new results')
        model = model_run.model_load(out, epoch)
        model_run.model_test(model, x, c, file_path=file_path, batch_size=batch_size)
    else:
        print('Loaded previous results')

    data_pred = pd.read_csv(file_path, dtype=np.float, header=None).values

    pred = np.expand_dims(data_pred, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_model.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)

    return pred, obs
Esempio n. 4
0
def test_lstm_storage(data_input, epoch=-1):
    model_dict = data_input.data_model_storage.data_source.data_config.model_dict
    opt_data = model_dict['data']
    opt_model = model_dict['model']
    opt_train = model_dict['train']
    batch_size, rho = opt_train['miniBatch']

    seq_length_storage = opt_model["storageLength"]
    # qx and y data have been cut
    qx, c, natflow, y = data_input.load_data()
    # generate file names and run model
    out = os.path.join(model_dict['dir']['Out'], 'model')
    t_range = data_input.data_model_storage.t_s_dict["t_final_range"]
    if epoch < 0:
        epoch = opt_train["nEpoch"]
    file_path = name_pred(model_dict, out, t_range, epoch)
    print('output files:', file_path)
    model = model_run.model_load(out, epoch)
    data_pred, data_params = model_run.model_test_storage(model, qx, c, natflow, seq_length_storage, batch_size)

    data_stack = reduce(lambda a, b: np.vstack((a, b)),
                        list(map(lambda x: x.reshape(x.shape[0], x.shape[1]), data_pred)))
    pred = np.expand_dims(data_stack, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_input.data_model_storage.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        y = _trans_norm(y, 'usgsFlow', stat_dict, to_norm=False)

    return pred, y
Esempio n. 5
0
def master_test_with_pretrained_model(data_model, pretrained_model_file, pretrained_name):
    """test data_model with a pretrained model"""
    model_dict = data_model.data_source.data_config.model_dict
    opt_data = model_dict['data']
    opt_model = model_dict['model']
    batch_size, rho = model_dict['train']['miniBatch']
    x, obs, c = data_model.load_data(model_dict)

    # generate file names and run model
    t_range = data_model.t_s_dict["t_final_range"]
    save_dir = os.path.join(model_dict['dir']['Out'], pretrained_name)
    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)
    file_name = '_'.join([str(t_range[0]), str(t_range[1])])
    file_path = os.path.join(save_dir, file_name + '.csv')
    print('output files:', file_path)
    if not os.path.isfile(file_path):
        print('Runing new results')
        model = torch.load(pretrained_model_file)
        model_run.model_test(model, x, c, file_path=file_path, batch_size=batch_size)
    else:
        print('Loaded previous results')
    data_pred = pd.read_csv(file_path, dtype=np.float, header=None).values
    pred = np.expand_dims(data_pred, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_model.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)

    return pred, obs
Esempio n. 6
0
def master_test_1by1(data_model):
    model_dict = data_model.data_source.data_config.model_dict
    opt_model = model_dict['model']
    # generate file names and run model
    out = model_dict['dir']['Out']
    t_range = data_model.t_s_dict["t_final_range"]
    epoch = model_dict['train']["nEpoch"]
    file_path = name_pred(model_dict, out, t_range, epoch)
    print('output files:', file_path)

    model_file = os.path.join(out, 'checkpoint.pt')
    opt_model['nx'] = data_model.data_forcing.shape[-1]
    opt_model['ny'] = 1

    if opt_model['name'] == 'CudnnLstmModel':
        model = rnn.CudnnLstmModel(nx=opt_model['nx'], ny=opt_model['ny'], hidden_size=opt_model['hiddenSize'])
    elif opt_model['name'] == 'LstmCloseModel':
        model = rnn.LstmCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'],
                                   fillObs=True)
    elif opt_model['name'] == 'AnnModel':
        model = rnn.AnnCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'])
    elif opt_model['name'] == 'AnnCloseModel':
        model = rnn.AnnCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'],
                                  fillObs=True)
    model.load_state_dict(torch.load(model_file))
    testloader = create_datasets(data_model, train_mode=False)
    pred_list, obs_list = model_run.test_dataloader(model, testloader)
    pred = reduce(lambda x, y: np.vstack((x, y)), pred_list)
    obs = reduce(lambda x, y: np.vstack((x, y)), obs_list)
    stat_dict = data_model.stat_dict
    # denormalization to recover the data for test
    pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
    obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)
    return pred, obs
Esempio n. 7
0
 def get_data_ts(self, rm_nan=True, to_norm=True):
     stat_dict = self.stat_dict
     var_lst = self.data_source.all_configs.get("forcing_chosen")
     data = self.data_forcing
     data = _trans_norm(data, var_lst, stat_dict, to_norm=to_norm)
     if rm_nan is True:
         data[np.where(np.isnan(data))] = 0
     return data
Esempio n. 8
0
def master_test_easier_lstm(dataset, load_epoch=-1):
    model_dict = dataset.data_model.data_source.data_config.model_dict
    batch_size, rho = model_dict['train']['miniBatch']

    # data
    testloader = DataLoader(dataset, batch_size=dataset.batch_size, shuffle=False)

    # model
    out_folder = model_dict['dir']['Out']
    opt_train = model_dict['train']
    if load_epoch < 0:
        load_epoch = opt_train['nEpoch']
    model_file = os.path.join(out_folder, 'model', 'model' + '_Ep' + str(load_epoch) + '.pt')
    model = torch.load(model_file)
    pred_list, obs_list = model_run.test_dataloader(model, testloader)
    pred = reduce(lambda x, y: np.vstack((x, y)), pred_list)
    obs = reduce(lambda x, y: np.vstack((x, y)), obs_list)
    stat_dict = dataset.data_model.stat_dict
    pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
    obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)

    return pred, obs
Esempio n. 9
0
def master_test(data_model, epoch=-1, save_file_suffix=None):
    model_dict = data_model.data_source.data_config.model_dict
    opt_data = model_dict['data']
    opt_model = model_dict['model']
    # batch_size, rho are same with those in the training period
    batch_size, rho = model_dict['train']['miniBatch']

    x, obs, c = data_model.load_data(model_dict)

    # generate file names and run model
    out = model_dict['dir']['Out']
    t_range = data_model.t_s_dict["t_final_range"]
    if epoch < 0:
        epoch = model_dict['train']["nEpoch"]
    model_file = os.path.join(out, 'model_Ep' + str(epoch) + '.pt')
    file_path = name_pred(model_dict, out, t_range, epoch, suffix=save_file_suffix)
    print('output files:', file_path)
    if not os.path.isfile(model_file):
        model_file = os.path.join(out, 'checkpoint.pt')
        opt_model['nx'] = x.shape[-1] + c.shape[-1]
        opt_model['ny'] = obs.shape[-1]
        if opt_model['name'] == 'CudnnLstmModel':
            model = rnn.CudnnLstmModel(nx=opt_model['nx'], ny=opt_model['ny'], hidden_size=opt_model['hiddenSize'])
        elif opt_model['name'] == 'LstmCloseModel':
            model = rnn.LstmCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'],
                                       fillObs=True)
        elif opt_model['name'] == 'AnnModel':
            model = rnn.AnnCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'])
        elif opt_model['name'] == 'AnnCloseModel':
            model = rnn.AnnCloseModel(nx=opt_model['nx'], ny=opt_model['ny'], hiddenSize=opt_model['hiddenSize'],
                                      fillObs=True)
        model.load_state_dict(torch.load(model_file))
        model.eval()
        model_run.model_test_valid(model, x, c, file_path=file_path, batch_size=batch_size)
    else:
        # no test results, so run the test code
        re_test = False
        if not os.path.isfile(file_path):
            re_test = True
        if re_test:
            print('Runing new results')
            model = torch.load(model_file)
            model_run.model_test(model, x, c, file_path=file_path, batch_size=batch_size)
        else:
            print('Loaded previous results')

    # load previous result and denormalization
    data_pred = pd.read_csv(file_path, dtype=np.float, header=None).values
    is_sigma_x = False
    if model_dict['loss']['name'] == 'SigmaLoss':
        # not used
        is_sigma_x = True
        pred = data_pred[:, :, ::2]
        sigma_x = data_pred[:, :, 1::2]
    else:
        # expend to 3d format so that we can use stat.trans_norm for denormalization
        pred = np.expand_dims(data_pred, axis=2)
    if opt_data['doNorm'][1] is True:
        stat_dict = data_model.stat_dict
        pred = _trans_norm(pred, 'usgsFlow', stat_dict, to_norm=False)
        obs = _trans_norm(obs, 'usgsFlow', stat_dict, to_norm=False)

    if is_sigma_x is True:
        return pred, obs, sigma_x
    else:
        return pred, obs