Example #1
0
def dataloader_all_sensors_seq2seq(setting):
    train, eval, test = dataloader(setting['dataset'])
    scaler = utils.Scaler(train)
    return dataiter_all_sensors_seq2seq(train, scaler, setting), \
        dataiter_all_sensors_seq2seq(eval, scaler, setting, shuffle=False), \
        dataiter_all_sensors_seq2seq(test, scaler, setting, shuffle=False), \
        scaler
Example #2
0
def dataloader_all_sensors_seq2seq(setting):
    train, eval, test = dataloader(setting['dataset'])  #[T, N, D]
    scaler = utils.Scaler(train)

    return dataiter_all_sensors_seq2seq(train, scaler, setting), \
        dataiter_all_sensors_seq2seq(eval, scaler, setting, shuffle=False, offset=train.shape[0]), \
        dataiter_all_sensors_seq2seq(test, scaler, setting, shuffle=False, offset=train.shape[0]+eval.shape[0]), \
        scaler
Example #3
0
def dataloader_flow(settings):
    settings = settings['training']

    train, eval, test = load_flow()
    scaler = utils.Scaler()
    scaler.fit(train)

    flow_train = create_dataset_flow(
        train,
        scaler,
        sampler_type='random',
        batch_size=settings['flow_batch_size'],
        iterations_per_epoch=settings['iterations_per_epoch'])
    flow_eval = create_dataset_flow(eval,
                                    scaler,
                                    sampler_type='batch',
                                    batch_size=settings['flow_batch_size'])
    flow_test = create_dataset_flow(test,
                                    scaler,
                                    sampler_type='batch',
                                    batch_size=settings['flow_batch_size'])

    return flow_train, flow_eval, flow_test, scaler