Ejemplo n.º 1
0
        for i, (env, env_weights) in enumerate(in_splits)
        if i not in args.test_envs
    ]

    uda_loaders = [
        InfiniteDataLoader(dataset=env,
                           weights=env_weights,
                           batch_size=hparams['batch_size'],
                           num_workers=dataset.N_WORKERS)
        for i, (env, env_weights) in enumerate(uda_splits)
        if i in args.test_envs
    ]

    eval_loaders = [
        FastDataLoader(dataset=env,
                       batch_size=64,
                       num_workers=dataset.N_WORKERS)
        for env, _ in (in_splits + out_splits + uda_splits)
    ]
    eval_weights = [
        None for _, weights in (in_splits + out_splits + uda_splits)
    ]
    eval_loader_names = ['env{}_in'.format(i) for i in range(len(in_splits))]
    eval_loader_names += [
        'env{}_out'.format(i) for i in range(len(out_splits))
    ]
    eval_loader_names += [
        'env{}_uda'.format(i) for i in range(len(uda_splits))
    ]

    algorithm_class = algorithms.get_algorithm_class(args.algorithm)
Ejemplo n.º 2
0
        for i, (env, env_weights) in enumerate(in_splits)
        if i not in args.test_envs
    ]

    uda_loaders = [
        InfiniteDataLoader(dataset=env,
                           weights=env_weights,
                           batch_size=hparams['batch_size'],
                           num_workers=dataset.N_WORKERS)
        for i, (env, env_weights) in enumerate(uda_splits)
        if i in args.test_envs
    ]

    eval_loaders = [
        FastDataLoader(dataset=env,
                       batch_size=hparams['batch_size'],
                       num_workers=dataset.N_WORKERS)
        for env, _ in (in_splits + out_splits + uda_splits)
    ]
    if split_indata:
        invenio_mata_out_splits = in_val_splits
    else:
        invenio_mata_out_splits = out_splits
    val_loaders_invenio = [
        InfiniteDataLoader(dataset=env,
                           weights=env_weights,
                           batch_size=hparams['batch_size'],
                           num_workers=dataset.N_WORKERS)
        for i, (env, env_weights) in enumerate(invenio_mata_out_splits)
        if i not in args.test_envs
    ]
Ejemplo n.º 3
0
         in_weights, out_weights = None, None
     in_splits.append((in_, in_weights))
     out_splits.append((out, out_weights))
 hparams['batch_size'] = 24
 hparams['weight_decay'] = 1e-6
 train_loaders = [
     InfiniteDataLoader(dataset=env,
                        weights=env_weights,
                        batch_size=hparams['batch_size'],
                        num_workers=train_dataset.N_WORKERS)
     for i, (env, env_weights) in enumerate(in_splits)
     if i not in args.test_envs
 ]
 eval_loaders = [
     FastDataLoader(dataset=env,
                    batch_size=hparams['batch_size'] * 4,
                    num_workers=val_dataset.N_WORKERS)
     for env, _ in (out_splits)
 ]
 eval_weights = [None for _, weights in (out_splits)]
 eval_loader_names = ['env{}_out'.format(i) for i in range(len(out_splits))]
 train_eval_loaders = [
     FastDataLoader(dataset=env,
                    batch_size=hparams['batch_size'] * 4,
                    num_workers=train_dataset.N_WORKERS)
     for env, _ in (in_splits)
 ]
 train_eval_weights = [None for _, weights in (in_splits)]
 train_eval_loader_names = [
     'env{}_in'.format(i) for i in range(len(in_splits))
 ]
Ejemplo n.º 4
0
    for env_i, env in enumerate(dataset):
        out, in_ = misc.split_dataset(env,
                                      int(len(env) * args.holdout_fraction),
                                      misc.seed_hash(args.trial_seed, env_i))
        if hparams['class_balanced']:
            in_weights = misc.make_weights_for_balanced_classes(in_)
            out_weights = misc.make_weights_for_balanced_classes(out)
        else:
            in_weights, out_weights = None, None
        in_splits.append((in_, in_weights))
        out_splits.append((out, out_weights))

    train_loaders = [
        FastDataLoader(dataset=env,
                       weights=env_weights,
                       batch_size=hparams['batch_size'],
                       num_workers=dataset.N_WORKERS,
                       length=FastDataLoader.INFINITE)
        for i, (env, env_weights) in enumerate(in_splits)
        if i not in args.test_envs
    ]

    eval_loaders = [
        FastDataLoader(dataset=env,
                       weights=None,
                       batch_size=64,
                       num_workers=dataset.N_WORKERS,
                       length=FastDataLoader.EPOCH)
        for env, _ in (in_splits + out_splits)
    ]
    eval_weights = [None for _, weights in (in_splits + out_splits)]
Ejemplo n.º 5
0
     else:
         in_weights, out_weights = None, None
     if env_i in args.target_envs:
         target_envs.append(len(in_splits))
     in_splits.append((in_, in_weights))
     out_splits.append((out, out_weights))
     train_loaders.append(
         InfiniteDataLoader(dataset=in_,
                            weights=in_weights,
                            batch_size=hparams['batch_size'],
                            num_workers=dataset.N_WORKERS))
     #for i, (env, env_weights) in enumerate(in_splits)
     #if i not in args.test_envs]
     eval_loaders += [
         FastDataLoader(dataset=in_,
                        batch_size=64,
                        num_workers=dataset.N_WORKERS),
         FastDataLoader(dataset=out,
                        batch_size=64,
                        num_workers=dataset.N_WORKERS)
     ]
     eval_loader_names += [
         'source_tr{0}'.format(len(in_splits)),
         'source_te{0}'.format(len(in_splits))
     ]
     eval_weights += [None, None]
 else:
     out_splits.append((env, None))
     eval_loaders.append(
         FastDataLoader(dataset=env,
                        batch_size=64,