コード例 #1
0
ファイル: torch_mod.py プロジェクト: kevinbache/tablestakes
 def search_default(cls) -> "FullyConv1Resnet.ModelParams":
     return cls(
         num_groups=params.Discrete([8, 16, 32, 64]),
         num_blocks_per_residual=params.Integer(1, 5),
         activation=params.Categorical([nn.LeakyReLU, nn.GELU]),
         do_include_first_norm=params.Boolean(p_true=0.8),
     )
コード例 #2
0
 class ModelHyperParams(params.ParameterSet):
     num_hidden_layers = params.Integer(1, 4)
     num_neurons_per_layer = params.Discrete(
         np.logspace(2, 7, num=6, base=2, dtype=np.int))
     dropout_rate = params.Float(0.0, 0.99)
     activation = params.Categorical(['relu', 'sigmoid'])
     output_dir = '/tmp/output'
     filter_size = 3
コード例 #3
0
        max_seq_len=2**11,
        batch_size=32,
    )

    # neck_hp.data.dataset_name = 'num=1000_02b7'
    hp.data.dataset_name = 'num=4000_9b9f'
    hp.data.do_ignore_cached_dataset = False
    hp.data.seed = 42
    hp.data.num_workers = 0
    hp.data.num_gpus = 1
    hp.data.num_cpus = 4

    hp.opt.search_metric = 'valid_loss_total'
    hp.opt.search_mode = 'min'
    hp.opt.num_epochs = 100
    hp.opt.lr = params.Discrete([1e-4, 1e-3, 1e-2, 1e-1])
    hp.opt.min_lr = 1e-6
    hp.opt.patience = 16

    hp.exp.project_name = 'tablestakes'
    hp.exp.experiment_name = 'korv_which'
    hp.exp.experiment_tags = [
        'korv_which', 'conv', 'sharp', 'search', 'v0.0.1'
    ]
    hp.exp.sources_glob_str = constants.THIS_DIR.parent.parent / '**/*.py'

    hp.embed.dim = 16
    hp.embed.requires_grad = True

    hp.conv.num_features = params.Discrete([32, 64, 128, 256])
    hp.conv.num_layers = params.Integer(2, 11)
コード例 #4
0
    env['IMAGE_URI'] = CONTAINER_IMAGE_URI
    env['BUCKET_NAME'] = GCLOUD_BUCKET_NAME

    # Create a Cloud AI Platform Hyperparameter Search object
    search = search.HyperparamSearchSpec(
        max_trials=10,
        max_parallel_trials=5,
        max_failed_trials=2,
        hyperparameter_metric_tag='val_acc',
    )

    # Add parameter search ranges for this problem.
    my_param_ranges = train.MyParams(
        activation=params.Categorical(['relu', 'tanh']),
        num_layers=params.Integer(min_value=1, max_value=3),
        num_neurons=params.Discrete(np.logspace(2, 8, num=7, base=2)),
        dropout_rate=params.Double(min_value=-0.1, max_value=0.9),
        learning_rate=params.Discrete(np.logspace(-6, 2, 17, base=10)),
        batch_size=params.Integer(min_value=1, max_value=128),
    )
    search.add_parameters(my_param_ranges)

    # Call a bash script to build a docker image for this repo, submit it to the docker registry defined in the script
    # and run a training job on the Cloud AI Platform using this container and these hyperparameter ranges.
    this_dir = Path(__file__).resolve().parent
    retcode = subprocess.call([this_dir / 'build_push.sh'], env=env)
    if retcode:
        raise ValueError(f"Got returncode: {retcode}")

    search.run_from_container(
        gcloud_project_name=GCLOUD_PROJECT_NAME,
コード例 #5
0
ファイル: tune_runner.py プロジェクト: kevinbache/tablestakes
        utils.print_dict(best_trial.last_result)

        utils.hprint('best_trial.config', do_include_pre_break_line=True)
        utils.print_dict(best_trial.config)


if __name__ == '__main__':
    fast_dev_run = False

    dp = data_module.DataParams(
        # dataset_name='num=100_057b',
        dataset_name='num=1000_2cfc', )
    hp = ts_model.TotalParams(
        data=dp,
        max_seq_len=8192,
        batch_size=params.Discrete([4, 8, 16, 32, 64, 128, 256]),
    )

    hp.data.do_ignore_cached_dataset = False
    hp.data.seed = 42
    hp.data.num_workers = 4
    hp.data.num_gpus = 1
    hp.data.num_cpus = 4

    hp.opt.search_metric = 'valid/loss'
    hp.opt.search_mode = 'min'
    hp.opt.num_epochs = 4
    hp.opt.lr = params.Discrete(
        [1e-5, 3e-5, 1e-4, 3e-4, 1e-3, 3e-3, 1e-2, 3e-2, 1e-1])
    hp.opt.patience = 10