Exemplo n.º 1
0
                        default="cluster")
    args = parser.parse_args()

    config = {
        "data": {
            "url":
            "https://s3-us-west-2.amazonaws.com/determined-ai-test-data/pytorch_mnist.tar.gz"
        },
        "hyperparameters": {
            "learning_rate": det.Log(minval=-3.0, maxval=-1.0, base=10),
            "dropout": det.Double(minval=0.2, maxval=0.8),
            "global_batch_size": det.Constant(value=64),
            "n_filters1": det.Constant(value=32),
            "n_filters2": det.Constant(value=32),
        },
        "searcher": {
            "name": "single",
            "metric": "validation_error",
            "max_steps": 20,
            "smaller_is_better": True,
        },
    }
    config.update(json.loads(args.config))

    experimental.create(
        trial_def=model_def.MNistTrial,
        config=config,
        mode=experimental.Mode(args.mode),
        context_dir=str(pathlib.Path.cwd()),
    )
if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--mode", dest="mode", default="cluster")
    args = parser.parse_args()

    config = {
        "hyperparameters": {
            "hidden_size": 2,
            "learning_rate": 0.1,
            "global_batch_size": 4,
            "optimizer": "sgd",
            "shuffle": False,
        }
    }

    context = estimator.init(
        config=config, mode=experimental.Mode(args.mode), context_dir=str(pathlib.Path.cwd())
    )

    batch_size = context.get_per_slot_batch_size()
    shuffle = context.get_hparam("shuffle")
    context.serving_input_receiver_fns = build_serving_input_receiver_fns()
    context.train_and_evaluate(
        build_estimator(context),
        tf.estimator.TrainSpec(
            xor_input_fn(context=context, batch_size=batch_size, shuffle=shuffle), max_steps=1
        ),
        tf.estimator.EvalSpec(xor_input_fn(context=context, batch_size=batch_size, shuffle=False)),
    )