"""
import tensorflow as tf
import determined as det
from determined import experimental
from determined.experimental.keras import init

config = {
    "searcher": {
        "name": "adaptive_simple",
        "metric": "val_loss",
        "max_steps": 5,
        "max_trials": 5
    },
    "hyperparameters": {
        "num_units": det.Integer(64, 256),
        "dropout": det.Double(0.0, 0.5),
        "activation": det.Categorical(["relu", "tanh", "sigmoid"]),
        "global_batch_size": 32,
    },
}

###############################################################################
#
# First, configure the ``hyperparameters`` field in the experiment
# configuration to set up a search space of hyperparameters.
# :py:func:`determined.Integer`, :py:func:`determined.Double`,
# :py:func:`determined.Categorical`, and :py:func:`determined.Log` are utility
# functions for specifying distributions.
#
# Next, use the ``searcher`` field to configure the desired hyperparameter
# search algorithm. In this case, we're configuring a
Esempio n. 2
0
    parser.add_argument("--mode",
                        dest="mode",
                        help="Specifies local mode or cluster mode.",
                        default="cluster")
    parser.add_argument(
        "--use-fit",
        action="store_true",
        help="If true, uses model.fit() instead of model.fit_generator()",
    )
    args = parser.parse_args()

    config = {
        "hyperparameters": {
            "global_batch_size": det.Constant(value=32),
            "kernel_size": det.Constant(value=3),
            "dropout": det.Double(minval=0.0, maxval=0.5),
            "activation": det.Constant(value="relu"),
        },
        "searcher": {
            "name": "single",
            "metric": "val_accuracy",
            "max_steps": 40
        },
    }
    config.update(json.loads(args.config))

    context = keras.init(config,
                         mode=experimental.Mode(args.mode),
                         context_dir=str(pathlib.Path.cwd()))

    (x_train, y_train), (x_test, y_test) = load_mnist_data()
Esempio n. 3
0
        default="{}",
    )
    parser.add_argument("--mode",
                        dest="mode",
                        help="Specifies local mode or cluster mode.",
                        default="cluster")
    args = parser.parse_args()

    config = {
        "data": {
            "url":
            "https://s3-us-west-2.amazonaws.com/determined-ai-test-data/pytorch_mnist.tar.gz"
        },
        "hyperparameters": {
            "learning_rate": det.Log(minval=-3.0, maxval=-1.0, base=10),
            "dropout": det.Double(minval=0.2, maxval=0.8),
            "global_batch_size": det.Constant(value=64),
            "n_filters1": det.Constant(value=32),
            "n_filters2": det.Constant(value=32),
        },
        "searcher": {
            "name": "single",
            "metric": "validation_error",
            "max_steps": 20,
            "smaller_is_better": True,
        },
    }
    config.update(json.loads(args.config))

    experimental.create(
        trial_def=model_def.MNistTrial,
Esempio n. 4
0
                        action="store_true",
                        help="Specifies local mode")
    parser.add_argument("--test",
                        action="store_true",
                        help="Specifies test mode")
    args = parser.parse_args()

    config = {
        "data": {
            "url":
            "https://s3-us-west-2.amazonaws.com/determined-ai-test-data/pytorch_mnist.tar.gz"
        },
        "hyperparameters": {
            "learning_rate": det.Log(minval=-3.0, maxval=-1.0, base=10),
            "global_batch_size": det.Constant(value=64),
            "dropout1": det.Double(minval=0.2, maxval=0.8),
            "dropout2": det.Double(minval=0.2, maxval=0.8),
            "n_filters1": det.Constant(value=32),
            "n_filters2": det.Constant(value=32),
        },
        "searcher": {
            "name": "single",
            "metric": "validation_loss",
            "max_length": {
                "batches": 2000,
            },
            "smaller_is_better": True,
        },
    }
    config.update(json.loads(args.config))