Exemplo n.º 1
0
                        default="cluster")
    args = parser.parse_args()

    config = {
        "data": {
            "url":
            "https://s3-us-west-2.amazonaws.com/determined-ai-test-data/pytorch_mnist.tar.gz"
        },
        "hyperparameters": {
            "learning_rate": det.Log(minval=-3.0, maxval=-1.0, base=10),
            "dropout": det.Double(minval=0.2, maxval=0.8),
            "global_batch_size": det.Constant(value=64),
            "n_filters1": det.Constant(value=32),
            "n_filters2": det.Constant(value=32),
        },
        "searcher": {
            "name": "single",
            "metric": "validation_error",
            "max_steps": 20,
            "smaller_is_better": True,
        },
    }
    config.update(json.loads(args.config))

    experimental.create(
        trial_def=model_def.MNistTrial,
        config=config,
        mode=experimental.Mode(args.mode),
        context_dir=str(pathlib.Path.cwd()),
    )
        )
        return model

    def build_training_data_loader(self):
        return np.zeros(1), np.zeros(1)

    def build_validation_data_loader(self):
        return np.zeros(1), np.zeros(1)


if __name__ == "__main__":
    experimental.create(
        trial_def=RuntimeErrorTrial,
        config={
            "description": "keras_runtime_error",
            "hyperparameters": {
                "global_batch_size": det.Constant(1)
            },
            "searcher": {
                "metric": "accuracy"
            },
            "data_layer": {
                "type": "lfs",
                "container_storage_path": "/tmp"
            },
        },
        local=True,
        test=True,
        context_dir=str(pathlib.Path.cwd()),
    )
Exemplo n.º 3
0
    dataset_url = (
        "https://determined-ai-public-datasets.s3-us-west-2.amazonaws.com/"
        "PennFudanPed/PennFudanPed.zip")
    config = {
        "data": {
            "url": dataset_url
        },
        "hyperparameters": {
            "learning_rate": det.Constant(value=0.005),
            "momentum": det.Constant(value=0.9),
            "weight_decay": det.Constant(value=0.0005),
            "global_batch_size": det.Constant(value=2),
        },
        "batches_per_step": 1,
        "searcher": {
            "name": "single",
            "metric": "val_avg_iou",
            "max_steps": 16,
            "smaller_is_better": False,
        },
    }
    config.update(json.loads(args.config))

    experimental.create(
        trial_def=model_def.ObjectDetectionTrial,
        config=config,
        local=args.local,
        test=args.test,
        context_dir=str(pathlib.Path.cwd()),
    )
Exemplo n.º 4
0
        default="{}",
    )
    parser.add_argument("--local",
                        action="store_true",
                        help="Specifies local mode")
    parser.add_argument("--test",
                        action="store_true",
                        help="Specifies test mode")
    args = parser.parse_args()

    config = {
        "hyperparameters": {
            "global_batch_size": det.Constant(value=32),
            "dense1": det.Constant(value=128),
        },
        "searcher": {
            "name": "single",
            "metric": "val_accuracy",
            "max_steps": 40
        },
    }
    config.update(json.loads(args.config))

    experimental.create(
        trial_def=model_def.FashionMNISTTrial,
        config=config,
        local=args.local,
        test=args.test,
        context_dir=str(pathlib.Path.cwd()),
    )
Exemplo n.º 5
0
            },
            "smaller_is_better": True,
        },
        "data": {
            "data_dir": "/tmp/data",
            "task": "MRPC",
            "model_name_or_path": "bert-base-uncased",
            "output_mode": "classification",
            "path_to_mrpc": "",
            "download_data": True,
        },
        "hyperparameters": {
            "global_batch_size": det.Constant(value=24),
            "model_type": det.Constant(value="bert"),
            "learning_rate": det.Constant(value=0.00002),
            "lr_scheduler_epoch_freq": det.Constant(value=1),
            "adam_epsilon": det.Constant(value=1e-8),
            "weight_decay": det.Constant(value=0),
            "num_warmup_steps": det.Constant(value=0),
            "num_training_steps": det.Constant(value=459),
            "max_seq_length": det.Constant(value=128),
        },
    }

    experimental.create(
        trial_def=model_def.BertPytorch,
        mode=experimental.Mode(args.mode),
        context_dir=str(pathlib.Path.cwd()),
        config=config,
    )
Exemplo n.º 6
0
        return {"val_loss": loss}

    def build_training_data_loader(self) -> pytorch.DataLoader:
        return pytorch.DataLoader(
            OnesDataset(), batch_size=self.context.get_per_slot_batch_size())

    def build_validation_data_loader(self) -> pytorch.DataLoader:
        return pytorch.DataLoader(
            OnesDataset(), batch_size=self.context.get_per_slot_batch_size())


if __name__ == "__main__":
    conf = yaml.safe_load("""
    description: test-native-api-local-test-mode
    hyperparameters:
      global_batch_size: 32
    scheduling_unit: 1
    searcher:
      name: single
      metric: val_loss
      max_length:
        batches: 1
      smaller_is_better: true
    max_restarts: 0
    """)
    experimental.create(OneVarTrial,
                        conf,
                        context_dir=".",
                        local=True,
                        test=True)
Exemplo n.º 7
0
    def build_training_data_loader(self):
        return pytorch.DataLoader(
            OnesDataset(), batch_size=self.context.get_per_slot_batch_size())

    def build_validation_data_loader(self):
        return pytorch.DataLoader(
            OnesDataset(), batch_size=self.context.get_per_slot_batch_size())


if __name__ == "__main__":
    conf = yaml.safe_load("""
    description: noop-pytorch-native-api
    data:
      model_type: single_output
    hyperparameters:
      global_batch_size: 32
    scheduling_unit: 1
    searcher:
      name: single
      metric: validation_error
      max_length:
        batches: 3
      smaller_is_better: true
    max_restarts: 0
    min_checkpoint_period:
      batches: 1
    min_validation_period:
      batches: 1
    """)
    experimental.create(NoopPytorchTrial, conf, context_dir=".")