Beispiel #1
0
    def f(trial: optuna.trial.Trial) -> float:

        x = trial.suggest_int("x", 1, 1)
        y = trial.suggest_categorical("y", (2.5, ))
        trial.set_user_attr("train_loss", 3)
        raise ValueError()
        return x + y  # 3.5
Beispiel #2
0
def _objective_func(trial: optuna.trial.Trial) -> float:
    x = trial.suggest_uniform("x", -1.0, 1.0)
    y = trial.suggest_loguniform("y", 20.0, 30.0)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    trial.set_user_attr("my_user_attr", "my_user_attr_value")
    return (x - 2) ** 2 + (y - 25) ** 2 + z
Beispiel #3
0
def mo_objective_test_upgrade(trial: optuna.trial.Trial) -> Tuple[float, float]:
    x = trial.suggest_float("x", -5, 5)
    y = trial.suggest_int("y", 0, 10)
    z = cast(float, trial.suggest_categorical("z", [-5, 0, 5]))
    trial.set_system_attr("a", 0)
    trial.set_user_attr("b", 1)
    return x, x ** 2 + y ** 2 + z ** 2
Beispiel #4
0
def objective_test_upgrade(trial: optuna.trial.Trial) -> float:
    x = trial.suggest_uniform("x", -5, 5)  # optuna==0.9.0 does not have suggest_float.
    y = trial.suggest_int("y", 0, 10)
    z = cast(float, trial.suggest_categorical("z", [-5, 0, 5]))
    trial.set_system_attr("a", 0)
    trial.set_user_attr("b", 1)
    trial.report(0.5, step=0)
    return x ** 2 + y ** 2 + z ** 2
Beispiel #5
0
def _objective_func_long_user_attr(trial: optuna.trial.Trial) -> float:

    x = trial.suggest_float("x", -1.0, 1.0)
    y = trial.suggest_float("y", 20, 30, log=True)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    long_str = str(list(range(5000)))
    trial.set_user_attr("my_user_attr", long_str)
    return (x - 2)**2 + (y - 25)**2 + z
Beispiel #6
0
    def _objective_func(trial: optuna.trial.Trial) -> float:

        x = trial.suggest_float("x", -1.0, 1.0)
        y = trial.suggest_float("y", 20, 30, log=True)
        z = trial.suggest_categorical("z", (-1.0, 1.0))
        assert isinstance(z, float)
        trial.set_user_attr("my_user_attr", "my_user_attr_value")
        mlflow.log_metric(metric_name, metric)
        return (x - 2)**2 + (y - 25)**2 + z
Beispiel #7
0
def _objective_func(trial: optuna.trial.Trial) -> float:
    u = trial.suggest_int("u", 0, 10, step=2)
    v = trial.suggest_int("v", 1, 10, log=True)
    w = trial.suggest_float("w", -1.0, 1.0, step=0.1)
    x = trial.suggest_uniform("x", -1.0, 1.0)
    y = trial.suggest_loguniform("y", 20.0, 30.0)
    z = trial.suggest_categorical("z", (-1.0, 1.0))
    assert isinstance(z, float)
    trial.set_user_attr("my_user_attr", "my_user_attr_value")
    return u + v + w + (x - 2)**2 + (y - 25)**2 + z
Beispiel #8
0
    def f(trial: optuna.trial.Trial) -> float:

        x = trial.suggest_int("x", 1, 1)
        y = trial.suggest_categorical("y", (2.5, ))
        assert isinstance(y, float)
        trial.set_user_attr("train_loss", 3)
        trial.set_system_attr("foo", "bar")
        value = x + y  # 3.5

        # Test reported intermediate values, although it in practice is not "intermediate".
        trial.report(value, step=0)

        return value
Beispiel #9
0
def objective_for_binary_unet(args, trial: optuna.trial.Trial):
    args.lr = trial.suggest_loguniform("lr", low=1e-5, high=1e-2)
    args.edge_weight = trial.suggest_uniform("edge_weight", low=1, high=5)
    args.wf = trial.suggest_int("wf", low=2, high=4)
    args.depth = trial.suggest_int("depth", low=4, high=6)

    pl_pruning_callback = PyTorchLightningPruningCallback(
        trial, "val/f1_score")
    ckpt_callback = train_binary_unet_model(args,
                                            callbacks=[pl_pruning_callback])

    best_f1_score = ckpt_callback.best_model_score.detach().cpu().numpy().item(
    )
    trial.set_user_attr("best_val_f1", best_f1_score)
    trial.set_user_attr("best_model_path", ckpt_callback.best_model_path)

    return best_f1_score
Beispiel #10
0
    def f(trial: optuna.trial.Trial) -> float:

        trial.set_user_attr("train_accuracy", 1)
        assert trial.user_attrs["train_accuracy"] == 1
        return 0.0
Beispiel #11
0
    def __call__(self, trial: optuna.trial.Trial) -> float:
        q10_init = trial.suggest_float('q10_init', 0.0001, 1000.)
        seed = trial.suggest_int('seed', 0, 999999999999)
        use_ta = trial.suggest_categorical('use_ta', [True, False])
        dropout = trial.suggest_float('dropout', 0.0, 1.0)

        if use_ta:
            features = ['sw_pot', 'dsw_pot', 'ta']
        else:
            features = ['sw_pot', 'dsw_pot']

        pl.seed_everything(seed)

        # Further variables used in the hybrid model.
        physical = ['ta']

        # Target (multiple targets not possible currently).
        targets = ['reco']

        # Find variables that are only needed in physical model but not in NN.
        physical_exclusive = [v for v in physical if v not in features]

        # ------------
        # data
        # ------------
        ds = xr.open_dataset(self.args.data_path)

        fluxdata = FluxData(ds,
                            features=features + physical_exclusive,
                            targets=targets,
                            context_size=1,
                            train_time=slice('2003-01-01', '2006-12-31'),
                            valid_time=slice('2007-01-01', '2007-12-31'),
                            test_time=slice('2008-01-01', '2008-12-31'),
                            batch_size=self.args.batch_size,
                            data_loader_kwargs={'num_workers': 4})

        train_loader = fluxdata.train_dataloader()
        val_loader = fluxdata.val_dataloader()
        test_loader = fluxdata.test_dataloader()

        # Create empty xr.Dataset, will be used by the model to save predictions every epoch.
        max_epochs = TRAINER_ARGS['max_epochs']
        ds_pred = fluxdata.target_xr('valid',
                                     varnames=['reco', 'rb'],
                                     num_epochs=max_epochs)

        # ------------
        # model
        # ------------
        model = Q10Model(features=features,
                         targets=targets,
                         norm=fluxdata._norm,
                         ds=ds_pred,
                         q10_init=q10_init,
                         hidden_dim=self.args.hidden_dim,
                         num_layers=self.args.num_layers,
                         learning_rate=self.args.learning_rate,
                         dropout=dropout,
                         weight_decay=self.args.weight_decay,
                         num_steps=len(train_loader) * max_epochs)

        # ------------
        # training
        # ------------
        trainer = pl.Trainer.from_argparse_args(
            self.args,
            default_root_dir=self.args.log_dir,
            **TRAINER_ARGS,
            callbacks=[
                EarlyStopping(monitor='valid_loss',
                              patience=10,
                              min_delta=0.00001),
                ModelCheckpoint(filename='{epoch}-{val_loss:.2f}',
                                save_top_k=1,
                                verbose=False,
                                monitor='valid_loss',
                                mode='min',
                                prefix=model.__class__.__name__)
            ])
        trainer.fit(model, train_loader, val_loader)

        # ------------
        # testing
        # ------------
        # trainer.test(test_dataloaders=test_loader)

        # ------------
        # save results
        # ------------
        # Store predictions.
        ds = fluxdata.add_scalar_record(model.ds,
                                        varname='q10',
                                        x=model.q10_history)
        trial.set_user_attr('q10', ds.q10[-1].item())

        # Add some attributes that are required for analysis.
        ds.attrs = {
            'created': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
            'author': '*****@*****.**',
            'q10_init': q10_init,
            'dropout': dropout,
            'use_ta': int(use_ta),
            'loss': trainer.callback_metrics['valid_loss'].item()
        }

        ds = ds.isel(epoch=slice(0, trainer.current_epoch + 1))

        # Save data.
        save_dir = os.path.join(model.logger.log_dir, 'predictions.nc')
        print(f'Saving predictions to: {save_dir}')
        ds.to_netcdf(save_dir)

        return trainer.callback_metrics['valid_loss'].item()
Beispiel #12
0
        def objective(trial: optuna.trial.Trial, value: float) -> float:

            trial.set_user_attr("lightgbm_tuner:step_name", "step{:.0f}".format(value))
            return trial.suggest_uniform("x", value, value)
Beispiel #13
0
 def objective(trial: optuna.trial.Trial) -> Tuple[float, float]:
     trial.suggest_int("x", 5, 5)
     trial.set_user_attr("constraint", (constraint_value, -1))
     return 5.0, 5.0