Exemple #1
0
def test_progress_interval():
    # Test setting the total number of iterations and report interval as int.
    with Mock() as mock:
        with out.Progress(name="name", total=4, interval=3) as progress:
            progress(a="a")
            progress(a="b")
            progress(a="c")
            # Change time per iteration to a second to show a non-zero time
            # left.
            progress.values["_time_per_it"] = 1
            progress(a="d")

    assert len(mock) == 14
    assert mock[0] == "name:\n"
    # First is always shown.
    assert mock[1] == "    Iteration 1/4:\n"
    assert "Time elapsed" in mock[2]
    assert "Time left" in mock[3]
    assert mock[4] == "        a:          a\n"
    assert mock[5] == "    Iteration 3/4:\n"
    assert "Time elapsed" in mock[6]
    assert "Time left" in mock[7]
    assert mock[8] == "        a:          c\n"
    # Last is also always shown.
    assert mock[9] == "    Iteration 4/4:\n"
    assert "Time elapsed" in mock[10]
    assert "Time left" in mock[11]
    assert "0.0 s" not in mock[11]
    assert mock[12] == "        a:          d\n"
    assert mock[13] == "    Done!\n"
Exemple #2
0
    def _minimise_l_bfgs_b(f,
                           vs,
                           f_calls=10000,
                           iters=1000,
                           trace=False,
                           names=None,
                           jit=False):
        names = _convert_and_validate_names(names)

        # Run function once to ensure that all variables are initialised and
        # available.
        val_init = f(vs)

        # SciPy doesn't perform zero iterations, so handle that edge case
        # manually.
        if iters == 0 or f_calls == 0:
            return B.to_numpy(val_init)

        # Extract initial value.
        x0 = B.to_numpy(vs.get_latent_vector(*names))

        # The optimiser expects to get `float64`s.
        def _convert(*xs):
            return [B.cast(np.float64, B.to_numpy(x)) for x in xs]

        # Wrap the function and get the list of function evaluations.
        f_vals, f_wrapped = wrap_f(vs, names, f, jit, _convert)

        # Perform optimisation routine.
        def perform_minimisation(callback_=lambda _: None):
            return fmin_l_bfgs_b(
                func=f_wrapped,
                x0=x0,
                maxiter=iters,
                maxfun=f_calls,
                callback=callback_,
                disp=0,
            )

        if trace:
            # Print progress during minimisation.
            with out.Progress(name='Minimisation of "{}"'.format(f.__name__),
                              total=iters) as progress:

                def callback(_):
                    progress({"Objective value": np.min(f_vals)})

                x_opt, val_opt, info = perform_minimisation(callback)

            with out.Section("Termination message"):
                out.out(convert(info["task"], str))
        else:
            # Don't print progress; simply perform minimisation.
            x_opt, val_opt, info = perform_minimisation()

        vs.set_latent_vector(x_opt, *names)  # Assign optimum.

        return val_opt  # Return optimal value.
Exemple #3
0
def test_progress():
    # Test a simple case.
    with Mock() as mock:
        with out.Progress() as progress:
            progress(a=1)

    assert len(mock) == 5
    assert mock[0] == "Progress:\n"
    assert mock[1] == "    Iteration 1:\n"
    assert "Time elapsed" in mock[2]
    assert mock[3] == "        a:          1\n"
    assert mock[4] == "    Done!\n"
Exemple #4
0
    def minimise_l_bfgs_b(f,
                          vs,
                          f_calls=10000,
                          iters=1000,
                          trace=False,
                          names=None):
        names = [] if names is None else names

        # Run function once to ensure that all variables are initialised and
        # available.
        val_init = f(vs)

        # SciPy doesn't perform zero iterations, so handle that edge case
        # manually.
        if iters == 0 or f_calls == 0:
            return B.to_numpy(val_init)

        # Extract initial value.
        x0 = B.to_numpy(vs.get_vector(*names))

        # Wrap the function and get the list of function evaluations.
        f_vals, f_wrapped = wrap_f(vs, names, f)

        # Perform optimisation routine.
        def perform_minimisation(callback_=lambda _: None):
            return fmin_l_bfgs_b(func=f_wrapped,
                                 x0=x0,
                                 maxiter=iters,
                                 maxfun=f_calls,
                                 callback=callback_,
                                 disp=0)

        if trace:
            # Print progress during minimisation.
            with out.Progress(name='Minimisation of "{}"'.format(f.__name__),
                              total=iters) as progress:
                def callback(_):
                    progress({'Objective value': np.min(f_vals)})

                x_opt, val_opt, info = perform_minimisation(callback)

            with out.Section('Termination message'):
                out.out(info['task'].decode('utf-8'))
        else:
            # Don't print progress; simply perform minimisation.
            x_opt, val_opt, info = perform_minimisation()

        vs.set_vector(x_opt, *names)  # Assign optimum.

        return val_opt  # Return optimal value.
Exemple #5
0
def test_progress_filters():
    # Test filters, report interval as float, and giving a dictionary.
    with Mock() as mock:
        with out.Progress(name="name",
                          interval=1e-10,
                          filter={"a": None},
                          filter_global=np.inf) as progress:
            progress({"a": 1, "b": 1})
            progress({"a": 2, "b": 2})

    assert len(mock) == 10
    assert mock[0] == "name:\n"
    assert mock[1] == "    Iteration 1:\n"
    assert "Time elapsed" in mock[2]
    assert mock[3] == "        a:          1\n"
    assert mock[4] == "        b:          1\n"
    assert mock[5] == "    Iteration 2:\n"
    assert "Time elapsed" in mock[6]
    # Filter should be off.
    assert mock[7] == "        a:          2\n"
    # Filter should be maximal.
    assert mock[8] == "        b:          1.0\n"
    assert mock[9] == "    Done!\n"
Exemple #6
0
        ),
        GPCM(
            window=window,
            scale=scale,
            noise=noise,
            n_u=n_u,
            n_z=n_z,
            t=t_train,
        ),
        CGPCM(
            window=window,
            scale=scale,
            noise=noise,
            n_u=n_u,
            n_z=n_z,
            t=t_train,
        ),
]:
    # Fit model.
    model.fit(t_train, y_train, iters=20_000)

    # Make predictions for all held-out test sets.
    preds = []
    with out.Progress("Making predictions", total=len(tests)) as progress:
        for (t_test1, y_test1), (t_test2, y_test2) in tests:
            posterior = model.condition(t_test1, normaliser.transform(y_test1))
            mean, var = normaliser.untransform(posterior.predict(t_test2))
            preds.append((y_test2, mean, var))
            progress()
    wd.save(preds, model.name.lower(), "preds.pickle")
Exemple #7
0
    def _minimise_adam(
        f,
        vs,
        iters=1000,
        rate=1e-3,
        beta1=0.9,
        beta2=0.999,
        epsilon=1e-8,
        local_rates=True,
        trace=False,
        names=None,
        jit=False,
    ):
        names = _convert_and_validate_names(names)

        # Run function once to ensure that all variables are initialised and
        # available.
        val_init = f(vs)

        # Handle the edge case of zero iterations.
        if iters == 0:
            return B.to_numpy(val_init)

        # Extract initial value.
        x0 = B.to_numpy(vs.get_latent_vector(*names))

        # Wrap the function.
        _, f_wrapped = wrap_f(vs, names, f, jit, B.to_numpy)

        def perform_minimisation(callback_=lambda _: None):
            # Perform optimisation routine.
            x = x0
            obj_value = None
            adam = ADAM(
                rate=rate,
                beta1=beta1,
                beta2=beta2,
                epsilon=epsilon,
                local_rates=local_rates,
            )

            for i in range(iters):
                obj_value, grad = f_wrapped(x)
                callback_(obj_value)
                x = adam.step(x, grad)

            return x, obj_value

        if trace:
            # Print progress during minimisation.
            with out.Progress(name='Minimisation of "{}"'.format(f.__name__),
                              total=iters) as progress:

                def callback(obj_value):
                    progress({"Objective value": obj_value})

                x_opt, obj_value = perform_minimisation(callback)
        else:
            x_opt, obj_value = perform_minimisation()

        vs.set_latent_vector(x_opt, *names)  # Assign optimum.

        return obj_value  # Return last objective value.
Exemple #8
0
    def minimise_adam(f,
                      vs,
                      iters=1000,
                      rate=1e-3,
                      beta1=0.9,
                      beta2=0.999,
                      epsilon=1e-8,
                      trace=False,
                      names=None):
        names = [] if names is None else names

        # Run function once to ensure that all variables are initialised and
        # available.
        val_init = f(vs)

        # Handle the edge case of zero iterations.
        if iters == 0:
            return B.to_numpy(val_init)

        # Extract initial value.
        x0 = B.to_numpy(vs.get_vector(*names))

        # Wrap the function.
        _, f_wrapped = wrap_f(vs, names, f)

        def perform_minimisation(callback_=lambda _: None):
            # Perform optimisation routine.
            x = x0
            obj_value = None
            m = np.zeros_like(x0)
            v = np.zeros_like(x0)

            for i in range(iters):
                obj_value, grad = f_wrapped(x)

                callback_(obj_value)

                # Update estimates of moments.
                m = beta1 * m + (1 - beta1) * grad
                v = beta2 * v + (1 - beta2) * grad ** 2

                # Correct for bias of initialisation.
                m_corr = m / (1 - beta1 ** (i + 1))
                v_corr = v / (1 - beta2 ** (i + 1))

                # Perform update.
                x = x - rate * m_corr / (v_corr ** .5 + epsilon)

            return x, obj_value

        if trace:
            # Print progress during minimisation.
            with out.Progress(name='Minimisation of "{}"'.format(f.__name__),
                              total=iters) as progress:
                def callback(obj_value):
                    progress({'Objective value': obj_value})

                x_opt, obj_value = perform_minimisation(callback)
        else:
            x_opt, obj_value = perform_minimisation()

        vs.set_vector(x_opt, *names)  # Assign optimum.

        return obj_value  # Return last objective value.