Exemplo n.º 1
0
    def test_hourglass_calc_dims_check_dims(self):
        """
        Test that hourglass_calc_dims implements the correct dimensions
        """

        dims = hourglass_calc_dims(0.2, 4, 5)
        self.assertEqual(dims, [4, 3, 2, 1])
        dims = hourglass_calc_dims(0.5, 3, 10)
        self.assertEqual(dims, [8, 7, 5])
        dims = hourglass_calc_dims(0.5, 3, 3)
        self.assertEqual(dims, [3, 2, 2])
        dims = hourglass_calc_dims(0.3, 3, 10)
        self.assertEqual(dims, [8, 5, 3])
        dims = hourglass_calc_dims(1, 3, 10)
        self.assertEqual(dims, [10, 10, 10])
        dims = hourglass_calc_dims(0, 3, 100000)
        self.assertEqual(dims, [66667, 33334, 1])
def feedforward_hourglass(
    n_features: int,
    encoding_layers: int = 3,
    compression_factor: float = 0.5,
    func: str = "tanh",
    **kwargs,
) -> keras.models.Sequential:
    """
    Builds an hourglass shaped neural network, with decreasing number of neurons
    as one gets deeper into the encoder network and increasing number
    of neurons as one gets out of the decoder network.

    Parameters
    ----------
    n_features: int
        Number of input and output neurons
    encoding_layers: int
        Number of layers from the input layer (exclusive) to the
        narrowest layer (inclusive). Must be > 0. The total nr of layers
        including input and output layer will be 2*encoding_layers + 1.
    compression_factor: float
        How small the smallest layer is as a ratio of n_features
        (smallest layer is rounded up to nearest integer). Must satisfy
        0 <= compression_factor <= 1.
    func: str
        Activation function for the internal layers

    Notes
    -----
    The resulting model will look like this when n_features = 10, encoding_layers= 3,
    and compression_factor = 0.3::

                * * * * * * * * * *
                  * * * * * * * *
                     * * * * *
                       * * *
                       * * *
                     * * * * *
                  * * * * * * * *
                * * * * * * * * * *


    Returns
    -------
    keras.models.Sequential

    Examples
    --------
    >>> model = feedforward_hourglass(10)
    >>> len(model.layers)
    7
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [8, 7, 5, 5, 7, 8, 10]
    >>> model = feedforward_hourglass(5)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [4, 4, 3, 3, 4, 4, 5]
    >>> model = feedforward_hourglass(10, compression_factor=0.2)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [7, 5, 2, 2, 5, 7, 10]
    >>> model = feedforward_hourglass(10, encoding_layers=1)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [5, 5, 10]
    """
    dims = hourglass_calc_dims(compression_factor, encoding_layers, n_features)

    return feedforward_symmetric(n_features, dims, [func] * len(dims),
                                 **kwargs)
Exemplo n.º 3
0
def lstm_hourglass(
    n_features: int,
    lookback_window: int = 1,
    encoding_layers: int = 3,
    compression_factor: float = 0.5,
    func: str = "relu",
    out_func: str = "linear",
    optimizer: Union[str, Optimizer] = "adam",
    optimizer_kwargs: Dict[str, Any] = dict(),
    loss: str = "mse",
    **kwargs,
) -> keras.models.Sequential:

    """

    Builds an hourglass shaped neural network, with decreasing number of neurons
    as one gets deeper into the encoder network and increasing number
    of neurons as one gets out of the decoder network.


    Parameters
    ----------
    n_features: int
        Number of input and output neurons
    encoding_layers: int
        Number of layers from the input layer (exclusive) to the
        narrowest layer (inclusive). Must be > 0. The total nr of layers
        including input and output layer will be 2*encoding_layers + 1.
     compression_factor: float
        How small the smallest layer is as a ratio of n_features
        (smallest layer is rounded up to nearest integer). Must satisfy
        0 <= compression_factor <= 1.
    func: str
        Activation function for the internal layers
    out_func: str
        Activation function for the output Dense layer.
    optimizer: Union[str, Optimizer]
        If str then the name of the optimizer must be provided (e.x. "adam").
        The arguments of the optimizer can be supplied in optimization_kwargs.
        If a Keras optimizer call the instance of the respective
        class (e.x. Adam(lr=0.01,beta_1=0.9, beta_2=0.999)).  If no arguments are
        provided Keras default values will be set.
    optimizer_kwargs: Dict[str, Any]
        The arguments for the chosen optimizer. If not provided Keras'
        default values will be used.
    loss: str
        Keras' supported loss functions (e.x. "mse", "MSE", "mean_squared_error"
                                              for mean squared error,
                                              "mae", "MAE", "mean_absolute_error"
                                              for mean absolute error,
                                              for other supported loss functions
                                              refer to https://keras.io/losses/).


    Returns
    -------
    keras.models.Sequential

    Examples
    --------
    >>> model = lstm_hourglass(10)
    >>> len(model.layers)
    7
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [8, 7, 5, 5, 7, 8, 10]
    >>> model = lstm_hourglass(5)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [4, 4, 3, 3, 4, 4, 5]
    >>> model = lstm_hourglass(10, compression_factor=0.2)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [7, 5, 2, 2, 5, 7, 10]
    >>> model = lstm_hourglass(10, encoding_layers=1)
    >>> [model.layers[i].units for i in range(len(model.layers))]
    [5, 5, 10]
    """
    dims = hourglass_calc_dims(compression_factor, encoding_layers, n_features)

    return lstm_symmetric(
        n_features,
        lookback_window,
        dims,
        [func] * len(dims),
        out_func,
        optimizer,
        optimizer_kwargs,
        loss,
        **kwargs,
    )