Esempio n. 1
0
 def __init__(
         self,
         prediction_length: int,
         freq: str,
         context_length: Optional[int] = None,
         # FIXME: prefix those so clients know that these are decoder params
         mlp_final_dim: int = 20,
         mlp_hidden_dimension_seq: List[int] = list(),
         trainer: Trainer = Trainer(),
         quantiles: List[float] = list([0.1, 0.5, 0.9]),
 ) -> None:
     encoder = RNNEncoder(
         mode="gru",
         hidden_size=50,
         num_layers=1,
         bidirectional=True,
         prefix="encoder_",
     )
     super(MQRNNEstimator, self).__init__(
         encoder=encoder,
         mlp_final_dim=mlp_final_dim,
         mlp_hidden_dimension_seq=mlp_hidden_dimension_seq,
         freq=freq,
         prediction_length=prediction_length,
         trainer=trainer,
         context_length=context_length,
         quantiles=quantiles,
     )
Esempio n. 2
0
    def __init__(
        self,
        prediction_length: int,
        freq: str,
        context_length: Optional[int] = None,
        decoder_mlp_dim_seq: List[int] = None,
        trainer: Trainer = Trainer(),
        quantiles: List[float] = None,
        scaling: bool = True,
    ) -> None:

        assert (prediction_length >
                0), f"Invalid prediction length: {prediction_length}."
        assert decoder_mlp_dim_seq is None or all(
            d > 0 for d in decoder_mlp_dim_seq
        ), "Elements of `mlp_hidden_dimension_seq` should be > 0"
        assert quantiles is None or all(
            0 <= d <= 1 for d in
            quantiles), "Elements of `quantiles` should be >= 0 and <= 1"

        self.decoder_mlp_dim_seq = (decoder_mlp_dim_seq if decoder_mlp_dim_seq
                                    is not None else [30])
        self.quantiles = (quantiles if quantiles is not None else
                          [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])

        # `use_static_feat` and `use_dynamic_feat` always True because network
        # always receives input; either from the input data or constants
        encoder = RNNEncoder(
            mode="gru",
            hidden_size=50,
            num_layers=1,
            bidirectional=True,
            prefix="encoder_",
            use_static_feat=True,
            use_dynamic_feat=True,
        )

        decoder = ForkingMLPDecoder(
            dec_len=prediction_length,
            final_dim=self.decoder_mlp_dim_seq[-1],
            hidden_dimension_sequence=self.decoder_mlp_dim_seq[:-1],
            prefix="decoder_",
        )

        quantile_output = QuantileOutput(self.quantiles)

        super().__init__(
            encoder=encoder,
            decoder=decoder,
            quantile_output=quantile_output,
            freq=freq,
            prediction_length=prediction_length,
            context_length=context_length,
            trainer=trainer,
            scaling=scaling,
        )
 def __init__(
     self,
     freq: str,
     prediction_length: int,
     cardinality: List[int],
     embedding_dimension: int,
     encoder_rnn_layer: int,
     encoder_rnn_num_hidden: int,
     decoder_mlp_layer: List[int],
     decoder_mlp_static_dim: int,
     encoder_rnn_model: str = "lstm",
     encoder_rnn_bidirectional: bool = True,
     scaler: Scaler = NOPScaler(),
     context_length: Optional[int] = None,
     quantiles: Optional[List[float]] = None,
     trainer: Trainer = Trainer(),
     num_parallel_samples: int = 100,
 ) -> None:
     encoder = RNNEncoder(
         mode=encoder_rnn_model,
         hidden_size=encoder_rnn_num_hidden,
         num_layers=encoder_rnn_layer,
         bidirectional=encoder_rnn_bidirectional,
         use_static_feat=True,
         use_dynamic_feat=True,
     )
     super(RNN2QRForecaster, self).__init__(
         freq=freq,
         prediction_length=prediction_length,
         encoder=encoder,
         cardinality=cardinality,
         embedding_dimension=embedding_dimension,
         decoder_mlp_layer=decoder_mlp_layer,
         decoder_mlp_static_dim=decoder_mlp_static_dim,
         context_length=context_length,
         scaler=scaler,
         quantiles=quantiles,
         trainer=trainer,
         num_parallel_samples=num_parallel_samples,
     )