def test_decoding_2(self):
     src = torch.rand(20, 3)
     trg = torch.rand(354, 3)
     src1 = torch.rand(20, 4)
     trg1 = torch.rand(354, 4)
     d = decoding_function(self.informer, src, trg, 5, src1, trg1, 1, 20,
                           336, "cpu")
     self.assertEqual(d.shape[0], 1)
     self.assertEqual(d.shape[1], 336)
def generate_decoded_predictions(
        model: Type[TimeSeriesModel],
        test_data: CSVTestLoader,
        forecast_start_idx: int,
        device: torch.device,
        history_dim: torch.Tensor,
        hours_to_forecast: int,
        decoder_params: Dict,
        multi_targets: int = 1,
        targs: Union[bool, torch.Tensor] = False) -> torch.Tensor:
    probabilistic = False
    scaler = None
    if test_data.no_scale:
        scaler = test_data
    if decoder_params is not None:
        if "probabilistic" in decoder_params:
            probabilistic = True

        real_target_tensor = (torch.from_numpy(
            test_data.df[forecast_start_idx:].to_numpy()).to(device).unsqueeze(
                0).to(model.device))
        if targs:
            src = history_dim
            src0 = src[0]
            trg = targs
            decoder_seq_len = model.params["model_params"]["label_len"]
            end_tensor = decoding_function(
                model.model, src0, trg[1],
                model.params["dataset_params"]["forecast_length"], src[1],
                trg[0], 1, decoder_seq_len, hours_to_forecast, device)
        else:
            end_tensor = decoding_functions[
                decoder_params["decoder_function"]](
                    model.model,
                    history_dim,
                    hours_to_forecast,
                    real_target_tensor,
                    decoder_params["unsqueeze_dim"],
                    output_len=model.params["dataset_params"]
                    ["forecast_length"],
                    multi_targets=multi_targets,
                    device=model.device,
                    probabilistic=probabilistic,
                    scaler=scaler)
        if probabilistic:
            end_tensor_mean = end_tensor[0][:, :,
                                            0].view(-1).to("cpu").detach()
            return end_tensor_mean, end_tensor[1]
        elif isinstance(end_tensor, tuple):
            e = end_tensor[0][:, :, 0].view(-1).to(
                "cpu").detach(), end_tensor[1][:, :,
                                               0].view(-1).to("cpu").detach()
            return e
        if multi_targets == 1:
            end_tensor = end_tensor[:, :, 0].view(-1)
    return end_tensor.to("cpu").detach()
 def test_decoding_3(self):
     informer_model2 = Informer(3, 3, 3, 48, 24, 12, factor=1)
     src = torch.rand(1, 48, 3)
     trg = torch.rand(1, 362, 3)
     src1 = torch.rand(1, 48, 4)
     trg1 = torch.rand(1, 362, 4)
     d = decoding_function(informer_model2, src, trg, 12, src1, trg1, 1, 36,
                           336, "cpu")
     self.assertEqual(d.shape[0], 1)
     self.assertEqual(d.shape[1], 336)
 def test_decoding_2(self):
     src = torch.rand(20, 3)
     trg = torch.rand(355, 3)
     src1 = torch.rand(20, 4)
     trg1 = torch.rand(355, 4)
     d = decoding_function(self.informer, src, trg, 5, src1, trg1, 1, 20,
                           336, "cpu")
     self.assertEqual(d.shape[0], 1)
     self.assertEqual(d.shape[1], 336)
     self.assertNotEqual(d[0, 0, 0].item(), d[0, 1, 0].item())
     self.assertNotAlmostEqual(d[0, 0, 0].item(), d[0, 330, 0].item())
     self.assertNotAlmostEqual(d[0, 20, 0].item(), d[0, 333, 0].item())
     self.assertNotAlmostEqual(d[0, 300, 0].item(), d[0, 334, 0].item())
     self.assertNotAlmostEqual(d[0, 20, 0].item(), trg[20, 0].item())
     self.assertNotAlmostEqual(d[0, 21, 0].item(), trg[21, 0].item())