# (n_scat_nodes) means 1 if data not transformed
    if isinstance(data, np.ndarray):
        data = np.reshape(data, (n_data_total, -1, data.shape[-1]))
    elif isinstance(data, list):
        data = [np.reshape(data_slice, (-1, data_slice.shape[-1])) for data_slice in data]
    else:
        raise ValueError("Invalid type of data given")

    # take out only a fraction of the test data
    data = data[idx_start:idx_end]
    labels = labels[idx_start:idx_end]
    n_data_total = len(data)

    input_size = data[0].shape[0]
    output_size = meta['output_size']
    dataset = nu.TimeSeriesDataset(data, labels, transform=nu.ToTensor())
    dataloader = DataLoader(dataset, sampler=SequentialSampler(range(n_data_total)),
        batch_size=batch_size, collate_fn=nu.collate_fn, num_workers=0)

    if device == 'cpu':
        rnn = nu.RNN(input_size=meta['input_size'], hidden_size=meta['hidden_size'],
            output_size=meta['output_size'], n_layers=meta['n_layers'], bidirectional=meta['bidirectional'])
    else:
        rnn = nu.RNN(input_size=meta['input_size'], hidden_size=meta['hidden_size'],
            output_size=meta['output_size'], n_layers=meta['n_layers'], bidirectional=meta['bidirectional']).cuda()
    rnn.load_state_dict(meta['model'][idx_min_loss_epoch])
    del meta
    #criterion = nn.CrossEntropyLoss(reduction='sum') if classifier else nn.MSELoss(reduction='sum')
    #metric = 'cross_entropy_mean' if classifier else 'rmse'
    loss_sum = {}
    loss_metric = {}
            input = data.reshape([n_data_total, n_features, data_len])

        input = torch.tensor(input, dtype=torch.get_default_dtype())
        # following is shaped (n_labels, n_conditions)
        labels = np.array(list(product(*samples['labels'])),
                          dtype='float32').swapaxes(0, 1)
        for idx_label in range(n_labels):
            net = nu.RNN(input_size=meta['input_size'],
                         hidden_size=meta['hidden_size'][idx_label],
                         output_size=1,
                         n_layers=meta['n_layers'],
                         bidirectional=meta['bidirectional'])
            net.load_state_dict(
                meta['weights'][idx_label][idx_epochs[idx_label]])

            dataset = nu.TimeSeriesDataset(input, np.zeros((n_data_total, )))
            dataloader = DataLoader(dataset,
                                    batch_size=batch_size,
                                    sampler=SequentialSampler(
                                        range(n_data_total)))
            output = []
            for idx_batch, batch in enumerate(dataloader):
                output.append(
                    net(batch['data'].permute([2, 0, 1])).detach().numpy()[:,
                                                                           0])
            output = np.concatenate(output, axis=0)

            if transformed:
                output = output.reshape(samples['data'].shape[:-3])
                output_mean = output.mean(axis=-1)
                output_std = output.std(axis=-1)