Exemplo n.º 1
0
        def batched_gen(blur_name,
                        truth_name,
                        batch_size,
                        chunk_factor=10,
                        shuffle=False):
            fp = self.h5fp
            data_len = len(fp[blur_name])
            chunk_size = batch_size * chunk_factor
            total_chunks = math.ceil(data_len / chunk_size)

            for chunk_idx in range(total_chunks):
                start = chunk_idx * chunk_size
                end = min(start + chunk_size, data_len)

                blur_chunk = fp[blur_name][start:end]
                truth_chunk = fp[truth_name][start:end]

                if shuffle:
                    rand_idx, _ = sample_idxs(end - start, 0)
                    blur_chunk = blur_chunk[rand_idx]
                    truth_chunk = truth_chunk[rand_idx]

                chunk_len = len(blur_chunk)
                total_batches = math.ceil(chunk_len / batch_size)
                for batch_idx in range(total_batches):
                    start = batch_idx * batch_size
                    end = min(start + batch_size, chunk_len)

                    blur_batch = blur_chunk[start:end]
                    truth_batch = truth_chunk[start:end]
                    yield blur_batch, truth_batch
Exemplo n.º 2
0
def build_dataset(n_samps_per_image=10, train_test_split=0.2):
    train_idxs, test_idxs = sample_idxs(images.shape[0], train_test_split)

    train_dataset = np.array(
        list(zip(*_dataset_gen(n_samps_per_image, train_idxs))))
    test_dataset = np.stack([
        decoded_images[test_idxs].reshape(-1, *IMAGE_DIMS),
        images[test_idxs].reshape(-1, *IMAGE_DIMS)
    ],
                            axis=0)

    train_dataset = np.expand_dims(train_dataset, -1)
    test_dataset = np.expand_dims(test_dataset, -1)

    return (train_dataset, test_dataset)
Exemplo n.º 3
0
    def load_data(self, path):
        example = label = test_example = test_label = None
        with h5py.File(path, 'r') as fp:
            example = fp['train_blur'][:]
            label = fp['train_truth'][:]
            test_example = fp['test_blur'][:]
            test_label = fp['test_truth'][:]

        train_idx, val_idx = sample_idxs(example.shape[0], self.params['train_val_split'])
        self.train_ds = tf.data.Dataset.from_tensor_slices((example[train_idx], label[train_idx])) \
                                       .shuffle(256) \
                                       .batch(self.params['batch_size'])
        self.val_ds = tf.data.Dataset.from_tensor_slices((example[val_idx], label[val_idx])) \
                                       .batch(self.params['batch_size'])

        self.test_ds = tf.data.Dataset.from_tensor_slices((test_example, test_label)) \
                                      .batch(self.params['batch_size'])
Exemplo n.º 4
0
import matplotlib.pyplot as plt
import numpy as np

from matplotlib.backends.backend_pdf import PdfPages
from matplotlib.colors import Normalize
from scipy.ndimage import gaussian_filter
from sklearn.gaussian_process import GaussianProcessRegressor
from tqdm import tqdm

from internal.util import sample_idxs

IMAGE_DIMS = (95, 146)

# <codecell>
train, test = sample_idxs(20000, 0.1)

# <codecell>
images = np.load('save/test_images.npy')
smoothed_images = np.load('save/smooth_test_images.npy')
decoded_images = np.load('save/smooth_test_decoded.npy')
residuals = smoothed_images - decoded_images


# <codecell>
def build_dataset(n_samps_per_image=10, train_test_split=0.2):
    train_idxs, test_idxs = sample_idxs(images.shape[0], train_test_split)

    train_dataset = np.array(
        list(zip(*_dataset_gen(n_samps_per_image, train_idxs))))
    test_dataset = np.stack([