Пример #1
0
def test_dataset(dataset_name,
                 filenames=[],
                 is_save=True,
                 nb_samples=32,
                 report_shape=False,
                 is_plot=False,
                 **kwargs):
    """ test datasets
    Args:
        dataset_name
        filenames
        nb_sample
        report_shape
    """
    print_pretty_args(test_dataset, locals())
    dsc = getattr(xlearn.datasets, dataset_name)
    with dsc(filenames=filenames) as dataset:
        for i in tqdm(range(kwargs['nb_sample']), ascii=True):
            s = next(dataset)
            if report_shape:
                print(len(s))
                print(len(s[0]))
                print(s[0][0].shape)
            if is_save:
                imgss = []
                for i in range(len(s[0])):
                    imgss.append(dataset.visualize(s[0][i]))
                subplot_images(imgss, is_save=True, filename='images.png')
Пример #2
0
def predict(net, dataset, nb_batch=1):
    imgs = []
    for i in tqdm(range(nb_batch)):
        z = net.gen_latent()
        p = net.predict('Gen', [z])
        imgs += dataset.visualize(p[0])
    subplot_images((imgs, ), is_gray=True, size=3.0, tight_c=0.5)
Пример #3
0
def predict(cfs):
    dataset = MNIST(filenames=cfs)
    net = LSGAN(filenames=cfs)
    net.define_net()
    print(net.pretty_settings())
    net.load('AutoEncoder')
    x = net.gen_latent()
    p = net.predict('Decoder', [x])
    imgs = dataset.visualize(p[0])
    subplot_images((imgs, ), is_gray=True, size=3.0, tight_c=0.5)
Пример #4
0
def predict_and_show(net, dataset):
    s = next(dataset)
    p = net.predict(0, [s[0]])
    p = p[0]
    images = dataset.data_from_sample(s, data_type='data')
    images = dataset.visualize(images)
    labels = dataset.data_from_sample(s, data_type='label')
    labels = dataset.visualize(labels)
    preds = dataset.visualize(p)
    subplot_images((images, labels, preds), is_gray=True)
Пример #5
0
def predict():
    dataset = MNIST(**data_settings)
    if net_name == 'ae':
        net = AutoEncoder1D(**net_settings)
    elif net_name == 'vae':
        net = VAE1D(**net_settings)
    net.define_net()
    print(net.pretty_settings())
    net.load('AutoEncoder')
    s = next(dataset)
    p = net.predict('Decoder', [net.gen_latent()])
    imgs = dataset.visualize(p[0])
    subplot_images((imgs, ), is_gray=True, size=3.0, tight_c=0.5)
Пример #6
0
def show_mainfold(cfs):
    dataset = MNIST(filenames=cfs)
    nb_axis = int(np.sqrt(dataset._batch_size))
    x = np.linspace(-1.5, 1.5, nb_axis)
    y = np.linspace(-1.5, 1.5, nb_axis)
    pos = np.meshgrid(x, y)
    xs = pos[0]
    ys = pos[1]
    xs = xs.reshape([-1])
    ys = ys.reshape([-1])
    net = LSGAN(filenames=cfs)
    net.define_net()
    net.load('Gen')
    latents = np.array([xs, ys]).T
    p = net.predict('Gen', [latents])
    imgs = dataset.visualize(p[0])
    subplot_images((imgs, ),
                   nb_max_row=nb_axis,
                   is_gray=True,
                   size=1.0,
                   tight_c=0.5)
Пример #7
0
def show_mainfold():
    dataset = MNIST(**data_settings)
    nb_axis = int(np.sqrt(batch_size))
    x = np.linspace(-1.5, 1.5, nb_axis)
    y = np.linspace(-1.5, 1.5, nb_axis)
    pos = np.meshgrid(x, y)
    xs = pos[0]
    ys = pos[1]
    xs = xs.reshape([-1])
    ys = ys.reshape([-1])
    if net_name == 'ae':
        net = AutoEncoder1D(**net_settings)
    elif net_name == 'vae':
        net = VAE1D(**net_settings)
    net.define_net()
    net.load('AutoEncoder')
    latents = np.array([xs, ys]).T
    p = net.predict('Decoder', [latents])
    imgs = dataset.visualize(p[0])
    subplot_images((imgs, ), nb_max_row=nb_axis,
                   is_gray=True, size=1.0, tight_c=0.5)
Пример #8
0
def test_dataset(dataset,
                 nb_images=64,
                 data_type='data',
                 settings=None,
                 **kwargs):
    if not isinstance(data_type, (list, tuple)):
        data_type = [data_type]
    imgs_all = empty_list(len(data_type))
    for i in range(int(np.ceil(nb_images / dataset.batch_size))):
        s = next(dataset)
        for i, ctype in enumerate(data_type):
            img_tensor = dataset.data_from_sample(s, data_type=ctype)
            imgs = dataset.visualize(img_tensor)
            if imgs_all[i] is None:
                imgs_all[i] = imgs
            else:
                imgs_all[i].append(imgs)
    subplot_images(imgs_all, is_gray=True)
    for imgs in imgs_all:
        data = np.array(imgs)
        print(data.shape)
        print("mean:{0:10f}, max:{1:10f}, min:{2:10f}".format(
            np.mean(data), np.max(data), np.min(data)))
Пример #9
0
def show_data_mainfold(cfs):
    """ show latent main fold for data """
    dataset = MNIST(filenames=cfs)
    # nb_axis = int(np.sqrt(dataset._batch_size))
    nb_axis = 32
    x = np.linspace(-5.0, 20.0, nb_axis)
    y = np.linspace(-5.0, 20.0, nb_axis)
    pos = np.meshgrid(x, y)
    xs = pos[0]
    ys = pos[1]
    xs = xs.reshape([-1])
    ys = ys.reshape([-1])
    net = AAE1D(filenames=cfs)
    net.define_net()
    net.load('Gen')
    latents = np.array([xs, ys]).T
    pall = None
    nb_latents = latents.shape[0]
    nb_batches = int(np.ceil(nb_latents / net.batch_size))
    nb_pad = nb_batches * net.batch_size - nb_latents
    latents_pad = np.pad(latents, ((0, nb_pad), (0, 0)), mode='constant')
    for i in tqdm(range(nb_batches)):
        data_batch = latents_pad[i * net.batch_size:(i + 1) *
                                 net.batch_size, :]
        p = net.predict('Gen', [data_batch])
        if pall is None:
            pall = p[0]
        else:
            pall = np.concatenate((pall, p[0]))
    p = pall[:nb_latents, ...]
    imgs = dataset.visualize(p)
    subplot_images((imgs, ),
                   nb_max_row=nb_axis,
                   is_gray=True,
                   size=1.0,
                   tight_c=0.5)
Пример #10
0
batch_size = 32
with Sinograms(
        file_data="/home/hongxwing/Workspace/Datas/shepplogan_sinograms.h5",
        is_gray=False,
        is_batch=True,
        batch_size=batch_size,
        is_down_sample=True,
        down_sample_ratio=[1, 4],
        is_padding=True,
        is_4d=True) as dataset:
    s = next(dataset)
    imgs = dataset.data_from_sample(s, data_type='data')
    imgs = dataset.visualize(imgs)
    plt.figure(figsize=(8, batch_size // 8))
    subplot_images((imgs, ), is_gray=True)
    s = next(dataset)
    imgs = dataset.data_from_sample(s, data_type='label')
    imgs = dataset.visualize(imgs)
    plt.figure(figsize=(8, batch_size // 8))
    subplot_images((imgs, ), is_gray=True)
net = SRNetInterp(shape_i=(365, 61, 1),
                  shape_o=(365, 244, 1),
                  down_sample_ratio=(1, 4, 1))
net.define_net()
with Sinograms(
        file_data="/home/hongxwing/Workspace/Datas/shepplogan_sinograms.h5",
        is_gray=False,
        is_batch=True,
        batch_size=batch_size,
        is_down_sample=True,
Пример #11
0
def predict_sr(net_name=None,
               dataset_name=None,
               path_save='./predict',
               is_visualize=False,
               is_save=False,
               save_filename='predict.png',
               filenames=[],
               load_step=None,
               **kwargs):
    print_pretty_args(predict_sr, locals())
    dsc = getattr(xlearn.datasets, dataset_name)
    netc = getattr(xlearn.nets, net_name)
    if load_step is None:
        files = os.listdir('.')
        save_re = r'save-.*-([0-9]+)'
        prog = re.compile(save_re)
        max_step = -1
        for f in files:
            m = prog.match(f)
            if m:
                step = int(m.group(1))
                if step > max_step:
                    max_step = step
        load_step = max_step

    with dsc(filenames=filenames) as dataset:
        net_settings = {'filenames': filenames}
        if load_step is not None:
            net_settings.update({'init_step': load_step})
        net = netc(**net_settings)
        net.define_net()
        click.echo(net.pretty_settings())
        if load_step is not None:
            net.load(step=load_step)
        net_interp = xlearn.nets.SRInterp(filenames=filenames)
        net_interp.define_net()
        s = next(dataset)
        p = net.predict('sr', s[0])
        p_it = net_interp.predict('sr', s[0])
        _, hr_t = net.predict('itp', s[0])
        res_sr = net.predict('res_out', s[0])
        res_sr = np.abs(res_sr)
        res_it = net.predict('res_itp', s[0])
        res_it = np.abs(res_it)
        hr = dataset.visualize(hr_t, is_no_change=True)
        lr = dataset.visualize(s[0][-1], is_no_change=True)
        sr = dataset.visualize(p, is_no_change=True)
        it = dataset.visualize(p_it, is_no_change=True)
        res_sr_l = dataset.visualize(res_sr, is_no_change=True)
        res_it_l = dataset.visualize(res_it, is_no_change=True)
        window = [(-0.5, 0.5), (-0.5, 0.5), (-0.5, 0.5), (-0.5, 0.5),
                  (-0.01, 0.01), (-0.01, 0.01)]
        subplot_images((hr, lr, sr, it, res_sr_l, res_it_l),
                       size=3.0,
                       tight_c=0.5,
                       is_save=True,
                       filename=save_filename,
                       window=window)

        # np.save('predict_hr.npy', s[1][0])
        # np.save('predict_lr.npy', s[0][1])
        # np.save('predict_sr.npy', p)
        # np.save('predict_res_sr.npy', res_sr)
        # np.save('predict_res_it.npy', res_it)
        res_sr_v = np.sqrt(np.mean(np.square(res_sr)))
        res_it_v = np.sqrt(np.mean(np.square(res_it)))
        print('res_sr: {0:10f}, res_it: {1:10f}'.format(res_sr_v, res_it_v))