示例#1
0
def make_srcnn_rgb_dataset_based_on_cifar10():
    print('making SRCNN-RGB dataset using CIFAR-10...')
    (Y_train, _), (Y_test, _) = get_dataset_part(cifar10.load_data(),
                                                 train_part=0.2)

    from image_handler import get_image, get_image_data, zoom_out_image, zoom_up_image

    # X_train
    print('making X_train list...')
    X_train = []
    for item in Y_train:
        image_data = item.tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_train.append(zoomed_up_image_data)

    # X_test
    print('making X_test list...')
    X_test = []
    for item in Y_test:
        image_data = item.tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_test.append(zoomed_up_image_data)

    dtype = 'uint8'
    dataset = (np.array(X_train, dtype=dtype), np.array(Y_train, dtype=dtype)), \
              (np.array(X_test, dtype=dtype), np.array(Y_test, dtype=dtype))
    print('saving dataset to srcnn-rgb-cifar10-dataset.npz...')
    np.savez('datasets/srcnn-rgb-cifar10-dataset.npz', dataset)
def compare_images():
    global first_image
    global second_image

    if first_image is None:
        return
    if second_image is None:
        return

    window = Toplevel()
    window.title('Результаты измерений')

    footer = Frame(window)
    footer.pack(side=BOTTOM, expand=NO, fill=X)

    ok_button = ToolbarButton(footer, text='ОК')
    ok_button.pack(side=TOP, fill=Y, padx=10, pady=10)
    ok_button.config(command=window.destroy)
    ok_button.config(width=20)

    true_image = image_handler.convert_to_rgb(first_image)
    test_image = image_handler.convert_to_rgb(second_image)
    true_image_data = image_handler.get_image_data(true_image)
    test_image_data = image_handler.get_image_data(test_image)
    true_image_data = np.array(true_image_data, dtype='uint8')
    test_image_data = np.array(test_image_data, dtype='uint8')

    # print(true_image_data.shape)
    # print(test_image_data.shape)

    psnr = compare_psnr(true_image_data, test_image_data)
    ssim = compare_ssim(true_image_data, test_image_data, multichannel=True)
    mse = compare_mse(true_image_data, test_image_data)
    # print(true_image_data.shape)
    # print(image_handler.get_image_head_data(first_image))

    label_text = 'PSNR (peak signal-to-noise ratio) (Пиковое отношение сигнала к шуму): \n' + str(psnr) + ' dB' + '\n' + \
                 'SSIM (structural similarity) (Индекс структурного сходства) \n' + str(ssim) + '\n' + \
                 'MSE (mean squared error) (Среднеквадратичная ошибка)\n' + str(mse)

    # TODO IFC (Information Fidelity Criterion), NQM (Noise Quality Measure),
    # TODO PSNR (weighted peak signal-to-noise ratio),
    # TODO MSSSIM (multiscale structure similarity index)

    label = Label(window,
                  text=label_text,
                  font=COMPARE_IMAGES_RESULT_LABEL_FONT)
    label.pack(side=TOP, fill=BOTH, expand=YES, padx=20, pady=20)

    window.protocol('WM_DELETE_WINDOW', lambda: None)
    window.grab_set()
    window.focus_set()
    window.wait_window()
示例#3
0
def make_pasadena_dataset():
    print('making PASADENA dataset...')

    from image_handler import get_image_data, get_image, zoom_out_image, zoom_up_image

    path = 'saved_images/Pasadena Dataset/'
    filename_prefix = path + 'dcp_24'
    size = 10
    result = []

    for i in range(size):
        filename = filename_prefix + str(12 + i) + '.jpg'
        image = Image.open(filename)
        print('Image', filename.rpartition('/')[2], 'opened')
        image_data = get_image_data(image)
        result.append(image_data)

    print('making Y_train and Y_test...')
    train_size = 7
    Y_train, Y_test = result[:train_size], result[train_size:]

    # X_train
    print('making X_train list...')
    X_train = []
    for item in Y_train:
        image_data = item  # .tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_train.append(zoomed_up_image_data)

    # X_test
    print('making X_test list...')
    X_test = []
    for item in Y_test:
        image_data = item  # .tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_test.append(zoomed_up_image_data)

    dtype = 'uint8'
    dataset = (np.array(X_train, dtype=dtype), np.array(Y_train, dtype=dtype)), \
              (np.array(X_test, dtype=dtype), np.array(Y_test, dtype=dtype))
    print('saving dataset to pasadena-dataset.npz...')
    np.savez_compressed('datasets/pasadena-dataset.npz', dataset)
    print('pasadena-dataset.npz saved')
示例#4
0
def make_hundred_dataset():
    print('making HUNDRED dataset...')

    from image_handler import get_image_data, get_image, zoom_out_image, zoom_up_image

    path = 'images/Hundred Dataset/images/'
    size = 53
    result = []

    for i in range(size):
        filename = path + str(1 + i) + '.png'
        image = Image.open(filename)
        print('Image', filename.rpartition('/')[2], 'opened')
        image_data = get_image_data(image)
        result.append(image_data)

    print('making Y_train and Y_test...')
    train_size = 40
    Y_train, Y_test = result[:train_size], result[train_size:]

    # X_train
    print('making X_train list...')
    X_train = []
    for item in Y_train:
        image_data = item  # .tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_train.append(zoomed_up_image_data)

    # X_test
    print('making X_test list...')
    X_test = []
    for item in Y_test:
        image_data = item  # .tolist()
        image = get_image(image_data, mode='RGB')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_test.append(zoomed_up_image_data)

    dtype = 'uint8'
    dataset = (np.array(X_train, dtype=dtype), np.array(Y_train, dtype=dtype)), \
              (np.array(X_test, dtype=dtype), np.array(Y_test, dtype=dtype))
    print('saving dataset to hundred-dataset.npz...')
    np.savez('datasets/hundred-dataset.npz', dataset)  # _compressed
    print('hundred-dataset.npz saved')
示例#5
0
def handle_mnist():
    from image_handler import get_image, get_image_data, zoom_out_image
    from keras.datasets import mnist
    (X_train, y_train), (X_test, y_test) = mnist.load_data()

    dataset_size = 60000

    dataset_X, dataset_Y = [], []
    for i, X_train_item in enumerate(X_train[:dataset_size]):
        image_data = X_train_item.tolist()
        image = get_image(image_data, mode='L')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_out_image_data = get_image_data(zoomed_out_image)

        dataset_X.append(zoomed_out_image_data)
        dataset_Y.append(image_data)

        print('image ' + str(i) + ', height : ' + str(len(image_data)),
              'width : ' + str(len(image_data[0])),
              sep=', ')
        print('zoomed_out_image ' + str(i) + ', height : ' +
              str(len(zoomed_out_image_data)),
              'width : ' + str(len(zoomed_out_image_data[0])),
              sep=', ')
        # print('image_data :', image_data)
        # print('zoomed_out_image_data :', zoomed_out_image_data)
        # image.show()
        # zoomed_out_image.show()

    dataset = (dataset_X, dataset_Y)

    import pickle
    mnist_dataset_file = open('datasets/mnist-dataset.pkl', 'wb')
    pickle.dump(dataset, mnist_dataset_file)
    mnist_dataset_file.close()
示例#6
0
def make_srcnn_dataset_based_on_mnist():
    print('making SRCNN dataset using MNIST...')

    from image_handler import get_image, get_image_data, zoom_out_image, zoom_up_image

    (Y_train, _), (Y_test, _) = mnist.load_data()  # 60000, 10000

    # making X_train list
    print('making X_train list...')
    X_train = []
    for item in Y_train:
        image_data = item.tolist()
        image = get_image(image_data, mode='L')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_train.append(zoomed_up_image_data)

    # making X_test list
    print('making X_test list...')
    X_test = []
    for item in Y_test:
        image_data = item.tolist()
        image = get_image(image_data, mode='L')
        zoomed_out_image = zoom_out_image(image, times=2)
        zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
        zoomed_up_image_data = get_image_data(zoomed_up_image)
        X_test.append(zoomed_up_image_data)

    # X_train, X_test = [], []
    # for X, Y in [(X_train, Y_train), (X_test, Y_test)]:
    #     for item in Y:
    #         image_data = item.tolist()
    #         image = get_image(image_data, mode='L')
    #         zoomed_out_image = zoom_out_image(image, times=2)
    #         zoomed_up_image = zoom_up_image(zoomed_out_image, times=2)
    #         zoomed_up_image_data = get_image_data(zoomed_up_image)
    #         X.append(zoomed_up_image_data)

    dtype = 'uint8'
    dataset = (np.array(X_train, dtype=dtype), np.array(Y_train, dtype=dtype)), \
              (np.array(X_test, dtype=dtype), np.array(Y_test, dtype=dtype))
    print('saving dataset to srcnn-mnist-dataset.npz...')
    np.savez('datasets/srcnn-mnist-dataset.npz', dataset)