Exemple #1
0
def prepare_line():
    train_data_names = read_data_names('./train_labels')
    test_data_names = read_data_names('./test_labels')

    train_images = read_images('./train_images', data_names=train_data_names)
    train_labels = read_labels('./train_labels')
    # train_data_names = read_data_names('./train_labels')

    test_labels = read_labels('./test_labels')
    test_images = read_images('./test_images', test_data_names)
    # test_data_names = read_data_names('./test_labels')

    for ind, image in enumerate(train_images):
        print(ind)
        H, W, C = image.shape
        lab = train_labels[ind]
        seg = draw_line(lab, H, W)
        np.save('./train_lines/{}.npy'.format(ind), seg)

    for ind, image in enumerate(test_images):
        print('test {}'.format(ind))
        H, W, C = image.shape
        lab = test_labels[ind]
        seg = draw_line(lab, H, W)
        np.save('./test_lines/{}.npy'.format(ind), seg)

    seg = np.load('./train_labels/1.npy')
    ln = np.load('./train_lines/1.npy')
    plt.figure()
    plot_image(seg, segmap=ln)
    plt.show()
Exemple #2
0
def get_loader_train_val(batch_size_tr=64, batch_size_val=1, shuffle=True):
    data_path = './train_images'
    label_path = './train_labels'

    val_ratio = 0.1
    transform_list = [SegResize((512, 256))]

    data_names = read_data_names(label_path)
    segmap_names = read_data_names(label_path, title='segmap_names')

    N_all = len(data_names)
    N_val = int(N_all * val_ratio)
    N_train = N_all - N_val
    # get train and validation data set
    data_names_train = []
    data_names_val = []
    segmap_names_train = []
    segmap_names_val = []

    if not os.path.exists(os.path.join('./', 'val_permutation.npy')):
        print('reset permutation')
        permutation = np.random.permutation(N_all)
        np.save(os.path.join('./', 'val_permutation.npy'), permutation)
    else:
        permutation = np.load(os.path.join('./', 'val_permutation.npy'))

    for ind in permutation[:N_train]:
        data_names_train.append(data_names[ind])
        segmap_names_train.append(segmap_names[ind])

    for ind in permutation[N_train:]:
        data_names_val.append(data_names[ind])
        segmap_names_val.append(segmap_names[ind])

    #########################
    dset_train = SegDataset(data_location=data_path,
                            segmap_location=label_path,
                            data_names=data_names_train,
                            segmap_names=segmap_names_train,
                            transform_list=transform_list)
    dset_val = SegDataset(data_location=data_path,
                          segmap_location=label_path,
                          data_names=data_names_val,
                          segmap_names=segmap_names_val,
                          transform_list=transform_list)

    loader_train = DataLoader(dataset=dset_train,
                              batch_size=batch_size_tr,
                              shuffle=shuffle)
    loader_val = DataLoader(dataset=dset_val,
                            batch_size=batch_size_val,
                            shuffle=False)
    return loader_train, loader_val
Exemple #3
0
def preprocessing():

    #여기서 데이터 프리프로세싱 작업 하기

    #path declaration
    # image_dest_resized = './resized_images'
    # label_dest_resized = './resized_labels'

    image_path = './highres_images'
    label_path = './highres_labels'

    data_names = read_data_names(label_location=label_path)
    labels = read_labels(label_path)

    #testing image and labels
    # plt.figure()
    # for i,ind in enumerate([35,67,25,235]):
    #     plt.subplot(221+i)
    #     image = Image.open(os.path.join(image_path, data_names[ind]))
    #     segmap = np.load(os.path.join(label_path, data_names[ind] + '.npy'))
    #     plot_image(image, coord = labels[ind], segmap=segmap )
    # plt.show()

    # #pad image
    # resized_H = 512
    # desired_W = 256

    label_list = []

    for data_no, data_name in enumerate(data_names):
        image = Image.open(os.path.join(image_path, data_name))
        np_image = np.asarray(image)
        label = labels[data_no]

        H, W = np_image.shape
        resized_H = H
        desired_W = W
        # resize_ratio = resized_H / H
        # left_pad = int((desired_W - int(W * resize_ratio)) / 2)
        # right_pad = desired_W - int(W * resize_ratio) - left_pad
        #
        # label_rev = label.reshape(-1,2)
        # label_rev *= resize_ratio
        # label_rev[:,0] += left_pad
        # label_rev = label_rev.reshape(-1)
        # label_list.append(label_rev)
        #
        # im_resize = image.resize((int(W * resize_ratio), int(resized_H)))
        # im_pad = ImageOps.expand(im_resize, (left_pad, 0, right_pad, 0))
        # im_pad.save(os.path.join(image_dest_resized, data_name))

        segmap = draw_seg(label, resized_H, desired_W)
        np.save(os.path.join(label_path, data_name + '.npy'), segmap)

        if data_no % 100 == 0:
            print(np.asarray(image).shape)
            plt.figure()
            plot_image(image, label, segmap=segmap)
    plt.show()
Exemple #4
0
def get_lineloader_train_val(batch_size_tr, batch_size_val = 1, shuffle = True):
    segmap_path = './train_labels'
    line_path = './train_lines'

    segmap_names = read_data_names(segmap_path, 'segmap_names')
    line_names = read_data_names(line_path, 'line_names')

    N_all = len(segmap_names)
    N_val = int(N_all * val_ratio)
    N_train = N_all - N_val
    # get train and validation data set

    segmap_names_train = []
    segmap_names_val = []
    line_names_train = []
    line_names_val = []

    transform_list = [SegResize((512,256))]

    if not os.path.exists(os.path.join('./', 'val_permutation.npy')):
        print('reset permutation')
        permutation = np.random.permutation(N_all)
        np.save(os.path.join('./', 'val_permutation.npy'), permutation)
    else:
        permutation = np.load(os.path.join('./', 'val_permutation.npy'))

    for ind in permutation[:N_train]:
        segmap_names_train.append(segmap_names[ind])
        line_names_train.append(line_names[ind])

    for ind in permutation[N_train:]:
        segmap_names_val.append(segmap_names[ind])
        line_names_val.append(line_names[ind])

    #########################
    dset_train = LineDataset(segmap_location = segmap_path, line_location = line_path,
                             segmap_names = segmap_names_train, line_names = line_names_train,
                             transform_list = transform_list)
    dset_val = LineDataset(segmap_location = segmap_path, line_location = line_path,
                             segmap_names = segmap_names_val, line_names = line_names_val,
                             transform_list = transform_list)

    loader_train = DataLoader(dataset=dset_train, batch_size=batch_size_tr, shuffle=shuffle)
    loader_val = DataLoader(dataset=dset_val, batch_size=batch_size_val, shuffle=False)
    return loader_train, loader_val
Exemple #5
0
def prepare_seg():
    train_data_names = read_data_names('./train_labels')
    test_data_names = read_data_names('./test_labels')

    train_images = read_images('./train_images', data_names=train_data_names)
    train_labels = read_labels('./train_labels')
    # train_data_names = read_data_names('./train_labels')

    test_labels = read_labels('./test_labels')
    test_images = read_images('./test_images', test_data_names)
    # test_data_names = read_data_names('./test_labels')

    for ind, image in enumerate(train_images):
        H, W, C = image.shape
        lab = train_labels[ind]
        seg = draw_seg(lab, H, W)
        np.save('./train_labels/{}.npy'.format(ind), seg)

    for ind, image in enumerate(test_images):
        H, W, C = image.shape
        lab = test_labels[ind]
        seg = draw_seg(lab, H, W)
        np.save('./test_labels/{}.npy'.format(ind), seg)
Exemple #6
0
def get_loader_test(tfm = 'nopad', batch_size = 1, shuffle = False):
    if type(tfm) == type('PAD'):
        if tfm.lower() == 'pad_val' or tfm.lower() == 'pad':
            tfm = PAD_VAL
        elif tfm.lower() == 'nopad_val' or tfm.lower() == 'nopad':
            tfm = NOPAD_VAL
        else:
            tfm = None


    data_path = './test_images'
    label_path = './test_labels'
    labels = read_labels(label_path)
    data_names = read_data_names(label_path)
    dset_test = CoordDataset(data_path, labels, data_names, transform_list=tfm)

    loader_test = DataLoader(dataset=dset_test, batch_size=batch_size, shuffle=shuffle)
    return loader_test
Exemple #7
0
def get_loader_train(tfm = 'nopad', batch_size = 64, shuffle = False):
    if type(tfm) == type('PAD'):
        if tfm.lower() == 'pad_val':
            tfm = PAD_VAL
        elif tfm.lower() == 'nopad_val':
            tfm = NOPAD_VAL
        elif tfm.lower() == 'pad' or tfm.lower() == 'unf' or tfm.lower() == 'uniform':
            tfm = UNF
        elif tfm.lower() == 'nopad':
            tfm = NOPAD
        elif tfm.lower() == 'rhp':
            tfm = RHP
        else:
            tfm = None

    data_path = './train_images'
    label_path = './train_labels'
    labels = read_labels(label_path)
    data_names = read_data_names(label_path)
    dset_train = CoordDataset(data_path, labels, data_names, transform_list=tfm)

    loader_train = DataLoader(dataset=dset_train, batch_size=batch_size, shuffle=shuffle)
    return loader_train
Exemple #8
0
    pol = np.concatenate((coord_rev[0, 0, :].reshape(
        1, 2), coord_rev[:, 1, :], coord_rev[::-1, 0, :]),
                         axis=0)
    rr, cc = polygon(pol[:, 1], pol[:, 0], seg_image.shape)
    seg_image[rr, cc, :] = 1
    seg_image = seg_image[:, :, 0]
    return seg_image


if __name__ == '__main__':
    author = 'YB'

    #making traintest set
    from os.path import join as osj
    import shutil
    train_data_names = read_data_names('./train_labels')
    train_labels = read_labels('./train_labels')

    test_data_names = read_data_names('./test_labels')
    test_labels = read_labels('./test_labels')

    dest = './trtest_images'
    train_no = len(train_data_names)

    all_labels = []
    all_names = []
    for i, d in enumerate(train_data_names):
        l = train_labels[i]

        shutil.copy(osj('./train_images', d), osj(dest, d))
Exemple #9
0
    import os
    import torch
    import torch.nn as nn
    from torch.utils.data import Dataset, DataLoader
    import torchvision.transforms as transforms

    from label_io import read_data_names, read_labels, plot_image, chw, hwc
    from dataset import CoordDataset
    from label_transform import CoordCustomPad, CoordHorizontalFlip, CoordRandomRotate, \
        CoordLabelNormalize, CoordResize, CoordVerticalFlip

    #def label-to-image
    data_path = './train_images'
    label_path = './train_labels'
    labels = read_labels(label_location=label_path)
    data_names = read_data_names(label_location=label_path)

    batch_size = 8

    # transform = tr.Compose([
    #     tr.RandomRotation(30, expand = False)
    #     tr.ToTensor()
    # ])

    customTransforms = [
        CoordCustomPad(512 / 256),
        CoordResize((512, 256)),
        CoordLabelNormalize()
    ]

    # customTransforms = [
Exemple #10
0
def get_loader_train_val(tfm_train = 'nopad', tfm_val = 'nopad', batch_size_tr=64, batch_size_val=1, shuffle = True):
    tfm = tfm_train
    if type(tfm) == type('PAD'):
        if tfm.lower() == 'pad_val':
            tfm = PAD_VAL
        elif tfm.lower() == 'nopad_val':
            tfm = NOPAD_VAL
        elif tfm.lower() == 'pad' or tfm.lower() == 'unf' or tfm.lower() == 'uniform':
            tfm = UNF
        elif tfm.lower() == 'nopad':
            tfm = NOPAD
        elif tfm.lower() == 'rhp':
            tfm = RHP
        else:
            tfm = None
    tfm_train = tfm

    tfm = tfm_val
    if type(tfm) == type('PAD'):
        if tfm.lower() == 'pad_val' or tfm.lower() == 'pad':
            tfm = PAD_VAL
        elif tfm.lower() == 'nopad_val' or tfm.lower() == 'nopad':
            tfm = NOPAD_VAL
        else:
            tfm = None
    tfm_val = tfm


    data_path = './train_images'
    label_path = './train_labels'

    val_ratio = 0.1

    labels = read_labels(label_path)
    data_names = read_data_names(label_path)

    N_all = len(data_names)
    N_val = int(N_all * val_ratio)
    N_train = N_all - N_val
    # get train and validation data set
    data_names_train = []
    data_names_val = []
    labels_train = []
    labels_val = []

    if not os.path.exists(os.path.join('./', 'val_permutation.npy')):
        print('reset permutation')
        permutation = np.random.permutation(N_all)
        np.save(os.path.join('./', 'val_permutation.npy'), permutation)
    else:
        permutation = np.load(os.path.join('./', 'val_permutation.npy'))

    for ind in permutation[:N_train]:
        data_names_train.append(data_names[ind])
        labels_train.append(labels[ind])
    labels_train = np.asarray(labels_train)

    for ind in permutation[N_train:]:
        data_names_val.append(data_names[ind])
        labels_val.append(labels[ind])
    labels_val = np.asarray(labels_val)
    #########################
    dset_train = CoordDataset(data_path, labels_train, data_names_train, transform_list=tfm_train)
    dset_val = CoordDataset(data_path, labels_val, data_names_val, transform_list=tfm_val)

    loader_train = DataLoader(dataset=dset_train, batch_size=batch_size_tr, shuffle=shuffle)
    loader_val = DataLoader(dataset=dset_val, batch_size=batch_size_val, shuffle=False)
    return loader_train ,loader_val
    df = pd.DataFrame(result_list)
    df.to_csv(
        os.path.join(
            pred_path, title, 'result_' + title + '_%.2f' %
            (np.average(w2_errors) * 100) + '.csv'))

    return result_list


if __name__ == '__main__':
    plt.rcParams["figure.figsize"] = (8, 16)
    #test랑 record 변환은 메인에서 하지말고 여기서만 하면됨

    record_label_location = './record_cr_labels'
    record_data_location = './record_cr_images'
    record_data_names = read_data_names(record_label_location)
    record_labels = read_labels(record_label_location)
    record_images = read_images(record_data_location, record_data_names)

    pred_path = './model/TRTEST_ep4278'
    postprocess_inte(pred_path=pred_path,
                     images=record_images,
                     labels_gt_abs=record_labels,
                     title=None,
                     save_plot=True,
                     method2_on=True,
                     method1_on=False,
                     original_display=True)

    ###Validation set
Exemple #12
0
if __name__ == '__main__':
    plt.rcParams["figure.figsize"] = (4, 8)
    # test_label_location = './test_labels'
    # test_data_location = './test_images'
    # test_data_names = read_data_names(test_label_location)
    # test_labels = read_labels(test_label_location)
    # test_images = read_images(test_data_location, test_data_names)

    train_label_location = './train_labels'
    train_image_location = './train_images'

    out_path = './plots'

    train_labels = read_labels(train_label_location)
    train_data_names = read_data_names(train_label_location)

    train_images = read_images(train_image_location, train_data_names)

    count = 0

    for ind, image in enumerate(train_images):
        label = train_labels[ind]
        plt.figure()
        #title = 'train_vanila'
        H, W, C = image.shape
        gt = label
        gt_angles, gt_pos = calc_angle_old(gt, (H, W), full=True)
        _fp = gt.reshape(-1, 2, 2).copy()
        for pos in gt_pos:
            dots = np.average(_fp[2 * pos:2 * pos + 2, :, :], axis=0)
Exemple #13
0
            tl = angles2[_pos22] * 180 / np.pi
    cobb_angles = np.asarray([pt, mt, tl])
    pos = (pos1, pos2, pos11, pos22)
    #params = dict(pos = pos, mid_points = mid_points, vec_lines = vec_lines, case = case)
    if full:
        return cobb_angles, pos
    else:
        return cobb_angles

from label_io import read_labels, read_images, read_data_names
import pandas as pd
if __name__ == '__main__':
    ####    Testing demo algorithm


    data_names_train = read_data_names('./train_labels')
    labels_now = read_labels('./train_labels')
    labels_ori = read_labels('./train_labels', title ='labels_original')
    labels_m = read_labels('./train_labels', title ='labels_m')
    train_images_location = './train_images'
    train_images = read_images(train_images_location, data_names=data_names_train)

    images = read_images('./train_images', data_names_train)

    # for ind, im in enumerate(images):
    #     lab_m = labels_m[ind]
    #     lab_ori = labels_ori[ind]
    #
    #     if np.sum(np.abs((lab_m - lab_ori)))>=1:
    #         plt.figure()
    #         plot_image(im, coord_red=lab_ori, coord_gr=lab_m)