예제 #1
0
def main():
    out_dir = 'predict_to'
    in_dir = 'predict_from'
    gen_npz = 'pretrained/gen.npz'

    opt = get_options()

    gen = SPADEGenerator(opt)
    gen.to_gpu(0)
    chainer.serializers.load_npz(gen_npz, gen)
    gen.to_cpu()

    os.makedirs(in_dir, exist_ok=True)
    os.makedirs(out_dir, exist_ok=True)

    files = glob(in_dir + '/*.*')
    if len(files) == 0:
        print('Erorr: No files to load in \'' + in_dir + '\'.')
        return

    num = 0
    for filename in files:
        print(filename + ': ', end="")
        src_img = Image.open(filename).convert('RGB')
        if src_img is None:
            print('Not Loaded')
            continue

        print('Loaded')
        src_array = np.array(src_img, dtype='float32')
        src_array = src_array.transpose((2, 0, 1)) / 255

        x_array = src_array[:3, :, :256]
        c_array = src_array[:3, :, 256:512]

        x_onehot = label2onehot(x_array, threshold=0.4, skip_bg=True, dtype='float32')
        x = chainer.Variable(x_onehot[np.newaxis, :, :, :].astype('float32'))

        c_array = c_array * x_onehot[2]  # crop with hair label
        c = chainer.Variable(c_array[np.newaxis, :, :, :].astype('float32'))

        out = gen([x, c])

        x_array = np.transpose(x_array, (1, 2, 0))
        out_array = np.transpose((out.array[0] + 1) / 2, (1, 2, 0))

        img_array = np.concatenate((x_array, out_array), axis=1) * 255
        img = Image.fromarray(img_array.astype('uint8'))

        path = out_dir + '/' + str(num) + '.png'
        img.save(path)

        num += 1
예제 #2
0
def get_dataset(opt):
    files = glob(opt.dataset_dir + '/*.png')

    os.makedirs('dump', exist_ok=True)
    dump_file = 'dump/datasets_with_label.joblib'

    if os.path.exists(dump_file):
        with open(dump_file, 'rb') as f:
            x, t = joblib.load(f)

        return TupleDataset(x, t)

    x, t = [], []
    for filename in files:
        if not os.path.exists(filename):
            continue

        img_array = np.array(Image.open(filename), dtype='float16')
        img_array = img_array.transpose((2, 0, 1)) / 255

        x_array = img_array[:3, :, :256]
        t_array = img_array[:3, :, 256:]

        #convert to onehot
        t_array = label2onehot(t_array, threshold=0.4, dtype='float16')

        x.append(x_array)
        t.append(t_array)

        #Data-Augmentation
        if opt.augment_data:
            #mirroring
            x.append(x_array[:, :, ::-1])
            t.append(t_array[:, :, ::-1])

            #gamma-correction
            x.append(gamma_correction(x_array, gamma=2.5))
            t.append(t_array)

            #mirroring and gamma correction
            x.append(gamma_correction(x_array[:, :, ::-1], gamma=2.5))
            t.append(t_array[:, :, ::-1])

    with open(dump_file, 'wb') as f:
        joblib.dump((x, t), f, compress=3)

    return TupleDataset(x, t)
def get_dataset(dir_path, augment=False, is_valid=True):
    os.makedirs('joblib', exist_ok=True)
    buf_file = 'joblib/' + dir_path.replace('/', '-') + '.job'

    if not os.path.exists(buf_file):
        x, t = [], []

        files = glob(dir_path + '/*.png')
        random.shuffle(files)
        for img_path in files:
            if not os.path.exists(img_path):
                continue

            print(img_path)
            img = Image.open(img_path)

            if img == None:
                continue

            img_array = np.array(img).astype('float16') / 255
            img_array = np.transpose(img_array, (2, 0, 1))

            t_array = img_array[:3, :, :256]
            x_array = img_array[:3, :, 256:512]
            c_array = img_array[:3, :, 512:]

            #to onehot
            x_array = label2onehot(x_array,
                                   threshold=0.4,
                                   skip_bg=True,
                                   dtype='float16')
            c_array = c_array * x_array[2]
            x_array = np.concatenate((x_array, c_array), axis=0)

            t_array = t_array * 2 - 1

            x.append(x_array)
            t.append(t_array)

            if augment:
                #mirroring
                x_mirror = x_array[:, :, ::-1]
                t_mirror = t_array[:, :, ::-1]
                x.append(x_mirror)
                t.append(t_mirror)

        with open(buf_file, 'wb') as f:
            joblib.dump((x, t), f, compress=3)

    else:
        with open(buf_file, 'rb') as f:
            x, t = joblib.load(f)

    if is_valid:
        train_size = int(len(x) * 0.9)
    else:
        train_size = len(x)

    train_x = x[:train_size]
    train_t = t[:train_size]
    valid_x = x[train_size:]
    valid_t = t[train_size:]

    return (TupleDataset(train_x, train_t), TupleDataset(valid_x, valid_t))
def main():
    out_predict_dir = 'out'
    device = 0
    gen_npz = 'trained/gen_snapshot_epoch-900.npz'

    opt = get_options()

    opt.spade_ch = 32
    opt.ngf = 64
    opt.ndf = 64

    gen = SPADEGenerator(opt)
    gen.to_gpu(device)
    chainer.serializers.load_npz(gen_npz, gen)

    os.makedirs(out_predict_dir, exist_ok=True)

    out_dir = out_predict_dir + '/predicted'
    os.makedirs(out_dir, exist_ok=True)
    num = 0

    dir_path = 'datasets/resnet-large_hc'
    files = glob(dir_path + '/*.png')
    random.shuffle(files)

    for img_path in files:
        if not os.path.exists(img_path):
            continue

        img = Image.open(img_path)

        if img == None:
            continue
        print(img_path)

        img_array = np.array(img).astype('float32') / 255
        img_array = np.transpose(img_array, (2, 0, 1))

        t_array = img_array[:3, :, :256]
        x_array = img_array[:3, :, 256:512]
        c_array = img_array[:3, :, 512:]

        #to onehot
        x_array = label2onehot(x_array,
                               threshold=0.4,
                               skip_bg=True,
                               dtype='float32').astype('float32')
        c_array = c_array * x_array[2]

        #cast 16bit -> 32bit (cannot use tensor core)
        x = Variable(cuda.to_gpu(x_array[np.newaxis, :, :, :]))
        c = Variable(cuda.to_gpu(c_array[np.newaxis, :, :, :]))

        out = gen([x, c])[0]

        out = cp.asnumpy(out.array[0])
        out = (out + 1) / 2
        x = cp.asnumpy(x.array[0])
        x = onehot2label(x, skip_bg=True, dtype='float32')

        out = np.transpose(out * 255, (1, 2, 0)).astype('uint8')
        x = np.transpose(x * 255, (1, 2, 0)).astype('uint8')

        y = np.transpose(t_array * 255, (1, 2, 0)).astype('uint8')

        out_img = np.concatenate((x, y, out), axis=1)
        img = Image.fromarray(out_img)
        path = out_dir + '/' + str(num) + '.png'
        img.save(path)

        num += 1