def save_data_list(inpath, outpath, filenames, filename_bbox):
    hr_images = []
    lr_images = []
    hr_segs = []
    lr_segs = []
    lr_size = int(LOAD_SIZE / LR_HR_RETIO)
    cnt = 0
    for key in filenames:
        bbox = filename_bbox[key]
        f_name = '%s/CUB_200_2011/images/%s.jpg' % (inpath, key)
        img = get_image(f_name, LOAD_SIZE, is_crop=True, bbox=bbox)
        img = img.astype('uint8')

        seg_name = '%s/CUB_200_2011/segmentations/%s.png' % (inpath, key)
        seg = get_image(seg_name, LOAD_SIZE, is_crop=True, bbox=bbox)
        seg = seg.astype('uint8')

        hr_images.append(img)
        lr_img = scipy.misc.imresize(img, [lr_size, lr_size], 'bicubic')
        lr_images.append(lr_img)
        hr_segs.append(seg)
        lr_seg = scipy.misc.imresize(seg, [lr_size, lr_size], 'bicubic')
        lr_segs.append(lr_seg)
        cnt += 1
        if cnt % 100 == 0:
            print('Load %d......' % cnt)

    #
    print('images', len(hr_images), hr_images[0].shape, lr_images[0].shape)
    print('segmentations', len(hr_segs), hr_segs[0].shape, lr_segs[0].shape)
    #
    outfile = outpath + str(LOAD_SIZE) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(hr_images, f_out)
        print('save to: ', outfile)
    #
    outfile = outpath + str(lr_size) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(lr_images, f_out)
        print('save to: ', outfile)

    outfile = outpath + str(LOAD_SIZE) + 'segs.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(hr_segs, f_out)
        print('save to: ', outfile)
    #
    outfile = outpath + str(lr_size) + 'segs.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(lr_segs, f_out)
        print('save to: ', outfile)
示例#2
0
def save_data_list(inpath, outpath, filenames, filename_bbox):
    hr_images = []
    lr_images = []
    lr_size = int(LOAD_SIZE / LR_HR_RETIO)
    cnt = 0
    for key in filenames:
        bbox = filename_bbox[key]
        f_name = '%s/CUB_200_2011/images/%s.jpg' % (inpath, key)
        img = get_image(f_name, LOAD_SIZE, is_crop=True, bbox=bbox)
        img = img.astype('uint8')
        hr_images.append(img)
        lr_img = scipy.misc.imresize(img, [lr_size, lr_size], 'bicubic')
        lr_images.append(lr_img)
        cnt += 1
        if cnt % 100 == 0:
            print('Load %d......' % cnt)
    #
    print('images', len(hr_images), hr_images[0].shape, lr_images[0].shape)
    #
    outfile = outpath + str(LOAD_SIZE) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(hr_images, f_out)
        print('save to: ', outfile)
    #
    outfile = outpath + str(lr_size) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(lr_images, f_out)
        print('save to: ', outfile)
示例#3
0
def save_data_list(inpath, outpath, filenames, filename_bbox):
    hr_images = []
    lr_images = []
    lr_size = int(LOAD_SIZE / LR_HR_RETIO)
    cnt = 0
    for key in filenames:
        bbox = filename_bbox[key]
        f_name = '%s/CUB_200_2011/images/%s.jpg' % (inpath, key)
        img = get_image(f_name, LOAD_SIZE, is_crop=True, bbox=bbox)
        img = img.astype('uint8')
        hr_images.append(img)
        lr_img = skimage.transform.resize(img, [lr_size, lr_size], order=3)
        lr_images.append(lr_img)
        cnt += 1
        if cnt % 100 == 0:
            print('Load %d......' % cnt)
    #
    print('images', len(hr_images), hr_images[0].shape, lr_images[0].shape)
    #
    outfile = outpath + str(LOAD_SIZE) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(hr_images, f_out)
        print('save to: ', outfile)
    #
    outfile = outpath + str(lr_size) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(lr_images, f_out)
        print('save to: ', outfile)
示例#4
0
def save_data_list(inpath, outpath, filenames, filename_bbox):
    lr_size = int(LOAD_SIZE / LR_HR_RETIO)
    cnt = 0
    ids = range(len(filenames))
    shuffle(ids)
    with open(outpath + 'File_Ids.pickle', 'wb') as f:
        pickle.dump(ids, f)
    numfiles = len(ids) / cfg.NUM_BATCH_IN_FILE
    numfiles = int(numfiles) + 1

    for file_id in range(numfiles):
        hr_images = []  #np.array([],dtype=np.)
        lr_images = []  #np.array([],dtype='uint8')
        for i in range(
                file_id * cfg.NUM_BATCH_IN_FILE * cfg.TRAIN.BATCH_SIZE,
                min((file_id + 1) * cfg.NUM_BATCH_IN_FILE *
                    cfg.TRAIN.BATCH_SIZE, len(ids))):
            id = ids[i]
            key = filenames[id]
            key = key[:-4]
            bbox = filename_bbox[key]
            f_name = '%s/../%s.jpg' % (inpath, key)
            img = get_image(f_name, LOAD_SIZE, is_crop=True, bbox=bbox)
            img = img.astype('uint8')
            hr_images.append(img)
            lr_img = scipy.misc.imresize(img, [lr_size, lr_size], 'bicubic')
            lr_images.append(lr_img)
            cnt += 1
            if cnt % 100 == 0:
                print('Load %d......' % cnt)
        #
        print('images', len(hr_images), hr_images[0].shape, lr_images[0].shape)
        #
        outfile = outpath + str(LOAD_SIZE) + 'images' + str(
            file_id) + '.pickle'
        # with open(outfile, 'wb') as f_out:
        #     pickle.dump(hr_images, f_out)
        #     print('save to: ', outfile)
        #
        outfile = outpath + str(lr_size) + 'images' + str(file_id) + '.pickle'
        with open(outfile, 'wb') as f_out:
            pickle.dump(lr_images, f_out)
            print('save to: ', outfile)
示例#5
0
def save_data_list(outpath, filenames):
    hr_images = []
    lr_images = []
    lr_size = int(LOAD_SIZE / LR_HR_RATIO)
    cnt = 0
    for f_name in filenames:
        print('processing %s' % f_name)
        img = get_image(f_name, LOAD_SIZE, is_crop=False)
        img = img.astype('uint8')
        hr_images.append(img)
        lr_img = scipy.misc.imresize(img, [lr_size, lr_size], 'bicubic')
        lr_images.append(lr_img)
        cnt += 1
        if cnt % 50 == 0:
            print('Load %d........' % cnt)

    print('images', len(hr_images), len(lr_images), hr_images[0].shape,
          lr_images[0].shape)

    outfile = outpath + str(LOAD_SIZE) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(hr_images, f_out)
        print('save to: ', outfile)

    outfile = outpath + str(lr_size) + 'images.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump(lr_images, f_out)
        print('save to: ', outfile)

    outfile = outpath + 'filenames.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump([os.path.basename(name) for name in filenames], f_out)
        print('save to: ', outfile)

    # TODO Use fake classes for now, switch to real data when it's available
    outfile = outpath + 'class_info.pickle'
    with open(outfile, 'wb') as f_out:
        pickle.dump([0 for i in range(len(filenames))], f_out)
        print('save to: ', outfile)
示例#6
0
            # get the corresponding captions
            with open(DATA_DIR + '/text_c10/' + filename + '.txt', "r") as f:
                captions = f.read().split('\n')
            captions = [cap for cap in captions if len(cap) > 0]

            # randomly select 1 caption and the corresponding embedding
            j, caption = random.choice(list(enumerate(captions)))
            embedding = embeddings[index][j]

            # load the example true image (process as usual down to 76 x 76
            lr_size = int(LOAD_SIZE / LR_HR_RATIO)
            filename_bbox = load_bbox('Data/birds/')
            bbox = filename_bbox[filename]
            f_name = 'Data/birds/CUB_200_2011/images/%s.jpg' % filename
            img = get_image(f_name, LOAD_SIZE, is_crop=True, bbox=bbox)
            img = img.astype(np.float32)
            true_img = scipy.misc.imresize(img, [lr_size, lr_size],
                                           'bicubic').astype(np.float32)

        print("Caption: ", caption)

        # convert the embedding to a tensor and repeat BATCH_SIZE times to shape (BATCH_SIZE, 1024)
        embedding = tf.constant(embedding)
        batch_embeddings = tf.tile(tf.expand_dims(embedding, axis=0),
                                   [BATCH_SIZE, 1])

        batch_z = tf.random_normal([BATCH_SIZE, Z_DIM])

        # get conditioning vector (from embedding) and KL divergence for use as a
        # regularization term in the generator loss