img_names = np.array([
    img_name for subdir in img_subdirs
    for img_name in sorted(glob("%s/%s/%s/*" % (data_root, img_dir, subdir)))
]).reshape((-1, 2))
num_img = img_names.shape[0]

ds = tf.data.Dataset.from_tensor_slices(img_names)
ds = ds.shuffle(num_img, reshuffle_each_iteration=False)

val_size = int(num_img * 0.2)
train_size = num_img - val_size
train_ds = ds.skip(val_size)
val_ds = ds.take(val_size)

train_ds = train_ds.map(
    lambda x: gen_shadow(x, mask_file_list),
    num_parallel_calls=4).batch(BATCH_SIZE).prefetch(BATCH_SIZE)
val_ds = val_ds.map(lambda x: gen_shadow(x, mask_file_list)).batch(2 *
                                                                   BATCH_SIZE)

print(train_ds)

iterator = tf.data.Iterator.from_structure(train_ds.output_types,
                                           train_ds.output_shapes)
img_with_shadow, shadow_mask, img_no_shadow, input_pureflash = iterator.get_next(
)

training_init_op = iterator.make_initializer(train_ds)
validation_init_op = iterator.make_initializer(val_ds)

with tf.variable_scope(tf.get_variable_scope()):
Beispiel #2
0
    glob("%s/%s/*"%(data_root,mask_dir))))

img_names=np.array([img_name for subdir in img_subdirs 
    for img_name in sorted(glob(
        "%s/%s/%s/*"%(data_root,img_dir,subdir)))]).reshape((-1,2))
num_img=img_names.shape[0]

ds=tf.data.Dataset.from_tensor_slices(img_names)
ds=ds.shuffle(num_img,reshuffle_each_iteration=False)

val_size=int(num_img*0.2)
train_size=num_img-val_size
train_ds =ds.skip(val_size)
val_ds =ds.take(val_size)

train_ds=train_ds.map(lambda x:gen_shadow(x,mask_file_list),
            num_parallel_calls=4).batch(BATCH_SIZE).prefetch(BATCH_SIZE)
val_ds=val_ds.map(lambda x:gen_shadow(x,mask_file_list)).batch(2*BATCH_SIZE)

print(train_ds)

iterator = tf.data.Iterator.from_structure(train_ds.output_types,
                                           train_ds.output_shapes)
img_with_shadow,shadow_mask,img_no_shadow,input_pureflash = iterator.get_next()

training_init_op = iterator.make_initializer(train_ds)
validation_init_op = iterator.make_initializer(val_ds)

with tf.variable_scope(tf.get_variable_scope()):

    gray_pureflash = 0.33 * (input_pureflash[...,0:1] + input_pureflash[...,1:2] + input_pureflash[...,2:3])
Beispiel #3
0
img_names = np.array([
    img_name for subdir in img_subdirs
    for img_name in sorted(glob("%s/%s/%s/*" % (data_root, img_dir, subdir)))
]).reshape((-1, 2))
num_img = img_names.shape[0]

ds = tf.data.Dataset.from_tensor_slices(img_names)
ds = ds.shuffle(num_img, reshuffle_each_iteration=False)

val_size = int(num_img * 0.2)
train_size = num_img - val_size
train_ds = ds.skip(val_size)
val_ds = ds.take(val_size)

train_ds = train_ds.map(lambda x: gen_shadow(x, mask_file_list))
val_ds = val_ds.map(lambda x: gen_shadow(x, mask_file_list))

n = 100
i = 0
class_names = ["noshad", "flash", "shadow", "mask"]
iterator = train_ds.take(n).make_one_shot_iterator()
next_imgs = iterator.get_next()
with tf.Session() as sess:
    for _ in range(n):
        imgs = sess.run(next_imgs)
        i += 1
        for j, cls in enumerate(class_names):
            sess.run(save_img("%s/%04d_%s.jpg" % (output_dir, i, cls),
                              imgs[j]))