예제 #1
0
import gen_v4

dshape = (1, 3, 480, 640)
clip_norm = 1.0 * np.prod(dshape)
model_prefix = "./model/"
ctx = mx.gpu(0)

# generator
gens = [
    gen_v4.get_module("g0", dshape, ctx),
    gen_v3.get_module("g1", dshape, ctx),
    gen_v3.get_module("g2", dshape, ctx),
    gen_v4.get_module("g3", dshape, ctx)
]
for i in range(len(gens)):
    gens[i].load_params("./model/%d/v3_0002-0026000.params" % i)

content_np = data_processing.PreprocessContentImage("../IMG_4343.jpg",
                                                    min(dshape[2:]), dshape)
data = [mx.nd.array(content_np)]
for i in range(len(gens)):
    gens[i].forward(mx.io.DataBatch([data[-1]], [0]), is_train=False)
    new_img = gens[i].get_outputs()[0]
    data.append(new_img.copyto(mx.cpu()))
    data_processing.SaveImage(new_img.asnumpy(), "out_%d.jpg" % i)

import os

os.system("rm -rf out.zip")
os.system("zip out.zip out_*")
예제 #2
0
data_root = "../data/"
file_list = os.listdir(data_root)
num_image = len(file_list)
logging.info("Dataset size: %d" % num_image)

# train

for i in range(start_epoch, end_epoch):
    random.shuffle(file_list)
    for idx in range(num_image):
        loss_grad_array = []
        data_array = []
        path = data_root + file_list[idx]
        try:
            content_np = data_processing.PreprocessContentImage(
                path, min(dshape[2:]), dshape)
        except:
            logging.warn("Fail to load an input image. Skip.")
        data = mx.nd.array(content_np)
        data_array.append(data)
        # get content
        content_mod.forward(mx.io.DataBatch([data], [0]), is_train=False)
        content_array = content_mod.get_outputs()[0].copyto(mx.cpu())
        # set target content
        loss.set_params({"target_content": content_array}, {}, True, True)
        # gen_forward
        for k in range(len(gens)):
            gens[k].forward(mx.io.DataBatch([data_array[-1]], [0]),
                            is_train=True)
            data_array.append(gens[k].get_outputs()[0].copyto(mx.cpu()))
            # loss forward