def generate( genw, cnt ): shape = (Args.sz, Args.sz, 3) gen = build_gen( shape ) gen.compile(optimizer='sgd', loss='mse') load_weights(gen, Args.genw) generated = gen.predict(binary_noise(Args.batch_sz)) generated = denormalize4gan(generated) for i in range(cnt): ofname = "{:04d}.png".format(i) scipy.misc.imsave( ofname, generated[i] )
def dump_batch(imgs, cnt, ofname): assert Args.batch_sz >= cnt * cnt rows = [] for i in range( cnt ) : cols = [] for j in range(cnt*i, cnt*i+cnt): cols.append( imgs[j] ) rows.append( np.concatenate(cols, axis=1) ) alles = np.concatenate( rows, axis=0 ) alles = denormalize4gan( alles ) scipy.misc.imsave( ofname, alles )
def generate(genw, cnt): shape = (Args.sz, Args.sz, 3) gen = build_gen(shape) gen.compile(optimizer='sgd', loss='mse') load_weights(gen, Args.genw) generated = gen.predict(binary_noise(Args.batch_sz)) # Unoffset, in batch. # Must convert back to unit8 to stop color distortion. generated = denormalize4gan(generated) for i in range(cnt): ofname = "tmp/{:04d}.png".format(i) scipy.misc.imsave(ofname, generated[i])
def dump_batch(imgs, cnt, ofname): ''' Merges cnt x cnt generated images into one big image. Use the command $ feh dump.png --reload 1 to refresh image peroidically during training! ''' assert Args.batch_sz >= cnt * cnt rows = [] for i in range(cnt): cols = [] for j in range(cnt * i, cnt * i + cnt): cols.append(imgs[j]) rows.append(np.concatenate(cols, axis=1)) alles = np.concatenate(rows, axis=0) alles = denormalize4gan(alles) # alles = scipy.misc.imresize(alles, 200) # uncomment to scale scipy.misc.imsave(ofname, alles)