def compare(epoch, batch=0,prefix=""): x0 = np.load("%s/epoch%s.npz" % (prefix,epoch)) print "loss", x0.items()[0][1] unchanged_img = x0.items()[-3][1] changed_img = x0.items()[-2][1] res_reshape2 = x0.items()[-1][1] display.draw(unchanged_img[batch]) display.draw(changed_img[batch]) display.draw(res_reshape2[batch])
shapes = np.random.rand(nprims, nbatch, 4)*2 - 2 return np.array(shapes, dtype=config.floatX) width = 224 height = 224 # Generate initial rays exfragcoords = gen_fragcoords(width, height) nprims = 500 print("Compiling Renderer") render = make_render(nprims, width, height) shapes = genshapebatch(nprims, 2) print("Rendering") img = render(exfragcoords, shapes) from ig import display display.draw(img[:,:,0]) # np.save('data/observed_img', img, allow_pickle=True, fix_imports=True) # # # print("Drawing Img") # # draw(img) # # print("Doing Pixel Comparison") # cost = similarity_cost2(img, nprims, width, height) # sim = cost(exfragcoords, shapes) # # print(sim) # # img_tiled = np.tile(img,(1,3,1,1)) # Tile because vgg expects RGB but img is depth # ## Render the image to create an observation