#plt.show() # #plt.hist(np.log(gt)) #plt.show() # #plt.set_trace() # # #plt.imshow(dataObj.currImage) #plt.show() #plt.imshow(dataObj.currDepth) #plt.show() # #pdb.set_trace() tfObj = unaryDepthInference(trainDataObj, vggFile) if(load): tfObj.loadModel(loadFile) else: tfObj.initSess() tfObj.writeSummary(runDir + "/tfout") print "Done init" #Pretrain for i in range(100): saveFile = runDir + "/depth-model-pre" #Evaluate test frame, providing gt so that it writes to summary
load = True loadFile = outDir + "/saved/saved.ckpt" #Get object from which tensorflow will pull data from testDataObj = kittiObj(imageList, depthList) #Allocate obj to calc mean/std trainDataObj = kittiObj(trainImageList, trainDepthList) #Set mean/std on test set testDataObj.setMeanVar(trainDataObj.mean, trainDataObj.std) vggFile = "/home/sheng/mountData/pretrain/imagenet-vgg-f.mat" #Allocate tf obj with test data tfObj = unaryDepthInference(testDataObj, vggFile) #Load weights if(load): tfObj.loadModel(loadFile) else: tfObj.initSess() #Summary dir tfObj.writeSummary(runDir + "/test") print "Done init" numImages = testDataObj.numImages allGT = None allEst = None
inputFilename = argv[1] outDir = "/home/sheng/mountData/unaryDepthInference/" runDir = outDir + "/demo/" plotDir = runDir + "plots/" if not os.path.exists(runDir): os.makedirs(runDir) if not os.path.exists(plotDir): os.makedirs(plotDir) load = True #TODO change loadfile loadFile = "pretrain/saved.ckpt" #Allocate obj to calc mean/std imgObj = demoObj(inputFilename) #plotSegments(imgObj.currImage, imgObj.currSegments) #Allocate tf obj with test data tfObj = unaryDepthInference(imgObj, None) tfObj.loadModel(loadFile) evalData = imgObj.allSegments() estData = tfObj.evalModelBatch(32, evalData) plotEval(imgObj.currImage, imgObj.currSegments, imgObj.segLabels, estData, inputFilename + ".depth.png")
load = True loadFile = outDir + "/saved/saved.ckpt" #Get object from which tensorflow will pull data from testDataObj = kittiObj(imageList, depthList) #Allocate obj to calc mean/std trainDataObj = kittiObj(trainImageList, trainDepthList) #Set mean/std on test set testDataObj.setMeanVar(trainDataObj.mean, trainDataObj.std) vggFile = "/home/sheng/mountData/pretrain/imagenet-vgg-f.mat" #Allocate tf obj with test data tfObj = unaryDepthInference(testDataObj, vggFile) #Load weights if (load): tfObj.loadModel(loadFile) else: tfObj.initSess() #Summary dir tfObj.writeSummary(runDir + "/test") print "Done init" numImages = testDataObj.numImages allGT = None allEst = None
print("Usage: python demo.py [filename]") exit(-1) inputFilename = argv[1] outDir = "/home/sheng/mountData/unaryDepthInference/" runDir = outDir + "/demo/" plotDir = runDir + "plots/" if not os.path.exists(runDir): os.makedirs(runDir) if not os.path.exists(plotDir): os.makedirs(plotDir) load = True #TODO change loadfile loadFile = "pretrain/saved.ckpt" #Allocate obj to calc mean/std imgObj = demoObj(inputFilename) #plotSegments(imgObj.currImage, imgObj.currSegments) #Allocate tf obj with test data tfObj = unaryDepthInference(imgObj, None) tfObj.loadModel(loadFile) evalData = imgObj.allSegments() estData = tfObj.evalModelBatch(32, evalData) plotEval(imgObj.currImage, imgObj.currSegments, imgObj.segLabels, estData, inputFilename + ".depth.png")