def plot(self, select=''): measureList = [] if select == '': measureList = self._measureList else: selections = select.split(',') for selection in selections: measureList += tb.wildcardMatch(self._measureList, selection) measureList = tb.unique(measureList) plot = Plot("loss/accuracy for %s" % self._networkName) def plotMeasure(name,label,color): plot.plotList(label, self._measures[name], color) measureList.remove(name) for name, (label, style) in Config.plotMeasureStyles.iteritems(): if name in measureList: plotMeasure(name, name, style) styles = tb.styleList() for name in measureList[:]: if name.startswith('test_'): plotMeasure(name, name, styles.pop(0)) for name in measureList[:]: plotMeasure(name, name, styles.pop(0)) plot.finish()
def plot(self, select=''): measureList = [] if select == '': measureList = self._measureList else: selections = select.split(',') for selection in selections: measureList += tb.wildcardMatch(self._measureList, selection) measureList = tb.unique(measureList) plot = Plot("loss/accuracy for %s" % self._networkName) def plotMeasure(name, label, color): plot.plotList(label, self._measures[name], color) measureList.remove(name) for name, (label, style) in Config.plotMeasureStyles.iteritems(): if name in measureList: plotMeasure(name, name, style) styles = tb.styleList() for name in measureList[:]: if name.startswith('test_'): plotMeasure(name, name, styles.pop(0)) for name in measureList[:]: plotMeasure(name, name, styles.pop(0)) plot.finish()
def compare(self, networks, losses): folders = [ dir for dir in os.listdir('.') if os.path.isdir(dir) and not dir.startswith('.') ] networks = tb.wildcardMatch(folders, networks) logs = [] measureNames = [] for net in networks: logfile = '%s/training/log.txt' % net print 'reading %s' % logfile logs.append(Log(net, logfile)) for name in logs[-1].measureNames(): if name not in measureNames: measureNames.append(name) if losses is not None: selectedNames = tb.unique(tb.wildcardMatch(measureNames, losses)) else: selectedNames = tb.unique(measureNames) print 'comparing networks:' for net in networks: print " ", net print 'comparing losses: ' for name in selectedNames: print " ", name Log.plotComparison(selectedNames, logs)
def makeComputeJob(self, job): job.setLog(self.file("log.txt")) tb.noticeVerbose("creating compute job for <%s>" % self.img1Path(), "run") job.addCommand(self.path(), "echo type=%s > info" % self.type()) self.computeJobImplementation(job) self.epeJobImplementation(job)
def makeComputeJob(self,job): job.setLog(self.file('log.txt')) tb.noticeVerbose('creating compute job for <%s>' % self.img1Path(),'run') job.addCommand(self.path(), "echo type=%s > info" % self.type()) self.computeJobImplementation(job) self.epeJobImplementation(job)
def slice(net, input_blob, slice_point, axis=1): return Layers.Slice(net, input_blob, nout=len(slice_point)+1 if tb.isList(slice_point) else 2, slice_param={ 'slice_point': slice_point if tb.isList(slice_point) else (slice_point,) , 'axis': axis })
def notice(self, message, type=None): if self._silent: return if type is None: print '%s\n' % message else: tb.notice(message, type)
def viewFilters(self, iter): self.prepareTraining() prototxt = self._trainDir + '/train.prototxt' modelFile, iter = self.getModelFile(iter) os.environ['LD_LIBRARY_PATH']="/misc/lmbraid17/sceneflownet/common/programs/torch/install/lib:/usr/lib/x86_64-linux-gnu:/misc/lmbraid17/sceneflownet/common/software-root/lib:/home/ilge/dev/hackathon-caffe2/build/lib:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/lib:/misc/lmbraid17/sceneflownet/common/programs/torch/install/lib:/usr/lib/x86_64-linux-gnu:/misc/lmbraid17/sceneflownet/common/software-root/lib:/home/ilge/dev/hackathon-caffe2/build/lib:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/lib::/home/ilge/lib:/misc/software-lin/lmbsoft/openni-1.5.2.23-x86_64/usr/lib:/misc/software-lin/lmbsoft/glog/lib:/misc/software-lin/lmbsoft/mkl/lib:/misc/software-lin/lmbsoft/mkl/lib/intel64:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/lib64:/misc/software-lin/lmbsoft/cuda-6.0.37-x86_64/lib64:/misc/student/mayern/OpenNI-Bin-Dev-Linux-x64-v1.5.4.0/Lib:/home/ilge/lib:/misc/software-lin/lmbsoft/openni-1.5.2.23-x86_64/usr/lib:/misc/software-lin/lmbsoft/glog/lib:/misc/software-lin/lmbsoft/mkl/lib:/misc/software-lin/lmbsoft/mkl/lib/intel64:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/lib64:/misc/software-lin/lmbsoft/cuda-6.0.37-x86_64/lib64:/misc/student/mayern/OpenNI-Bin-Dev-Linux-x64-v1.5.4.0/Lib" os.environ['PATH']="/home/ilge/bin:/home/ilge/dev/pymill/bin:/misc/lmbraid17/sceneflownet/common/programs/torch/install/bin:/misc/lmbraid17/sceneflownet/common/software-root/bin:/misc/lmbraid17/sceneflownet/ilge/hackathon-caffe2/python/pymill/bin:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/bin:/home/ilge/bin:/home/ilge/dev/pymill/bin:/misc/lmbraid17/sceneflownet/common/programs/torch/install/bin:/misc/lmbraid17/sceneflownet/common/software-root/bin:/misc/lmbraid17/sceneflownet/ilge/hackathon-caffe2/python/pymill/bin:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/bin:/misc/software-lin/matlabR2013a/bin:/home/ilge/data/caffe/matching/bin:/misc/lmbraid15/hackathon/common/flo-results/bin:/misc/lmbraid17/sceneflownet/common/data_tools:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/bin:/misc/software-lin/matlabR2013a/bin:/home/ilge/data/caffe/matching/bin:/misc/lmbraid15/hackathon/common/flo-results/bin:/misc/lmbraid17/sceneflownet/common/data_tools" tb.system('/home/ilge/bin/weight-viewer %s %s' % (prototxt, modelFile))
def clean(self): nCleaned = 0 for m in self._methods: for ds in self._datasets: for ent in ds.bents(): if ent.bind(m).clean(): nCleaned += 1 tb.notice('cleaned %d entries' % (nCleaned), 'passed')
def clean(self): nCleaned = 0 for m in self._methods: for ds in self._datasets: for ent in ds.bents(): if ent.bind(m).clean(): nCleaned += 1 tb.notice("cleaned %d entries" % (nCleaned), "passed")
def slice(net, input_blob, slice_point, axis=1): return Layers.Slice( net, input_blob, nout=len(slice_point) + 1 if tb.isList(slice_point) else 2, slice_param={ 'slice_point': slice_point if tb.isList(slice_point) else (slice_point, ), 'axis': axis })
def viewFilters(self, iter): self.prepareTraining() prototxt = self._trainDir + '/train.prototxt' modelFile, iter = self.getModelFile(iter) os.environ[ 'LD_LIBRARY_PATH'] = "/misc/lmbraid17/sceneflownet/common/programs/torch/install/lib:/usr/lib/x86_64-linux-gnu:/misc/lmbraid17/sceneflownet/common/software-root/lib:/home/ilge/dev/hackathon-caffe2/build/lib:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/lib:/misc/lmbraid17/sceneflownet/common/programs/torch/install/lib:/usr/lib/x86_64-linux-gnu:/misc/lmbraid17/sceneflownet/common/software-root/lib:/home/ilge/dev/hackathon-caffe2/build/lib:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/lib::/home/ilge/lib:/misc/software-lin/lmbsoft/openni-1.5.2.23-x86_64/usr/lib:/misc/software-lin/lmbsoft/glog/lib:/misc/software-lin/lmbsoft/mkl/lib:/misc/software-lin/lmbsoft/mkl/lib/intel64:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/lib64:/misc/software-lin/lmbsoft/cuda-6.0.37-x86_64/lib64:/misc/student/mayern/OpenNI-Bin-Dev-Linux-x64-v1.5.4.0/Lib:/home/ilge/lib:/misc/software-lin/lmbsoft/openni-1.5.2.23-x86_64/usr/lib:/misc/software-lin/lmbsoft/glog/lib:/misc/software-lin/lmbsoft/mkl/lib:/misc/software-lin/lmbsoft/mkl/lib/intel64:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/lib64:/misc/software-lin/lmbsoft/cuda-6.0.37-x86_64/lib64:/misc/student/mayern/OpenNI-Bin-Dev-Linux-x64-v1.5.4.0/Lib" os.environ[ 'PATH'] = "/home/ilge/bin:/home/ilge/dev/pymill/bin:/misc/lmbraid17/sceneflownet/common/programs/torch/install/bin:/misc/lmbraid17/sceneflownet/common/software-root/bin:/misc/lmbraid17/sceneflownet/ilge/hackathon-caffe2/python/pymill/bin:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/bin:/home/ilge/bin:/home/ilge/dev/pymill/bin:/misc/lmbraid17/sceneflownet/common/programs/torch/install/bin:/misc/lmbraid17/sceneflownet/common/software-root/bin:/misc/lmbraid17/sceneflownet/ilge/hackathon-caffe2/python/pymill/bin:/misc/software-lin/Qt-5.3.2/5.3/gcc_64/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/bin:/misc/software-lin/matlabR2013a/bin:/home/ilge/data/caffe/matching/bin:/misc/lmbraid15/hackathon/common/flo-results/bin:/misc/lmbraid17/sceneflownet/common/data_tools:/misc/software-lin/lmbsoft/cuda-6.5.14-x86_64/bin:/misc/software-lin/matlabR2013a/bin:/home/ilge/data/caffe/matching/bin:/misc/lmbraid15/hackathon/common/flo-results/bin:/misc/lmbraid17/sceneflownet/common/data_tools" tb.system('/home/ilge/bin/weight-viewer %s %s' % (prototxt, modelFile))
def archive(self, src, target, iter_step=-1): basename = os.path.basename(src) targetPath = os.path.join(target, basename) if os.path.exists(targetPath): raise Exception("target path %s already exists" % targetPath) env = Environment(src,backend=self._backend, unattended=self._unattended, silent=self._silent) env.init() env.shrink(iter_step=iter_step) tb.notice('archiving %s to %s' % (src, targetPath), 'run') os.system('mv %s %s' % (src, targetPath))
def update(self): queue = tb.Queue() for m in self._methods: for ds in self._datasets: tb.notice('creating jobs for <%s> on <%s>' % (m, ds)) for ent in ds.uents() if m.direction() == '' else ds.bents(): job = tb.Job() ent.bind(m).makeUpdateJob(job) queue.postJob(job) queue.finishPacket() queue.submit(local=self._args.local, cores=self._args.cores)
def check(self): nTotal = 0 nOk = 0 for m in self._methods: for ds in self._datasets: for ent in ds.uents() if m.direction() == '' else ds.bents(): if ent.bind(m).checkOut(self._args.verbose): nOk += 1 nTotal += 1 if nOk == nTotal: tb.notice('(%d/%d) passed' % (nOk, nTotal), 'passed') else: tb.notice('(%d/%d) passed' % (nOk, nTotal), 'failed')
def prototxt(self, inFile, outDir, defs={}): defs['name'] = self._name if not os.path.isfile(inFile): raise Exception('input file %s not file' % inFile) if inFile.endswith('.prototxt'): os.system('cp %s %s' % (inFile, outDir)) return '%s' % (inFile) elif inFile.endswith('.prototmp'): prototxt = '%s/%s.prototxt' % ( outDir, os.path.basename(inFile).replace('.prototmp', '')) if not self._silent: tb.notice('preprocessing %s' % inFile, 'run') tb.preprocessFile(inFile, prototxt, defs) return prototxt elif inFile.endswith('.py'): prototxt = '%s/%s.prototxt' % ( outDir, os.path.basename(inFile).replace('.py', '')) args = '' for k, v in defs.iteritems(): if len(args): args += ' ' args += '%s=%s' % (k, v) if not self._silent: if not len(defs): tb.notice('converting %s' % inFile, 'run') else: tb.notice('converting %s (%s)' % (inFile, args), 'run') if os.system('python -B %s %s > %s' % (inFile, args, prototxt)) != 0: raise Exception('conversion of %s failed' % inFile) return prototxt else: raise Exception('don\'t know how to convert file %s to prototxt' % inFile)
def prototxt(self, inFile, outDir, defs={}): defs['name'] = self._name if not os.path.isfile(inFile): raise Exception('input file %s not file' % inFile) if inFile.endswith('.prototxt'): os.system('cp %s %s' % (inFile, outDir)) return '%s' % (inFile) elif inFile.endswith('.prototmp'): prototxt = '%s/%s.prototxt' % (outDir, os.path.basename(inFile).replace('.prototmp', '')) if not self._silent: tb.notice('preprocessing %s' % inFile, 'run') tb.preprocessFile(inFile, prototxt, defs) return prototxt elif inFile.endswith('.py'): prototxt = '%s/%s.prototxt' % (outDir, os.path.basename(inFile).replace('.py', '')) args = '' for k, v in defs.iteritems(): if len(args): args += ' ' args += '%s=%s' % (k, v) if not self._silent: if not len(defs): tb.notice('converting %s' % inFile, 'run') else: tb.notice('converting %s (%s)' % (inFile, args), 'run') if os.system('python -B %s %s > %s' % (inFile, args, prototxt)) != 0: raise Exception('conversion of %s failed' % inFile) return prototxt else: raise Exception('don\'t know how to convert file %s to prototxt' % inFile)
def update(self): queue = tb.Queue() for m in self._methods: for ds in self._datasets: tb.notice("creating jobs for <%s> on <%s>" % (m, ds)) for ent in ds.uents() if m.direction() == "" else ds.bents(): job = tb.Job() ent.bind(m).makeUpdateJob(job) queue.postJob(job) queue.finishPacket() queue.submit(local=self._args.local, cores=self._args.cores)
def check(self): nTotal = 0 nOk = 0 for m in self._methods: for ds in self._datasets: for ent in ds.uents() if m.direction() == "" else ds.bents(): if ent.bind(m).checkOut(self._args.verbose): nOk += 1 nTotal += 1 if nOk == nTotal: tb.notice("(%d/%d) passed" % (nOk, nTotal), "passed") else: tb.notice("(%d/%d) passed" % (nOk, nTotal), "failed")
def archive(self, src, target, iter_step=-1): basename = os.path.basename(src) targetPath = os.path.join(target, basename) if os.path.exists(targetPath): raise Exception("target path %s already exists" % targetPath) env = Environment(src, backend=self._backend, unattended=self._unattended, silent=self._silent) env.init() env.shrink(iter_step=iter_step) tb.notice('archiving %s to %s' % (src, targetPath), 'run') os.system('mv %s %s' % (src, targetPath))
def readBents(bents): list = tb.readTupleList(bents) clips = [] clip = [] current = None for ent in list: if ent[2] != current: if len(clip): clips.append(clip) clip = [] current = ent[2] clip.append(ent) if len(clip): clips.append(clip) data = OrderedDict() for clip in clips: sceneName = clip[0][2] data[sceneName] = [] for frame in clip: data[sceneName].append({'cleanImageL': frame[5], 'finalImageL': frame[5].replace('clean','final'), 'forwardFlowL': frame[7]}) data[sceneName].append({'cleanImageL': frame[6], 'finalImageL': frame[6].replace('clean','final')}) return data
def runProto(self, proto): defFile = proto modelFile, iter = self._env.getModelFile(self._iter) print 'testing for iteration %d ...' % self._iter if self._output: dir = 'output_%s_%d' % (self._name, self._iter) tb.system('mkdir -p %s' % dir) self._variables['TEST_OUTPUT'] = 1 self._variables['TEST_OUTPUT_DIR'] = '"\\"%s\\""' % dir self._env.makeScratchDir() defPrototxt = self._env.prototxt(defFile, 'scratch', self._variables) print defFile, defPrototxt tb.system('%s test -weights %s -model %s -gpu 0 -iterations %d 2>&1' % (Environment.caffeBin(), modelFile, defPrototxt, self._iterations))
def epeStat(self): if len(self._methods) > 1: raise Exception('cannot make epe stat for more than one method') method = self._methods[0] entries = [] for ds in self._datasets: for ent in ds.bents(): entries.append(ent.bind(method).flowStatParams()) cmd = 'FlowStat --epe-type=%s %s %s %s %s' % ( self._args.type, '--make-epe' if self._args.make_epe else '', '--make-stat' if self._args.make_stat else '', '--refresh' if self._args.refresh else '', '--stat') print tb.run(cmd, '\n'.join(entries)),
def scale(net, image_blob, factor): if tb.isList(factor): return Layers.Convolution(net, image_blob, nout=1, convolution_param={ 'num_output': len(factor), 'pad': 0, 'kernel_size': 1, 'stride': 1, 'weight_filler': { 'type': 'diagonal', 'diag_val': factor }, 'bias_filler': { 'type': 'constant' } }) else: return Layers.Eltwise(net, image_blob, nout=1, eltwise_param={ 'operation': Params.Eltwise.SUM, 'coeff': (factor, ) })
def testref(self, iter, definition=None, vars={}, num_iter=-1): modelFile, iter = self.getModelFile(iter) vars['output'] = True vars['prefix'] = 'ref_%d' % iter vars['lowres'] = True self.makeScratchDir() if definition is None: definition = 'test' proto = self.findProto(definition) outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): if self._unattended or tb.queryYesNo( 'Output folder %s exists, do you want to delete it first?' % os.path.basename(outPath)): os.system('rm -rf %s' % outPath) finalProto = self.makeScratchPrototxt(proto, vars) solverProto = self.makeScratchPrototxt(self._solverProto, vars) self.notice( 'testing snapshot iteration %d for %d iterations...' % (iter, num_iter), 'notice') os.chdir(self._path) self._backend.test(caffemodelFilename=modelFile, protoFilename=finalProto, iterations=num_iter, logFile=self._scratchLogFile)
def __init__(self, networkName, filename): lines = open(filename, 'r').readlines() iter = -1 self._networkName = networkName self._lines = [] self._measures = {} self._measureList = [] def appendMeasure(name, iter, value): if name not in self._measures: self._measures[name] = [] self._measureList.append(name) self._measures[name].append((iter, float(value))) for l in lines: if ']' not in l: self._lines.append((iter, l)) continue msg = l.split(']')[1].strip() if msg.startswith('Iteration'): match = re.compile('Iteration ([0-9]+)').match(msg) if match: iter = int(match.group(1)) match = re.compile( 'Iteration [0-9]+, loss = (([0-9]|\.)+)').match(msg) if match: appendMeasure('train_loss', iter, match.group(1)) if msg.startswith('Test loss'): value = re.compile('Test loss: (([0-9]|\.)+)').match( msg).group(1) appendMeasure('test_loss', iter, value) if msg.startswith('Train net output'): match = re.compile( 'Train net output ..: ([a-zA-Z0-9_-]+) = (([0-9]|\.)+)' ).match(msg) if match: name = match.group(1) value = match.group(2) appendMeasure('train_' + name, iter, value) if msg.startswith('Test net output'): match = re.compile( 'Test net output #[a-zA-Z0-9_-]+: ([a-zA-Z0-9_-]+) = (([0-9]|\.)+)' ).match(msg) if match: name = match.group(1) value = match.group(2) appendMeasure('test_' + name, iter, value) self._lines.append((iter, l)) self._measureList = tb.unique(self._measureList)
def testFiles(self, filelist, iter, output=False, definition=None, vars={}): modelFile, iter = self.getModelFile(iter) assert(output) vars['output'] = True vars['prefix'] = iter vars['dataset'] = os.path.splitext(os.path.basename(filelist))[0] self.makeScratchDir() if definition is None: definition = 'testsingle' proto = self.findProto(definition) if output and 'dataset' in vars: outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): if self._unattended or tb.queryYesNo('Output folder %s exists, do you want to delete it first?' % os.path.basename(outPath)): os.system('rm -rf %s' % outPath) data_list = IO.readTupleList(filelist) print data_list # Run a new net for every sample: for idx, line in enumerate(data_list): num_blobs = len(line) input_data = [] for blob_idx in range(num_blobs): img = IO.readImage(line[blob_idx]) #print(img.shape) input_data.append(img[np.newaxis, :, :, :].transpose(0, 3, 1, 2)[:,[2,1,0],:,:]) #print(input_data[-1].shape) vars['width'] = input_data[0].shape[3] vars['height'] = input_data[0].shape[2] vars['basename'] = 'b%03d' % idx finalProto = self.makeScratchPrototxt(proto, vars) caffe.set_logging_disabled() caffe.set_mode_gpu() net = caffe.Net(finalProto, modelFile,caffe.TEST) print 'Network forward pass (%d of %d). %d inputs of shapes:' % (idx+1, len(data_list), num_blobs) for blob_idx in range(num_blobs): print(" " + str(input_data[blob_idx].shape)) if not len(net.inputs) == len(line): raise Exception('Net has %d inputs and in file list there are %d' % (len(net.inputs), len(line))) input_dict = {} for blob_idx in range(num_blobs): input_dict[net.inputs[blob_idx]] = input_data[blob_idx] net.forward(**input_dict) print 'Iteration was %d' %iter
def computeHistograms(resolution, subpath, collectionName, clips, skipIfExists=False, overwrite=True, numBins=5000, maxValue=1000): dataPath = '/misc/lmbraid17/sceneflownet/common/data/4_bin-db' savePath = '%s/hists/%s/%s' % (dataPath, resolution, subpath) saveFile = '%s/%s.npz' % (savePath, collectionName) completePath = os.path.dirname(saveFile) os.system('mkdir -p %s' % completePath) print 'savePath', savePath print 'completePath', completePath print 'saveFile', saveFile histFlow = np.zeros(numBins) histDisp = np.zeros(numBins) histDispChange = np.zeros(numBins) for clip in clips: print 'processing', clip for i in range(clip.startFrame(), clip.endFrame()+1): frame = clip.frame(i) if i%10 == 1: print '%d/%d' % (i - clip.startFrame() + 1, clip.endFrame() - clip.startFrame() + 1) # flow histogram if frame.hasForwardFlowL(): values = tb.readFlow(frame.forwardFlowL()) mag = np.power(np.sum(np.power(values, 2), axis=2), 0.5) curr_hist, _ = np.histogram(mag, bins=numBins, range=(0,maxValue)) histFlow += curr_hist # disparity histogram if frame.hasDispL(): values = tb.readDisparity(frame.dispL()) mag = np.abs(values) curr_hist, _ = np.histogram(mag, bins=numBins, range=(0,maxValue)) histDisp += curr_hist # disparity change histogram if frame.hasBackwardDispChangeL(): values = tb.readDisparity(frame.forwardDispChangeL()) mag = np.abs(values) curr_hist, _ = np.histogram(mag, bins=numBins, range=(0,maxValue)) histDispChange += curr_hist np.savez(saveFile, histFlow=histFlow, histDisp=histDisp, histDispChange=histDispChange)
def runProto(self, proto): defFile = proto modelFile, iter = self._env.getModelFile(self._iter) print 'testing for iteration %d ...' % self._iter if self._output: dir = 'output_%s_%d' % (self._name, self._iter) tb.system('mkdir -p %s' % dir) self._variables['TEST_OUTPUT'] = 1 self._variables['TEST_OUTPUT_DIR'] = '"\\"%s\\""' % dir self._env.makeScratchDir() defPrototxt = self._env.prototxt(defFile, 'scratch', self._variables) print defFile, defPrototxt tb.system( '%s test -weights %s -model %s -gpu 0 -iterations %d 2>&1' % (Environment.caffeBin(), modelFile, defPrototxt, self._iterations))
def epeStat(self): if len(self._methods) > 1: raise Exception("cannot make epe stat for more than one method") method = self._methods[0] entries = [] for ds in self._datasets: for ent in ds.bents(): entries.append(ent.bind(method).flowStatParams()) cmd = "FlowStat --epe-type=%s %s %s %s %s" % ( self._args.type, "--make-epe" if self._args.make_epe else "", "--make-stat" if self._args.make_stat else "", "--refresh" if self._args.refresh else "", "--stat", ) print tb.run(cmd, "\n".join(entries)),
def getSelectedCollections(): collectionNames = collections.keys() selectedNames = [] if args.collections == '': selectedNames = collectionNames else: exprs = args.collections.split(',') for expr in exprs: selectedNames += tb.wildcardMatch(collectionNames, expr) selectedNames = tb.unique(selectedNames) selectedCollections = {} for name in selectedNames: for collectionName, collection in collections.iteritems(): if collectionName == name: selectedCollections[name] = collection return selectedCollections
def __init__(self, networkName, filename): lines = open(filename,'r').readlines() iter = -1 self._networkName = networkName self._lines = [] self._measures = {} self._measureList = [] def appendMeasure(name, iter, value): if name not in self._measures: self._measures[name]=[] self._measureList.append(name) self._measures[name].append((iter,float(value))) for l in lines: if ']' not in l: self._lines.append((iter, l)) continue msg = l.split(']')[1].strip() if msg.startswith('Iteration'): match = re.compile('Iteration ([0-9]+)').match(msg) if match: iter = int(match.group(1)) match = re.compile('Iteration [0-9]+, loss = (([0-9]|\.)+)').match(msg) if match: appendMeasure('train_loss', iter, match.group(1)) if msg.startswith('Test loss'): value = re.compile('Test loss: (([0-9]|\.)+)').match(msg).group(1) appendMeasure('test_loss', iter, value) if msg.startswith('Train net output'): match = re.compile('Train net output ..: ([a-zA-Z0-9_-]+) = (([0-9]|\.)+)').match(msg) if match: name = match.group(1) value = match.group(2) appendMeasure('train_'+name, iter, value) if msg.startswith('Test net output'): match = re.compile('Test net output #[a-zA-Z0-9_-]+: ([a-zA-Z0-9_-]+) = (([0-9]|\.)+)').match(msg) if match: name = match.group(1) value = match.group(2) appendMeasure('test_'+name, iter, value) self._lines.append((iter, l)) self._measureList = tb.unique(self._measureList)
def compute(self, queue): OpticalFlow.Method.compute(self, queue) temp = Template( '/misc/software-lin/matlabR2013a/bin/matlab -nodesktop -nojvm -r "gbDetect(\'$imagePath\',1); exit"' ) command = temp.safe_substitute(imagePath=self.ent().img1Path()) job = tb.Job() job.setPath(self.path()) job.addCommand(command) queue.postJob(job)
def makeFlowCheck(resolution, rendertype, clip): print clip.path() checkPath = clip.path() + '/flow_check' os.system('mkdir -p %s' % checkPath) for i in range(clip.startFrame(), clip.endFrame()): print 'frame %d' % i frame = clip.frame(i, rendertype) Image0 = misc.imread(frame.ImageL()) Image1 = misc.imread(frame.ImageL(+1)) flow = tb.readPFM(frame.forwardFlowL())[0][:,:,0:2] warped = tb.flowWarp(Image1, flow) h = Image0.shape[0] w = Image0.shape[1] ImageCheck = np.zeros((2*h, 2*w, 3)).astype(np.uint8) ImageCheck[0:h, 0:w, :] = Image0 ImageCheck[0:h, w:2*w, :] = Image1 ImageCheck[h:2*h, 0:w, :] = warped misc.imsave('%s/%s.png' % (checkPath, os.path.basename(frame.ImageL())), ImageCheck)
def compute(self, queue): OpticalFlow.Method.compute(self, queue) ent = self.ent() input = self.spec().inputs()[0] inputPath = ent.inFile(input, "lowres.flo") if not os.path.exists(inputPath): raise Exception('input does not exist: %s' % inputPath) params = self.spec().params() paramStr = '' if 'b' in params and params['b'] == '1': paramStr += ' --boundaries=' + ent.boundaryPath() if 'delta' in params: paramStr += ' --delta=' + params['delta'] if 'beta' in params: paramStr += ' --beta=' + params['beta'] if 'sigma' in params: paramStr += ' --sigma=' + params['sigma'] os.system("rm -rf %s" % ent.outFile(str(self.spec()), 'flow.flo')) if ent.flowGTPath != None: temp = Template( 'VarFlowRefine $img1 $img2 $flo $out $params --groundtruth=$groundtruth --make-epe --make-stat' ) command = temp.safe_substitute( img1=ent.img1Path(), img2=ent.img2Path(), flo=inputPath, out=ent.outFile(str(self.spec()), 'flow.flo'), groundtruth='%s:%s:%s' % (ent.flowGTPath(), ent.occGTPath(), ent.mbdistGTPath()), params=paramStr) else: temp = Template('VarFlowRefine $img1 $img2 $flo $out $params') command = temp.safe_substitute(img1=ent.img1Path(), img2=ent.img2Path(), flo=inputPath, out=ent.outFile( str(self.spec()), 'flow.flo'), params=paramStr) job = tb.Job() job.setPath(self.path()) job.addCommand(command) queue.postJob(job)
def plotComparison(names, logs): plot = Plot("loss/accuracy comparison") styles = tb.styleList() for log in logs: style = styles.pop(0) #lineStyles = tb.lineStyleCycle() for name in names: label = '%s for %s' % (name, log.networkName()) #style.setLineStyle(lineStyles.get()) if name in log.measures(): plot.plotList(label, log.measures()[name], style) plot.finish()
def plotComparison(names, logs): plot = Plot("loss/accuracy comparison") styles = tb.styleList() for log in logs: style = styles.pop(0) #lineStyles = tb.lineStyleCycle() for name in names: label = '%s for %s' %(name,log.networkName()) #style.setLineStyle(lineStyles.get()) if name in log.measures(): plot.plotList(label, log.measures()[name], style) plot.finish()
def test(self, iter, output=False, definition=None, vars={}, num_iter=-1, discard=False): modelFile, iter = self.getModelFile(iter) if output: vars['output'] = True vars['prefix'] = iter self.makeScratchDir() if definition is None: definition = 'test' proto = self.findProto(definition) if output and 'dataset' in vars: outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): if self._unattended or tb.queryYesNo( 'Output folder %s exists, do you want to delete it first?' % os.path.basename(outPath)): os.system('rm -rf %s' % outPath) finalProto = self.makeScratchPrototxt(proto, vars) solverProto = self.makeScratchPrototxt(self._solverProto, vars) self.notice( 'testing snapshot iteration %d for %d iterations...' % (iter, num_iter), 'notice') os.chdir(self._path) self._backend.test(caffemodelFilename=modelFile, protoFilename=finalProto, iterations=num_iter, logFile=self._scratchLogFile) if output and 'dataset' in vars and num_iter == -1: outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): logFile = '%s/log.txt' % outPath print 'saving log to %s', logFile os.system('cp %s %s' % (self._scratchLogFile, logFile)) if 'dataset' in vars: self._saveTestResults(iter, vars['dataset'], not discard) print 'Iteration was %d' % iter
def epeLists(methods,datasets,type='all'): epe = {} for m in methods: entries = [] for ds in datasets: for ent in ds.bents(): entries.append(ent.bind(m).flowStatParams()) cmd = 'FlowStat --epe-type=%s' % ( type, ) epe[str(m)] = [line for line in tb.run(cmd, '\n'.join(entries)).split('\n') if line.strip() != ''] return epe
def compare(self, networks, losses): folders = [dir for dir in os.listdir('.') if os.path.isdir(dir) and not dir.startswith('.')] networks = tb.wildcardMatch(folders, networks) logs = [] measureNames = [] for net in networks: logfile = '%s/training/log.txt' % net print 'reading %s' % logfile logs.append(Log(net, logfile)) for name in logs[-1].measureNames(): if name not in measureNames: measureNames.append(name) if losses is not None: selectedNames = tb.unique(tb.wildcardMatch(measureNames, losses)) else: selectedNames = tb.unique(measureNames) print 'comparing networks:' for net in networks: print " ", net print 'comparing losses: ' for name in selectedNames: print " ", name Log.plotComparison(selectedNames, logs)
def _callCopiedBin(self, cmd): bin = './' + os.path.basename(caffeBin()) tb.notice('making a local copy of %s' % caffeBin()) os.system('cp %s .' % caffeBin()) ldd = tb.run('ldd %s' % caffeBin()) caffeLib = None for line in ldd.split('\n'): match = re.match('\\s*libcaffe.so => (.*\.so)', line) if match: caffeLib = match.group(1) break if caffeLib is None: raise Exception('cannot find libcaffe.so dependency') tb.notice('making a local copy of %s' % caffeLib) os.system('cp %s .' % caffeLib) cmd = 'GLOG_logtostderr=%d LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH %s %s' % (not self._quiet, bin, cmd) if not self._silent: tb.notice('running "%s"' % cmd, 'run') tb.system(cmd)
def resume(self, iter=-1): if not len(self._stateFiles): raise Exception('no .solverstate files to continue from') stateFile, iter = self.getStateFile(iter) if self.existingData(iter) and not self._unattended: if not tb.queryYesNo( 'Existing data beyond iteration %d found. Do you want to delete it and continue?' % iter): return self.clean(iter) solverFilename = self.prepareTraining() self.notice('continuing from iteration %d ...' % iter, 'notice') os.chdir(self._trainDir) self._backend.resume(solverFilename=solverFilename, solverstateFilename=stateFile, logFile=self._logFile)
def train(self, weights=None, blobSummary=False): if self.existingData() and not self._unattended: if not tb.queryYesNo('Existing data found. Do you want to delete it and start from scratch?'): return self.clean() solverFilename = self.prepareTraining() self.notice('training...') os.chdir(self._trainDir) if weights != '': self._backend.train(solverFilename=solverFilename, logFile=self._logFile, weights=weights) else: self._backend.train(solverFilename=solverFilename, logFile=self._logFile) if blobSummary: self.displayBlobSummary(self._logFile)
def _callCopiedBin(self, cmd): bin = './' + os.path.basename(caffeBin()) tb.notice('making a local copy of %s' % caffeBin()) os.system('cp %s .' % caffeBin()) ldd = tb.run('ldd %s' % caffeBin()) caffeLib = None for line in ldd.split('\n'): match = re.match('\\s*libcaffe.so => (.*\.so)', line) if match: caffeLib = match.group(1) break if caffeLib is None: raise Exception('cannot find libcaffe.so dependency') tb.notice('making a local copy of %s' % caffeLib) os.system('cp %s .' % caffeLib) cmd = 'GLOG_logtostderr=%d LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH %s %s' % ( not self._quiet, bin, cmd) if not self._silent: tb.notice('running "%s"' % cmd, 'run') tb.system(cmd)
def preparePythonBackend(): os.system('mkdir -p training') folder = os.path.dirname(caffe.__file__) print 'copying %s to training' % folder os.system('cp %s training -r' % folder) ldd = tb.run('ldd %s' % caffe._caffe.__file__) caffeLib = None for line in ldd.split('\n'): match = re.match('\\s*libcaffe.so => (.*\.so)', line) if match: caffeLib = match.group(1) break if caffeLib is None: raise Exception('cannot find libcaffe.so dependency') print 'copying %s to training' % caffeLib os.system('cp %s %s' % (caffeLib, env.trainDir()))
def preparePythonBackend(): os.system("mkdir -p training") folder = os.path.dirname(caffe.__file__) print "copying %s to training" % folder os.system("cp %s training -r" % folder) ldd = tb.run("ldd %s" % caffe._caffe.__file__) caffeLib = None for line in ldd.split("\n"): match = re.match("\\s*libcaffe.so => (.*\.so)", line) if match: caffeLib = match.group(1) break if caffeLib is None: raise Exception("cannot find libcaffe.so dependency") print "copying %s to training" % caffeLib os.system("cp %s %s" % (caffeLib, env.trainDir()))
def concat(net, *args, **kwargs): ''' @brief Setup a ConcatLayer that takes all ARGS and throws them together along the first dimension @param net Current network @returns A new blob (concatenation of all blobs in ARGS) ''' axis = 1 if 'axis' in kwargs: axis = int(kwargs['axis']) del kwargs['axis'] if len(kwargs): raise Exception('Cannot handle kwargs %s' % kwargs) inputs = [] for arg in args: if tb.isList(arg): inputs += arg else: inputs.append(arg) return Layers.Concat(net, inputs, nout=1, concat_param={'axis': axis})
def scale(net, image_blob, factor): if tb.isList(factor): return Layers.Convolution(net, image_blob, nout=1, convolution_param={ 'num_output': len(factor), 'pad': 0, 'kernel_size': 1, 'stride': 1, 'weight_filler': {'type': 'diagonal', 'diag_val': factor}, 'bias_filler': {'type': 'constant'} }) else: return Layers.Eltwise(net, image_blob, nout=1, eltwise_param={ 'operation': Params.Eltwise.SUM, 'coeff': (factor,) })
def train(self, weights=None, blobSummary=False): if self.existingData() and not self._unattended: if not tb.queryYesNo( 'Existing data found. Do you want to delete it and start from scratch?' ): return self.clean() solverFilename = self.prepareTraining() self.notice('training...') os.chdir(self._trainDir) if weights != '': self._backend.train(solverFilename=solverFilename, logFile=self._logFile, weights=weights) else: self._backend.train(solverFilename=solverFilename, logFile=self._logFile) if blobSummary: self.displayBlobSummary(self._logFile)
def data(self, downsample = 1): if self._type == "image": Image = misc.imread(self._path)[:, :, 0:3] if downsample != 1: Image = tb.downsampleImage(Image, downsample) return Image[..., np.r_[2, 1, 0]].transpose((2, 0, 1)) elif self._type == "flow": flow = tb.readFlow(self._path) flow = flow[:, :, 0:2] if downsample != 1: flow = OpticalFlow.downsampleMedian(flow, downsample) flow = flow.transpose((2, 0, 1)) nanMask = np.isnan(flow) data = (flow * 32.0).astype(np.int16) data[nanMask] = np.iinfo(np.int16).max return data elif self._type == "leftdisparity": disparity = tb.readDisparity(self._path) nanMask = np.isnan(disparity) disparity *= -1 if downsample != 1: raise Exception("no downsampling implemented for disparity") data = (disparity * 32.0).astype(np.int16) data[nanMask] = np.iinfo(np.int16).max return data elif self._type == "rightdisparity": disparity = tb.readDisparity(self._path) nanMask = np.isnan(disparity) if downsample != 1: raise Exception("no downsampling implemented for disparity") data = (disparity * 32.0).astype(np.int16) data[nanMask] = np.iinfo(np.int16).max return data elif self._type == "leftdisparitychange": disparityChange = tb.readDisparity(self._path) nanMask = np.isnan(disparityChange) disparityChange *= -1 if downsample != 1: raise Exception("no downsampling implemented for disparity") data = (disparityChange * 32.0).astype(np.int16) data[nanMask] = np.iinfo(np.int16).max return data elif self._type == "rightdisparitychange": disparityChange = tb.readDisparity(self._path) nanMask = np.isnan(disparityChange) if downsample != 1: raise Exception("no downsampling implemented for disparity") data = (disparityChange * 32.0).astype(np.int16) data[nanMask] = np.iinfo(np.int16).max return data else: raise Exception('unhandled data type')
def testref(self, iter, definition=None, vars={}, num_iter=-1): modelFile, iter = self.getModelFile(iter) vars['output'] = True vars['prefix'] = 'ref_%d' % iter vars['lowres'] = True self.makeScratchDir() if definition is None: definition = 'test' proto = self.findProto(definition) outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): if self._unattended or tb.queryYesNo('Output folder %s exists, do you want to delete it first?' % os.path.basename(outPath)): os.system('rm -rf %s' % outPath) finalProto = self.makeScratchPrototxt(proto, vars) solverProto = self.makeScratchPrototxt(self._solverProto, vars) self.notice('testing snapshot iteration %d for %d iterations...' % (iter, num_iter), 'notice') os.chdir(self._path) self._backend.test(caffemodelFilename=modelFile, protoFilename=finalProto, iterations=num_iter, logFile=self._scratchLogFile)
def test(self, iter, output=False, definition=None, vars={}, num_iter=-1, discard=False): modelFile, iter = self.getModelFile(iter) if output: vars['output'] = True vars['prefix'] = iter self.makeScratchDir() if definition is None: definition = 'test' proto = self.findProto(definition) if output and 'dataset' in vars: outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): if self._unattended or tb.queryYesNo('Output folder %s exists, do you want to delete it first?' % os.path.basename(outPath)): os.system('rm -rf %s' % outPath) finalProto = self.makeScratchPrototxt(proto, vars) solverProto = self.makeScratchPrototxt(self._solverProto, vars) self.notice('testing snapshot iteration %d for %d iterations...' % (iter, num_iter), 'notice') os.chdir(self._path) self._backend.test(caffemodelFilename=modelFile, protoFilename=finalProto, iterations=num_iter, logFile=self._scratchLogFile) if output and 'dataset' in vars and num_iter==-1: outPath = '%s/output_%d_%s' % (self._path, iter, vars['dataset']) if os.path.isdir(outPath): logFile = '%s/log.txt' % outPath print 'saving log to %s', logFile os.system('cp %s %s' % (self._scratchLogFile, logFile)) if 'dataset' in vars: self._saveTestResults(iter,vars['dataset'], not discard) print 'Iteration was %d' %iter
def plotlr(self): lrs = [] maxLr = 0 for l in self._lines: iter = l[0] if not ']' in l[1]: continue msg = l[1].split(']')[1].strip() if msg.startswith('Iteration'): match = re.compile( 'Iteration [0-9]+, lr = (([0-9]|\.|-|e)+)').match(msg) if match: lr = float(match.group(1)) if lr > maxLr: maxLr = lr lrs.append((iter, lr)) plot = Plot("learning rate for %s" % self._networkName) if len(lrs): plot.plotList('LR', lrs, tb.PlotStyle('r-')) plt.ylim((0, maxLr * 1.1)) plot.finish()
def shrink(self, iter_step): self.notice('removing *.pyc', 'del') os.system('rm -f %s/*.pyc' % (self._path)) self.notice('removing scratch', 'del') os.system('rm -rf %s/scratch' % self._path) if self.haveJobDir(): self.notice('removing jobs', 'del') os.system('rm -rf %s' % self._jobDir) self.sweep() if self.haveTrainDir(): for file in self._stateFiles: keep = False if iter_step != -1: if file.iteration() % iter_step == 0: keep = True if file == self._stateFiles[-1]: keep = True modelFile = None for f in self._modelFiles: if f.iteration() == file.iteration(): modelFile = f if keep: if modelFile is not None: tb.notice( 'keeping file %s' % (os.path.basename(file.filename())), 'passed') tb.notice( 'keeping file %s' % (os.path.basename(modelFile.filename())), 'passed') else: tb.notice('keeping file %s' % file, 'passed') else: if modelFile is not None: modelFile.delete(True) file.delete(True) else: file.delete(True)
def delete(self,verbose=False): if verbose: tb.notice('removing %s' % self._filename, 'del') os.remove(self._filename)