def evaluate(args): if args.model_prefix: model_paths = sorted( glob.glob( os.path.join(args.output_dir, "Models", f"{args.model_prefix}*.pt")), key=lambda p: int(re.search(r"_step_(\d+)", p).group(1))) model_scores = {} for model_path in model_paths: translate(model_path, "valid", args) processData(args, False) scores = calculate_scores(args, "valid") model_scores[os.path.basename(model_path)] = scores write_scores( model_scores, os.path.join( args.output_dir, "Reports", f"{args.model_prefix}.valid.{args.src_lang}2{args.tgt_lang}.log" )) if args.eval_model: model_scores = {} translate(args.eval_model, "test", args) processData(args, False) scores = calculate_scores(args, "test") model_scores[os.path.basename(args.eval_model)] = scores write_scores( model_scores, os.path.join( args.output_dir, "Reports", f"{os.path.basename(args.eval_model)}.test.{args.src_lang}2{args.tgt_lang}.log" ))
def main(args): processData(args, True) if args.do_preprocess: preprocess(args) if args.do_train: train(args) if args.model_prefix and args.average_last: average_models(args) if args.do_eval: evaluate(args)
def selectNegativeWindowsFromPositiveImages(groundTruths,featuresDir,featuresExt,maxVectors,overlap,model=False): gtb = dict() for x in groundTruths: im,bx = x[0],map(float,x[1:]) try: gtb[im].append(bx) except: gtb[im] = [bx] task = NWFPIFilter(gtb,featuresDir,featuresExt,maxVectors/len(gtb.keys()),overlap,model) result = dp.processData(gtb.keys(),featuresDir,featuresExt,task) posIdx,posFeat,negIdx,negFeat = [],[],[],[] for r in result: posIdx += r[0] posFeat += r[1] negIdx += r[2] negFeat += r[3] Xp = emptyMatrix( (len(posIdx),posFeat[0].shape[1]) ) Xn = emptyMatrix( (len(negIdx),negFeat[0].shape[1]) ) k = 0 for i in range(len(posFeat)): Xp[k:k+posFeat[i].shape[0],:] = posFeat[i] k = k + posFeat[i].shape[0] k = 0 for i in range(len(negFeat)): Xn[k:k+negFeat[i].shape[0],:] = negFeat[i] k + k + negFeat[i].shape[0] print 'NegFromPos ready:',len(negIdx) return {'posIdx':posIdx, 'posFeat':Xp, 'negIdx':negIdx, 'negFeat':Xn}
def detectObjects(imageList, featuresDir, indexType, groundTruthDir, outputDir): maxOverlap = 0.3 categories, catIndex = bse.categoryIndex(indexType) task = SoftmaxDetector(maxOverlap, catIndex) result = processData(imageList, featuresDir, 'prob', task) # Collect detection results after NMS detections = dict([(c, []) for c in catIndex]) for res in result: for idx in catIndex: img, filteredBoxes, filteredScores = res[idx] for j in range(len(filteredBoxes)): detections[idx].append([img, filteredScores[j]] + filteredBoxes[j]) # Evaluate results for each category independently for idx in catIndex: groundTruthFile = groundTruthDir + '/' + categories[ idx] + '_test_bboxes.txt' output = outputDir + '/' + categories[idx] + '.out' detections[idx].sort(key=lambda x: x[1], reverse=True) gtBoxes = [x.split() for x in open(groundTruthFile)] numPositives = len(gtBoxes) groundTruth = eval.loadGroundTruthAnnotations(gtBoxes) results = eval.evaluateDetections(groundTruth, detections[idx], 0.5) prec, recall = eval.computePrecisionRecall(numPositives, results['tp'], results['fp'], output)
def detectObjects(model, imageList, featuresDir, featuresExt, maxOverlap, threshold, outputFile=None): task = Detector(model, threshold, maxOverlap) result = processData(imageList, featuresDir, featuresExt, task) if outputFile != None: outf = open(outputFile, 'w') writeF = lambda x, y, b: outf.write( x + ' {:.8f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f}\n'.format( y, b[0], b[1], b[2], b[3], b[4])) else: writeF = lambda x, y, b: x detectionsList = [] for data in result: img, filteredBoxes, filteredScores = data for i in range(len(filteredBoxes)): b = filteredBoxes[i] writeF(img, filteredScores[i], b) detectionsList.append( [img, filteredScores[i], b[0], b[1], b[2], b[3], b[4]]) if outputFile != None: outf.close() return detectionsList
def detectObjects(model, imageList, featuresDir, featuresExt, threshold, projector=None): task = MaskDetector(model, threshold, projector) result = processData(imageList, featuresDir, featuresExt, task) if projector == None: totalDetections = reduce(lambda x, y: x + y, [d[1].shape[0] for d in result]) detections = np.zeros((totalDetections, 5)) images = {} imgId = 0 i = 0 for data in result: img, cells = data cells[:, 0] = cells[:, 0] * imgId detections[i:i + cells.shape[0], :] = cells images[img] = imgId imgId += 1 i = i + cells.shape[0] return (images, detections[0:i, :]) else: resultsList = [] for data in result: resultsList += data return resultsList
def loadHardNegativesFromMatrix(featuresDir,imagesIdx,detMatrix,featuresExt,numFeatures,totalNegatives): i = 0 task = LoadHardNegatives(imagesIdx,detMatrix,numFeatures) result = dp.processData(imagesIdx.keys(),featuresDir,featuresExt,task) hardng = cu.emptyMatrix([totalNegatives,numFeatures]) while len(result) > 0: data = result.pop(0) hardng[i:i+data.shape[0],:] = data i = i + data.shape[0] return hardng[0:i,:]
def loadHardNegativesFromMatrix(featuresDir, imagesIdx, detMatrix, featuresExt, numFeatures, totalNegatives): i = 0 task = LoadHardNegatives(imagesIdx, detMatrix, numFeatures) result = dp.processData(imagesIdx.keys(), featuresDir, featuresExt, task) hardng = cu.emptyMatrix([totalNegatives, numFeatures]) while len(result) > 0: data = result.pop(0) hardng[i:i + data.shape[0], :] = data i = i + data.shape[0] return hardng[0:i, :]
def loadHardNegativesFromList(featuresDir,negativesInfo,featuresExt,numFeatures,totalNegatives,idx=False): i = 0 task = LoadHardNegatives(negativesInfo) result = dp.processData(negativesInfo.keys(),featuresDir,featuresExt,task) hardng = emptyMatrix([totalNegatives,numFeatures]) hardNames = [] boxes = [] while len(result) > 0: data,imgs,box = result.pop(0) hardng[i:i+data.shape[0],:] = data hardNames += imgs boxes += box i = i + data.shape[0] return (hardng[0:i,:],boxes)
def getRandomNegs(featuresDir,negativeList,featuresExt,numFeatures,maxVectors,maxNegativeImages): randomBoxes = maxVectors/maxNegativeImages cu.rnd.shuffle(negativeList) task = RandomNegativesFilter(numFeatures,randomBoxes) negatives = [negativeList.pop(0) for i in range(maxNegativeImages)] result = dp.processData(negatives,featuresDir,featuresExt,task) neg = emptyMatrix([maxVectors,numFeatures]) boxes = [] n = 0 while len(result) > 0: mat,box = result.pop() neg[n:n+mat.shape[0]] = mat n = n + mat.shape[0] boxes += box return (neg[0:n],boxes[0:n])
def getHardNegatives(negativesDir,negativesList,featuresExt,numFeatures,maxVectors,currentModel): maxVectorsPerImage = maxVectors/len(negativesList) i = 0 task = HardNegativeMining(currentModel,maxVectorsPerImage) result = dp.processData(negativesList,negativesDir,featuresExt,task) hardng = emptyMatrix([2*maxVectors,numFeatures]) boxes = [] while len(result) > 0: data = result.pop(0) if data[0].shape[0]+i > hardng.shape[0]: print 'Not enough matrix space' hardng = np.concatenate( (hardng,emptyMatrix([maxVectors,numFeatures])) ) hardng[i:i+data[0].shape[0],:] = data[0] boxes += data[2] i = i + data[0].shape[0] return hardng[0:i,:],boxes[0:i]
def detectObjects(model,imageList,featuresDir,featuresExt,maxOverlap,threshold,outputFile=None): task = Detector(model,threshold,maxOverlap) result = processData(imageList,featuresDir,featuresExt,task) if outputFile != None: outf = open(outputFile,'w') writeF = lambda x,y,b: outf.write(x + ' {:.8f} {:.0f} {:.0f} {:.0f} {:.0f} {:.0f}\n'.format(y,b[0],b[1],b[2],b[3],b[4])) else: writeF = lambda x,y,b: x detectionsList = [] for data in result: img,filteredBoxes,filteredScores = data for i in range(len(filteredBoxes)): b = filteredBoxes[i] writeF(img,filteredScores[i],b) detectionsList.append( [img,filteredScores[i],b[0],b[1],b[2],b[3],b[4]] ) if outputFile != None: outf.close() return detectionsList
def selectRegions(imageList, featuresDir, groundTruths, outputDir, featExt, category, operator): task = RegionSelector(groundTruths, operator) result = processData(imageList, featuresDir, featExt, task) nBoxes,nFeat = 0,0 for r in result: nBoxes += r[0].shape[0] nFeat = r[0].shape[1] featureMatrix = np.zeros( (nBoxes,nFeat) ) i = 0 outputFile = open(outputDir + '/' + category + '.idx','w') for r in result: featureMatrix[i:i+r[0].shape[0]] = r[0] for box in r[1]: outputFile.write(box[0] + ' ' + ' '.join(map(str,map(int,box[1:]))) + '\n') i += r[0].shape[0] outputFile.close() cu.saveMatrix(featureMatrix,outputDir + '/' + category + '.' + featExt) print 'Total of',nBoxes,'positive examples collected for',category
def selectRegions(imageList, featuresDir, groundTruths, outputDir, featExt, category, operator): task = RegionSelector(groundTruths, operator) result = processData(imageList, featuresDir, featExt, task) nBoxes, nFeat = 0, 0 for r in result: nBoxes += r[0].shape[0] nFeat = r[0].shape[1] featureMatrix = np.zeros((nBoxes, nFeat)) i = 0 outputFile = open(outputDir + '/' + category + '.idx', 'w') for r in result: featureMatrix[i:i + r[0].shape[0]] = r[0] for box in r[1]: outputFile.write(box[0] + ' ' + ' '.join(map(str, map(int, box[1:]))) + '\n') i += r[0].shape[0] outputFile.close() cu.saveMatrix(featureMatrix, outputDir + '/' + category + '.' + featExt) print 'Total of', nBoxes, 'positive examples collected for', category
def detectObjects(imageList, featuresDir, indexType, groundTruthDir, outputDir): maxOverlap = 0.3 categories, catIndex = bse.categoryIndex(indexType) task = SoftmaxDetector(maxOverlap, catIndex) result = processData(imageList, featuresDir, 'prob', task) # Collect detection results after NMS detections = dict([ (c,[]) for c in catIndex]) for res in result: for idx in catIndex: img, filteredBoxes, filteredScores = res[idx] for j in range(len(filteredBoxes)): detections[idx].append( [img, filteredScores[j]] + filteredBoxes[j] ) # Evaluate results for each category independently for idx in catIndex: groundTruthFile = groundTruthDir + '/' + categories[idx] + '_test_bboxes.txt' output = outputDir + '/' + categories[idx] + '.out' detections[idx].sort(key=lambda x:x[1], reverse=True) gtBoxes = [x.split() for x in open(groundTruthFile)] numPositives = len(gtBoxes) groundTruth = eval.loadGroundTruthAnnotations(gtBoxes) results = eval.evaluateDetections(groundTruth, detections[idx], 0.5) prec, recall = eval.computePrecisionRecall(numPositives, results['tp'], results['fp'], output)
def detectObjects(model,imageList,featuresDir,featuresExt,threshold,projector=None): task = MaskDetector(model,threshold,projector) result = processData(imageList,featuresDir,featuresExt,task) if projector == None: totalDetections = reduce(lambda x,y:x+y,[d[1].shape[0] for d in result]) detections = np.zeros( (totalDetections,5) ) images = {} imgId = 0 i = 0 for data in result: img,cells = data cells[:,0] = cells[:,0]*imgId detections[i:i+cells.shape[0],:] = cells images[img] = imgId imgId += 1 i = i + cells.shape[0] return (images,detections[0:i,:]) else: resultsList = [] for data in result: resultsList += data return resultsList
import utils as cu import libDetection as det from dataProcessor import processData class Checker(): def __init__(self): print 'Starting checker' def run(self,img,features,bboxes): return img,features.shape[0] == len(bboxes) ## Main Program Parameters params = cu.loadParams("testImageList featuresDir featuresExt") imageList = [x.replace('\n','') for x in open(params['testImageList'])] ## Run Detector task = Checker() start = cu.tic() result = processData(imageList,params['featuresDir'],params['featuresExt'],task) cu.toc('All images checked',start) totalP = 0 for data in result: img,r = data if not r: print 'Problems with',img totalP += 1 print 'Total problems:',totalP
def extractFeatures(model,imageList,featuresDir,featuresExt): task = CategoryScores(model,featuresDir) result = processData(imageList,featuresDir,featuresExt,task)
def extractFeatures(model, imageList, featuresDir, featuresExt): task = CategoryScores(model, featuresDir) result = processData(imageList, featuresDir, featuresExt, task)
modelClass = svm.SVMDetector else: import sys print 'Model not supported' sys.exit() for c in categories.keys(): filename = params['modelDir'] + '/' + c + params['modelSuffix'] categories[c] = modelClass() categories[c].load(filename) imageList = [x.replace('\n', '') for x in open(params['testImageList'])] maxOverlap = float(params['maxOverlap']) threshold = float(params['threshold']) ## Run Detector task = Detector(categories, threshold, maxOverlap) result = processData(imageList, params['featuresDir'], params['featuresExt'], task) # Prepare output files for c in categories.keys(): categories[c] = open(c + params['outputFile'], 'w') for data in result: for c in data.keys(): img, filteredBoxes, filteredScores = data[c] for i in range(len(filteredBoxes)): b = filteredBoxes[i] categories[c].write(img + ' {:.8f} {:} {:} {:} {:}\n'.format( filteredScores[i], b[0], b[1], b[2], b[3])) for c in categories.keys(): categories[c].close()