def run( nb_filters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, fixedSize=fixedSize, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, random=random, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, nbTrees=nbTrees, maxFeatures=maxFeatures, maxDepth=maxDepth, minSamplesSplit=minSamplesSplit, minSamplesLeaf=minSamplesLeaf, bootstrap=bootstrap, randomClassif=randomClassif, nbJobsEstimator=nbJobsEstimator, verbose=verbose, learningUse=learningUse, testingUse=testingUse, saveFile=saveFile, shouldSave=shouldSave, ): randomState = None if not randomClassif: randomState = 100 lsSize = learningUse if learningUse > maxLearningSize: lsSize = maxLearningSize tsSize = testingUse if testingUse > maxTestingSize: tsSize = maxTestingSize # ======INSTANTIATING========# # --RandConv-- randConvCoord = coordinatorRandConvFactory( nbFilters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random, ) randConvCoord = LoadCoordinator(randConvCoord, learnFile, testFile) # --Extra-tree-- baseClassif = ExtraTreesClassifier( nbTrees, max_features=maxFeatures, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, bootstrap=bootstrap, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose, ) # --Classifier classifier = Classifier(randConvCoord, baseClassif) # --Data-- loader = CifarFromNumpies(learningSetDir, learningIndexFile) learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) learningSet = learningSet[0:lsSize] loader = CifarFromNumpies(testingSetDir, testingIndexFile) testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) testingSet = testingSet[0:tsSize] # =====COMPUTATION=====# # --Learning--# print "Starting learning" fitStart = time() classifier.fit(learningSet) fitEnd = time() print "Learning done", formatDuration(fitEnd - fitStart) sys.stdout.flush() # --Testing--# y_truth = testingSet.getLabels() predStart = time() y_prob, y_pred = classifier.predict_predict_proba(testingSet) predEnd = time() accuracy = classifier.accuracy(y_pred, y_truth) confMat = classifier.confusionMatrix(y_pred, y_truth) # ====ANALYSIS=====# importance, order = randConvCoord.importancePerFeatureGrp(baseClassif) print "==================RandConv================" print "-----------Filtering--------------" print "nb_filters", nb_filters print "filterPolicy", filterPolicy print "----------Pooling--------------" print "poolings", poolings print "--------SW extractor----------" print "#Subwindows", nbSubwindows print "subwindowMinSizeRatio", subwindowMinSizeRatio print "subwindowMaxSizeRatio", subwindowMaxSizeRatio print "subwindowTargetWidth", subwindowTargetWidth print "subwindowTargetHeight", subwindowTargetHeight print "fixedSize", fixedSize print "------------Misc-----------------" print "includeOriginalImage", includeOriginalImage print "random", random print "tempFolder", tempFolder print "verbosity", verbosity print "nbJobs", nbJobs print "--------ExtraTrees----------" print "nbTrees", nbTrees print "maxFeatures", maxFeatures print "maxDepth", maxDepth print "minSamplesSplit", minSamplesSplit print "minSamplesLeaf", minSamplesLeaf print "bootstrap", bootstrap print "nbJobsEstimator", nbJobsEstimator print "verbose", verbose print "randomState", randomState print "------------Data---------------" print "LearningSet size", len(learningSet) print "TestingSet size", len(testingSet) print "-------------------------------" if shouldSave: print "saveFile", saveFile print "Fit time", formatDuration(fitEnd - fitStart) print "Classifcation time", formatDuration(predEnd - predStart) print "Accuracy", accuracy if shouldSave: np.save(saveFile, y_prob) return accuracy, confMat, importance, order
def run(nb_filters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, fixedSize=fixedSize, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, random=random, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, nbTrees=nbTrees, maxFeatures=maxFeatures, maxDepth=maxDepth, minSamplesSplit=minSamplesSplit, minSamplesLeaf=minSamplesLeaf, bootstrap=bootstrap, randomClassif=randomClassif, nbJobsEstimator=nbJobsEstimator, verbose=verbose, learningUse=learningUse, testingUse=testingUse, saveFile=saveFile, shouldSave=shouldSave): randomState = None if not randomClassif: randomState = 100 lsSize = learningUse if learningUse > maxLearningSize: lsSize = maxLearningSize tsSize = testingUse if testingUse > maxTestingSize: tsSize = maxTestingSize #======INSTANTIATING========# #--RandConv-- randConvCoord = coordinatorRandConvFactory( nbFilters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random) randConvCoord = LoadCoordinator(randConvCoord, learnFile, testFile) #--Extra-tree-- baseClassif = ExtraTreesClassifier(nbTrees, max_features=maxFeatures, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, bootstrap=bootstrap, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose) #--Classifier classifier = Classifier(randConvCoord, baseClassif) #--Data-- loader = CifarFromNumpies(learningSetDir, learningIndexFile) learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) learningSet = learningSet[0:lsSize] loader = CifarFromNumpies(testingSetDir, testingIndexFile) testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) testingSet = testingSet[0:tsSize] #=====COMPUTATION=====# #--Learning--# print "Starting learning" fitStart = time() classifier.fit(learningSet) fitEnd = time() print "Learning done", formatDuration(fitEnd - fitStart) sys.stdout.flush() #--Testing--# y_truth = testingSet.getLabels() predStart = time() y_prob, y_pred = classifier.predict_predict_proba(testingSet) predEnd = time() accuracy = classifier.accuracy(y_pred, y_truth) confMat = classifier.confusionMatrix(y_pred, y_truth) #====ANALYSIS=====# importance, order = randConvCoord.importancePerFeatureGrp(baseClassif) print "==================RandConv================" print "-----------Filtering--------------" print "nb_filters", nb_filters print "filterPolicy", filterPolicy print "----------Pooling--------------" print "poolings", poolings print "--------SW extractor----------" print "#Subwindows", nbSubwindows print "subwindowMinSizeRatio", subwindowMinSizeRatio print "subwindowMaxSizeRatio", subwindowMaxSizeRatio print "subwindowTargetWidth", subwindowTargetWidth print "subwindowTargetHeight", subwindowTargetHeight print "fixedSize", fixedSize print "------------Misc-----------------" print "includeOriginalImage", includeOriginalImage print "random", random print "tempFolder", tempFolder print "verbosity", verbosity print "nbJobs", nbJobs print "--------ExtraTrees----------" print "nbTrees", nbTrees print "maxFeatures", maxFeatures print "maxDepth", maxDepth print "minSamplesSplit", minSamplesSplit print "minSamplesLeaf", minSamplesLeaf print "bootstrap", bootstrap print "nbJobsEstimator", nbJobsEstimator print "verbose", verbose print "randomState", randomState print "------------Data---------------" print "LearningSet size", len(learningSet) print "TestingSet size", len(testingSet) print "-------------------------------" if shouldSave: print "saveFile", saveFile print "Fit time", formatDuration(fitEnd - fitStart) print "Classifcation time", formatDuration(predEnd - predStart) print "Accuracy", accuracy if shouldSave: np.save(saveFile, y_prob) return accuracy, confMat, importance, order
def run(nb_filters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, fixedSize=fixedSize, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, random=random, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, nbTrees=nbTrees, maxDepth=maxDepth, minSamplesSplit=minSamplesSplit, minSamplesLeaf=minSamplesLeaf, randomClassif=randomClassif, nbJobsEstimator=nbJobsEstimator, verbose=verbose, learningUse=learningUse, testingUse=testingUse, saveFile=saveFile, shouldSave=shouldSave): randomState = None if not randomClassif: randomState = 100 lsSize = learningUse if learningUse > maxLearningSize: lsSize = maxLearningSize tsSize = testingUse if testingUse > maxTestingSize: tsSize = maxTestingSize #======INSTANTIATING========# #--randconv-- randConvCoord = coordinatorRandConvFactory( nbFilters=nb_filters, filterPolicy=filterPolicy, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, poolings=poolings, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random) nb_filters = len(randConvCoord.getFilters()) #--SVM-- baseClassif = LinearSVC(verbose=verbose, random_state=randomState) #--Classifier classifier = uClassifier(coordinator=randConvCoord, base_classifier=baseClassif, n_estimators=nbTrees, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose) #--Data-- loader = CifarFromNumpies(learningSetDir, learningIndexFile) learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) learningSet = learningSet[0:lsSize] loader = CifarFromNumpies(testingSetDir, testingIndexFile) testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) testingSet = testingSet[0:tsSize] #=====COMPUTATION=====# #--Learning--# print "Starting learning" fitStart = time() hist = classifier._preprocess(learningSet, learningPhase=True) y = learningSet.getLabels() if shouldSave: np.savez(saveFile, data=hist.data, indices=hist.indices, indptr=hist.indptr, shape=hist.shape) classifier.fit_histogram(hist, y) fitEnd = time() print "Learning done", formatDuration(fitEnd - fitStart) sys.stdout.flush() #--Testing--# y_truth = testingSet.getLabels() predStart = time() y_pred = classifier.predict(testingSet) predEnd = time() accuracy = classifier.accuracy(y_pred, y_truth) confMat = classifier.confusionMatrix(y_pred, y_truth) #====ANALYSIS=====# importance, order = randConvCoord.importancePerFeatureGrp( classifier._visualBagger) print "==================Bag of Visual Words=======================" print "-----------Filtering--------------" print "nb_filters", nb_filters print "filterPolicy", filterPolicy print "----------Pooling--------------" print "poolings", poolings print "--------SW extractor----------" print "#Subwindows", nbSubwindows print "subwindowMinSizeRatio", subwindowMinSizeRatio print "subwindowMaxSizeRatio", subwindowMaxSizeRatio print "subwindowTargetWidth", subwindowTargetWidth print "subwindowTargetHeight", subwindowTargetHeight print "fixedSize", fixedSize print "------------Misc-----------------" print "includeOriginalImage", includeOriginalImage print "random", random print "tempFolder", tempFolder print "verbosity", verbosity print "nbJobs", nbJobs print "--------Bag of words params + SVC----------" print "nbTrees", nbTrees print "maxDepth", maxDepth print "minSamplesSplit", minSamplesSplit print "minSamplesLeaf", minSamplesLeaf print "nbJobsEstimator", nbJobsEstimator print "verbose", verbose print "randomState", randomState print "------------Data---------------" print "LearningSet size", len(learningSet) print "TestingSet size", len(testingSet) print "-------------------------------" if shouldSave: print "saveFile", saveFile print "Fit time", formatDuration(fitEnd - fitStart) print "Classifcation time", formatDuration(predEnd - predStart) print "Accuracy", accuracy print "Leafs", formatBigNumber(classifier.histoSize) return accuracy, confMat, importance, order
def run(nb_filters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, fixedSize=fixedSize, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, random=random, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, nbTrees=nbTrees, maxFeatures=maxFeatures, maxDepth=maxDepth, minSamplesSplit=minSamplesSplit, minSamplesLeaf=minSamplesLeaf, bootstrap=bootstrap, randomClassif=randomClassif, nbJobsEstimator=nbJobsEstimator, verbose=verbose, learningUse=learningUse, testingUse=testingUse, saveFile=saveFile, shouldSave=shouldSave, nbFilterOptimization=nbFilterOptimization, shufflingRate=shufflingRate): randomState = None if not randomClassif: randomState = 100 lsSize = learningUse if learningUse > maxLearningSize: lsSize = maxLearningSize tsSize = testingUse if testingUse > maxTestingSize: tsSize = maxTestingSize #======INSTANTIATING========# #--RandConv-- randConvCoord = coordinatorRandConvFactory( nbFilters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, extractor=extractor, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random) nb_filters = len(randConvCoord.getFilters()) #--Extra-tree-- baseClassif = ExtraTreesClassifier(nbTrees, max_features=maxFeatures, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, bootstrap=bootstrap, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose) #--Classifier classifier = Classifier(randConvCoord, baseClassif) #--Data-- train_set_imgs, train_set_labels, test_set_imgs, test_set_labels = mnist.loadMnist() learningSet = ShufflerImageBuffer(ImageBuffer(zip(train_set_imgs, train_set_labels), ImageBuffer.NUMPY_FORMAT), 28, 28, shufflingRate) testingSet = ShufflerImageBuffer(ImageBuffer(zip(test_set_imgs, test_set_labels), ImageBuffer.NUMPY_FORMAT), 28, 28, shufflingRate) #=====COMPUTATION=====# #------Filter optimization-----# if nbFilterOptimization > 0: randConvOptimizer = coordinatorRandConvFactory( nbFilters=nbFilterOptimization, filterPolicy=filterPolicy, poolings=poolings, extractor=(Const.FEATEXT_SPASUB, {"nbCol":1}), nbSubwindows=5, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random) totallyTrees = ExtraTreesClassifier(30, max_features=1, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, bootstrap=bootstrap, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose) optiClassif = Classifier(randConvCoord, baseClassif) print "Starting optimization" optiStart = time() optiClassif.fit(learningSet) optiEnd = time() print "optimization done", formatDuration(optiEnd-optiStart) _, order = randConvOptimizer.importancePerFeatureGrp(totallyTrees) filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters filters = [x for x, _, _ in filtersTmp] if not includeOriginalImage: bestIndices = order[:nb_filters] else: count = 0 bestIndices = [] for index in order: if count == nb_filters-1: break if index != 0: bestIndices.append(index-1) count += 1 bestFlters = [] for i in bestIndices: bestFlters.append(filters[i]) best3Filters = Finite3SameFilter(bestFlters) randConvCoord._convolExtractor._finiteFilter = best3Filters #--Learning--# print "Starting learning" fitStart = time() classifier.fit(learningSet) fitEnd = time() print "Learning done", formatDuration(fitEnd-fitStart) sys.stdout.flush() #--Testing--# y_truth = testingSet.getLabels() predStart = time() y_prob, y_pred = classifier.predict_predict_proba(testingSet) predEnd = time() accuracy = classifier.accuracy(y_pred, y_truth) confMat = classifier.confusionMatrix(y_pred, y_truth) #====ANALYSIS=====# importance, order = randConvCoord.importancePerFeatureGrp(baseClassif) print "==================RandConv================" print "-----------Filtering--------------" print "nb_filters", nb_filters print "filterPolicy", filterPolicy print "----------Pooling--------------" print "poolings", poolings print "--------SW extractor----------" print "#Subwindows", nbSubwindows print "subwindowMinSizeRatio", subwindowMinSizeRatio print "subwindowMaxSizeRatio", subwindowMaxSizeRatio print "subwindowTargetWidth", subwindowTargetWidth print "subwindowTargetHeight", subwindowTargetHeight print "fixedSize", fixedSize print "------------Misc-----------------" print "includeOriginalImage", includeOriginalImage print "random", random print "tempFolder", tempFolder print "verbosity", verbosity print "nbJobs", nbJobs print "--------ExtraTrees----------" print "nbTrees", nbTrees print "maxFeatures", maxFeatures print "maxDepth", maxDepth print "minSamplesSplit", minSamplesSplit print "minSamplesLeaf", minSamplesLeaf print "bootstrap", bootstrap print "nbJobsEstimator", nbJobsEstimator print "verbose", verbose print "randomState", randomState print "------------Data---------------" print "LearningSet size", len(learningSet) print "TestingSet size", len(testingSet) print "-------------------------------" if shouldSave: print "saveFile", saveFile print "Fit time", formatDuration(fitEnd-fitStart) print "Classifcation time", formatDuration(predEnd-predStart) print "Accuracy", accuracy if shouldSave: np.save(saveFile, y_prob) filtersTmp = randConvCoord._convolExtractor._finiteFilter._filters filters = [x for x, _, _ in filtersTmp] return accuracy, confMat, importance, order, filters
def run(nb_filters=nb_filters, filterPolicy=filterPolicy, poolings=poolings, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, fixedSize=fixedSize, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, random=random, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, nbTrees=nbTrees, maxDepth=maxDepth, minSamplesSplit=minSamplesSplit, minSamplesLeaf=minSamplesLeaf, randomClassif=randomClassif, nbJobsEstimator=nbJobsEstimator, verbose=verbose, learningUse=learningUse, testingUse=testingUse, saveFile=saveFile, shouldSave=shouldSave): randomState = None if not randomClassif: randomState = 100 lsSize = learningUse if learningUse > maxLearningSize: lsSize = maxLearningSize tsSize = testingUse if testingUse > maxTestingSize: tsSize = maxTestingSize #======INSTANTIATING========# #--randconv-- randConvCoord = coordinatorRandConvFactory( nbFilters=nb_filters, filterPolicy=filterPolicy, nbSubwindows=nbSubwindows, subwindowMinSizeRatio=subwindowMinSizeRatio, subwindowMaxSizeRatio=subwindowMaxSizeRatio, subwindowTargetWidth=subwindowTargetWidth, subwindowTargetHeight=subwindowTargetHeight, poolings=poolings, subwindowInterpolation=subwindowInterpolation, includeOriginalImage=includeOriginalImage, nbJobs=nbJobs, verbosity=verbosity, tempFolder=tempFolder, random=random) nb_filters = len(randConvCoord.getFilters()) #--SVM-- baseClassif = LinearSVC(verbose=verbose, random_state=randomState) #--Classifier classifier = uClassifier(coordinator=randConvCoord, base_classifier=baseClassif, n_estimators=nbTrees, max_depth=maxDepth, min_samples_split=minSamplesSplit, min_samples_leaf=minSamplesLeaf, n_jobs=nbJobsEstimator, random_state=randomState, verbose=verbose) #--Data-- loader = CifarFromNumpies(learningSetDir, learningIndexFile) learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) learningSet = learningSet[0:lsSize] loader = CifarFromNumpies(testingSetDir, testingIndexFile) testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader()) testingSet = testingSet[0:tsSize] #=====COMPUTATION=====# #--Learning--# print "Starting learning" fitStart = time() hist = classifier._preprocess(learningSet, learningPhase=True) y = learningSet.getLabels() if shouldSave: np.savez(saveFile, data=hist.data, indices=hist.indices, indptr=hist.indptr, shape=hist.shape) classifier.fit_histogram(hist, y) fitEnd = time() print "Learning done", formatDuration(fitEnd-fitStart) sys.stdout.flush() #--Testing--# y_truth = testingSet.getLabels() predStart = time() y_pred = classifier.predict(testingSet) predEnd = time() accuracy = classifier.accuracy(y_pred, y_truth) confMat = classifier.confusionMatrix(y_pred, y_truth) #====ANALYSIS=====# importance, order = randConvCoord.importancePerFeatureGrp(classifier._visualBagger) print "==================Bag of Visual Words=======================" print "-----------Filtering--------------" print "nb_filters", nb_filters print "filterPolicy", filterPolicy print "----------Pooling--------------" print "poolings", poolings print "--------SW extractor----------" print "#Subwindows", nbSubwindows print "subwindowMinSizeRatio", subwindowMinSizeRatio print "subwindowMaxSizeRatio", subwindowMaxSizeRatio print "subwindowTargetWidth", subwindowTargetWidth print "subwindowTargetHeight", subwindowTargetHeight print "fixedSize", fixedSize print "------------Misc-----------------" print "includeOriginalImage", includeOriginalImage print "random", random print "tempFolder", tempFolder print "verbosity", verbosity print "nbJobs", nbJobs print "--------Bag of words params + SVC----------" print "nbTrees", nbTrees print "maxDepth", maxDepth print "minSamplesSplit", minSamplesSplit print "minSamplesLeaf", minSamplesLeaf print "nbJobsEstimator", nbJobsEstimator print "verbose", verbose print "randomState", randomState print "------------Data---------------" print "LearningSet size", len(learningSet) print "TestingSet size", len(testingSet) print "-------------------------------" if shouldSave: print "saveFile", saveFile print "Fit time", formatDuration(fitEnd-fitStart) print "Classifcation time", formatDuration(predEnd-predStart) print "Accuracy", accuracy print "Leafs", formatBigNumber(classifier.histoSize) return accuracy, confMat, importance, order