Example #1
0
    randomState = None
    if not randomClassif:
        randomState = 100

    tsSize = testingUse
    if testingUse > maxTestingSize:
        tsSize = maxTestingSize

    #--SVM--
    baseClassif = LinearSVC(verbose=verbose, random_state=randomState)

    #--Classifier
    classifier = uClassifier(coordinator=None,
                             base_classifier=baseClassif,
                             n_jobs=nbJobsEstimator,
                             random_state=randomState,
                             verbose=verbose)

    #--Data--
    loader = CifarFromNumpies(learningSetDir, learningIndexFile)
    learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    learningSet = learningSet[0:histSize]

    loader = CifarFromNumpies(testingSetDir, testingIndexFile)
    testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    testingSet = testingSet[0:tsSize]

    #--Learning--#
    print "Starting learning"
    fitStart = time()
Example #2
0
    randomState = None
    if not randomClassif:
        randomState = 100

    tsSize = testingUse
    if testingUse > maxTestingSize:
        tsSize = maxTestingSize

    #--SVM--
    baseClassif = LinearSVC(verbose=verbose, random_state=randomState)

    #--Classifier
    classifier = uClassifier(coordinator=None,
                             base_classifier=baseClassif,
                             n_jobs=nbJobsEstimator,
                             random_state=randomState,
                             verbose=verbose)

    #--Data--
    loader = CifarFromNumpies(learningSetDir, learningIndexFile)
    learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    learningSet = learningSet[0:histSize]

    loader = CifarFromNumpies(testingSetDir, testingIndexFile)
    testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    testingSet = testingSet[0:tsSize]

    #--Learning--#
    print "Starting learning"
    fitStart = time()
Example #3
0
def run(nb_filters=nb_filters,
        filterPolicy=filterPolicy,
        poolings=poolings,
        nbSubwindows=nbSubwindows,
        subwindowMinSizeRatio=subwindowMinSizeRatio,
        subwindowMaxSizeRatio=subwindowMaxSizeRatio,
        subwindowTargetWidth=subwindowTargetWidth,
        subwindowTargetHeight=subwindowTargetHeight,
        fixedSize=fixedSize,
        subwindowInterpolation=subwindowInterpolation,
        includeOriginalImage=includeOriginalImage,
        random=random,
        nbJobs=nbJobs,
        verbosity=verbosity,
        tempFolder=tempFolder,
        nbTrees=nbTrees,
        maxDepth=maxDepth,
        minSamplesSplit=minSamplesSplit,
        minSamplesLeaf=minSamplesLeaf,
        randomClassif=randomClassif,
        nbJobsEstimator=nbJobsEstimator,
        verbose=verbose,
        learningUse=learningUse,
        testingUse=testingUse,
        saveFile=saveFile,
        shouldSave=shouldSave):

    randomState = None
    if not randomClassif:
        randomState = 100

    lsSize = learningUse
    if learningUse > maxLearningSize:
        lsSize = maxLearningSize

    tsSize = testingUse
    if testingUse > maxTestingSize:
        tsSize = maxTestingSize

    #======INSTANTIATING========#
    #--randconv--
    randConvCoord = coordinatorRandConvFactory(
        nbFilters=nb_filters,
        filterPolicy=filterPolicy,
        nbSubwindows=nbSubwindows,
        subwindowMinSizeRatio=subwindowMinSizeRatio,
        subwindowMaxSizeRatio=subwindowMaxSizeRatio,
        subwindowTargetWidth=subwindowTargetWidth,
        subwindowTargetHeight=subwindowTargetHeight,
        poolings=poolings,
        subwindowInterpolation=subwindowInterpolation,
        includeOriginalImage=includeOriginalImage,
        nbJobs=nbJobs,
        verbosity=verbosity,
        tempFolder=tempFolder,
        random=random)

    nb_filters = len(randConvCoord.getFilters())

    #--SVM--
    baseClassif = LinearSVC(verbose=verbose, random_state=randomState)

    #--Classifier
    classifier = uClassifier(coordinator=randConvCoord,
                             base_classifier=baseClassif,
                             n_estimators=nbTrees,
                             max_depth=maxDepth,
                             min_samples_split=minSamplesSplit,
                             min_samples_leaf=minSamplesLeaf,
                             n_jobs=nbJobsEstimator,
                             random_state=randomState,
                             verbose=verbose)

    #--Data--
    loader = CifarFromNumpies(learningSetDir, learningIndexFile)
    learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    learningSet = learningSet[0:lsSize]

    loader = CifarFromNumpies(testingSetDir, testingIndexFile)
    testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    testingSet = testingSet[0:tsSize]

    #=====COMPUTATION=====#
    #--Learning--#
    print "Starting learning"
    fitStart = time()
    hist = classifier._preprocess(learningSet, learningPhase=True)
    y = learningSet.getLabels()
    if shouldSave:
        np.savez(saveFile,
                 data=hist.data,
                 indices=hist.indices,
                 indptr=hist.indptr,
                 shape=hist.shape)
    classifier.fit_histogram(hist, y)
    fitEnd = time()
    print "Learning done", formatDuration(fitEnd - fitStart)
    sys.stdout.flush()

    #--Testing--#
    y_truth = testingSet.getLabels()
    predStart = time()
    y_pred = classifier.predict(testingSet)
    predEnd = time()
    accuracy = classifier.accuracy(y_pred, y_truth)
    confMat = classifier.confusionMatrix(y_pred, y_truth)

    #====ANALYSIS=====#
    importance, order = randConvCoord.importancePerFeatureGrp(
        classifier._visualBagger)

    print "==================Bag of Visual Words======================="
    print "-----------Filtering--------------"
    print "nb_filters", nb_filters
    print "filterPolicy", filterPolicy
    print "----------Pooling--------------"
    print "poolings", poolings
    print "--------SW extractor----------"
    print "#Subwindows", nbSubwindows
    print "subwindowMinSizeRatio", subwindowMinSizeRatio
    print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
    print "subwindowTargetWidth", subwindowTargetWidth
    print "subwindowTargetHeight", subwindowTargetHeight
    print "fixedSize", fixedSize
    print "------------Misc-----------------"
    print "includeOriginalImage", includeOriginalImage
    print "random", random
    print "tempFolder", tempFolder
    print "verbosity", verbosity
    print "nbJobs", nbJobs
    print "--------Bag of words params + SVC----------"
    print "nbTrees", nbTrees
    print "maxDepth", maxDepth
    print "minSamplesSplit", minSamplesSplit
    print "minSamplesLeaf", minSamplesLeaf
    print "nbJobsEstimator", nbJobsEstimator
    print "verbose", verbose
    print "randomState", randomState
    print "------------Data---------------"
    print "LearningSet size", len(learningSet)
    print "TestingSet size", len(testingSet)
    print "-------------------------------"
    if shouldSave:
        print "saveFile", saveFile
    print "Fit time", formatDuration(fitEnd - fitStart)
    print "Classifcation time", formatDuration(predEnd - predStart)
    print "Accuracy", accuracy
    print "Leafs", formatBigNumber(classifier.histoSize)

    return accuracy, confMat, importance, order
Example #4
0
def run(nb_filters=nb_filters,
        filterPolicy=filterPolicy,
        poolings=poolings,
        nbSubwindows=nbSubwindows,
        subwindowMinSizeRatio=subwindowMinSizeRatio,
        subwindowMaxSizeRatio=subwindowMaxSizeRatio,
        subwindowTargetWidth=subwindowTargetWidth,
        subwindowTargetHeight=subwindowTargetHeight,
        fixedSize=fixedSize,
        subwindowInterpolation=subwindowInterpolation,
        includeOriginalImage=includeOriginalImage,
        random=random,
        nbJobs=nbJobs,
        verbosity=verbosity,
        tempFolder=tempFolder,
        nbTrees=nbTrees,
        maxDepth=maxDepth,
        minSamplesSplit=minSamplesSplit,
        minSamplesLeaf=minSamplesLeaf,
        randomClassif=randomClassif,
        nbJobsEstimator=nbJobsEstimator,
        verbose=verbose,
        learningUse=learningUse,
        testingUse=testingUse,
        saveFile=saveFile,
        shouldSave=shouldSave):

    randomState = None
    if not randomClassif:
        randomState = 100

    lsSize = learningUse
    if learningUse > maxLearningSize:
        lsSize = maxLearningSize

    tsSize = testingUse
    if testingUse > maxTestingSize:
        tsSize = maxTestingSize

    #======INSTANTIATING========#
    #--randconv--
    randConvCoord = coordinatorRandConvFactory(
        nbFilters=nb_filters,
        filterPolicy=filterPolicy,
        nbSubwindows=nbSubwindows,
        subwindowMinSizeRatio=subwindowMinSizeRatio,
        subwindowMaxSizeRatio=subwindowMaxSizeRatio,
        subwindowTargetWidth=subwindowTargetWidth,
        subwindowTargetHeight=subwindowTargetHeight,
        poolings=poolings,
        subwindowInterpolation=subwindowInterpolation,
        includeOriginalImage=includeOriginalImage,
        nbJobs=nbJobs,
        verbosity=verbosity,
        tempFolder=tempFolder,
        random=random)

    nb_filters = len(randConvCoord.getFilters())

    #--SVM--
    baseClassif = LinearSVC(verbose=verbose, random_state=randomState)

    #--Classifier
    classifier = uClassifier(coordinator=randConvCoord,
                             base_classifier=baseClassif,
                             n_estimators=nbTrees,
                             max_depth=maxDepth,
                             min_samples_split=minSamplesSplit,
                             min_samples_leaf=minSamplesLeaf,
                             n_jobs=nbJobsEstimator,
                             random_state=randomState,
                             verbose=verbose)

    #--Data--
    loader = CifarFromNumpies(learningSetDir, learningIndexFile)
    learningSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    learningSet = learningSet[0:lsSize]

    loader = CifarFromNumpies(testingSetDir, testingIndexFile)
    testingSet = FileImageBuffer(loader.getFiles(), NumpyImageLoader())
    testingSet = testingSet[0:tsSize]

    #=====COMPUTATION=====#
    #--Learning--#
    print "Starting learning"
    fitStart = time()
    hist = classifier._preprocess(learningSet, learningPhase=True)
    y = learningSet.getLabels()
    if shouldSave:
        np.savez(saveFile, data=hist.data, indices=hist.indices,
                 indptr=hist.indptr, shape=hist.shape)
    classifier.fit_histogram(hist, y)
    fitEnd = time()
    print "Learning done", formatDuration(fitEnd-fitStart)
    sys.stdout.flush()

    #--Testing--#
    y_truth = testingSet.getLabels()
    predStart = time()
    y_pred = classifier.predict(testingSet)
    predEnd = time()
    accuracy = classifier.accuracy(y_pred, y_truth)
    confMat = classifier.confusionMatrix(y_pred, y_truth)

    #====ANALYSIS=====#
    importance, order = randConvCoord.importancePerFeatureGrp(classifier._visualBagger)

    print "==================Bag of Visual Words======================="
    print "-----------Filtering--------------"
    print "nb_filters", nb_filters
    print "filterPolicy", filterPolicy
    print "----------Pooling--------------"
    print "poolings", poolings
    print "--------SW extractor----------"
    print "#Subwindows", nbSubwindows
    print "subwindowMinSizeRatio", subwindowMinSizeRatio
    print "subwindowMaxSizeRatio", subwindowMaxSizeRatio
    print "subwindowTargetWidth", subwindowTargetWidth
    print "subwindowTargetHeight", subwindowTargetHeight
    print "fixedSize", fixedSize
    print "------------Misc-----------------"
    print "includeOriginalImage", includeOriginalImage
    print "random", random
    print "tempFolder", tempFolder
    print "verbosity", verbosity
    print "nbJobs", nbJobs
    print "--------Bag of words params + SVC----------"
    print "nbTrees", nbTrees
    print "maxDepth", maxDepth
    print "minSamplesSplit", minSamplesSplit
    print "minSamplesLeaf", minSamplesLeaf
    print "nbJobsEstimator", nbJobsEstimator
    print "verbose", verbose
    print "randomState", randomState
    print "------------Data---------------"
    print "LearningSet size", len(learningSet)
    print "TestingSet size", len(testingSet)
    print "-------------------------------"
    if shouldSave:
        print "saveFile", saveFile
    print "Fit time", formatDuration(fitEnd-fitStart)
    print "Classifcation time", formatDuration(predEnd-predStart)
    print "Accuracy", accuracy
    print "Leafs", formatBigNumber(classifier.histoSize)

    return accuracy, confMat, importance, order