Ejemplo n.º 1
0
    def scanImgOverScale(self, image):

        from config import DETECT_START      #1
        from config import DETECT_END        #2
        from config import DETECT_STEP       #0.2
        from config import ADABOOST_CACHE_FILE  #"./model/adaboost_classifier.cache"
        from config import ADABOOST_LIMIT    #150

        model = getCachedAdaBoost(filename = ADABOOST_CACHE_FILE + str(0), limit = ADABOOST_LIMIT)

        rectangles = []

        for scale in numpy.arange(DETECT_START , DETECT_END, DETECT_STEP):
            rectangles += self.scanImgAtScale(model, image, scale)

        return self.optimalRectangle(rectangles)
Ejemplo n.º 2
0
    def scanImgOverScale(self, image):

        from config import DETECT_START
        from config import DETECT_END
        from config import DETECT_STEP
        from config import ADABOOST_CACHE_FILE
        from config import ADABOOST_LIMIT

        model = getCachedAdaBoost(filename = ADABOOST_CACHE_FILE + str(0), limit = ADABOOST_LIMIT)

        rectangles = []

        for scale in numpy.arange(DETECT_START , DETECT_END, DETECT_STEP):
            rectangles += self.scanImgAtScale(model, image, scale)

        return self.optimalRectangle(rectangles)
Ejemplo n.º 3
0
    def scanImgOverScale(self, image):

        from config import DETECT_START
        from config import DETECT_END
        from config import DETECT_STEP
        from config import ADABOOST_CACHE_FILE
        from config import ADABOOST_LIMIT

        model = getCachedAdaBoost(filename=ADABOOST_CACHE_FILE + str(0),
                                  limit=ADABOOST_LIMIT)

        rectangles = []

        for scale in numpy.arange(DETECT_START, DETECT_END, DETECT_STEP):
            rectangles += self.scanImgAtScale(model, image, scale)

        return self.optimalRectangle(rectangles)
Ejemplo n.º 4
0
    def train(self):

        raise ("Unfinished")

        detection_rate = 0
        from config import EXPECTED_FPR_PRE_LAYYER
        from config import EXPECTED_FPR
        from config import LABEL_NEGATIVE

        cur_fpr = 1.0
        mat = self._mat
        label = self._label

        for i in xrange(self.limit):

            if cur_fpr < EXPECTED_FPR:
                break
            else:
                cache_filename = ADABOOST_CACHE_FILE + str(i)

                if os.path.isfile(cache_filename):
                    self.strong_classifier[i] = getCachedAdaBoost(
                        mat=self._mat,
                        label=self._label,
                        filename=cache_filename,
                        limit=ADABOOST_LIMIT)
                else:
                    self.strong_classifier[i] = AdaBoost(mat,
                                                         label,
                                                         limit=ADABOOST_LIMIT)
                    output, fpr = self.strong_classifier[i].train()

                    cur_fpr *= fpr

                    fp_num = fpr * numpy.count_nonzero(label == LABEL_NEGATIVE)

                    self.strong_classifier[i].saveModel(cache_filename)
                    mat, label = self.updateTrainingDate(mat, output, fp_num)

                self.classifierNum += 1
Ejemplo n.º 5
0
    def train(self):

        raise ("Unfinished")

        detection_rate = 0
        from config import EXPECTED_FPR_PRE_LAYYER
        from config import EXPECTED_FPR
        from config import LABEL_NEGATIVE

        cur_fpr = 1.0
        mat   = self._mat
        label = self._label

        for i in xrange(self.limit):

            if cur_fpr < EXPECTED_FPR:
                break
            else:
                cache_filename = ADABOOST_CACHE_FILE + str(i)

                if os.path.isfile(cache_filename):
                    self.strong_classifier[i] = getCachedAdaBoost(mat     = self._mat,
                                                                  label   = self._label,
                                                                  filename= cache_filename,
                                                                  limit   = ADABOOST_LIMIT)
                else:
                    self.strong_classifier[i] = AdaBoost(mat, label, limit = ADABOOST_LIMIT)
                    output, fpr = self.strong_classifier[i].train()

                    cur_fpr *= fpr

                    fp_num = fpr * numpy.count_nonzero(label == LABEL_NEGATIVE)

                    self.strong_classifier[i].saveModel(cache_filename)
                    mat, label = self.updateTrainingDate(mat, output, fp_num)

                self.classifierNum += 1
Ejemplo n.º 6
0
        from mapReduce import reduce

        map(Face, nonFace)
        _mat = reduce()

mat = _mat

featureNum, sampleNum = _mat.shape

assert sampleNum  == (POSITIVE_SAMPLE + NEGATIVE_SAMPLE)
assert featureNum == FEATURE_NUM

Label_Face    = [+1 for i in xrange(POSITIVE_SAMPLE)]
Label_NonFace = [-1 for i in xrange(NEGATIVE_SAMPLE)]

label = numpy.array(Label_Face + Label_NonFace)

cache_filename = ADABOOST_CACHE_FILE + str(0)

if os.path.isfile(cache_filename):
    model = getCachedAdaBoost(mat     = _mat,
                              label   = label,
                              filename= cache_filename,
                              limit   = ADABOOST_LIMIT)
else:
    model = AdaBoost(mat, label, limit = ADABOOST_LIMIT)
    model.train()
    model.saveModel(cache_filename)

print model
Ejemplo n.º 7
0
import numpy

face = ImageSet(TEST_FACE, sampleNum=100)

nonFace = ImageSet(TEST_NONFACE, sampleNum=100)

tot_samples = face.sampleNum + nonFace.sampleNum

haar = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

mat = numpy.zeros((haar.featuresNum, tot_samples))

for i in range(face.sampleNum):
    featureVec = haar.calFeatureForImg(face.images[i])
    for j in range(haar.featuresNum):
        mat[j][i] = featureVec[j]

for i in range(nonFace.sampleNum):
    featureVec = haar.calFeatureForImg(nonFace.images[i])
    for j in range(haar.featuresNum):
        mat[j][i + face.sampleNum] = featureVec[j]

model = getCachedAdaBoost(filename=ADABOOST_CACHE_FILE + str(0), limit=10)

output = model.prediction(mat, th=0)

detectionRate = numpy.count_nonzero(output[0:100] == LABEL_POSITIVE) * 1. / 100

print(output)
Ejemplo n.º 8
0
nonFace = ImageSet(TEST_NONFACE, sampleNum = 100)

tot_samples = face.sampleNum + nonFace.sampleNum

haar   = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

mat = numpy.zeros((haar.featuresNum, tot_samples))

for i in xrange(face.sampleNum):
    featureVec = haar.calFeatureForImg(face.images[i])
    for j in xrange(haar.featuresNum):
        mat[j][i                     ]  = featureVec[j]
        
for i in xrange(nonFace.sampleNum):
    featureVec = haar.calFeatureForImg(nonFace.images[i])
    for j in xrange(haar.featuresNum):
        mat[j][i + face.sampleNum] = featureVec[j]


model = getCachedAdaBoost(filename = ADABOOST_CACHE_FILE + str(0), limit = 10)

output = model.prediction(mat, th=0)

detectionRate = numpy.count_nonzero(output[0:100] == LABEL_POSITIVE) * 1./ 100

print output