Пример #1
0
def routine(images, filename):
    tot_samples = len(images)

    haar = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

    mat = numpy.zeros((haar.featuresNum, tot_samples), dtype=numpy.float32)

    for i in xrange(tot_samples):
        featureVec = haar.calFeatureForImg(images[i])
        for j in xrange(haar.featuresNum):
            mat[j][i] = featureVec[j]

    numpy.save(filename, mat)
Пример #2
0
def routine(images, filename):
    tot_samples = len(images)

    haar = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

    mat = numpy.zeros((haar.featuresNum, tot_samples), dtype = numpy.float32)

    for i in xrange(tot_samples):
        featureVec = haar.calFeatureForImg(images[i])
        for j in xrange(haar.featuresNum):
            mat[j][i]  = featureVec[j]

    numpy.save(filename, mat)
Пример #3
0
nonFace = ImageSet(TRAINING_NONFACE, sampleNum = NEGATIVE_SAMPLE)

tot_samples = Face.sampleNum + nonFace.sampleNum

haar   = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

if os.path.isfile(FEATURE_FILE_TRAINING + ".npy"):

    _mat = numpy.load(FEATURE_FILE_TRAINING + ".npy")

else:
    if DEBUG_MODEL is True:
        _mat = numpy.zeros((haar.featuresNum, tot_samples))

        for i in xrange(Face.sampleNum):
            featureVec = haar.calFeatureForImg(Face.images[i])
            for j in xrange(haar.featuresNum):
                _mat[j][i                     ]  = featureVec[j]

        for i in xrange(nonFace.sampleNum):
            featureVec = haar.calFeatureForImg(nonFace.images[i])
            for j in xrange(haar.featuresNum):
                _mat[j][i + Face.sampleNum] = featureVec[j]

        numpy.save(FEATURE_FILE_TRAINING, _mat)
    else:
        from mapReduce import map
        from mapReduce import reduce

        map(Face, nonFace)
        _mat = reduce()
Пример #4
0
class Cascade:

    def __init__(self, face_dir = "", nonface_dir = "", train = True, limit = 30):
        #tot_samples = 0

        self.Face    = ImageSet(face_dir,    sampleNum = POSITIVE_SAMPLE)
        self.nonFace = ImageSet(nonface_dir, sampleNum = NEGATIVE_SAMPLE)

        tot_samples = self.Face.sampleNum + self.nonFace.sampleNum

        self.classifier = AdaBoost

        self.haar   = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

        if os.path.isfile(FEATURE_FILE_TRAINING + ".npy"):

            self._mat = numpy.load(FEATURE_FILE_TRAINING + ".npy")

        else:
            if DEBUG_MODEL is True:
                self._mat = numpy.zeros((self.haar.featuresNum, tot_samples))

                for i in xrange(self.Face.sampleNum):
                    featureVec = self.haar.calFeatureForImg(self.Face.images[i])
                    for j in xrange(self.haar.featuresNum):
                        self._mat[j][i                     ]  = featureVec[j]

                for i in xrange(self.nonFace.sampleNum):
                    featureVec = self.haar.calFeatureForImg(self.nonFace.images[i])
                    for j in xrange(self.haar.featuresNum):
                        self._mat[j][i + self.Face.sampleNum] = featureVec[j]

                numpy.save(FEATURE_FILE_TRAINING, self._mat)
            else:
                from mapReduce import map
                from mapReduce import reduce

                map(self.Face, self.nonFace)
                self._mat = reduce()

        featureNum, sampleNum = self._mat.shape

        assert sampleNum  == (POSITIVE_SAMPLE + NEGATIVE_SAMPLE)
        assert featureNum == FEATURE_NUM

        Label_Face    = [+1 for i in xrange(POSITIVE_SAMPLE)]
        Label_NonFace = [-1 for i in xrange(NEGATIVE_SAMPLE)]

        self._label = numpy.array(Label_Face + Label_NonFace)
        self.limit  = limit
        self.classifierNum     = 0
        self.strong_classifier = [None for i in xrange(limit)]


    def train(self):

        raise ("Unfinished")

        detection_rate = 0
        from config import EXPECTED_FPR_PRE_LAYYER
        from config import EXPECTED_FPR
        from config import LABEL_NEGATIVE

        cur_fpr = 1.0
        mat   = self._mat
        label = self._label

        for i in xrange(self.limit):

            if cur_fpr < EXPECTED_FPR:
                break
            else:
                cache_filename = ADABOOST_CACHE_FILE + str(i)

                if os.path.isfile(cache_filename):
                    self.strong_classifier[i] = getCachedAdaBoost(mat     = self._mat,
                                                                  label   = self._label,
                                                                  filename= cache_filename,
                                                                  limit   = ADABOOST_LIMIT)
                else:
                    self.strong_classifier[i] = AdaBoost(mat, label, limit = ADABOOST_LIMIT)
                    output, fpr = self.strong_classifier[i].train()

                    cur_fpr *= fpr

                    fp_num = fpr * numpy.count_nonzero(label == LABEL_NEGATIVE)

                    self.strong_classifier[i].saveModel(cache_filename)
                    mat, label = self.updateTrainingDate(mat, output, fp_num)

                self.classifierNum += 1


    def updateTrainingDate(self, mat, output, fp_num):

        fp_num = int(fp_num)

        assert len(output) == self._label.size

        _mat = numpy.zeros((FEATURE_NUM, POSITIVE_SAMPLE + fp_num), dtype=numpy.float16)

        _mat[:, :POSITIVE_SAMPLE] = mat[:, :POSITIVE_SAMPLE]
        """
        for i in xrange(POSITIVE_SAMPLE):
            for j in xrange(FEATURE_NUM):
                mat[j][i] = self._mat[j][i]
        """

        counter = 0
        # only reserve negative samples which are classified wrong
        for i in xrange(POSITIVE_SAMPLE, self._label.size):
            if output[i] != self._label[i]:
                for j in xrange(FEATURE_NUM):
                    _mat[j][POSITIVE_SAMPLE + counter] = mat[j][i]
                counter += 1

        assert counter == fp_num

        Label_Face    = [+1 for i in xrange(POSITIVE_SAMPLE)]
        Label_NonFace = [-1 for i in xrange(fp_num)]

        _label = numpy.array(Label_Face + Label_NonFace)

        return _mat, _label


    def predict(self):

        output = numpy.zeros(POSITIVE_SAMPLE + NEGATIVE_SAMPLE, dtype= numpy.float16)
        for i in xrange(self.classifierNum):

            self.strong_classifier[i].prediction(mat, th = 0)

            """unfinished"""

    def save(self):
        pass

    def is_goodenough(self):
        pass
Пример #5
0
class Cascade:
    def __init__(self, face_dir="", nonface_dir="", train=True, limit=30):
        #tot_samples = 0

        self.Face = ImageSet(face_dir, sampleNum=POSITIVE_SAMPLE)
        self.nonFace = ImageSet(nonface_dir, sampleNum=NEGATIVE_SAMPLE)

        tot_samples = self.Face.sampleNum + self.nonFace.sampleNum

        self.classifier = AdaBoost

        self.haar = Feature(TRAINING_IMG_WIDTH, TRAINING_IMG_HEIGHT)

        if os.path.isfile(FEATURE_FILE_TRAINING + ".npy"):

            self._mat = numpy.load(FEATURE_FILE_TRAINING + ".npy")

        else:
            if DEBUG_MODEL is True:
                self._mat = numpy.zeros((self.haar.featuresNum, tot_samples))

                for i in range(self.Face.sampleNum):
                    featureVec = self.haar.calFeatureForImg(
                        self.Face.images[i])
                    for j in range(self.haar.featuresNum):
                        self._mat[j][i] = featureVec[j]

                for i in range(self.nonFace.sampleNum):
                    featureVec = self.haar.calFeatureForImg(
                        self.nonFace.images[i])
                    for j in range(self.haar.featuresNum):
                        self._mat[j][i + self.Face.sampleNum] = featureVec[j]

                numpy.save(FEATURE_FILE_TRAINING, self._mat)
            else:
                from mapReduce import map
                from mapReduce import reduce

                map(self.Face, self.nonFace)
                self._mat = reduce()

        featureNum, sampleNum = self._mat.shape

        assert sampleNum == (POSITIVE_SAMPLE + NEGATIVE_SAMPLE)
        assert featureNum == FEATURE_NUM

        Label_Face = [+1 for i in range(POSITIVE_SAMPLE)]
        Label_NonFace = [-1 for i in range(NEGATIVE_SAMPLE)]

        self._label = numpy.array(Label_Face + Label_NonFace)
        self.limit = limit
        self.classifierNum = 0
        self.strong_classifier = [None for i in range(limit)]

    def train(self):

        raise ("Unfinished")

        detection_rate = 0
        from config import EXPECTED_FPR_PRE_LAYYER
        from config import EXPECTED_FPR
        from config import LABEL_NEGATIVE

        cur_fpr = 1.0
        mat = self._mat
        label = self._label

        for i in xrange(self.limit):

            if cur_fpr < EXPECTED_FPR:
                break
            else:
                cache_filename = ADABOOST_CACHE_FILE + str(i)

                if os.path.isfile(cache_filename):
                    self.strong_classifier[i] = getCachedAdaBoost(
                        mat=self._mat,
                        label=self._label,
                        filename=cache_filename,
                        limit=ADABOOST_LIMIT)
                else:
                    self.strong_classifier[i] = AdaBoost(mat,
                                                         label,
                                                         limit=ADABOOST_LIMIT)
                    output, fpr = self.strong_classifier[i].train()

                    cur_fpr *= fpr

                    fp_num = fpr * numpy.count_nonzero(label == LABEL_NEGATIVE)

                    self.strong_classifier[i].saveModel(cache_filename)
                    mat, label = self.updateTrainingDate(mat, output, fp_num)

                self.classifierNum += 1

    def updateTrainingDate(self, mat, output, fp_num):

        fp_num = int(fp_num)

        assert len(output) == self._label.size

        _mat = numpy.zeros((FEATURE_NUM, POSITIVE_SAMPLE + fp_num),
                           dtype=numpy.float16)

        _mat[:, :POSITIVE_SAMPLE] = mat[:, :POSITIVE_SAMPLE]
        """
        for i in xrange(POSITIVE_SAMPLE):
            for j in xrange(FEATURE_NUM):
                mat[j][i] = self._mat[j][i]
        """

        counter = 0
        # only reserve negative samples which are classified wrong
        for i in range(POSITIVE_SAMPLE, self._label.size):
            if output[i] != self._label[i]:
                for j in range(FEATURE_NUM):
                    _mat[j][POSITIVE_SAMPLE + counter] = mat[j][i]
                counter += 1

        assert counter == fp_num

        Label_Face = [+1 for i in range(POSITIVE_SAMPLE)]
        Label_NonFace = [-1 for i in range(fp_num)]

        _label = numpy.array(Label_Face + Label_NonFace)

        return _mat, _label

    def predict(self):

        output = numpy.zeros(POSITIVE_SAMPLE + NEGATIVE_SAMPLE,
                             dtype=numpy.float16)
        for i in range(self.classifierNum):

            self.strong_classifier[i].prediction(mat, th=0)
            """unfinished"""

    def save(self):
        pass

    def is_goodenough(self):
        pass