Example #1
0
def model_build(path=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "res", "train"), feature=PCA(), dist_metric=EuclideanDistance(), k=1, sz=None):
    model_fn = os.path.join(path, "mdl.pkl")
    if not os.path.isfile(model_fn):
        [X,y] = read_images(path, sz=sz)
        classifier = NearestNeighbor(dist_metric=dist_metric, k=k)
        model = PredictableModel(feature=feature, classifier=classifier)
        model.compute(X, y)
        save_model(model_fn, model)
    return load_model(model_fn)
Example #2
0
def get_model(image_size, subject_names):
    """ Vraca predvidajuci model
    """
    # definise Fisherface metodu:
    feature = Fisherfaces()
    #definise 1-NN klasifikator sa Euklidskim rastojanjem Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # vraca modela :
    return ExtendedPredictableModel(feature=feature, classifier=classifier, image_size=image_size, subject_names=subject_names)
Example #3
0
def get_model(numeric_dataset, model_filename=None):
    feature = ChainOperator(Resize((128,128)), Fisherfaces())
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    inner_model = PredictableModel(feature=feature, classifier=classifier)
    model = PredictableModelWrapper(inner_model)
    model.set_data(numeric_dataset)
    model.compute()
    if not model_filename is None:
        save_model(model_filename, model)
    return model
Example #4
0
def get_model(image_size, subject_names):

    feature = Fisherfaces()

    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)

    return ExtendedPredictableModel(feature=feature,
                                    classifier=classifier,
                                    image_size=image_size,
                                    subject_names=subject_names)
Example #5
0
def get_model(image_size, subject_names):
    """ This method returns the PredictableModel which is used to learn a model
        for possible further usage. If you want to define your own model, this
        is the method to return it from!
    """
    # Define the Fisherfaces Method as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # Return the model as the combination:
    return ExtendedPredictableModel(feature=feature, classifier=classifier, image_size=image_size, subject_names=subject_names)
Example #6
0
def entrenarModelo(dirImagenes = None, arcModelo = arcModelo):
    if dirImagenes is None:
        print dirImagenes
        return 0
    [X,y,clases] = read_images(sys.argv[2])
    modelo = PredictableModel(feature=Fisherfaces(), classifier=NearestNeighbor(dist_metric=EuclideanDistance(), k=1)) #configuración del modelo
    modelo.compute(X, y)
    pkl = open(arcModelo, 'wb')
    cPickle.dump([modelo,clases,tamanioCara],pkl)   #se usa cPickle directamente en vez de save_model para poder insertar metadata
    pkl.close()
    validacion = KFoldCrossValidation(modelo, k=10)
    validacion.validate(X, y)
    validacion.print_results()
Example #7
0
class Classifier(Enum):
    svm = SVM()
    svm_linear = SVM(
        "-s 2 -t 0 -n 0.3 -q"
    )  # One-class SVM, linear function, cross-validation k = 3
    svm_rbf = SVM(
        '-s 2 -t 2 -q')  # One-class SVM, rbs function, cross-validation k = 3
    svm_sigmoid = SVM(
        '-s 2 -t 3 -n 0.7 -q'
    )  # One-class SVM, sigmoid function, nu param cross-validation k = 3

    euclidean = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    chisquare = NearestNeighbor(dist_metric=ChiSquareDistance(), k=1)
    euclidean3 = NearestNeighbor(dist_metric=EuclideanDistance(), k=3)
    chisquare3 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=3)
    euclidean5 = NearestNeighbor(dist_metric=EuclideanDistance(), k=5)
    chisquare5 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=5)
    euclidean7 = NearestNeighbor(dist_metric=EuclideanDistance(), k=7)
    chisquare7 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=7)
Example #8
0
def train(train_path):
    # Now read in the image data. This must be a valid path!
    [X, y, class_names] = read_images(train_path)
    print X, y, class_names
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)
    # Define the Fisherfaces as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # Define the model as the combination
    model = PredictableModel(feature=feature, classifier=classifier)
    # Compute the Fisherfaces on the given data (in X) and labels (in y):
    model.compute(X, y)
    # Then turn the first (at most) 16 eigenvectors into grayscale
    # images (note: eigenvectors are stored by column!)
    E = []
    for i in xrange(min(model.feature.eigenvectors.shape[1], 16)):
        e = model.feature.eigenvectors[:, i].reshape(X[0].shape)
        E.append(minmax_normalize(e, 0, 255, dtype=np.uint8))
    # Plot them and store the plot to "python_fisherfaces_fisherfaces.pdf"
    subplot(title="Fisherfaces",
            images=E,
            rows=4,
            cols=4,
            sptitle="Fisherface",
            colormap=cm.jet,
            filename="fisherfaces.png")
    # Perform a 10-fold cross validation
    cv = KFoldCrossValidation(model, k=10)
    cv.validate(X, y)
    # And print the result:
    cv.print_results()
    save_model('model.pkl', model, class_names)
    return [model, class_names]
Example #9
0
 def __init__(
     self,
     video_src,
     dataset_fn,
     face_sz=(130, 130),
     cascade_fn="/home/philipp/projects/opencv2/OpenCV-2.3.1/data/haarcascades/haarcascade_frontalface_alt2.xml"
 ):
     self.face_sz = face_sz
     self.cam = create_capture(video_src)
     ret, self.frame = self.cam.read()
     self.detector = CascadedDetector(cascade_fn=cascade_fn,
                                      minNeighbors=5,
                                      scaleFactor=1.1)
     # define feature extraction chain & and classifier)
     feature = ChainOperator(TanTriggsPreprocessing(), LBP())
     classifier = NearestNeighbor(dist_metric=ChiSquareDistance())
     # build the predictable model
     self.predictor = PredictableModel(feature, classifier)
     # read the data & compute the predictor
     self.dataSet = DataSet(filename=dataset_fn, sz=self.face_sz)
     self.predictor.compute(self.dataSet.data, self.dataSet.labels)
Example #10
0
class ProPos:
    face_cascade = cv2.CascadeClassifier(
        'haarcascades/haarcascade_frontalface_default.xml')
    model = PredictableModel(Fisherfaces(), NearestNeighbor())
    cap = cv2.VideoCapture(0)
    camera_id = 1
    url = 'http://localhost:8080/'
    last_face = ''

    recognizer = cv2.createLBPHFaceRecognizer()
    path = 'img_db/'

    def __init__(self):
        print 'Working'

    def read_images(self, path, size=(256, 256)):
        c = 0
        x, y = [], []
        folder_names = []

        for dirname, dirnames, filenames in os.walk(path):
            for subdirname in dirnames:
                folder_names.append(subdirname)
                subject_path = os.path.join(dirname, subdirname)
                for filename in os.listdir(subject_path):
                    try:
                        im = cv2.imread(os.path.join(subject_path, filename),
                                        cv2.IMREAD_GRAYSCALE)

                        # resize to given size (if given)
                        if (size is not None):
                            im = cv2.resize(im, size)
                        x.append(np.asarray(im, dtype=np.uint8))
                        y.append(c)
                    except IOError, (errno, strerror):
                        print "I/O error({0}): {1}".format(errno, strerror)
                    except:
                        print "Unexpected error:", sys.exc_info()[0]
                        raise
Example #11
0
from facerec.feature import Fisherfaces, PCA, Identity
from facerec.classifier import NearestNeighbor
from facerec.model import PredictableModel
from PIL import Image
import numpy as np
from PIL import Image
import sys, os
import time
#sys.path.append("../..")
import cv2
import multiprocessing

model = PredictableModel(PCA(), NearestNeighbor())

vc = cv2.VideoCapture(0)
# Choosing the haar cascade for face detection
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_alt_tree.xml')


# Reads the database of faces
def read_images(path, sz=(256, 256)):
    # Reads the images in a given folder, resizes images on the fly if size is given.
    # Args:
    #     path: Path to a folder with subfolders representing the subjects (persons).
    #     sz: A tuple with the size Resizes
    # Returns:
    #     A list [X,y]
    #         X: The images, which is a Python list of numpy arrays.
    #         y: The corresponding labels (the unique number of the subject, person) in a Python list.
    c = 0
    X, y = [], []
Example #12
0
        if feature_parameter in m:
            if feature_parameter == 'LPQ':
                feature = SpatialHistogram(LPQ())
            elif feature_parameter == 'fisher80':
                feature = Fisherfaces(80)
            elif feature_parameter == 'pca80':
                feature = PCA(80)
            else:
                feature = m[feature_parameter]()

    # Define a 1-NN classifier with Euclidean Distance:
    # classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=3) # þokkalegt, [1.7472255 ,  1.80661233,  1.89985602] bara fremsta rétt
    # classifier = NearestNeighbor(dist_metric=CosineDistance(), k=3) # þokkalegt, niðurstöður sem mínus tölur ([-0.72430667, -0.65913855, -0.61865271])
    # classifier = NearestNeighbor(dist_metric=NormalizedCorrelation(), k=3) # ágætt  0.28873109,  0.35998333,  0.39835315 (bara fremsta rétt)
    classifier = NearestNeighbor(dist_metric=ChiSquareDistance(
    ), k=3)  # gott, 32.49907228,  44.53673458,  45.39480197 bara síðasta rangt
    # classifier = NearestNeighbor(dist_metric=HistogramIntersection(), k=3) # sökkar
    # classifier = NearestNeighbor(dist_metric=L1BinRatioDistance(), k=3) # nokkuð gott,  36.77156378,  47.84164013,  52.63872497] - síðasta rangt
    # classifier = NearestNeighbor(dist_metric=ChiSquareBRD(), k=3) #  36.87781902,  44.06119053,  46.40875114 - síðasta rangt

    # Define the model as the combination
    # model = PredictableModel(feature=feature, classifier=classifier)
    # Compute the model on the given data (in X) and labels (in y):

    feature = ChainOperator(TanTriggsPreprocessing(), feature)
    # classifier = NearestNeighbor()
    model = PredictableModel(feature, classifier)

    # images in one list, id's on another
    id_list, face_list = zip(*input_faces)
Example #13
0
 yale_filter = YaleBaseFilter(-25, 25, -25, 25)
 # Now read in the image data. This must be a valid path!
 [X, y] = read_images(sys.argv[1], yale_filter)
 # Then set up a handler for logging:
 handler = logging.StreamHandler(sys.stdout)
 formatter = logging.Formatter(
     '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 handler.setFormatter(formatter)
 # Add handler to facerec modules, so we see what's going on inside:
 logger = logging.getLogger("facerec")
 logger.addHandler(handler)
 logger.setLevel(logging.DEBUG)
 # Define the Fisherfaces as Feature Extraction method:
 feature = PCA()
 # Define a 1-NN classifier with Euclidean Distance:
 classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
 # Define the model as the combination
 model = PredictableModel(feature=feature, classifier=classifier)
 # Compute the Fisherfaces on the given data (in X) and labels (in y):
 model.compute(X, y)
 # Then turn the first (at most) 16 eigenvectors into grayscale
 # images (note: eigenvectors are stored by column!)
 E = []
 for i in range(min(model.feature.eigenvectors.shape[1], 16)):
     e = model.feature.eigenvectors[:, i].reshape(X[0].shape)
     E.append(minmax_normalize(e, 0, 255, dtype=np.uint8))
 # Plot them and store the plot to "python_fisherfaces_fisherfaces.pdf"
 subplot(title="Fisherfaces",
         images=E,
         rows=4,
         cols=4,
Example #14
0
    def __init__(self,
                 database_folder,
                 feature_parameter="LPQ",
                 metric="chi",
                 k=3):
        self.model = None

        handler = logging.StreamHandler(sys.stdout)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)
        logger = logging.getLogger("facerec")
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)

        path = database_folder

        start = time.clock()
        input_faces = utils.read_images_from_single_folder(path)
        stop = time.clock()

        print("read {}, images from {} in {} seconds.".format(
            len(input_faces), path, stop - start))

        feature = None
        m = {
            "fisher": Fisherfaces,
            "fisher80": Fisherfaces,
            "pca": PCA,
            "pca10": PCA,
            "lda": LDA,
            "spatial": SpatialHistogram,
            "LPQ": SpatialHistogram
        }

        if feature_parameter in m:
            if feature_parameter == 'LPQ':
                feature = SpatialHistogram(LPQ())
                self.threshold = threshold_function(71.4, 70)
            elif feature_parameter == 'fisher80':
                feature = Fisherfaces(80)
                self.threshold = threshold_function(0.61, 0.5)
            elif feature_parameter == 'fisher':
                feature = Fisherfaces()
                self.threshold = threshold_function(0.61, 0.5)
            elif feature_parameter == 'pca80':
                feature = PCA(80)
            else:
                feature = m[feature_parameter]()

        metric_param = None
        d = {
            "euclid": EuclideanDistance,
            "cosine": CosineDistance,
            "normal": NormalizedCorrelation,
            "chi": ChiSquareDistance,
            "histo": HistogramIntersection,
            "l1b": L1BinRatioDistance,
            "chibrd": ChiSquareBRD
        }
        if metric in d:
            metric_param = d[metric]()
        else:
            metric_param = ChiSquareDistance()

        classifier = NearestNeighbor(dist_metric=metric_param, k=k)
        feature = ChainOperator(TanTriggsPreprocessing(), feature)
        # feature = ChainOperator(TanTriggsPreprocessing(0.1, 10.0, 1.0, 3.0), feature)
        self.model = PredictableModel(feature, classifier)

        # images in one list, id's on another
        id_list, face_list = zip(*input_faces)

        print "Train the model"
        start = time.clock()
        # model.compute(X, y)
        self.model.compute(face_list, id_list)
        stop = time.clock()
        print "Training done in", stop - start, " next...find a face"
Example #15
0
 yale_subset_0_40 = YaleBaseFilter(0, 40, 0, 40)
 # Now read in the image data. Apply filters, scale to 128 x 128 pixel:
 [X, y] = read_images(sys.argv[1], yale_subset_0_40, sz=(64, 64))
 # Set up a handler for logging:
 handler = logging.StreamHandler(sys.stdout)
 formatter = logging.Formatter(
     '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 handler.setFormatter(formatter)
 # Add handler to facerec modules, so we see what's going on inside:
 logger = logging.getLogger("facerec")
 logger.addHandler(handler)
 logger.setLevel(logging.INFO)
 # The models we want to evaluate:
 model0 = PredictableModel(
     feature=SpatialHistogram(lbp_operator=ExtendedLBP()),
     classifier=NearestNeighbor(dist_metric=ChiSquareDistance(), k=1))
 model1 = PredictableModel(feature=SpatialHistogram(lbp_operator=LPQ()),
                           classifier=NearestNeighbor(
                               dist_metric=ChiSquareDistance(), k=1))
 # The sigmas we'll apply for each run:
 sigmas = [0]
 print('The experiment will be run %s times!' % ITER_MAX)
 # Initialize experiments (with empty results):
 experiments = {}
 experiments['lbp_model'] = {
     'model': model0,
     'results': {},
     'color': 'r',
     'linestyle': '--',
     'marker': '*'
 }
Example #16
0
    [X, y] = read_images(database_path)
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    logger.debug("Iniciando treinamento.")
    # Define the Fisherfaces as Feature Extraction method:
    feature = Fisherfaces()
    # Define a k-NN classifier
    classifier = NearestNeighbor(dist_metric=CosineDistance(), k=5)
    # Define the model as the combination
    model = PredictableModel(feature=feature, classifier=classifier)
    # Compute the Fisherfaces on the given data (in X) and labels (in y):
    model.compute(X, y)

    logger.debug(model)

    # Then turn the first (at most) 16 eigenvectors into grayscale
    # images (note: eigenvectors are stored by column!)
    E = []
    for i in xrange(min(model.feature.eigenvectors.shape[1], 16)):
        e = model.feature.eigenvectors[:, i].reshape(X[0].shape)
        E.append(minmax_normalize(e, 0, 255, dtype=np.uint8))
    # Plot them and store the plot to "fisherfaces.png"
    subplot(title="Fisherfaces",
def run():
    # This is where we write the images, if an output_dir is given
    # in command line:

    # out_dir = None

    # You'll need at least a path to your image data, please see
    # the tutorial coming with this source code on how to prepare
    # your image data:

    # if len(sys.argv) < 2:
    #     print ("USAGE: facerec_demo.py </path/to/images>")
    #     sys.exit()

    # Now read in the image data. This must be a valid path!

    # [X,y] = read_images(sys.argv[1])
    [X, y] = read_images('../data/trainset/')

    # dataset = FilesystemReader(sys.argv[1])
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)
    # Define the Fisherfaces as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    svm = SVM(C=0.1, kernel='rbf', degree=4, gamma='auto', coef0=0.0)
    knn = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # # Define the model as the combination
    model_svm = PredictableModel(feature=feature, classifier=svm)

    model_knn = PredictableModel(feature=feature, classifier=knn)

    # # Compute the Fisherfaces on the given data (in X) and labels (in y):
    model_svm.compute(X, y)

    model_knn.compute(X, y)
    # E = []
    # for i in range(min(model.feature.eigenvectors.shape[1], 16)):
    #  e = model.feature.eigenvectors[:,i].reshape(X[0].shape)
    #  E.append(minmax_normalize(e,0,255, dtype=np.uint8))
    # subplot(title="Fisherfaces", images=E, rows=4, cols=4, sptitle="Fisherface", colormap=cm.jet, filename="fisherfaces.png")

    # cv = LeaveOneOutCrossValidation(model)
    # print(cv0)
    # cv0.validate(dataset.data,dataset.classes,print_debug=True)
    cv_svm = KFoldCrossValidation(model_svm, k=10)
    cv_knn = KFoldCrossValidation(model_knn, k=10)

    param_grid = [
        {
            'C': [0.05, 0.1, 0.3, 0.5, 1, 2, 5],
            'gamma': [0.001, 0.0001],
            'kernel': ['rbf']
        },
    ]
    [tX, tY] = read_images('../data/testset/')

    # cv_svm.validate(X, y)
    # cv_knn.validate(X, y)

    gs(model_svm, X, y, param_grid)

    count1 = 0
    count2 = 0
    for i in range(len(tY)):
        r1 = model_svm.predict(tX[i])
        r2 = model_knn.predict(tX[i])
        if r1[0] == tY[i]:
            count1 += 1
        if r2[0] == tY[i]:
            count2 += 1

    print('SVM ACC:{0}'.format(count1 / len(tY)))
    print('KNN ACC:{0}'.format(count2 / len(tY)))
    print(cv_knn.print_results())
    print(cv_svm.print_results())
Example #18
0
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    # ætla að prófa allar aðferðir
    # feature = Fisherfaces()
    m = (Fisherfaces(), PCA(), SpatialHistogram(), SpatialHistogram(LPQ()))

    classifiers = (
        # Define a 1-NN classifier with Euclidean Distance:
        NearestNeighbor(dist_metric=EuclideanDistance(), k=3),
        NearestNeighbor(dist_metric=CosineDistance(), k=3),
        NearestNeighbor(dist_metric=NormalizedCorrelation(), k=3),
        NearestNeighbor(dist_metric=ChiSquareDistance(), k=3),
        NearestNeighbor(dist_metric=HistogramIntersection(), k=3),
        NearestNeighbor(dist_metric=L1BinRatioDistance(), k=3),
        NearestNeighbor(dist_metric=ChiSquareBRD(), k=3),
    )

    def test_one(idx):
        tt, pt, res_list = test_one_method(input_faces, test_faces, m[idx], classifiers[idx], True)
        print tt, ",", pt
        for id, guess, rm in res_list:
            labels = rm['labels']
            distances = rm['distances']
            # print id, guess, labels[0], labels[1], labels[2], distances[0], distances[1], distances[2]
Example #19
0
     print "USAGE: lpq_experiment.py </path/to/images>"
     sys.exit()
 # Define filters for the Dataset:
 yale_subset_0_40 = YaleBaseFilter(0, 40, 0, 40)
 # Now read in the image data. Apply filters, scale to 128 x 128 pixel:
 [X,y] = read_images(sys.argv[1], yale_subset_0_40, sz=(64,64))
 # Set up a handler for logging:
 handler = logging.StreamHandler(sys.stdout)
 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 handler.setFormatter(formatter)
 # Add handler to facerec modules, so we see what's going on inside:
 logger = logging.getLogger("facerec")
 logger.addHandler(handler)
 logger.setLevel(logging.INFO)
 # The models we want to evaluate:
 model0 = PredictableModel(feature=SpatialHistogram(lbp_operator=ExtendedLBP()), classifier=NearestNeighbor(dist_metric=ChiSquareDistance(), k=1))
 model1 = PredictableModel(feature=SpatialHistogram(lbp_operator=LPQ()), classifier=NearestNeighbor(dist_metric=ChiSquareDistance(), k=1))
 # The sigmas we'll apply for each run:
 sigmas = [0]
 print 'The experiment will be run %s times!' % ITER_MAX
 # Initialize experiments (with empty results):
 experiments = {}
 experiments['lbp_model'] = { 'model': model0, 'results' : {}, 'color' : 'r', 'linestyle' : '--', 'marker' : '*'} 
 experiments['lpq_model'] = { 'model': model1, 'results' : {}, 'color' : 'b', 'linestyle' : '--', 'marker' : 's'}
 # Loop to acquire the results for each experiment:
 for sigma in sigmas:
     print "Setting sigma=%s" % sigma
     for key, value in experiments.iteritems():
         print 'Running experiment for model=%s' % key
         # Define the validators for the model:
         cv0 = SimpleValidation(value['model'])
Example #20
0
from feature import Fisherfaces
from facerec.classifier import NearestNeighbor
from facerec.model import PredictableModel
from PIL import Image
import numpy as np
from PIL import Image
import sys, os
import time
#sys.path.append("../..")
import cv2
import multiprocessing



model = PredictableModel(Fisherfaces(), NearestNeighbor())

vc=cv2.VideoCapture(0)
face_cascade = cv2.CascadeClassifier('path to the classifier : haarcascade_frontalface_alt_tree.xml(suggerito)')


#una volta ottenuto (prossimo step) un db di facce, le 
def read_images(path, sz=(256,256)):
    """Reads the images in a given folder, resizes images on the fly if size is given.

    Args:
        path: Path to a folder with subfolders representing the subjects (persons).
        sz: A tuple with the size Resizes 

    Returns:
        A list [X,y]
#
# Set up the Haar cascade to detect (not recognize) the faces
#
#
# We're going to use the Fisherfaces face recognition module
#

initial_time = time.time()
print "Initializing Haar cascades for face, eyes, nose and mouth detection: "
#
# This was prior to using the TanTriggsPreprocessing, we can go back
#model = PredictableModel(Fisherfaces(), NearestNeighbor())

feature = ChainOperator(TanTriggsPreprocessing(), Fisherfaces())
classifier = NearestNeighbor()
model = PredictableModel(feature, classifier)

face_cascade = cv2.CascadeClassifier(haarcascade)
eye_cascade = cv2.CascadeClassifier(eyehaarcascade)
nose_cascade = cv2.CascadeClassifier(nosehaarcascade)
mouth_cascade = cv2.CascadeClassifier(mouthhaarcascade)
print "Initialization completed in {0:.2f} seconds.\n".format(time.time() - initial_time)

#
# Main loop
#   Press "l" to learn a new image
#   Press "r" to reload image database
#   Press "v" to toggle voice synthesis
#   Press "b" for best guess of image
#   Press "e" to toggle eye detection