Beispiel #1
0
def get_model(image_size, subject_names):
    """ Vraca predvidajuci model
    """
    # definise Fisherface metodu:
    feature = Fisherfaces()
    #definise 1-NN klasifikator sa Euklidskim rastojanjem Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # vraca modela :
    return ExtendedPredictableModel(feature=feature, classifier=classifier, image_size=image_size, subject_names=subject_names)
Beispiel #2
0
class Classifier(Enum):
    svm = SVM()
    svm_linear = SVM(
        "-s 2 -t 0 -n 0.3 -q"
    )  # One-class SVM, linear function, cross-validation k = 3
    svm_rbf = SVM(
        '-s 2 -t 2 -q')  # One-class SVM, rbs function, cross-validation k = 3
    svm_sigmoid = SVM(
        '-s 2 -t 3 -n 0.7 -q'
    )  # One-class SVM, sigmoid function, nu param cross-validation k = 3

    euclidean = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    chisquare = NearestNeighbor(dist_metric=ChiSquareDistance(), k=1)
    euclidean3 = NearestNeighbor(dist_metric=EuclideanDistance(), k=3)
    chisquare3 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=3)
    euclidean5 = NearestNeighbor(dist_metric=EuclideanDistance(), k=5)
    chisquare5 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=5)
    euclidean7 = NearestNeighbor(dist_metric=EuclideanDistance(), k=7)
    chisquare7 = NearestNeighbor(dist_metric=ChiSquareDistance(), k=7)
Beispiel #3
0
def get_model(image_size, subject_names):

    feature = Fisherfaces()

    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)

    return ExtendedPredictableModel(feature=feature,
                                    classifier=classifier,
                                    image_size=image_size,
                                    subject_names=subject_names)
Beispiel #4
0
def get_model(numeric_dataset, model_filename=None):
    feature = ChainOperator(Resize((128,128)), Fisherfaces())
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    inner_model = PredictableModel(feature=feature, classifier=classifier)
    model = PredictableModelWrapper(inner_model)
    model.set_data(numeric_dataset)
    model.compute()
    if not model_filename is None:
        save_model(model_filename, model)
    return model
def get_model(image_size, subject_names):
    """ This method returns the PredictableModel which is used to learn a model
        for possible further usage. If you want to define your own model, this
        is the method to return it from!
    """
    # Define the Fisherfaces Method as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # Return the model as the combination:
    return ExtendedPredictableModel(feature=feature, classifier=classifier, image_size=image_size, subject_names=subject_names)
Beispiel #6
0
def train(train_path):
    # Now read in the image data. This must be a valid path!
    [X, y, class_names] = read_images(train_path)
    print X, y, class_names
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)
    # Define the Fisherfaces as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # Define the model as the combination
    model = PredictableModel(feature=feature, classifier=classifier)
    # Compute the Fisherfaces on the given data (in X) and labels (in y):
    model.compute(X, y)
    # Then turn the first (at most) 16 eigenvectors into grayscale
    # images (note: eigenvectors are stored by column!)
    E = []
    for i in xrange(min(model.feature.eigenvectors.shape[1], 16)):
        e = model.feature.eigenvectors[:, i].reshape(X[0].shape)
        E.append(minmax_normalize(e, 0, 255, dtype=np.uint8))
    # Plot them and store the plot to "python_fisherfaces_fisherfaces.pdf"
    subplot(title="Fisherfaces",
            images=E,
            rows=4,
            cols=4,
            sptitle="Fisherface",
            colormap=cm.jet,
            filename="fisherfaces.png")
    # Perform a 10-fold cross validation
    cv = KFoldCrossValidation(model, k=10)
    cv.validate(X, y)
    # And print the result:
    cv.print_results()
    save_model('model.pkl', model, class_names)
    return [model, class_names]
Beispiel #7
0
 yale_filter = YaleBaseFilter(-25, 25, -25, 25)
 # Now read in the image data. This must be a valid path!
 [X, y] = read_images(sys.argv[1], yale_filter)
 # Then set up a handler for logging:
 handler = logging.StreamHandler(sys.stdout)
 formatter = logging.Formatter(
     '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 handler.setFormatter(formatter)
 # Add handler to facerec modules, so we see what's going on inside:
 logger = logging.getLogger("facerec")
 logger.addHandler(handler)
 logger.setLevel(logging.DEBUG)
 # Define the Fisherfaces as Feature Extraction method:
 feature = PCA()
 # Define a 1-NN classifier with Euclidean Distance:
 classifier = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
 # Define the model as the combination
 model = PredictableModel(feature=feature, classifier=classifier)
 # Compute the Fisherfaces on the given data (in X) and labels (in y):
 model.compute(X, y)
 # Then turn the first (at most) 16 eigenvectors into grayscale
 # images (note: eigenvectors are stored by column!)
 E = []
 for i in range(min(model.feature.eigenvectors.shape[1], 16)):
     e = model.feature.eigenvectors[:, i].reshape(X[0].shape)
     E.append(minmax_normalize(e, 0, 255, dtype=np.uint8))
 # Plot them and store the plot to "python_fisherfaces_fisherfaces.pdf"
 subplot(title="Fisherfaces",
         images=E,
         rows=4,
         cols=4,
Beispiel #8
0
 def __init__(self, dist_metric=EuclideanDistance(), k=1):
     AbstractClassifier.__init__(self)
     self.k = k
     self.dist_metric = dist_metric
Beispiel #9
0
def entrenarModelo(dirImagenes = None, arcModelo = arcModelo):
    if dirImagenes is None:
        print dirImagenes
        return 0
    [X,y,clases] = read_images(sys.argv[2])
    modelo = PredictableModel(feature=Fisherfaces(), classifier=NearestNeighbor(dist_metric=EuclideanDistance(), k=1)) #configuración del modelo
    modelo.compute(X, y)
    pkl = open(arcModelo, 'wb')
    cPickle.dump([modelo,clases,tamanioCara],pkl)   #se usa cPickle directamente en vez de save_model para poder insertar metadata
    pkl.close()
    validacion = KFoldCrossValidation(modelo, k=10)
    validacion.validate(X, y)
    validacion.print_results()
Beispiel #10
0
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    # ætla að prófa allar aðferðir
    # feature = Fisherfaces()
    m = (Fisherfaces(), PCA(), SpatialHistogram(), SpatialHistogram(LPQ()))

    classifiers = (
        # Define a 1-NN classifier with Euclidean Distance:
        NearestNeighbor(dist_metric=EuclideanDistance(), k=3),
        NearestNeighbor(dist_metric=CosineDistance(), k=3),
        NearestNeighbor(dist_metric=NormalizedCorrelation(), k=3),
        NearestNeighbor(dist_metric=ChiSquareDistance(), k=3),
        NearestNeighbor(dist_metric=HistogramIntersection(), k=3),
        NearestNeighbor(dist_metric=L1BinRatioDistance(), k=3),
        NearestNeighbor(dist_metric=ChiSquareBRD(), k=3),
    )

    def test_one(idx):
        tt, pt, res_list = test_one_method(input_faces, test_faces, m[idx], classifiers[idx], True)
        print tt, ",", pt
        for id, guess, rm in res_list:
            labels = rm['labels']
            distances = rm['distances']
            # print id, guess, labels[0], labels[1], labels[2], distances[0], distances[1], distances[2]
Beispiel #11
0
 def __init__(self, dist_metric=EuclideanDistance(), k=1):
     AbstractClassifier.__init__(self)
     self.k = k
     self.dist_metric = dist_metric
     self.X = []
     self.y = np.array([], dtype=np.int32)
Beispiel #12
0
def model_rebuild(path=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "res", "train"), feature=PCA(), dist_metric=EuclideanDistance(), k=1, sz=None):
    model_fn = os.path.join(path, "mdl.pkl")
    if os.path.isfile(model_fn):
        os.remove(model_fn)

    [X,y] = read_images(path, sz=sz)
    classifier = NearestNeighbor(dist_metric=dist_metric, k=k)
    model = PredictableModel(feature=feature, classifier=classifier)
    model.compute(X, y)
    save_model(model_fn, model)
    return load_model(model_fn)
Beispiel #13
0
     sys.exit()
 # Now read in the image data. This must be a valid path!
 [X, y] = read_images(sys.argv[1])
 # Set up a handler for logging:
 handler = logging.StreamHandler(sys.stdout)
 formatter = logging.Formatter(
     '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 handler.setFormatter(formatter)
 # Add handler to facerec modules, so we see what's going on inside:
 logger = logging.getLogger("facerec")
 logger.addHandler(handler)
 logger.setLevel(logging.DEBUG)
 # The models we want to evaluate:
 model0 = PredictableModel(feature=PCA(num_components=50),
                           classifier=NearestNeighbor(
                               dist_metric=EuclideanDistance(), k=1))
 model1 = PredictableModel(feature=Fisherfaces(),
                           classifier=NearestNeighbor(
                               dist_metric=EuclideanDistance(), k=1))
 model2 = PredictableModel(
     feature=SpatialHistogram(lbp_operator=ExtendedLBP()),
     classifier=NearestNeighbor(dist_metric=ChiSquareDistance(), k=1))
 model3 = PredictableModel(feature=SpatialHistogram(lbp_operator=LPQ()),
                           classifier=NearestNeighbor(
                               dist_metric=ChiSquareDistance(), k=1))
 # I should rewrite the framework to offer a less memory-intense solution here:
 cv0 = KFoldCrossValidation(model0, k=10)
 cv1 = KFoldCrossValidation(model1, k=10)
 cv2 = KFoldCrossValidation(model2, k=10)
 cv3 = KFoldCrossValidation(model3, k=10)
 # Make it a list, so we can iterate through:
def run():
    # This is where we write the images, if an output_dir is given
    # in command line:

    # out_dir = None

    # You'll need at least a path to your image data, please see
    # the tutorial coming with this source code on how to prepare
    # your image data:

    # if len(sys.argv) < 2:
    #     print ("USAGE: facerec_demo.py </path/to/images>")
    #     sys.exit()

    # Now read in the image data. This must be a valid path!

    # [X,y] = read_images(sys.argv[1])
    [X, y] = read_images('../data/trainset/')

    # dataset = FilesystemReader(sys.argv[1])
    # Then set up a handler for logging:
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    # Add handler to facerec modules, so we see what's going on inside:
    logger = logging.getLogger("facerec")
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)
    # Define the Fisherfaces as Feature Extraction method:
    feature = Fisherfaces()
    # Define a 1-NN classifier with Euclidean Distance:
    svm = SVM(C=0.1, kernel='rbf', degree=4, gamma='auto', coef0=0.0)
    knn = NearestNeighbor(dist_metric=EuclideanDistance(), k=1)
    # # Define the model as the combination
    model_svm = PredictableModel(feature=feature, classifier=svm)

    model_knn = PredictableModel(feature=feature, classifier=knn)

    # # Compute the Fisherfaces on the given data (in X) and labels (in y):
    model_svm.compute(X, y)

    model_knn.compute(X, y)
    # E = []
    # for i in range(min(model.feature.eigenvectors.shape[1], 16)):
    #  e = model.feature.eigenvectors[:,i].reshape(X[0].shape)
    #  E.append(minmax_normalize(e,0,255, dtype=np.uint8))
    # subplot(title="Fisherfaces", images=E, rows=4, cols=4, sptitle="Fisherface", colormap=cm.jet, filename="fisherfaces.png")

    # cv = LeaveOneOutCrossValidation(model)
    # print(cv0)
    # cv0.validate(dataset.data,dataset.classes,print_debug=True)
    cv_svm = KFoldCrossValidation(model_svm, k=10)
    cv_knn = KFoldCrossValidation(model_knn, k=10)

    param_grid = [
        {
            'C': [0.05, 0.1, 0.3, 0.5, 1, 2, 5],
            'gamma': [0.001, 0.0001],
            'kernel': ['rbf']
        },
    ]
    [tX, tY] = read_images('../data/testset/')

    # cv_svm.validate(X, y)
    # cv_knn.validate(X, y)

    gs(model_svm, X, y, param_grid)

    count1 = 0
    count2 = 0
    for i in range(len(tY)):
        r1 = model_svm.predict(tX[i])
        r2 = model_knn.predict(tX[i])
        if r1[0] == tY[i]:
            count1 += 1
        if r2[0] == tY[i]:
            count2 += 1

    print('SVM ACC:{0}'.format(count1 / len(tY)))
    print('KNN ACC:{0}'.format(count2 / len(tY)))
    print(cv_knn.print_results())
    print(cv_svm.print_results())