Exemplo n.º 1
0
def build_coocc_classifier_dataset(out_file, dataset_dir, entity_db_file,
                                   **kwargs):
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db_file)

    ret = coocc_classifier.build_dataset(dataset, entity_db, **kwargs)
    joblib.dump(ret, out_file)
Exemplo n.º 2
0
def build_page_classifier_dataset(out_file, dataset_dir, page_db_file,
                                  entity_db_file, **kwargs):
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db_file)
    page_db = PageDB(page_db_file, 'r')

    ret = page_classifier.build_dataset(dataset, page_db, entity_db, **kwargs)
    joblib.dump(ret, out_file)
Exemplo n.º 3
0
def build_scorer_dataset(clf_cache_file, out_file, dataset_dir, entity_db_file,
                         **kwargs):
    clf_cache = joblib.load(clf_cache_file, mmap_mode='r')
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db_file)

    ret = scorer.build_dataset(dataset, clf_cache, entity_db, **kwargs)

    joblib.dump(ret, out_file)
Exemplo n.º 4
0
def cache_classifier_results(page_db_file, out_file, dataset_dir,
                             entity_db_file, init, **kwargs):
    page_db = PageDB(page_db_file, 'r')
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db_file)
    if init:
        initial_data = joblib.load(init)
    else:
        initial_data = {}

    scorer.cache_classifier_results(dataset, initial_data, out_file, page_db,
                                    entity_db, **kwargs)
Exemplo n.º 5
0
def load_images_and_compute_embeddings(dataset_name, use_raw, min_threshold):
    # Load image dataset (default).
    dataset_path = "data/" + dataset_name + "/"
    print("Loading pictures under '%s'." % dataset_path)
    loader = DatasetLoader()
    faces, labels = loader.load_test_dataset(dataset_path, use_raw,
                                             min_threshold)
    print(">> %d face pictures extracted." % faces.shape[0])

    # Load pre-trained facenet model, and compute embeddings.
    print("Loading pre-trained facenet model at '%s'." %
          facenet_model_filepath)
    facenet = TrainedKerasFacenet(filepath=facenet_model_filepath)
    embeddings = facenet.compute_embbedings(faces)

    # Cache data.
    print("Caching data.. ", end='')
    np.save(cached_labels_filepath + dataset_name, labels)
    np.save(cached_embeddings_filepath + dataset_name, embeddings)
    print("DONE")

    return (embeddings, labels)
Exemplo n.º 6
0
def main():
    args = parse_command_line_args()

    # Load dataset of images.
    dataset_path = 'data/personal_faces'
    print("Loading pictures under '%s'." % dataset_path)
    loader = DatasetLoader()
    faces, _ = loader.load_from_folder_recursive(dataset_path, True)
    print(">> %d face pictures extracted." % faces.shape[0])

    # Load pre-trained facenet model, and compute embeddings.
    print("Computing embeddings.. ", end='')
    facenet = TrainedKerasFacenet(
        filepath='model/keras/model/facenet_keras.h5')
    embeddings = facenet.compute_embbedings(faces)
    print("DONE")

    if args.method == "mean-shift":
        bandwidths = [0.55, 0.60, 0.65]
        run_meanshift_clustering(bandwidths, faces, embeddings)
    else:
        K_values = np.arange(15, 21)
        run_clustering_with_k(K_values, args.method, faces, embeddings)
Exemplo n.º 7
0
def build_page_db(dataset_dir, entity_db_file, **kwargs):
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db_file)
    PageDB.build(dataset, entity_db, **kwargs)
Exemplo n.º 8
0
def run(dataset_dir, entity_db, **kwargs):
    dataset = DatasetLoader(dataset_dir)
    entity_db = EntityDB.load(entity_db)

    scorer.run(dataset=dataset, entity_db=entity_db, **kwargs)
Exemplo n.º 9
0
from tensorflow.python.keras.callbacks import TensorBoard
import matplotlib.pyplot as plt
import numpy as np
from os import path
from modules.autoencoder import AutoEncoder
from modules.genderclassifier import GenderClassifier
from modules.facematcher import VGGFace
from modules.san import SemiAdversarial
from utils.dataset_loader import DatasetLoader
from utils.util import save_weights

AE = AutoEncoder(mode='further_train')
GC = GenderClassifier(mode='further_train')
FM = VGGFace()

ds = DatasetLoader()
itr = ds.iterator_further(FM.model)
itr_val = ds.iterator_further_VAL(FM.model)

# Semi-Adversarial-Netowrk
SAN = SemiAdversarial(AE.model, GC.model, FM.model)

try:

    train = SAN.model.fit_generator(generator=itr, validation_data=itr_val,
                                    validation_steps=63,  # 852
                                    steps_per_epoch=313,  # 4410
                                    epochs=10, verbose=1, workers=0, )

    #print(train.history.keys())
Exemplo n.º 10
0
'''
This script has been created by Bruno Marino and Gianluca Pepe
'''

from datetime import datetime
from tensorflow.python import keras
from os import path
from utils.dataset_loader import DatasetLoader
from modules.genderclassifier import GenderClassifier
from utils.util import save_weights

ds = DatasetLoader()
itr = ds.iterator_GC()
itr_val = ds.iterator_GC_VAL()

callback = keras.callbacks.EarlyStopping(monitor='val_acc',
                                         min_delta=0,
                                         patience=0,
                                         verbose=0,
                                         mode='auto',
                                         baseline=98,
                                         restore_best_weights=False)

NN = GenderClassifier()

try:

    NN.model.fit_generator(
        generator=itr,
        validation_data=itr_val,
        validation_steps=416,  # 416
Exemplo n.º 11
0
'''
This script has been created by Bruno Marino and Gianluca Pepe
'''

from datetime import datetime
from utils.dataset_loader import DatasetLoader
from modules.autoencoder import AutoEncoder
from os import path
from utils.util import save_weights

ds = DatasetLoader()
itr = ds.iterator_AE()
itr_val = ds.iterator_AE_VAL()

NN = AutoEncoder()

try:
    NN.model.fit_generator(generator=itr,
                           validation_data=itr_val,
                           validation_steps=426,
                           steps_per_epoch=2355,
                           epochs=10,
                           verbose=1,
                           workers=0)
except KeyboardInterrupt:
    # salvo il modello aggiungendo al nome quando e' stato creato
    save_weights(NN.model, 'autoencoder')
else:
    save_weights(NN.model, 'autoencoder')