コード例 #1
0
def test_transform_image_dataframe_to_matrix():
    min_images_per_person = [5]
    min_per_person = min_images_per_person[0]
    multi_data = get_mounted_data(min_per_person, min_per_person)
    #Y = multi_data[['name']]
    #X = multi_data[['image_path']]
    #print(Y.head(6))
    #print(X.head(6))
    data_x, data_y = transform_image_dataframe_to_matrix(
        multi_data, 250, 250, 'lfw-dataset/lfw-deepfunneled/lfw-deepfunneled/')
コード例 #2
0
def test_create_dataset_tfRecord():
    min_images_per_person = [100]
    min_per_person = min_images_per_person[0]
    multi_data = get_mounted_data(min_per_person, min_per_person)
    #Y = multi_data[['name']]
    #X = multi_data[['image_path']]
    #print(Y.head(6))
    #print(X.head(6))
    create_dataset_tfRecord(multi_data,
                            'lfw-dataset/lfw-deepfunneled/lfw-deepfunneled/')
コード例 #3
0
def main():
    min_images_per_person = [30]
    models = ["DeepFace"]
    num_folds = 5
    batch_sizes = [60]
    for min_per_person in min_images_per_person:
        for batch in batch_sizes:
            for model in models:
                multi_data = get_mounted_data(min_per_person, min_per_person)
                Y = multi_data[['name']]
                X = multi_data[['image_path']]
                CLASSES = Y.groupby('name').nunique().shape[0]
                print("### run_k_fold ", " min_per_person ", min_per_person, " CLASSES ", CLASSES,
          "model ", model, " batch_size ", batch)
                run_k_fold(multi_data, X, Y, CLASSES, model, batch, num_folds)
                tf.keras.backend.clear_session()
                gc.collect()
コード例 #4
0
def main():
    epoch = 10
    min_images_per_person = [130]#[30,25]  # [25,20]
    models = ["LeNet5"]#,"VGGFace"]#,"AlexNet","DeepFace","VGGFace"]
    num_folds = 2

    #aumentando o batch para 30 DeepFace conseguiu bons resultados, testar com outras
    batch_sizes = [30]#[2,4,8,30]
    for min_per_person in min_images_per_person:
        for batch in batch_sizes:
            for model in models:
                multi_data = get_mounted_data(min_per_person, min_per_person)
                Y = multi_data[['name']]
                X = multi_data[['image_path']]
                CLASSES = Y.groupby('name').nunique().shape[0]
                # print("### run_hold_out "," epoch ", epoch, " min_per_person ", min_per_person," CLASSES ", CLASSES,"model ",model," batch_size ",batch)
                # run_hold_out(multi_data, X, Y, CLASSES, epoch, model, batch)
                print("### run_k_fold ", " epoch ", epoch, " min_per_person ", min_per_person, " CLASSES ", CLASSES, " model ", model, " batch_size ", batch)
                run_k_fold(multi_data, X, Y, CLASSES, epoch, model, batch, num_folds)
コード例 #5
0
def main():
    epoch = 300
    min_images_per_person = [30]  #[30,25]  # [25,20]
    models = [
        "AlexNet", "DeepFace"
    ]  #["LeNet5","AlexNet","DeepFace"]#["LeNet5","AlexNet","DeepFace","VGGFace"]
    num_folds = 5

    batch_sizes = [30, 60]  #[2,4,8,30]
    for min_per_person in min_images_per_person:
        for batch in batch_sizes:
            for model in models:
                multi_data = get_mounted_data(min_per_person, min_per_person)
                Y = multi_data[['name']]
                X = multi_data[['image_path']]
                CLASSES = Y.groupby('name').nunique().shape[0]
                print("### run_k_fold ", " epoch ", epoch, " min_per_person ",
                      min_per_person, " CLASSES ", CLASSES, "model ", model,
                      " batch_size ", batch)
                run_k_fold(multi_data, X, Y, CLASSES, epoch, model, batch,
                           num_folds)
                gc.collect()
コード例 #6
0
def main():
    min_images_per_person = [30]
    models = ["DeepFace"]
    num_folds = 5
    batch_sizes = [60]

    min_per_person = min_images_per_person[0]

    multi_data = get_mounted_data(min_per_person, min_per_person)
    Y = multi_data[['name']]
    X = multi_data[['image_path']]
    CLASSES = Y.groupby('name').nunique().shape[0]
    nomes_classes = []
    for i in pd.DataFrame(
            Y.groupby('name')['name'].nunique().reset_index(
                name="unique"))['name']:
        nomes_classes.append(str(i))
    print('CLASSES')
    print(CLASSES)
    #print(nomes_classes)

    train, test = train_test_split(multi_data,
                                   test_size=0.25,
                                   random_state=42,
                                   shuffle=True,
                                   stratify=multi_data[['name']])
    print('train')
    print(train.shape)
    print(train)
    print(train.groupby('name').count().image_path)
    print('test')
    print(test.shape)
    print(test)
    print(test.groupby('name').count().image_path)

    calfw_df = calfw_mount_data(nomes_classes)
    print(calfw_df.shape)
    calfw_Y = calfw_df[['name']]
    calfw_X = calfw_df[['image_path']]
    calfw_CLASSES = calfw_Y.groupby('name').nunique().shape[0]
    #print(calfw_df.groupby('name')['name'].nunique().reset_index(name="unique")['name'])
    print(calfw_Y)
    print(calfw_X)
    print(calfw_CLASSES)
    ind_counts = calfw_df.groupby('name').count().image_path
    print(ind_counts)
    calfw_classes = []
    for j in pd.DataFrame(
            calfw_df.groupby('name')['name'].nunique().reset_index(
                name="unique"))['name']:
        calfw_classes.append(str(j))
    print(calfw_classes)

    print(set(nomes_classes) - set(calfw_classes))
    print(set(calfw_classes) - set(nomes_classes))

    for batch in batch_sizes:
        for model in models:
            print("### run_k_fold ", " min_per_person ", min_per_person,
                  " CLASSES ", CLASSES, "model ", model, " batch_size ", batch)
            run_k_fold(calfw_df, X, Y, CLASSES, model, batch, num_folds,
                       nomes_classes)
            tf.keras.backend.clear_session()
            gc.collect()