def main():
    learning_rate = 10e-4
    batch_size = 32
    epochs = 15
    height = 256
    width = 256
    # Learning Rate multipliers for each layer
    learning_rate_multipliers = {}
    learning_rate_multipliers['Conv1'] = 1
    learning_rate_multipliers['Conv2'] = 1
    learning_rate_multipliers['Conv3'] = 1
    learning_rate_multipliers['Conv4'] = 1
    learning_rate_multipliers['Dense1'] = 1
    # l2-regularization penalization for each layer
    l2_penalization = {}
    l2_penalization['Conv1'] = 1e-2
    l2_penalization['Conv2'] = 1e-2
    l2_penalization['Conv3'] = 1e-2
    l2_penalization['Conv4'] = 1e-2
    l2_penalization['Dense1'] = 1e-4
    # Path where the logs will be saved
    tensorboard_log_path = './logs/siamese_net_lr10e-4'
    siamese_network = SiameseNetwork(
        learning_rate=learning_rate,
        batch_size=batch_size,
        epochs=epochs,
        learning_rate_multipliers=learning_rate_multipliers,
        l2_regularization_penalization=l2_penalization,
        tensorboard_log_path=tensorboard_log_path)

    # Data
    os.chdir("/data8t/ljq/whale_data/whale_data/siamese_networks/")
    train_data_temp = np.load('training_data.npy')
    train_data = [train_data_temp[0]]
    train_data.append(train_data_temp[1])
    train_label = np.load('training_label.npy')
    val_data_temp = np.load('validation_data.npy')
    val_data = [val_data_temp[0]]
    val_data.append(val_data_temp[1])
    val_label = np.load('validation_label.npy')
    # train_base_num = 1000
    # train_data = [np.zeros((train_base_num * 6, height, width, 3)) for j in range(2)]
    # train_label = np.zeros((train_base_num * 6, 1))
    # val_data = [np.ones((train_base_num * 2, height, width, 3)) for j in range(2)]
    # val_label = np.ones((train_base_num * 2, 1))

    siamese_network.train_siamese_network(model_name='siamese_net_whale',
                                          train_data=train_data,
                                          train_label=train_label,
                                          val_data=val_data,
                                          val_label=val_label)
def main():
    dataset_path = 'gen_simulated_data'
    use_augmentation = False
    learning_rate = 10e-4
    batch_size = 32
    # Learning Rate multipliers for each layer
    learning_rate_multipliers = {}
    learning_rate_multipliers['Conv1'] = 1
    learning_rate_multipliers['Conv2'] = 1
    learning_rate_multipliers['Conv3'] = 1
    learning_rate_multipliers['Conv4'] = 1
    #learning_rate_multipliers['Conv5'] = 1
    learning_rate_multipliers['Dense1'] = 1
    # l2-regularization penalization for each layer
    l2_penalization = {}
    l2_penalization['Conv1'] = 1e-2
    l2_penalization['Conv2'] = 1e-2
    l2_penalization['Conv3'] = 1e-2
    l2_penalization['Conv4'] = 1e-2
    #l2_penalization['Conv5'] = 1e-2
    l2_penalization['Dense1'] = 1e-4
    # Path where the logs will be saved
    tensorboard_log_path = './logs/siamese_net_lr10e-4'
    siamese_network = SiameseNetwork(
        dataset_path=dataset_path,
        learning_rate=learning_rate,
        batch_size=batch_size,
        use_augmentation=use_augmentation,
        learning_rate_multipliers=learning_rate_multipliers,
        l2_regularization_penalization=l2_penalization,
        tensorboard_log_path=tensorboard_log_path)
    # Final layer-wise momentum (mu_j in the paper)
    momentum = 0.9
    # linear epoch slope evolution
    momentum_slope = 0.01
    # support_set指支持一次"图像验证"的图像集合,即1:N image evaluation的N
    support_set_size = 20
    # evaluation时需要进行验证的图像数目
    number_of_tasks_per_bucket = 20
    evaluate_each = 500
    number_of_train_iterations = 1000000

    validation_accuracy = siamese_network.train_siamese_network(
        number_of_iterations=number_of_train_iterations,
        support_set_size=support_set_size,
        final_momentum=momentum,
        momentum_slope=momentum_slope,
        evaluate_each=evaluate_each,
        model_name='siamese_net_lr10e-4')
    if validation_accuracy == 0:
        evaluation_accuracy = 0
    else:
        # Load the weights with best validation accuracy
        siamese_network.model.load_weights('.models/siamese_net_lr10e-4.h5')
        evaluation_accuracy = siamese_network.chars_loader.one_shot_test(
            siamese_network.model, support_set_size,
            number_of_tasks_per_bucket, False)

    print('Final Evaluation Accuracy = ' + str(evaluation_accuracy))
def main():
    dataset_path = 'Omniglot Dataset'
    use_augmentation = True
    learning_rate = 10e-4

    #batch_size = 128
    batch_size = 256
    # Learning Rate multipliers for each layer
    learning_rate_multipliers = {}
    learning_rate_multipliers['Conv1'] = 1
    learning_rate_multipliers['Conv2'] = 1
    learning_rate_multipliers['Conv3'] = 1
    learning_rate_multipliers['Conv4'] = 1
    learning_rate_multipliers['Dense1'] = 1
    # l2-regularization penalization for each layer
    l2_penalization = {}
    l2_penalization['Conv1'] = 1e-2
    l2_penalization['Conv2'] = 1e-2
    l2_penalization['Conv3'] = 1e-2
    l2_penalization['Conv4'] = 1e-2
    l2_penalization['Dense1'] = 1e-4
    # Path where the logs will be saved
    tensorboard_log_path = './logs/siamese_net_lr10e-4'
    siamese_network = SiameseNetwork(
        dataset_path=dataset_path,
        learning_rate=learning_rate,
        batch_size=batch_size,
        use_augmentation=use_augmentation,
        learning_rate_multipliers=learning_rate_multipliers,
        l2_regularization_penalization=l2_penalization,
        tensorboard_log_path=tensorboard_log_path)
    # Final layer-wise momentum (mu_j in the paper)
    momentum = 0.9
    # linear epoch slope evolution
    momentum_slope = 0.01
    support_set_size = 20
    evaluate_each = 1000

    #number_of_train_iterations = 250000
    number_of_train_iterations = 125000

    validation_accuracy = siamese_network.train_siamese_network(
        number_of_iterations=number_of_train_iterations,
        support_set_size=support_set_size,
        final_momentum=momentum,
        momentum_slope=momentum_slope,
        evaluate_each=evaluate_each,
        model_name='siamese_net_lr10e-4')
    if validation_accuracy == 0:
        evaluation_accuracy = 0
    else:
        # Load the weights with best validation accuracy
        siamese_network.model.load_weights('./models/siamese_net_lr10e-4.h5')
        evaluation_accuracy = siamese_network.omniglot_loader.one_shot_test(
            siamese_network.model, 20, 40, False)

    print('Final Evaluation Accuracy = ' + str(evaluation_accuracy))
Esempio n. 4
0
def main():
    dataset_path = 'data/split_dataset_NOMASK/train'
    model_name = 'model_train_NOMASK'

    use_augmentation = True
    learning_rate = 1e-2
    batch_size = 16

    # Learning Rate multipliers for each layer
    learning_rate_multipliers = {'Conv1': 1, 'Conv2': 1, 'Conv3': 1, 'Conv4': 1, 'Dense1': 1}

    # l2-regularization penalization for each layer
    l2_penalization = {'Conv1': 1e-2, 'Conv2': 1e-2, 'Conv3': 1e-2, 'Conv4': 1e-2, 'Dense1': 1e-4}

    # Path where the logs will be saved
    tensorboard_log_path = f'./logs/{model_name}'
    siamese_network = SiameseNetwork(
        dataset_path=dataset_path,
        learning_rate=learning_rate,
        batch_size=batch_size, use_augmentation=use_augmentation,
        learning_rate_multipliers=learning_rate_multipliers,
        l2_regularization_penalization=l2_penalization,
        tensorboard_log_path=tensorboard_log_path
    )
    # Final layer-wise momentum (mu_j in the paper)
    momentum = 0.9
    # linear epoch slope evolution
    momentum_slope = 0.01
    support_set_size = 20
    evaluate_each = 500

    number_of_train_iterations = 30000

    validation_accuracy = siamese_network.train_siamese_network(number_of_iterations=number_of_train_iterations,
                                                                support_set_size=support_set_size,
                                                                final_momentum=momentum,
                                                                momentum_slope=momentum_slope,
                                                                evaluate_each=evaluate_each,
                                                                model_name=model_name)

    if validation_accuracy == 0:
        evaluation_accuracy = 0
    else:
        # Load the weights with best validation accuracy
        siamese_network.model.load_weights(f'./models/{model_name}.h5')
        evaluation_accuracy = siamese_network.omniglot_loader.one_shot_test(siamese_network.model, 20, 40, False)

    print('Final Evaluation Accuracy = ' + str(evaluation_accuracy))
Esempio n. 5
0
    def bayesian_optimization_function(x):
        dataset_path = 'Omniglot Dataset'

        current_learning_rate = float(x[:, 0])
        current_momentum = float(x[:, 1])
        current_momentum_slope = float(x[:, 2])
        current_conv1_multiplier = float(x[:, 3])
        current_conv2_multiplier = float(x[:, 4])
        current_conv3_multiplier = float(x[:, 5])
        current_conv4_multiplier = float(x[:, 6])
        current_dense1_multiplier = float(x[:, 7])
        current_conv1_penalization = float(x[:, 8])
        current_conv2_penalization = float(x[:, 9])
        current_conv3_penalization = float(x[:, 10])
        current_conv4_penalization = float(x[:, 11])
        current_dense1_penalization = float(x[:, 12])

        model_name = 'siamese_net_lr_' + str(current_learning_rate) + \
            'momentum_' + str(current_momentum) + '_slope_' + \
            str(current_momentum_slope)

        global current_model_number
        current_model_number += 1
        tensorboard_log_path = './logs/' + str(current_model_number)

        # Learning Rate multipliers for each layer
        learning_rate_multipliers = {}
        learning_rate_multipliers['Conv1'] = current_conv1_multiplier
        learning_rate_multipliers['Conv2'] = current_conv2_multiplier
        learning_rate_multipliers['Conv3'] = current_conv3_multiplier
        learning_rate_multipliers['Conv4'] = current_conv4_multiplier
        learning_rate_multipliers['Dense1'] = current_dense1_multiplier
        # l2-regularization penalization for each layer
        l2_penalization = {}
        l2_penalization['Conv1'] = current_conv1_penalization
        l2_penalization['Conv2'] = current_conv2_penalization
        l2_penalization['Conv3'] = current_conv3_penalization
        l2_penalization['Conv4'] = current_conv4_penalization
        l2_penalization['Dense1'] = current_dense1_penalization
        K.clear_session()
        siamese_network = SiameseNetwork(
            dataset_path=dataset_path,
            learning_rate=current_learning_rate,
            batch_size=32,
            use_augmentation=True,
            learning_rate_multipliers=learning_rate_multipliers,
            l2_regularization_penalization=l2_penalization,
            tensorboard_log_path=tensorboard_log_path)

        current_model_number += 1

        support_set_size = 20
        evaluate_each = 500
        number_of_train_iterations = 100000

        validation_accuracy = siamese_network.train_siamese_network(
            number_of_iterations=number_of_train_iterations,
            support_set_size=support_set_size,
            final_momentum=current_momentum,
            momentum_slope=current_momentum_slope,
            evaluate_each=evaluate_each,
            model_name=model_name)

        if validation_accuracy == 0:
            evaluation_accuracy = 0
        else:
            # Load the weights with best validation accuracy
            siamese_network.model.load_weights('models/' + model_name + '.h5')
            evaluation_accuracy = siamese_network.data_loader.one_shot_test(
                siamese_network.model, 20, 40, False)
        print("Model: " + model_name + ' | Accuracy: ' +
              str(evaluation_accuracy))
        K.clear_session()
        return 1 - evaluation_accuracy