Beispiel #1
0
if __name__ == "__main__":
    args = parser.parse_args()
    configs = generate_all_model_configs(
        activations=args.activation,
        init_methods=args.init,
        max_ic50_values=args.max_ic50,
        dropout_values=args.dropout,
        minibatch_sizes=args.minibatch_size,
        embedding_sizes=args.embedding_size,
        n_pretrain_epochs_values=args.pretrain_epochs,
        n_training_epochs_values=args.training_epochs,
        hidden_layer_sizes=args.hidden_layer_size,
        learning_rates=args.learning_rate,
        optimizers=args.optimizer)

    print("Total # configurations = %d" % len(configs))
    training_datasets, _ = load_data(
        args.binding_data_csv_path,
        max_ic50=args.max_ic50,
        peptide_length=9,
        binary_encoding=False)
    combined_df = evaluate_model_configs(
        configs=configs,
        results_filename=args.output,
        train_fn=lambda config: evaluate_model_config_by_cross_validation(
            config,
            training_datasets,
            min_samples_per_allele=args.min_samples_per_allele,
            cv_folds=args.cv_folds))
    hyperparameter_performance(combined_df)
        init=args.init,
        n_pretrain_epochs=args.pretrain_epochs,
        n_epochs=args.training_epochs,
        dropout_probability=args.dropout,
        max_ic50=args.max_ic50,
        minibatch_size=args.minibatch_size,
        learning_rate=args.learning_rate,
        optimizer=args.optimizer)

    models = [make_model(config) for _ in range(args.ensemble_size)]

    binary_encoding = (args.embedding_size == 0)

    training_datasets, _ = load_data(
        filename=args.training_csv,
        peptide_length=9,
        max_ic50=args.max_ic50,
        binary_encoding=binary_encoding)

    X_all = np.vstack([dataset.X for dataset in training_datasets.values()])
    Y_all = np.concatenate([
        dataset.Y
        for dataset in training_datasets.values()
    ])

    for model in models:
        model.fit(
            X_all,
            Y_all,
            nb_epoch=args.pretrain_epochs,
            batch_size=args.minibatch_size,