示例#1
0
def objective(args):
    parser = ArgumentParser()
    parser.add_argument("-i",
                        "--input_file",
                        type=str,
                        required=True,
                        help="Path to data file")
    parser.add_argument("-o",
                        "--output_dir",
                        type=str,
                        required=True,
                        help="Directory where model will be saved")
    parser.add_argument("-r",
                        "--result_folder",
                        type=str,
                        default="./result/",
                        required=False,
                        help="Folder of result")
    parser.add_argument("-d_e",
                        "--drug_encoding",
                        type=str,
                        default="Transformer",
                        required=False,
                        help="Drug encoding")
    tmp = parser.parse_args()
    base_config = vars(tmp)  #把参数转换为字典格式

    params = {
        "input_file":
        base_config["input_file"],
        "output_dir":
        base_config["output_dir"],
        "result_folder":
        base_config["result_folder"],
        "drug_encoding":
        base_config["drug_encoding"],
        "input_dim_drug":
        args["input_dim_drug"],
        "input_dim_protein":
        args["input_dim_protein"],
        "hidden_dim_drug":
        args["hidden_dim_drug"],
        "hidden_dim_protein":
        args["hidden_dim_protein"],
        "cls_hidden_dims": [1024, 1024, 512],
        "batch_size":
        args["batch_size"],
        "train_epoch":
        10,
        "test_every_X_epoch":
        20,
        "LR":
        0.0001,
        "decay":
        0,
        "num_workers":
        0,
        "transformer_dropout_rate":
        args["transformer_dropout_rate"],
        "transformer_emb_size_drug":
        args["transformer_emb_size_drug"],
        "transformer_n_layer_drug":
        args["transformer_n_layer_drug"],
        "transformer_intermediate_size_drug":
        args["transformer_intermediate_size_drug"],
        "transformer_num_attention_heads_drug":
        args["transformer_num_attention_heads_drug"],
        "transformer_attention_probs_dropout":
        args["transformer_attention_probs_dropout"],
        "transformer_hidden_dropout_rate":
        args["transformer_hidden_dropout_rate"],
        "transformer_emb_size_target":
        args["transformer_emb_size_target"],
        "transformer_n_layer_target":
        args["transformer_n_layer_target"],
        "transformer_intermediate_size_target":
        args["transformer_intermediate_size_target"],
        "transformer_num_attention_heads_target":
        args["transformer_num_attention_heads_target"]
    }
    test_result = train_model(params)
    return 1 - test_result["AUROC"]
示例#2
0
    # config of drug
    parser.add_argument("--mlp_hidden_dims_drug", type = list, default = [1024, 256, 64], required = False,\
        help = "Dimensionality of hidden layers of drug in MLP")

    return parser


#模型配置生成
def get_model_config(config):
    model_config = generate_config(
        drug_encoding=config.drug_encoding,
        result_folder=config.result_folder,
        input_dim_drug=config.input_dim_drug,
        hidden_dim_drug=config.hidden_dim_drug,
        cls_hidden_dims=config.cls_hidden_dims,
        batch_size=config.batch_size,
        train_epoch=config.train_epoch,
        test_every_X_epoch=config.test_every_X_epoch,
        LR=config.LR,
        decay=config.decay,
        num_workers=config.num_workers,
        mlp_hidden_dims_drug=config.mlp_hidden_dims_drug)
    return model_config


if __name__ == "__main__":
    parser = get_parser()
    config = parser.parse_known_args()[0]
    train_model(config)
示例#3
0
    hp.choice("hidden_dim_protein", [128, 256, 512]),
    "batch_size":
    hp.choice("batch_size", [32, 64, 128, 256, 512]),
    "transformer_dropout_rate":
    hp.choice("transformer_dropout_rate", [0.1]),
    "transformer_emb_size_drug":
    hp.choice("transformer_emb_size_drug", [128, 256]),
    "transformer_n_layer_drug":
    hp.choice("transformer_n_layer_drug", range(1, 10)),
    "transformer_intermediate_size_drug":
    hp.choice("transformer_intermediate_size_drug", [128, 256, 512]),
    "transformer_num_attention_heads_drug":
    hp.choice("transformer_num_attention_heads_drug", [2, 4, 8]),
    "transformer_attention_probs_dropout":
    hp.choice("transformer_attention_probs_dropout", [0.1]),
    "transformer_hidden_dropout_rate":
    hp.choice("transformer_hidden_dropout_rate", [0.1]),
    "transformer_emb_size_target":
    hp.choice("transformer_emb_size_target", [32, 64, 128]),
    "transformer_n_layer_target":
    hp.choice("transformer_n_layer_target", range(1, 10)),
    "transformer_intermediate_size_target":
    hp.choice("transformer_intermediate_size_target", [128, 256, 512]),
    "transformer_num_attention_heads_target":
    hp.choice("transformer_num_attention_heads_target", [2, 4, 8])
}

best = fmin(fn=objective, space=space, algo=tpe.suggest, max_evals=100)
print(best)
train_model(best)