def parse_model_args(parser, model_name='PreHash'): parser.add_argument('--hash_u_num', type=int, default=128, help='Size of user hash.') parser.add_argument('--sample_max_n', type=int, default=128, help='Sample top-n when learn hash.') parser.add_argument('--sample_r_n', type=int, default=128, help='Sample random-n when learn hash.') parser.add_argument('--hash_layers', type=str, default='[32]', help='MLP layer sizes of hash') parser.add_argument( '--tree_layers', type=str, default='[64]', help='Number of branches in each level of the hash tree') parser.add_argument( '--transfer_att_size', type=int, default=16, help= 'Size of attention layer of transfer layer (combine the hash and cf vector)' ) parser.add_argument('--cs_ratio', type=float, default=0.1, help='Cold-Sampling ratio of each batch.') return RecModel.parse_model_args(parser, model_name)
def parse_model_args(parser, model_name='GRU4Rec'): parser.add_argument('--hidden_size', type=int, default=64, help='Size of hidden vectors in GRU.') parser.add_argument('--num_layers', type=int, default=1, help='Number of GRU layers.') parser.add_argument('--p_layers', type=str, default='[64]', help="Size of each layer.") parser.add_argument( '--neg_emb', type=int, default=1, help="Whether use negative interaction embeddings.") parser.add_argument( '--neg_layer', type=str, default='[]', help= "Whether use a neg_layer to transfer negative interaction embeddings. " "[] means using -v. It is ignored when neg_emb=1") return RecModel.parse_model_args(parser, model_name)
def parse_model_args(parser, model_name='NeuMF'): parser.add_argument('--layers', type=str, default='[64]', help="Size of mlp layers.") parser.add_argument('--p_layers', type=str, default='[]', help="Size of prediction mlp layers.") return RecModel.parse_model_args(parser, model_name)
def parse_model_args(parser, model_name='ACCM'): parser.add_argument('--f_vector_size', type=int, default=64, help='Size of feature vectors.') parser.add_argument('--cb_hidden_layers', type=str, default='[]', help="Number of CB part's hidden layer.") parser.add_argument('--attention_size', type=int, default=16, help='Size of attention layer.') parser.add_argument('--cs_ratio', type=float, default=0.1, help='Cold-Sampling ratio of each batch.') return RecModel.parse_model_args(parser, model_name)