def make_model(fc_model, transformer, post_model, n_features, dstore): """ Helper function for instantiating LTRModel. :param fc_model: FCModel used as input block :param transformer: transformer Encoder used as encoder block :param post_model: parameters dict for OutputModel output block (excluding d_model) :param n_features: number of input features :return: LTR model instance """ if fc_model: fc_model = FCModel(**fc_model, n_features=n_features, vocab_size=len(dstore.vocab)) # type: ignore d_model = n_features if not fc_model else fc_model.output_size if transformer: transformer = make_transformer(n_features=d_model, **asdict(transformer, recurse=False)) # type: ignore model = LTRModel(fc_model, transformer, OutputLayer(d_model, **post_model)) if dstore.init_from_fasttext: fc_model.init_from_fasttext(dstore) # Initialize parameters with Glorot / fan_avg. for p in model.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) return model
def make_model(fc_model, transformer, post_model, n_features): if fc_model: fc_model = FCModel(**fc_model, n_features=n_features) # type: ignore d_model = n_features if not fc_model else fc_model.output_size if transformer: transformer = make_transformer(n_features=d_model, **asdict(transformer, recurse=False)) # type: ignore model = LTRModel(fc_model, transformer, OutputLayer(d_model, **post_model)) # Initialize parameters with Glorot / fan_avg. for p in model.parameters(): if p.dim() > 1: nn.init.xavier_uniform_(p) return model