class SequenceCombinerConfig: main_sequence_feature: Optional[str] = None reduce_output: Optional[str] = schema.ReductionOptions() encoder: Optional[str] = schema.StringOptions(list(sequence_encoder_registry.keys())) class Meta: unknown = INCLUDE
class TransformerCombinerConfig: num_layers: int = schema.PositiveInteger(default=1) hidden_size: int = schema.NonNegativeInteger(default=256) num_heads: int = schema.NonNegativeInteger(default=8) transformer_fc_size: int = schema.NonNegativeInteger(default=256) dropout: float = schema.FloatRange(default=0.1, min=0, max=1) fc_layers: Optional[List[Dict[str, Any]]] = schema.DictList() num_fc_layers: int = schema.NonNegativeInteger(default=0) fc_size: int = schema.PositiveInteger(default=256) use_bias: bool = True weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default="xavier_uniform") bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default="zeros") norm: Optional[str] = schema.StringOptions(["batch", "layer"]) norm_params: Optional[dict] = schema.Dict() fc_activation: str = "relu" fc_dropout: float = schema.FloatRange(default=0.0, min=0, max=1) fc_residual: bool = False reduce_output: Optional[str] = schema.ReductionOptions(default="mean") class Meta: unknown = INCLUDE
class SequenceConcatCombinerConfig: main_sequence_feature: Optional[str] = None reduce_output: Optional[str] = schema.ReductionOptions() class Meta: unknown = INCLUDE