コード例 #1
0
ファイル: combiners.py プロジェクト: jimthompson5802/ludwig
class ConcatCombinerConfig:
    fc_layers: Optional[List[Dict[str, Any]]] = schema.DictList()
    num_fc_layers: int = schema.NonNegativeInteger(default=0)
    fc_size: int = schema.PositiveInteger(default=256)
    use_bias: bool = True
    weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default="xavier_uniform")
    bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default="zeros")
    norm: Optional[str] = schema.StringOptions(["batch", "layer"])
    norm_params: Optional[dict] = schema.Dict()
    activation: str = "relu"
    dropout: float = schema.FloatRange(default=0.0, min=0, max=1)
    flatten_inputs: bool = False
    residual: bool = False

    class Meta:
        unknown = INCLUDE
コード例 #2
0
ファイル: combiners.py プロジェクト: jimthompson5802/ludwig
class TransformerCombinerConfig:
    num_layers: int = schema.PositiveInteger(default=1)
    hidden_size: int = schema.NonNegativeInteger(default=256)
    num_heads: int = schema.NonNegativeInteger(default=8)
    transformer_fc_size: int = schema.NonNegativeInteger(default=256)
    dropout: float = schema.FloatRange(default=0.1, min=0, max=1)
    fc_layers: Optional[List[Dict[str, Any]]] = schema.DictList()
    num_fc_layers: int = schema.NonNegativeInteger(default=0)
    fc_size: int = schema.PositiveInteger(default=256)
    use_bias: bool = True
    weights_initializer: Union[str, Dict] = schema.InitializerOrDict(default="xavier_uniform")
    bias_initializer: Union[str, Dict] = schema.InitializerOrDict(default="zeros")
    norm: Optional[str] = schema.StringOptions(["batch", "layer"])
    norm_params: Optional[dict] = schema.Dict()
    fc_activation: str = "relu"
    fc_dropout: float = schema.FloatRange(default=0.0, min=0, max=1)
    fc_residual: bool = False
    reduce_output: Optional[str] = schema.ReductionOptions(default="mean")

    class Meta:
        unknown = INCLUDE