class Config(OutputLayerBase.Config): loss: CrossEntropyLoss.Config = CrossEntropyLoss.Config() ignore_impossible: bool = True pos_loss_weight: float = 0.5 has_answer_loss_weight: float = 0.5 false_label: str = "False" max_answer_len: int = 30
class Config(OutputLayerBase.Config): loss: Union[CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, MultiLabelSoftMarginLoss.Config, AUCPRHingeLoss.Config, KLDivergenceBCELoss.Config, KLDivergenceCELoss.Config, LabelSmoothedCrossEntropyLoss. Config, ] = CrossEntropyLoss.Config() label_weights: Optional[Dict[str, float]] = None
class Config(OutputLayerBase.Config): loss: Union[CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, AUCPRHingeLoss.Config, KLDivergenceBCELoss.Config, KLDivergenceCELoss.Config, LabelSmoothedCrossEntropyLoss. Config, ] = CrossEntropyLoss.Config() label_weights: Dict[str, float] = {} ignore_pad_in_loss: Optional[bool] = True
def test_doc_classification_output_layer(self): tensorizer = LabelTensorizer() tensorizer.vocab = Vocabulary([SpecialTokens.PAD, "foo", "bar"]) layer = ClassificationOutputLayer.from_config( config=ClassificationOutputLayer.Config(loss=CrossEntropyLoss.Config()), labels=tensorizer.vocab, ) self.assertEqual(layer.loss_fn.ignore_index, 0) # use default pad tensorizer.vocab = Vocabulary(["foo", "bar"]) layer = ClassificationOutputLayer.from_config( config=ClassificationOutputLayer.Config(loss=CrossEntropyLoss.Config()), labels=tensorizer.vocab, ) self.assertEqual(layer.loss_fn.ignore_index, -1)
class Config(OutputLayerBase.Config): loss: Union[ CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, AUCPRHingeLoss.Config, KLDivergenceBCELoss.Config, KLDivergenceCELoss.Config, SoftHardBCELoss.Config, ] = CrossEntropyLoss.Config()
class Config(OutputLayerBase.Config): loss: Union[CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, BinaryCrossEntropyWithLogitsLoss.Config, MultiLabelSoftMarginLoss.Config, AUCPRHingeLoss.Config, HingeLoss.Config, KLDivergenceBCELoss.Config, KLDivergenceCELoss.Config, LabelSmoothedCrossEntropyLoss. Config, ] = CrossEntropyLoss.Config() label_weights: Optional[Dict[str, float]] = None automatic_label_weighting_method: Optional[WeightingMethod] = None
class Config(OutputLayerBase.Config): loss: Union[ CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, AUCPRHingeLoss.Config, KLDivergenceBCELoss.Config, KLDivergenceCELoss.Config, SoftHardBCELoss.Config, ] = CrossEntropyLoss.Config() label_weights: Optional[Dict[str, float]] = None
class Config(OutputLayerBase.Config): loss: Union[CrossEntropyLoss.Config, KLDivergenceCELoss.Config] = CrossEntropyLoss.Config() ignore_impossible: bool = True pos_loss_weight: float = 0.5 has_answer_loss_weight: float = 0.5 false_label: str = "False" max_answer_len: int = 30 # For knowledge distillation we have soft and hard labels. This specifies # the weight on loss against hard labels. hard_weight: float = 0.0
def __init__( self, is_binary: bool = True, label_weights: Optional[Dict[str, float]] = None, loss=None, ): super().__init__() if is_binary: self.loss = loss or BinaryCrossEntropyLoss(BinaryCrossEntropyLoss.Config()) else: self.loss = loss or CrossEntropyLoss(CrossEntropyLoss.Config())
class Config(OutputLayerBase.Config): loss: CrossEntropyLoss.Config = CrossEntropyLoss.Config()
class Config(ConfigBase): loss: Union[CrossEntropyLoss.Config, LabelSmoothedCrossEntropyLoss.Config, NLLLoss.Config, ] = CrossEntropyLoss.Config()
class Config(OutputLayerBase.Config): loss: CrossEntropyLoss.Config = CrossEntropyLoss.Config() label_weights: Dict[str, float] = {}
class Config(OutputLayerBase.Config): loss: Union[CrossEntropyLoss.Config, BinaryCrossEntropyLoss.Config, AUCPRHingeLoss.Config, ] = CrossEntropyLoss.Config()
def __init__(self, loss: Loss = None): super().__init__() self.loss = loss or CrossEntropyLoss(CrossEntropyLoss.Config())