def get_config(loss: str, nfeat: int, nclass: int, task: str, margin: float, triplet_strategy: str, semihard_n: int = 10) -> cf.LossConfig: """ Create a loss configuration object based on parameteres given by the user :param loss: the loss function name :param nfeat: the dimension of the embeddings :param nclass: the number of classes :param task: the task for which the loss will be used :param margin: a margin to use in contrastive, triplet and arcface losses :param triplet_strategy: The name of the triplet sampling strategy as received via script arguments :param semihard_n: the number of negatives to keep when using a semi-hard negative triplet sampling strategy :return: a loss configuration object """ if loss == 'softmax': return cf.SoftmaxConfig(DEVICE, nfeat, nclass) elif loss == 'contrastive': print(f"[Margin: {margin}]") return cf.ContrastiveConfig(DEVICE, margin=margin, distance=CosineDistance(), size_average=False, online=task != 'sts') elif loss == 'triplet': print(f"[Margin: {margin}]") return cf.TripletConfig(DEVICE, margin=margin, distance=CosineDistance(), size_average=task != 'sts', online=task != 'sts', sampling=get_triplet_strategy( triplet_strategy, semihard_n)) elif loss == 'arcface': print(f"[Margin: {margin}]") return cf.ArcFaceConfig(DEVICE, nfeat, nclass, margin=margin) elif loss == 'center': return cf.CenterConfig(DEVICE, nfeat, nclass, distance=CosineDistance()) elif loss == 'coco': return cf.CocoConfig(DEVICE, nfeat, nclass) elif loss == 'kldiv': return cf.KLDivergenceConfig(DEVICE, nfeat) else: raise ValueError(f"Loss function should be one of: {LOSS_OPTIONS_STR}")
def __init__(self, margin=0.2, distance=CosineDistance(), size_average=True, online=True): super().__init__(distance) self.loss_ = ContrastiveLoss(constants.DEVICE, margin, distance, size_average, online)
def __init__(self, scaling: float, distance=CosineDistance(), size_average: bool = True, online: bool = True, sampling=BatchAll()): super().__init__(distance) self.loss_ = TripletLoss(constants.DEVICE, scaling, distance, size_average, online, sampling)
def __init__(self, nfeat, nclass, lweight=1, distance=CosineDistance()): super().__init__(distance) self.nfeat, self.nclass = nfeat, nclass self.loss_ = SoftmaxCenterLoss(constants.DEVICE, nfeat, nclass, lweight, distance)
def __init__(self, nfeat, nclass, margin, s): super().__init__(CosineDistance()) self.clf_ = ArcLinear(nfeat, nclass, margin, s)
def __init__(self, nfeat, nclass): super().__init__(CosineDistance()) self.nfeat, self.nclass = nfeat, nclass
def __init__(self, nfeat, nclass, alpha): super().__init__(CosineDistance()) self.clf_ = CocoLinear(nfeat, nclass, alpha)
default=10, help= 'Steps (in percentage) to show evaluation progress, only for STS. Default: 10' ) parser.add_argument('--seed', type=int, default=None, help='Random seed') parser.add_argument('--exp-id', type=str, default=f"EXP-{launch_datetime.replace(' ', '-')}", help='An identifier for the experience') args = parser.parse_args() # Set custom seed set_custom_seed(args.seed) if args.distance == 'cosine': distance = CosineDistance() elif args.distance == 'euclidean': distance = EuclideanDistance() else: raise ValueError("Distance can only be: cosine / euclidean") print(f"[Task: {args.task.upper()}]") print('[Preparing...]') if args.task == 'speaker': experiment = VoxCeleb1ModelEvaluationExperiment(model_path=args.model, nfeat=256, distance=distance, batch_size=args.batch_size) metric_name = 'EER' elif args.task == 'sts':
def __init__(self, device, nfeat, nclass, margin=0.2, s=7.0): self.loss_module = ArcLinear(nfeat, nclass, margin, s) loss = LossWrapper(nn.CrossEntropyLoss().to(device)) super(ArcFaceConfig, self).__init__('ArcFace Loss', f"m={margin} s={s}", self.loss_module, loss, CosineDistance())
def __init__(self, device, nfeat, nclass): self.loss_module = CenterLinear(nfeat, nclass) loss = LossWrapper(nn.NLLLoss().to(device)) super(SoftmaxConfig, self).__init__('Cross Entropy', None, self.loss_module, loss, CosineDistance())
def __init__(self, device, nfeat): loss_module = STSBaselineClassifier(nfeat) loss = LossWrapper(nn.KLDivLoss().to(device)) super(KLDivergenceConfig, self).__init__('KL-Divergence', None, loss_module, loss, CosineDistance())
def __init__(self, device, nfeat, nclass, alpha=6.25): loss_module = CocoLinear(nfeat, nclass, alpha) loss = LossWrapper(nn.CrossEntropyLoss().to(device)) super(CocoConfig, self).__init__('CoCo Loss', f"α={alpha}", loss_module, loss, CosineDistance())