Exemplo n.º 1
0
 def __init__(self, target_name='sigmoid', endpoint_name="sigmoid"):
     super().__init__()
     self.cross_entropy = nn.BCEWithLogitsLoss(reduce=False)
     self.tensorboard_logger = get_tensorboard_logger()
     self.logger = get_logger()
     self.endpoint_name = endpoint_name
     self.target_name = target_name
 def __init__(self, endpoint_name, target_name):
     super().__init__()
     self.endpoint_name = endpoint_name
     self.target_name = target_name
     self.logger = get_logger()
     self.tensorboard_logger = get_tensorboard_logger()
     self.l1 = nn.L1Loss(reduction='none')
 def __init__(self, alpha, gamma, loss_module, name):
     super().__init__(loss_module)
     self.alpha = alpha
     self.gamma = gamma
     self.k = None
     self.tensorboard_logger = logger.get_tensorboard_logger()
     self.name = name
Exemplo n.º 4
0
 def __init__(self, target_name, endpoint_name):
     super().__init__()
     self.cross_entropy = nn.CrossEntropyLoss(reduce=False)
     self.tensorboard_logger = get_tensorboard_logger()
     self.logger = get_logger()
     self.target_name = target_name
     self.endpoint_name = endpoint_name
Exemplo n.º 5
0
 def __init__(self, m, cdist_fn=calc_cdist, endpoint_name='triplet'):
     super().__init__()
     self.name = "BatchHard(m={})".format(m)
     self.m = m
     self.cdist_fn = cdist_fn
     self.tensorboard_logger = get_tensorboard_logger()
     self.logger = get_logger()
     self.endpoint_name = endpoint_name
Exemplo n.º 6
0
    def __init__(self, target_name, endpoint_name, reduction='mean'):
        super(CrossEntropyLoss, self).__init__()

        self.cross_entropy = nn.CrossEntropyLoss(ignore_index=255, reduction=reduction)

        self.tensorboard_logger = get_tensorboard_logger()
        #self.logger = get_logger()
        self.target_name = target_name
        self.endpoint_name = endpoint_name
    def __init__(self, loss_module, name, init):
        """
        losses (dic): A name-> loss dictionary.
        init (float): 0 -> factor is 1
        """
        super().__init__(loss_module)

        #log(o^2)
        tensor = torch.tensor(init, requires_grad=True)
        self.log_var = torch.nn.Parameter(tensor, requires_grad=True)
        self.name = name
        self.logger = logger.get_tensorboard_logger()
Exemplo n.º 8
0
 def __init__(self, m, cdist_fn=calc_cdist, T=1.0, endpoint_name="triplet"):
     """
     Args:
         m: margin
         T: Softmax temperature
     """
     super(BatchSoft, self).__init__()
     self.name = "BatchSoft(m={}, T={})".format(m, T)
     self.m = m
     self.T = T
     self.cdist_fn = cdist_fn
     self.tensorboard_logger = get_tensorboard_logger()
     self.logger = get_logger()
 def __init__(self, delta, tr_loss, id_loss):
     """
     Args:
         tr_loss tuple(name, DynamicFocalLoss): batch hard loss
         id_loss tuple(name, DynamicFocalLoss): softmax loss
     """
     # TODO
     super().__init__({})
     # name is name of dataset
     self.tr_name, self.tr_loss = tr_loss
     self.id_name, self.id_loss = id_loss
     self.delta = delta
     self.tensorboard_logger = logger.get_tensorboard_logger()
Exemplo n.º 10
0
 def __init__(self, schedule_fn, optimizer, last_epoch=-1):
     """
     Args:
         last_epoch: counting from zero
     """
     self.schedule_fn = schedule_fn
     # Manage underlying pytorch behaviour,
     # -1 is special value
     if last_epoch == 0:
         # from scratch
         last_epoch = -1
     else:
         # restored
         last_epoch -= 1
     self.last_epoch = last_epoch
     self.tensorboard_logger = get_tensorboard_logger()
     super().__init__(optimizer, last_epoch)
Exemplo n.º 11
0
    def __init__(self, target_name, endpoint_name, top_k_percent_pixels=1.0, hard_mining_step=0):
        """
        Args:
            hard_mining_step: Training step in which the hard mining
                kicks off
        """
        super().__init__()
        self.cross_entropy = nn.CrossEntropyLoss(ignore_index=255, reduction='none')

        self.tensorboard_logger = get_tensorboard_logger()
        #self.logger = get_logger()
        self.target_name = target_name
        self.endpoint_name = endpoint_name

        if top_k_percent_pixels == 1.0:
            # just default cross entropy loss
            self.forward = super().forward

        self.top_k_percent_pixels = top_k_percent_pixels
        self.hard_mining_step = hard_mining_step
        # TODO global step
        self.step = 0
Exemplo n.º 12
0
 def __init__(self, attributes):
     super().__init__()
     self.attributes = attributes
     self.cross_entropy = nn.CrossEntropyLoss(reduce=False)
     self.M = len(attributes)
     self.tensorboard_logger = get_tensorboard_logger()
 def __init__(self, endpoint_name, target_name):
     super().__init__()
     self.endpoint_name = endpoint_name
     self.target_name = target_name
     self.logger = get_logger()
     self.tensorboard_logger = get_tensorboard_logger()
 def __init__(self, losses):
     super().__init__()
     self.losses = torch.nn.ModuleDict(losses)
     self.logger = logger.get_tensorboard_logger()
 def __init__(self, tasks, model, scale_only_backbone=False):
     super().__init__()
     self.model = model
     self.tasks = tasks
     self.tb = logger.get_tensorboard_logger()
     self.scale_only_backbone = scale_only_backbone