def __init__(self, myBust): # MSE: Mean Square Error # Cross-Entropy: NLL of probability (output) self.base_learner = interface.CSLFN_param(myBust.learners[-1]) self.T = myBust.Tmax self.alg = myBust.alg
def __init__(self, myDSNBust): # MSE: Mean Square Error # Cross-Entropy: NLL of probability (output) self.base_learner = interface.CSLFN_param(myDSNBust.layers[-1]) self.Lmax = myDSNBust.Lmax self.nL = myDSNBust.nL self.alpha = myDSNBust.alpha self.beta = myDSNBust.beta self.Inyection = myDSNBust.Inyection self.Agregation = myDSNBust.Agregation self.Enphasis = myDSNBust.Enphasis
def __init__(self, myDSN): # MSE: Mean Square Error # Cross-Entropy: NLL of probability (output) self.base_learner = interface.CSLFN_param(myDSN.layers[-1]) self.nL = myDSN.nL self.nP = myDSN.nP # Intermediate results info. # Intermediate results info. self.gammas = myDSN.gammas if (myDSN.visual.store_layers_scores == 1): self.scoreTr_layers = myDSN.scoreTr_layers self.scoreVal_layers = myDSN.scoreVal_layers if (myDSN.visual.store_layers_soft_error == 1): self.errorTr_layers = myDSN.errorTr_layers self.errorVal_layers = myDSN.errorVal_layers
def __init__(self, myDSNBust): # MSE: Mean Square Error # Cross-Entropy: NLL of probability (output) self.base_learner = interface.CSLFN_param(myDSNBust.layers[-1]) self.Lmax = myDSNBust.Lmax self.nL = myDSNBust.nL self.alpha = myDSNBust.alpha self.beta = myDSNBust.beta self.Inyection = myDSNBust.Inyection self.Agregation = myDSNBust.Agregation self.Enphasis = myDSNBust.Enphasis # Intermediate results info. self.gammas = myDSNBust.gammas if (myDSNBust.visual.store_layers_scores == 1): self.scoreTr_layers = myDSNBust.scoreTr_layers self.scoreVal_layers = myDSNBust.scoreVal_layers if (myDSNBust.visual.store_layers_soft_error == 1): self.errorTr_layers = myDSNBust.errorTr_layers self.errorVal_layers = myDSNBust.errorVal_layers