def __init__(self, logistic_params: LogisticParam): # set params self.alpha = logistic_params.alpha self.init_param_obj = logistic_params.init_param self.fit_intercept = self.init_param_obj.fit_intercept self.learning_rate = logistic_params.learning_rate if logistic_params.penalty == consts.L1_PENALTY: self.updater = L1Updater(self.alpha, self.learning_rate) elif logistic_params.penalty == consts.L2_PENALTY: self.updater = L2Updater(self.alpha, self.learning_rate) else: self.updater = None self.eps = logistic_params.eps self.batch_size = logistic_params.batch_size self.max_iter = logistic_params.max_iter if logistic_params.encrypt_param.method == consts.PAILLIER: self.encrypt_operator = PaillierEncrypt() else: self.encrypt_operator = FakeEncrypt() # attribute: self.n_iter_ = 0 self.coef_ = None self.intercept_ = 0 self.classes_ = None self.gradient_operator = None self.initializer = Initializer() self.transfer_variable = None self.model_meta = LogisticRegressionModelMeta()
def setUp(self): self.guest_X = np.array([[1, 2, 3, 4, 5], [3, 2, 4, 5, 1], [ 2, 2, 3, 1, 1, ]]) / 10 self.guest_Y = np.array([[1], [1], [-1]]) self.values = [] for idx, x in enumerate(self.guest_X): inst = Instance(inst_id=idx, features=x, label=self.guest_Y[idx]) self.values.append((idx, inst)) self.host_X = np.array([[1, 1.2, 3.1, 4, 5], [2.3, 2, 4, 5.3, 1], [ 2, 2.2, 1.3, 1, 1.6, ]]) / 10 self.host_Y = np.array([[-1], [1], [-1]]) self.host_values = [] for idx, x in enumerate(self.host_X): inst = Instance(inst_id=idx, features=x, label=self.host_Y[idx]) self.values.append((idx, inst)) self.max_iter = 10 self.alpha = 0.01 self.learning_rate = 0.01 optimizer = 'SGD' self.gradient_operator = LogisticGradient() self.initializer = Initializer() self.fit_intercept = True self.init_param_obj = InitParam(fit_intercept=self.fit_intercept) self.updater = L2Updater(self.alpha, self.learning_rate) self.optimizer = Optimizer(learning_rate=self.learning_rate, opt_method_name=optimizer) self.__init_model()
def setUp(self): alpha = 0.5 learning_rate = 0.1 self.l1_updater = L1Updater(alpha, learning_rate) self.l2_updater = L2Updater(alpha, learning_rate) self.coef_ = np.array([1, -2, 3, -4, 5, -6, 7, -8, 9]) self.gradient = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1]) # l2 regular self.l2_loss_norm = 0.5 * alpha * np.sum( np.array([i * i for i in self.coef_])) self.l2_update_coef = self.coef_ - self.gradient - learning_rate * alpha * self.coef_ # l1 regular self.l1_loss_norm = 22.5 self.l1_update_coef = [ 0, -2.95, 1.95, -4.95, 3.95, -6.95, 5.95, -8.95, 7.95 ]
def __init__(self, logistic_params: LogisticParam): self.param = logistic_params # set params LogisticParamChecker.check_param(logistic_params) self.alpha = logistic_params.alpha self.init_param_obj = logistic_params.init_param self.fit_intercept = self.init_param_obj.fit_intercept self.learning_rate = logistic_params.learning_rate self.encrypted_mode_calculator_param = logistic_params.encrypted_mode_calculator_param self.encrypted_calculator = None if logistic_params.penalty == consts.L1_PENALTY: self.updater = L1Updater(self.alpha, self.learning_rate) elif logistic_params.penalty == consts.L2_PENALTY: self.updater = L2Updater(self.alpha, self.learning_rate) else: self.updater = None self.eps = logistic_params.eps self.batch_size = logistic_params.batch_size self.max_iter = logistic_params.max_iter if logistic_params.encrypt_param.method == consts.PAILLIER: self.encrypt_operator = PaillierEncrypt() else: self.encrypt_operator = FakeEncrypt() # attribute: self.n_iter_ = 0 self.coef_ = None self.intercept_ = 0 self.classes_ = None self.feature_shape = None self.gradient_operator = None self.initializer = Initializer() self.transfer_variable = None self.model_meta = LogisticRegressionModelMeta() self.loss_history = [] self.is_converged = False self.header = None self.class_name = self.__class__.__name__
def _init_model(self, params): self.model_param = params self.alpha = params.alpha self.init_param_obj = params.init_param self.fit_intercept = self.init_param_obj.fit_intercept self.learning_rate = params.learning_rate self.encrypted_mode_calculator_param = params.encrypted_mode_calculator_param self.encrypted_calculator = None if params.penalty == consts.L1_PENALTY: self.updater = L1Updater(self.alpha, self.learning_rate) elif params.penalty == consts.L2_PENALTY: self.updater = L2Updater(self.alpha, self.learning_rate) else: self.updater = None self.eps = params.eps self.batch_size = params.batch_size self.max_iter = params.max_iter self.learning_rate = params.learning_rate self.party_weight = params.party_weight self.penalty = params.penalty if params.encrypt_param.method == consts.PAILLIER: self.encrypt_operator = PaillierEncrypt() else: self.encrypt_operator = FakeEncrypt() if params.converge_func == 'diff': self.converge_func = convergence.DiffConverge(eps=self.eps) elif params.converge_func == 'weight_diff': self.converge_func = convergence.WeightDiffConverge(eps=self.eps) else: self.converge_func = convergence.AbsConverge(eps=self.eps) self.re_encrypt_batches = params.re_encrypt_batches self.predict_param = params.predict_param self.optimizer = Optimizer(params.learning_rate, params.optimizer) self.key_length = params.encrypt_param.key_length