def __init__(self, max_num_aggregation, eps=0.0, name="default"): super(PyTorchSAServerContext, self).__init__(max_num_aggregation=max_num_aggregation, name=name) self.transfer_variable = SecureAggregatorTransVar() self.aggregator = aggregator.Server( self.transfer_variable.aggregator_trans_var) self.random_padding_cipher = random_padding_cipher.Server( self.transfer_variable.random_padding_cipher_trans_var) self._eps = eps self._loss = math.inf
def __init__(self, server=(consts.ARBITER, ), clients=(consts.GUEST, consts.HOST), prefix=None): super().__init__(server=server, clients=clients, prefix=prefix) self.secure_aggregator_trans_var = SecureAggregatorTransVar( server=server, clients=clients, prefix=self.prefix) self.loss_scatter_trans_var = LossScatterTransVar(server=server, clients=clients, prefix=self.prefix) self.has_converged_trans_var = HasConvergedTransVar(server=server, clients=clients, prefix=self.prefix)
def __init__(self, max_num_aggregation, aggregate_every_n_epoch, name="default"): super(PyTorchSAClientContext, self).__init__( max_num_aggregation=max_num_aggregation, name=name ) self.transfer_variable = SecureAggregatorTransVar() self.aggregator = aggregator.Client(self.transfer_variable.aggregator_trans_var) self.random_padding_cipher = random_padding_cipher.Client( self.transfer_variable.random_padding_cipher_trans_var ) self.aggregate_every_n_epoch = aggregate_every_n_epoch self._params: list = None self._should_stop = False self.loss_summary = []