def configure_fit( self, rnd: int, weights: Weights, client_manager: ClientManager ) -> List[Tuple[ClientProxy, FitIns]]: """Configure the next round of training.""" # Block until `min_num_clients` are available sample_size, min_num_clients = self.num_fit_clients( client_manager.num_available() ) success = client_manager.wait_for( num_clients=min_num_clients, timeout=WAIT_TIMEOUT ) if not success: # Do not continue if not enough clients are available log( INFO, "FedFS: not enough clients available after timeout %s", WAIT_TIMEOUT, ) return [] # Sample clients clients = self._contribution_based_sampling( sample_size=sample_size, client_manager=client_manager ) # Prepare parameters and config parameters = weights_to_parameters(weights) config = {} if self.on_fit_config_fn is not None: # Use custom fit config function if provided config = self.on_fit_config_fn(rnd) # Set timeout for this round use_fast_timeout = is_fast_round(rnd - 1, self.r_fast, self.r_slow) config["timeout"] = str(self.t_fast if use_fast_timeout else self.t_slow) # Fit instructions fit_ins = FitIns(parameters, config) # Return client/config pairs return [(client, fit_ins) for client in clients]
def configure_fit( self, rnd: int, weights: Weights, client_manager: ClientManager) -> List[Tuple[ClientProxy, FitIns]]: """Configure the next round of training.""" # Block until `min_num_clients` are available sample_size, min_num_clients = self.num_fit_clients( client_manager.num_available()) success = client_manager.wait_for(num_clients=min_num_clients, timeout=WAIT_TIMEOUT) if not success: # Do not continue if not enough clients are available log( INFO, "FedFS: not enough clients available after timeout %s", WAIT_TIMEOUT, ) return [] # Sample clients msg = "FedFS round %s, sample %s clients (based on all previous contributions)" if self.alternating_timeout: log( DEBUG, msg, str(rnd), str(sample_size), ) clients = self._contribution_based_sampling( sample_size=sample_size, client_manager=client_manager) elif self.importance_sampling: if rnd == 1: # Sample with 1/k in the first round log( DEBUG, "FedFS round %s, sample %s clients with 1/k", str(rnd), str(sample_size), ) clients = self._one_over_k_sampling( sample_size=sample_size, client_manager=client_manager) else: fast_round = is_fast_round(rnd - 1, r_fast=self.r_fast, r_slow=self.r_slow) log( DEBUG, "FedFS round %s, sample %s clients, fast_round %s", str(rnd), str(sample_size), str(fast_round), ) clients = self._fs_based_sampling( sample_size=sample_size, client_manager=client_manager, fast_round=fast_round, ) else: clients = self._one_over_k_sampling(sample_size=sample_size, client_manager=client_manager) # Prepare parameters and config parameters = weights_to_parameters(weights) config = {} if self.on_fit_config_fn is not None: # Use custom fit config function if provided config = self.on_fit_config_fn(rnd) # Set timeout for this round if self.dynamic_timeout: if self.durations: candidates = timeout_candidates( durations=self.durations, max_timeout=self.t_slow, ) timeout = next_timeout( candidates=candidates, percentile=self.dynamic_timeout_percentile, ) config["timeout"] = str(timeout) else: # Initial round has not past durations, use max_timeout config["timeout"] = str(self.t_slow) elif self.alternating_timeout: use_fast_timeout = is_fast_round(rnd - 1, self.r_fast, self.r_slow) config["timeout"] = str( self.t_fast if use_fast_timeout else self.t_slow) else: config["timeout"] = str(self.t_slow) # Fit instructions fit_ins = FitIns(parameters, config) # Return client/config pairs return [(client, fit_ins) for client in clients]