示例#1
0
 def on_aggregate_fit(
     self,
     rnd: int,
     results: List[Tuple[ClientProxy, FitRes]],
     failures: List[BaseException],
 ) -> Optional[Weights]:
     """Aggregate fit results using weighted average."""
     if not results:
         return None
     # Do not aggregate if there are failures and failures are not accepted
     if not self.accept_failures and failures:
         print(failures)
         return None
     # Convert results
     if glb.QUANTIZE:
         weights_results = [
             (modules.dequantize(self.dummy_model,
                                 parameters_to_weights(fit_res.parameters),
                                 glb.Q_BITS), fit_res.num_examples)
             for client, fit_res in results
         ]
     else:
         weights_results = [(parameters_to_weights(fit_res.parameters),
                             fit_res.num_examples)
                            for client, fit_res in results]
     return aggregate(weights_results)
示例#2
0
文件: fedfs_v0.py 项目: zliel/flower
    def aggregate_fit(
        self,
        rnd: int,
        results: List[Tuple[ClientProxy, FitRes]],
        failures: List[BaseException],
    ) -> Optional[Weights]:
        """Aggregate fit results using weighted average."""
        if not results:
            return None

        # Check if enough results are available
        completion_rate = len(results) / (len(results) + len(failures))
        if completion_rate < self.min_completion_rate_fit:
            # Not enough results for aggregation
            return None

        # Convert results
        weights_results = [(parameters_to_weights(fit_res.parameters),
                            fit_res.num_examples)
                           for client, fit_res in results]
        weights_prime = aggregate(weights_results)

        # Track contributions to the global model
        for client, fit_res in results:
            cid = client.cid
            contribution: Tuple[int, int, int] = (
                rnd,
                fit_res.num_examples,
                fit_res.num_examples_ceil,
            )
            if cid not in self.contributions.keys():
                self.contributions[cid] = []
            self.contributions[cid].append(contribution)

        return weights_prime
示例#3
0
 def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
     """Evaluate the provided parameters using the locally held dataset."""
     parameters: List[np.ndarray] = parameters_to_weights(ins.parameters)
     num_examples, loss, accuracy = self.numpy_client.evaluate(
         parameters, ins.config
     )
     return EvaluateRes(num_examples=num_examples, loss=loss, accuracy=accuracy)
示例#4
0
    def fit(self, ins: FitIns) -> FitRes:
        """Refine the provided weights using the locally held dataset."""
        # Deconstruct FitIns
        weights: Weights = parameters_to_weights(ins.parameters)

        # Train
        fit_begin = timeit.default_timer()
        results = self.keras_client.fit(weights, ins.config)
        if len(results) == 3:
            results = cast(Tuple[List[np.ndarray], int, int], results)
            weights_prime, num_examples, num_examples_ceil = results
            metrics: Optional[Metrics] = None
        elif len(results) == 4:
            results = cast(Tuple[List[np.ndarray], int, int, Metrics], results)
            weights_prime, num_examples, num_examples_ceil, metrics = results

        # Return FitRes
        fit_duration = timeit.default_timer() - fit_begin
        weights_prime_proto = weights_to_parameters(weights_prime)
        return FitRes(
            parameters=weights_prime_proto,
            num_examples=num_examples,
            num_examples_ceil=num_examples_ceil,
            fit_duration=fit_duration,
            metrics=metrics,
        )
示例#5
0
    def fit(self, ins: FitIns) -> FitRes:
        """Refine the provided weights using the locally held dataset."""
        # Deconstruct FitIns
        parameters: List[np.ndarray] = parameters_to_weights(ins.parameters)

        # Train
        fit_begin = timeit.default_timer()
        results = self.numpy_client.fit(parameters, ins.config)
        if len(results) == 2:
            print(DEPRECATION_WARNING_FIT)
            results = cast(Tuple[List[np.ndarray], int], results)
            parameters_prime, num_examples = results
            metrics: Optional[Metrics] = None
        elif len(results) == 3:
            results = cast(Tuple[List[np.ndarray], int, Metrics], results)
            parameters_prime, num_examples, metrics = results

        # Return FitRes
        fit_duration = timeit.default_timer() - fit_begin
        parameters_prime_proto = weights_to_parameters(parameters_prime)
        return FitRes(
            parameters=parameters_prime_proto,
            num_examples=num_examples,
            num_examples_ceil=num_examples,  # Deprecated
            fit_duration=fit_duration,  # Deprecated
            metrics=metrics,
        )
示例#6
0
 def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
     """Evaluate the provided weights using the locally held dataset."""
     weights: Weights = parameters_to_weights(ins.parameters)
     num_examples, loss, accuracy = self.keras_client.evaluate(
         weights, ins.config)
     return EvaluateRes(num_examples=num_examples,
                        loss=loss,
                        accuracy=accuracy)
示例#7
0
    def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
        """Evaluate the provided parameters using the locally held dataset."""
        parameters: List[np.ndarray] = parameters_to_weights(ins.parameters)

        results = self.numpy_client.evaluate(parameters, ins.config)
        if len(results) == 3:
            if (
                isinstance(results[0], float)
                and isinstance(results[1], int)
                and isinstance(results[2], dict)
            ):
                # Forward-compatible case: loss, num_examples, metrics
                results = cast(Tuple[float, int, Metrics], results)
                loss, num_examples, metrics = results
                evaluate_res = EvaluateRes(
                    loss=loss,
                    num_examples=num_examples,
                    metrics=metrics,
                )
            elif (
                isinstance(results[0], int)
                and isinstance(results[1], float)
                and isinstance(results[2], float)
            ):
                # Legacy case: num_examples, loss, accuracy
                # This will be removed in a future release
                print(DEPRECATION_WARNING_EVALUATE_0)
                results = cast(Tuple[int, float, float], results)
                num_examples, loss, accuracy = results
                evaluate_res = EvaluateRes(
                    loss=loss,
                    num_examples=num_examples,
                    accuracy=accuracy,  # Deprecated
                )
            else:
                raise Exception(
                    "Return value expected to be of type (float, int, dict)."
                )
        elif len(results) == 4:
            # Legacy case: num_examples, loss, accuracy, metrics
            # This will be removed in a future release
            print(DEPRECATION_WARNING_EVALUATE_1)
            results = cast(Tuple[int, float, float, Metrics], results)
            assert isinstance(results[0], int)
            assert isinstance(results[1], float)
            assert isinstance(results[2], float)
            assert isinstance(results[3], dict)
            num_examples, loss, accuracy, metrics = results
            evaluate_res = EvaluateRes(
                loss=loss,
                num_examples=num_examples,
                accuracy=accuracy,  # Deprecated
                metrics=metrics,
            )
        else:
            raise Exception(EXCEPTION_MESSAGE_WRONG_RETURN_TYPE)

        return evaluate_res
示例#8
0
    def aggregate_fit(
        self,
        rnd: int,
        results: List[Tuple[ClientProxy, FitRes]],
        failures: List[BaseException],
    ) -> Tuple[Optional[Parameters], Dict[str, Scalar]]:
        """Aggregate fit results using weighted average."""
        if not results:
            return None, {}
        # Do not aggregate if there are failures and failures are not accepted
        if not self.accept_failures and failures:
            return None, {}
        # Convert results

        def norm_grad(grad_list: List[Weights]) -> float:
            # input: nested gradients
            # output: square of the L-2 norm
            client_grads = grad_list[0]
            for i in range(1, len(grad_list)):
                client_grads = np.append(
                    client_grads, grad_list[i]
                )  # output a flattened array
            return float(np.sum(np.square(client_grads)))

        deltas = []
        hs_ffl = []

        if self.pre_weights is None:
            raise Exception("QffedAvg pre_weights are None in aggregate_fit")

        weights_before = self.pre_weights
        eval_result = self.evaluate(weights_to_parameters(weights_before))
        if eval_result is not None:
            loss, _ = eval_result

        for _, fit_res in results:
            new_weights = parameters_to_weights(fit_res.parameters)
            # plug in the weight updates into the gradient
            grads = [
                (u - v) * 1.0 / self.learning_rate
                for u, v in zip(weights_before, new_weights)
            ]
            deltas.append(
                [np.float_power(loss + 1e-10, self.q_param) * grad for grad in grads]
            )
            # estimation of the local Lipschitz constant
            hs_ffl.append(
                self.q_param
                * np.float_power(loss + 1e-10, (self.q_param - 1))
                * norm_grad(grads)
                + (1.0 / self.learning_rate)
                * np.float_power(loss + 1e-10, self.q_param)
            )

        weights_aggregated: Weights = aggregate_qffl(weights_before, deltas, hs_ffl)
        return weights_to_parameters(weights_aggregated), {}
示例#9
0
    def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
        """Evaluate the provided weights using the locally held dataset."""
        # Deconstruct EvaluateIns
        weights: Weights = parameters_to_weights(ins[0])
        config = ins[1]

        # Evaluate and return
        num_examples, loss, accuracy = self.keras_client.evaluate(
            weights, config)
        return num_examples, loss, accuracy
 def aggregate_fit(self, rnd: int, results: List[Tuple[ClientProxy,
                                                       FitRes]],
                   failures: List[BaseException]):
     # get step
     config = self.on_fit_config_fn(rnd)
     # discriminate the aggregation to be performed
     if config['model'] == 'k-FED':
         # initial checks
         if not results:
             return None, {}
         # Do not aggregate if there are failures and failures are not accepted
         if not self.accept_failures and failures:
             return None, {}
         # getting all centroids --> (n_clients, n_centroids, n_dimensions)
         all_centroids = np.array([
             parameters_to_weights(fit_res.parameters)
             for _, fit_res in results
         ])
         print('All centroids\' shape: {}'.format(all_centroids.shape))
         # pick, randomly, one client's centroids
         idx = self.rng.integers(0, all_centroids.shape[0], 1)
         # basis to be completed
         base_centroids = all_centroids[idx][0]
         # all other centroids
         other_centroids = all_centroids[
             np.arange(len(all_centroids)) != idx]
         other_centroids = np.concatenate(other_centroids, axis=0)
         # loop for completing the basis
         while base_centroids.shape[0] < config['n_clusters']:
             # all distances from the basis of centroids
             distances = [
                 distance_from_centroids(base_centroids, c)
                 for c in other_centroids
             ]
             # get the index of the maximum distance
             idx = np.argmax(distances)
             # add the new centroid --> (n_centroids, n_dimensions)
             base_centroids = np.concatenate(
                 (base_centroids, [other_centroids[idx]]), axis=0)
             print(base_centroids.shape)
         # Save base_centroids
         print(f"Saving base centroids...")
         np.savez("base_centroids.npz", *base_centroids)
         return weights_to_parameters(base_centroids), {}
     else:
         aggregated_weights = super().aggregate_fit(rnd, results, failures)
         # Save aggregated_weights
         print("Saving aggregated weights...")
         np.savez(agg_weights_filename, *aggregated_weights)
         return aggregated_weights
示例#11
0
    def aggregate_fit(
        self,
        rnd: int,
        results: List[Tuple[ClientProxy, FitRes]],
        failures: List[BaseException],
    ) -> Tuple[Optional[Parameters], Dict[str, Scalar]]:
        """Aggregate fit results using weighted average."""
        if not results:
            return None, {}

        # Check if enough results are available
        completion_rate = len(results) / (len(results) + len(failures))
        if completion_rate < self.min_completion_rate_fit:
            # Not enough results for aggregation
            return None, {}

        # Convert results
        weights_results = [(parameters_to_weights(fit_res.parameters),
                            fit_res.num_examples)
                           for client, fit_res in results]
        weights_prime = aggregate(weights_results)

        if self.importance_sampling:
            # Track contributions to the global model
            for client, fit_res in results:
                cid = client.cid
                assert fit_res.num_examples_ceil is not None
                contribution: Tuple[int, int, int] = (
                    rnd,
                    fit_res.num_examples,
                    fit_res.num_examples_ceil,
                )
                if cid not in self.contributions.keys():
                    self.contributions[cid] = []
                self.contributions[cid].append(contribution)

        if self.dynamic_timeout:
            self.durations = []
            for client, fit_res in results:
                assert fit_res.fit_duration is not None
                assert fit_res.num_examples_ceil is not None
                cid_duration = (
                    client.cid,
                    fit_res.fit_duration,
                    fit_res.num_examples,
                    fit_res.num_examples_ceil,
                )
                self.durations.append(cid_duration)

        return weights_to_parameters(weights_prime), {}
 def aggregate_fit(
     self,
     rnd: int,
     results: List[Tuple[ClientProxy, FitRes]],
     failures: List[BaseException],
 ):  # -> Optional[Weights]:
     aggregated_weights = super().aggregate_fit(rnd, results, failures)
     if aggregated_weights is not None:
         # Save aggregated_weights
         print("Saving aggregated_weights...")
         parameters = np.array(parameters_to_weights(aggregated_weights[0]),
                               dtype=object)
         np.savez(self.out_dir, parameters)
     return aggregated_weights
示例#13
0
    def fit(self, ins: FitIns) -> FitRes:
        """Refine the provided weights using the locally held dataset."""
        # Deconstruct FitIns
        weights: Weights = parameters_to_weights(ins[0])
        config = ins[1]

        # Train
        fit_begin = timeit.default_timer()
        weights_prime, num_examples, num_examples_ceil = self.keras_client.fit(
            weights, config)
        fit_duration = timeit.default_timer() - fit_begin

        # Return FitRes
        parameters = weights_to_parameters(weights_prime)
        return parameters, num_examples, num_examples_ceil, fit_duration
示例#14
0
文件: server.py 项目: vipulaSD/flower
    def _get_initial_parameters(self) -> Weights:
        """Get initial parameters from one of the available clients."""

        # Server-side parameter initialization
        parameters: Optional[Weights] = self.strategy.initialize_parameters(
            client_manager=self._client_manager)
        if parameters is not None:
            log(INFO, "Received initial parameters from strategy")
            return parameters

        # Get initial parameters from one of the clients
        random_client = self._client_manager.sample(1)[0]
        parameters_res = random_client.get_parameters()
        parameters = parameters_to_weights(parameters_res.parameters)
        log(INFO, "Received initial parameters from one random client")
        return parameters
示例#15
0
    def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
        """Evaluate the provided weights using the locally held dataset."""
        weights: Weights = parameters_to_weights(ins.parameters)

        results = self.keras_client.evaluate(weights, ins.config)
        # Note that accuracy is deprecated and will be removed in a future release
        if len(results) == 3:
            results = cast(Tuple[int, float, float], results)
            num_examples, loss, accuracy = results
            metrics: Optional[Metrics] = None
        elif len(results) == 4:
            results = cast(Tuple[int, float, float, Metrics], results)
            num_examples, loss, accuracy, metrics = results
        return EvaluateRes(num_examples=num_examples,
                           loss=loss,
                           accuracy=accuracy,
                           metrics=metrics)
示例#16
0
def test_aggregate_fit_no_failures() -> None:
    """Test evaluate function."""
    # Prepare
    strategy = FaultTolerantFedAvg(min_completion_rate_fit=0.99)
    results: List[Tuple[ClientProxy, FitRes]] = [
        (MagicMock(), FitRes(Parameters(tensors=[], tensor_type=""), 1, 1,
                             0.1))
    ]
    failures: List[BaseException] = []
    expected: Optional[Weights] = []

    # Execute
    actual, _ = strategy.aggregate_fit(1, results, failures)

    # Assert
    assert actual
    assert parameters_to_weights(actual) == expected
示例#17
0
文件: fedavg.py 项目: abh15/flower
 def on_aggregate_fit(
     self,
     rnd: int,
     results: List[Tuple[ClientProxy, FitRes]],
     failures: List[BaseException],
 ) -> Optional[Weights]:
     """Aggregate fit results using weighted average."""
     if not results:
         return None
     # Do not aggregate if there are failures and failures are not accepted
     if not self.accept_failures and failures:
         return None
     # Convert results
     weights_results = [(parameters_to_weights(fit_res.parameters),
                         fit_res.num_examples)
                        for client, fit_res in results]
     return aggregate(weights_results)
示例#18
0
文件: fedyogi.py 项目: sisco0/flower
    def aggregate_fit(
        self,
        rnd: int,
        results: List[Tuple[ClientProxy, FitRes]],
        failures: List[BaseException],
    ) -> Tuple[Optional[Parameters], Dict[str, Scalar]]:
        """Aggregate fit results using weighted average."""
        fedavg_parameters_aggregated, metrics_aggregated = super(
        ).aggregate_fit(rnd=rnd, results=results, failures=failures)
        if fedavg_parameters_aggregated is None:
            return None, {}

        fedavg_weights_aggregate = parameters_to_weights(
            fedavg_parameters_aggregated)

        # Yogi
        delta_t = [
            x - y
            for x, y in zip(fedavg_weights_aggregate, self.current_weights)
        ]

        # m_t
        if not self.m_t:
            self.m_t = [np.zeros_like(x) for x in delta_t]
        self.m_t = [
            self.beta_1 * x + (1 - self.beta_1) * y
            for x, y in zip(self.m_t, delta_t)
        ]

        # v_t
        if not self.v_t:
            self.v_t = [np.zeros_like(x) for x in delta_t]
        self.v_t = [
            x - (1.0 - self.beta_2) * np.multiply(y, y) *
            np.sign(x - np.multiply(y, y)) for x, y in zip(self.v_t, delta_t)
        ]

        new_weights = [
            x + self.eta * y / (np.sqrt(z) + self.tau)
            for x, y, z in zip(self.current_weights, self.m_t, self.v_t)
        ]

        self.current_weights = new_weights

        return weights_to_parameters(self.current_weights), metrics_aggregated
示例#19
0
    def fit(self, ins: FitIns) -> FitRes:
        """Refine the provided weights using the locally held dataset."""
        # Deconstruct FitIns
        parameters: List[np.ndarray] = parameters_to_weights(ins.parameters)

        # Train
        fit_begin = timeit.default_timer()
        parameters_prime, num_examples = self.numpy_client.fit(parameters, ins.config)
        fit_duration = timeit.default_timer() - fit_begin

        # Return FitRes
        parameters_prime_proto = weights_to_parameters(parameters_prime)
        return FitRes(
            parameters=parameters_prime_proto,
            num_examples=num_examples,
            num_examples_ceil=num_examples,  # num_examples == num_examples_ceil
            fit_duration=fit_duration,
        )
示例#20
0
 def aggregate_fit(
     self,
     rnd: int,
     results: List[Tuple[ClientProxy, FitRes]],
     failures: List[BaseException],
 ) -> Tuple[Optional[Parameters], Dict[str, Scalar]]:
     """Aggregate fit results using weighted average."""
     if not results:
         return None
     # Do not aggregate if there are failures and failures are not accepted
     if not self.accept_failures and failures:
         print(failures)
         return None
     # Convert results
     weights_results = [
         (modules.dequantize(self.dummy_model, parameters_to_weights(fit_res.parameters), self.q_bits), fit_res.num_examples) for client, fit_res in results
     ]
     return weights_to_parameters(aggregate(weights_results)), {}
示例#21
0
 def evaluate(
         self, parameters: Parameters
 ) -> Optional[Tuple[float, Dict[str, Scalar]]]:
     """Evaluate model parameters using an evaluation function."""
     if self.eval_fn is None:
         # No evaluation function provided
         return None
     weights = parameters_to_weights(parameters)
     eval_res = self.eval_fn(weights)
     if eval_res is None:
         return None
     loss, other = eval_res
     if isinstance(other, float):
         print(DEPRECATION_WARNING)
         metrics = {"accuracy": other}
     else:
         metrics = other
     return loss, metrics
示例#22
0
def test_aggregate_fit() -> None:
    """Tests if adagrad function is aggregating correctly."""
    # Prepare
    previous_weights: Weights = [array([0.1, 0.1, 0.1, 0.1], dtype=float32)]
    strategy = FedAdagrad(
        eta=0.1,
        eta_l=0.316,
        tau=0.5,
        initial_parameters=weights_to_parameters(previous_weights),
    )
    param_0: Parameters = weights_to_parameters(
        [array([0.2, 0.2, 0.2, 0.2], dtype=float32)])
    param_1: Parameters = weights_to_parameters(
        [array([1.0, 1.0, 1.0, 1.0], dtype=float32)])
    bridge = MagicMock()
    client_0 = GrpcClientProxy(cid="0", bridge=bridge)
    client_1 = GrpcClientProxy(cid="1", bridge=bridge)
    results: List[Tuple[ClientProxy, FitRes]] = [
        (
            client_0,
            FitRes(param_0,
                   num_examples=5,
                   num_examples_ceil=5,
                   fit_duration=0.1),
        ),
        (
            client_1,
            FitRes(param_1,
                   num_examples=5,
                   num_examples_ceil=5,
                   fit_duration=0.1),
        ),
    ]
    expected: Weights = [array([0.15, 0.15, 0.15, 0.15], dtype=float32)]

    # Execute
    actual_aggregated, _ = strategy.aggregate_fit(rnd=1,
                                                  results=results,
                                                  failures=[])
    if actual_aggregated:
        actual_list = parameters_to_weights(actual_aggregated)
        actual = actual_list[0]
    assert (actual == expected[0]).all()
示例#23
0
 def aggregate_fit(
     self,
     rnd: int,
     results: List[Tuple[ClientProxy, FitRes]],
     failures: List[BaseException],
 ) -> Optional[Weights]:
     """Aggregate fit results using weighted average."""
     if not results:
         return None
     # Check if enough results are available
     completion_rate = len(results) / (len(results) + len(failures))
     if completion_rate < self.completion_rate_fit:
         # Not enough results for aggregation
         return None
     # Convert results
     weights_results = [(parameters_to_weights(fit_res.parameters),
                         fit_res.num_examples)
                        for client, fit_res in results]
     return aggregate(weights_results)
示例#24
0
文件: qfedavg.py 项目: sisco0/flower
    def configure_fit(
            self, rnd: int, parameters: Parameters,
            client_manager: ClientManager) -> List[Tuple[ClientProxy, FitIns]]:
        """Configure the next round of training."""
        weights = parameters_to_weights(parameters)
        self.pre_weights = weights
        parameters = weights_to_parameters(weights)
        config = {}
        if self.on_fit_config_fn is not None:
            # Custom fit config function provided
            config = self.on_fit_config_fn(rnd)
        fit_ins = FitIns(parameters, config)

        # Sample clients
        sample_size, min_num_clients = self.num_fit_clients(
            client_manager.num_available())
        clients = client_manager.sample(num_clients=sample_size,
                                        min_num_clients=min_num_clients)

        # Return client/config pairs
        return [(client, fit_ins) for client in clients]
示例#25
0
    def federated_personalized_evaluate(
            self, ins: EvaluateIns) -> Tuple[EvaluateRes, EvaluateRes]:
        """Evaluate the provided parameters using the locally held dataset."""
        parameters: List[np.ndarray] = parameters_to_weights(ins.parameters)

        results = self.numpy_client.federated_personalized_evaluate(
            parameters, ins.config)
        if len(results) == 2:  # two tuples of (baseline_res, personalized_res)
            if (isinstance(results[0], tuple)
                    and isinstance(results[1], tuple)):
                if (isinstance(results[0][0], float)
                        and isinstance(results[0][1], int)
                        and isinstance(results[0][2], dict)
                        and isinstance(results[1][0], float)
                        and isinstance(results[1][1], int)
                        and isinstance(results[1][2], dict)):
                    # Forward-compatible case: loss, num_examples, metrics
                    baseline_evaluate_results = cast(
                        Tuple[float, int, Metrics], results[0])
                    personalized_evaluate_results = cast(
                        Tuple[float, int, Metrics], results[1])

                    baseline_loss, baseline_num_examples, baseline_metrics = baseline_evaluate_results
                    personalized_loss, personalized_num_examples, personalized_metrics = personalized_evaluate_results

                    baseline_eval_res = EvaluateRes(
                        loss=baseline_loss,
                        num_examples=baseline_num_examples,
                        metrics=baseline_metrics,
                    )
                    personalized_eval_res = EvaluateRes(
                        loss=personalized_loss,
                        num_examples=personalized_num_examples,
                        metrics=personalized_metrics,
                    )
            else:
                raise Exception(
                    "Return value expected to be of type (tuple, tuple).")
        return baseline_eval_res, personalized_eval_res
示例#26
0
    def aggregate_fit(
        self,
        rnd: int,
        results: List[Tuple[ClientProxy, FitRes]],
        failures: List[BaseException],
    ) -> Tuple[Optional[Parameters], Dict[str, Scalar]]:
        """Aggregate fit results using weighted average."""
        fedavg_parameters_aggregated, metrics_aggregated = super(
        ).aggregate_fit(rnd=rnd, results=results, failures=failures)
        if fedavg_parameters_aggregated is None:
            return None, {}

        fedavg_aggregate = parameters_to_weights(fedavg_parameters_aggregated)
        aggregated_updates = [
            subset_weights - self.current_weights[idx]
            for idx, subset_weights in enumerate(fedavg_aggregate)
        ]

        # Adagrad
        delta_t = aggregated_updates
        if not self.v_t:
            self.v_t = [
                np.zeros_like(subset_weights) for subset_weights in delta_t
            ]

        self.v_t = [
            self.v_t[idx] + np.multiply(subset_weights, subset_weights)
            for idx, subset_weights in enumerate(delta_t)
        ]

        new_weights = [
            self.current_weights[idx] + self.eta * delta_t[idx] /
            (np.sqrt(self.v_t[idx]) + self.tau) for idx in range(len(delta_t))
        ]
        self.current_weights = new_weights

        return weights_to_parameters(self.current_weights), metrics_aggregated
示例#27
0
文件: fedopt.py 项目: pszemkor/flower
    def __init__(
        self,
        *,
        fraction_fit: float = 0.1,
        fraction_eval: float = 0.1,
        min_fit_clients: int = 2,
        min_eval_clients: int = 2,
        min_available_clients: int = 2,
        eval_fn: Optional[Callable[[Weights],
                                   Optional[Tuple[float,
                                                  Dict[str, Scalar]]]]] = None,
        on_fit_config_fn: Optional[Callable[[int], Dict[str, Scalar]]] = None,
        on_evaluate_config_fn: Optional[Callable[[int], Dict[str,
                                                             Scalar]]] = None,
        accept_failures: bool = True,
        initial_parameters: Parameters,
        eta: float = 1e-1,
        eta_l: float = 1e-1,
        tau: float = 1e-9,
    ) -> None:
        """Federated Optim strategy interface.

        Implementation based on https://arxiv.org/abs/2003.00295

        Args:
            fraction_fit (float, optional): Fraction of clients used during
                training. Defaults to 0.1.
            fraction_eval (float, optional): Fraction of clients used during
                validation. Defaults to 0.1.
            min_fit_clients (int, optional): Minimum number of clients used
                during training. Defaults to 2.
            min_eval_clients (int, optional): Minimum number of clients used
                during validation. Defaults to 2.
            min_available_clients (int, optional): Minimum number of total
                clients in the system. Defaults to 2.
            eval_fn (Callable[[Weights], Optional[Tuple[float, float]]], optional):
                Function used for validation. Defaults to None.
            on_fit_config_fn (Callable[[int], Dict[str, str]], optional):
                Function used to configure training. Defaults to None.
            on_evaluate_config_fn (Callable[[int], Dict[str, str]], optional):
                Function used to configure validation. Defaults to None.
            accept_failures (bool, optional): Whether or not accept rounds
                containing failures. Defaults to True.
            initial_parameters (Parameters): Initial set of parameters from the server.
            eta (float, optional): Server-side learning rate. Defaults to 1e-1.
            eta_l (float, optional): Client-side learning rate. Defaults to 1e-1.
            tau (float, optional): Controls the algorithm's degree of adaptability.
                Defaults to 1e-9.
        """
        super().__init__(
            fraction_fit=fraction_fit,
            fraction_eval=fraction_eval,
            min_fit_clients=min_fit_clients,
            min_eval_clients=min_eval_clients,
            min_available_clients=min_available_clients,
            eval_fn=eval_fn,
            on_fit_config_fn=on_fit_config_fn,
            on_evaluate_config_fn=on_evaluate_config_fn,
            accept_failures=accept_failures,
            initial_parameters=initial_parameters,
        )
        self.current_weights = parameters_to_weights(initial_parameters)
        self.eta = eta
        self.eta_l = eta_l
        self.tau = tau
示例#28
0
文件: server.py 项目: vballoli/flower
 def _get_initial_weights(self) -> Weights:
     """Get initial weights from one of the available clients."""
     random_client = self._client_manager.sample(1)[0]
     parameters_res = random_client.get_parameters()
     return parameters_to_weights(parameters_res.parameters)