def test_inverse_monotone_function(self, func, value, lower_x, upper_x,
                                    initial_guess_x, expected_x):
   search_parameters = common.BinarySearchParameters(
       lower_x, upper_x, initial_guess=initial_guess_x)
   self.assertAlmostEqual(
       expected_x,
       common.inverse_monotone_function(
           func,
           value,
           search_parameters))
Beispiel #2
0
  def from_privacy_guarantee(
      cls,
      privacy_parameters: common.DifferentialPrivacyParameters,
      sensitivity: float = 1,
      pessimistic_estimate: bool = True,
  ) -> 'GaussianPrivacyLoss':
    """Creates the privacy loss for Gaussian mechanism with desired privacy.

    Uses binary search to find the smallest possible standard deviation of the
    Gaussian noise for which the protocol is (epsilon, delta)-differentially
    private.

    Args:
      privacy_parameters: the desired privacy guarantee of the mechanism.
      sensitivity: the sensitivity of function f. (i.e. the maximum absolute
        change in f when an input to a single user changes.)
      pessimistic_estimate: a value indicating whether the rounding is done in
        such a way that the resulting epsilon-hockey stick divergence
        computation gives an upper estimate to the real value.

    Returns:
      The privacy loss of the Gaussian mechanism with the given privacy
      guarantee.
    """
    if privacy_parameters.delta == 0:
      raise ValueError('delta=0 is not allowed for the Gaussian mechanism')

    # The initial standard deviation is set to
    # sqrt(2 * ln(1.5/delta)) * sensitivity / epsilon. It is known that, when
    # epsilon is no more than one, the Gaussian mechanism with this standard
    # deviation is (epsilon, delta)-DP. See e.g. Appendix A in Dwork and Roth
    # book, "The Algorithmic Foundations of Differential Privacy".
    search_parameters = common.BinarySearchParameters(
        0,
        math.inf,
        initial_guess=math.sqrt(2 * math.log(1.5 / privacy_parameters.delta)) *
        sensitivity / privacy_parameters.epsilon)

    def _get_delta_for_standard_deviation(current_standard_deviation):
      return GaussianPrivacyLoss(
          current_standard_deviation,
          sensitivity=sensitivity).get_delta_for_epsilon(
              privacy_parameters.epsilon)

    standard_deviation = common.inverse_monotone_function(
        _get_delta_for_standard_deviation, privacy_parameters.delta,
        search_parameters)

    return GaussianPrivacyLoss(
        standard_deviation,
        sensitivity=sensitivity,
        pessimistic_estimate=pessimistic_estimate)
 def test_inverse_monotone_function(self,
                                    func,
                                    value,
                                    lower_x,
                                    upper_x,
                                    initial_guess_x,
                                    expected_x,
                                    increasing,
                                    discrete=False):
   search_parameters = common.BinarySearchParameters(
       lower_x, upper_x, initial_guess=initial_guess_x, discrete=discrete)
   x = common.inverse_monotone_function(
       func, value, search_parameters, increasing=increasing)
   if expected_x is None:
     self.assertIsNone(x)
   else:
     self.assertAlmostEqual(expected_x, x)
Beispiel #4
0
  def from_privacy_guarantee(
      cls,
      privacy_parameters: common.DifferentialPrivacyParameters,
      sensitivity: int = 1,
  ) -> 'DiscreteGaussianPrivacyLoss':
    """Creates the privacy loss for discrete Gaussian mechanism with desired privacy.

    Uses binary search to find the smallest possible standard deviation of the
    discrete Gaussian noise for which the protocol is (epsilon, delta)-DP.

    Args:
      privacy_parameters: the desired privacy guarantee of the mechanism.
      sensitivity: the sensitivity of function f. (i.e. the maximum absolute
        change in f when an input to a single user changes.)

    Returns:
      The privacy loss of the discrete Gaussian mechanism with the given privacy
      guarantee.
    """
    if not isinstance(sensitivity, int):
      raise ValueError(f'Sensitivity is not an integer : {sensitivity}')
    if privacy_parameters.delta == 0:
      raise ValueError('delta=0 is not allowed for discrete Gaussian mechanism')

    # The initial standard deviation is set to
    # sqrt(2 * ln(1.5/delta)) * sensitivity / epsilon. It is known that, when
    # epsilon is no more than one, the (continuous) Gaussian mechanism with this
    # standard deviation is (epsilon, delta)-DP. See e.g. Appendix A in Dwork
    # and Roth book, "The Algorithmic Foundations of Differential Privacy".
    search_parameters = common.BinarySearchParameters(
        0,
        math.inf,
        initial_guess=math.sqrt(2 * math.log(1.5 / privacy_parameters.delta)) *
        sensitivity / privacy_parameters.epsilon)

    def _get_delta_for_sigma(current_sigma):
      return DiscreteGaussianPrivacyLoss(
          current_sigma,
          sensitivity=sensitivity).get_delta_for_epsilon(
              privacy_parameters.epsilon)

    sigma = common.inverse_monotone_function(
        _get_delta_for_sigma, privacy_parameters.delta, search_parameters)

    return DiscreteGaussianPrivacyLoss(sigma, sensitivity=sensitivity)
def get_smallest_epsilon_from_advanced_composition(
        total_privacy_parameters: common.DifferentialPrivacyParameters,
        num_queries: int,
        delta: float = 0) -> typing.Optional[float]:
    """Computes DP parameters that after a certain number of queries remain DP with given parameters.

  Using the optimal advanced composition theorem, Theorem 3.3 from the paper
  Kairouz, Oh, Viswanath. "The Composition Theorem for Differential Privacy",
  to compute DP parameter for an algorithm, so that when applied a given number
  of times it remains DP with given privacy parameters.

  Args:
    total_privacy_parameters: The desired privacy guarantee after applying the
      algorithm a given number of times.
    num_queries: Number of times the algorithm is invoked.
    delta: The value of DP parameter delta for the algorithm.

  Returns:
    epsilon such that if an algorithm is (epsilon, delta)-DP, then applying it
    the given number of times remains DP with total_privacy_parameters.

    None when total_privacy_parameters.delta is less than
    1 - (1 - delta)^num_queries for which no guarantee of
    total_privacy_parameters DP is possible for any value of epsilon.
  """
    if 1 - ((1 - delta)**num_queries) > total_privacy_parameters.delta:
        return None

    search_parameters = common.BinarySearchParameters(
        total_privacy_parameters.epsilon / num_queries,
        total_privacy_parameters.epsilon)

    def get_total_epsilon_for_epsilon(epsilon):
        privacy_parameters = common.DifferentialPrivacyParameters(
            epsilon, delta)
        return advanced_composition(privacy_parameters, num_queries,
                                    total_privacy_parameters.delta)

    return common.inverse_monotone_function(get_total_epsilon_for_epsilon,
                                            total_privacy_parameters.epsilon,
                                            search_parameters,
                                            increasing=True)
def get_smallest_parameter(
    privacy_parameters: common.DifferentialPrivacyParameters, num_queries: int,
    privacy_loss_distribution_constructor: typing.Callable[
        [float], privacy_loss_distribution.PrivacyLossDistribution],
    search_parameters: common.BinarySearchParameters
) -> typing.Union[float, None]:
    """Finds smallest parameter for which the mechanism satisfies desired privacy.

  This function computes the smallest "parameter" for which the corresponding
  mechanism, when run a specified number of times, satisfies a given privacy
  level. It is assumed that, when the parameter increases, the mechanism becomes
  more private.

  Args:
    privacy_parameters: The desired privacy guarantee.
    num_queries: Number of times the mechanism will be invoked.
    privacy_loss_distribution_constructor: A function that takes in a parameter
      and returns the privacy loss distribution for the corresponding mechanism
      for the given parameter.
    search_parameters: Parameters used for binary search.

  Returns:
    Smallest parameter for which the corresponding mechanism with that
    parameter, when applied the given number of times, satisfies the desired
    privacy guarantee. When no parameter in the given range satisfies this,
    return None.
  """
    def get_delta_for_parameter(parameter):
        pld_single_query = privacy_loss_distribution_constructor(parameter)
        pld_all_queries = pld_single_query.self_compose(num_queries)
        return pld_all_queries.get_delta_for_epsilon(
            privacy_parameters.epsilon)

    return common.inverse_monotone_function(get_delta_for_parameter,
                                            privacy_parameters.delta,
                                            search_parameters)