def test_client_interaction_LS(self): laplace = Laplace(self.A, self.eps_share) ans = laplace.measure(self.X, self.prng) least_squares = LeastSquares() x_est = least_squares.infer(self.A, ans) self.assertEqual(self.X.shape, x_est.shape)
def test_client_interaction_NLS(self): laplace = Laplace(self.A, self.eps_share) ans = laplace.measure(self.X, self.prng) non_neg_least_squares = NonNegativeLeastSquares() x_est = non_neg_least_squares.infer(self.A, ans) self.assertEqual(self.X.shape, x_est.shape)
def test_client_interaction_WNLS(self): laplace = Laplace(self.A, self.eps_share) ans = laplace.measure(self.X, self.prng) engine = WorkloadNonNegativeLeastSquares(self.A) x_est = engine.infer(self.A, ans) self.assertEqual(self.X.shape, x_est.shape)
def test_client_interaction_MW(self): laplace = Laplace(self.A, self.eps_share) ans = laplace.measure(self.X, self.prng) x_est_init = np.random.rand(self.n) mult_weight = MultiplicativeWeights() x_est = mult_weight.infer(self.A, ans, x_est_init) self.assertEqual(self.X.shape, x_est.shape)
def test_laplace_operator(self): laplace = Laplace(self.A, self.eps_share) actual_meas = laplace.measure(self.X, np.random.RandomState(self.seed)) prng = np.random.RandomState(self.seed) target_meas = self.n + prng.laplace(0.0, self.n / self.eps_share, self.n) np.testing.assert_array_equal(target_meas, actual_meas)
def test_client_interaction_HR(self): laplace = Laplace(self.A, self.eps_share) ans = laplace.measure(self.X, self.prng) eps_par = 0.1 eta = 0.35 ratio = 0.85 AHP_threshold = AHPThresholding(eta, ratio) x_est = AHP_threshold.infer(self.A, ans, eps_par) self.assertEqual(self.X.shape, x_est.shape)
def test_client_interaction(self): manager = PrivateManager(self.source_uri, None) manager._load_data = lambda source_uri: np.ones((self.n, )) n1 = manager.transform(Reshape((self.n, ))) state = manager.measure(n1, Laplace(self.A, self.eps_share), self.eps_share)
def synthesize(self, file_path, eps, seed): # setup random state prng = np.random.RandomState(seed) # load data vector relation = Relation(self.config) relation.load_csv(file_path) self._numerize(relation._df) # perform measurement attributes = [field_name for field_name in self.config.keys()] measurements = [] w_sum = sum(Ai.weight for Ai in self.strategy.matrices) for Ai in self.strategy.matrices: w = Ai.weight proj = [ attributes[i] for i, B in enumerate(Ai.base.matrices) if type(B).__name__ != 'Ones' ] matrix = [ B for B in Ai.base.matrices if type(B).__name__ != 'Ones' ] matrix = EkteloMatrix(np.ones( (1, 1))) if len(matrix) == 0 else Kronecker(matrix) proj_rel = copy.deepcopy(relation) proj_rel.project(proj) if proj_rel.df.shape[1] == 0: x = np.array([proj_rel.df.shape[0]]) else: x = Vectorize('').transform(proj_rel).flatten() y = Laplace(matrix, w * eps / w_sum).measure(x, prng) measurements.append((matrix.sparse_matrix(), y, 1.0 / w, proj)) # generate synthetic data sizes = [field['bins'] for field in self.config.values()] dom = Domain(attributes, sizes) engine = FactoredInference(dom) model = engine.estimate(measurements) df = model.synthetic_data().df self._denumerize(df) self._sample_numerical(df) return df
def test_get_y(self): y = Laplace(self.A, self.eps_share).measure(self.X, self.prng) noise_scales = [laplace_scale_factor(self.A, self.eps_share)] * len(y) np.testing.assert_array_equal(np.diag(y * get_A(self.A, noise_scales)), get_y(y, noise_scales).flatten())
def test_get_A(self): y = Laplace(self.A, self.eps_share).measure(self.X, self.prng) noise_scales = [laplace_scale_factor(self.A, self.eps_share)] * len(y) np.testing.assert_array_equal(np.array(noise_scales), 1 / np.diag(get_A(self.A, noise_scales)))
def laplace_scale_factor(A, eps): sensitivity = Laplace.sensitivity_L1(A) laplace_scale = util.old_div(sensitivity, float(eps)) return math.sqrt(2.0 * laplace_scale**2)