Ejemplo n.º 1
0
def probability_values(datasize,epsilon,delta,prior,observation, mech):
	
	Bayesian_Model = BayesInferwithDirPrior(prior, datasize, epsilon, delta)
	Bayesian_Model._set_observation(observation)

	Bayesian_Model._set_candidate_scores()
	Bayesian_Model._set_local_sensitivities()
	if(mech == r'Alg 3 - $\mathsf{EHD}$'):
		Bayesian_Model._set_up_exp_mech_with_GS()
	elif(mech == r'Alg 4 - $\mathsf{EHDL}$'):
		Bayesian_Model._set_up_exp_mech_with_LS()
	elif(mech == r'Alg 5 - $\mathsf{EHDS}$'):
		Bayesian_Model._set_up_exp_mech_with_gamma_SS()
	

	probabilities_exp = {}

	for i in range(len(Bayesian_Model._candidates)):
		z = Bayesian_Model._candidates[i]
		if(mech == r'Alg 3 - $\mathsf{EHD}$'):
			probabilities_exp[str(z._alphas)] = Bayesian_Model._GS_probabilities[i]
		elif(mech == r'Alg 4 - $\mathsf{EHDL}$'):
			probabilities_exp[str(z._alphas)] = Bayesian_Model._LS_probabilities[i]
		elif(mech == r'Alg 5 - $\mathsf{EHDS}$'):
			probabilities_exp[str(z._alphas)] = Bayesian_Model._gamma_SS_probabilities[i]
	
	return probabilities_exp
Ejemplo n.º 2
0
def exp_distribution_over_candidates(dataobs, prior, epsilon, mech):
    n = sum(dataobs)
    Bayesian_Model = BayesInferwithDirPrior(prior, n, epsilon)
    Bayesian_Model._set_observation(dataobs)

    Bayesian_Model._set_candidate_scores()
    Bayesian_Model._set_local_sensitivities()
    if mech == "exp":
        Bayesian_Model._set_up_exp_mech_with_GS()
    elif mech == "gamma":
        Bayesian_Model._set_up_exp_mech_with_gamma_SS()

    exp_prob = {}

    for i in range(len(Bayesian_Model._candidates)):
        z = Bayesian_Model._candidates[i]._pointwise_sub(prior)
        if mech == "exp":
            exp_prob[list2key(z._alphas)] = Bayesian_Model._GS_probabilities[i]
        elif mech == "gamma":
            exp_prob[list2key(
                z._alphas)] = Bayesian_Model._gamma_SS_probabilities[i]

    # print exp_prob

    return exp_prob
Ejemplo n.º 3
0
def row_discrete_probabilities(sample_size, epsilon, delta, prior,
                               observation):

    Bayesian_Model = BayesInferwithDirPrior(prior, sample_size, epsilon, delta)
    Bayesian_Model._set_observation(observation)
    print Bayesian_Model._observation_counts

    Bayesian_Model._set_candidate_scores()
    Bayesian_Model._set_local_sensitivities()
    Bayesian_Model._set_up_exp_mech_with_gamma_SS()
    Bayesian_Model._set_up_exp_mech_with_SS()
    Bayesian_Model._set_up_exp_mech_with_GS()
    Bayesian_Model._set_up_exp_mech_with_LS()

    #############################################################################
    #SPLIT THE BINS
    #############################################################################

    Candidate_bins_by_step = {}
    probability_distance_pairs_in_exp = []

    nomalizer = 0.0

    sorted_scores = sorted(Bayesian_Model._candidate_scores.items(),
                           key=operator.itemgetter(1))
    counter = 0
    while counter < len(sorted_scores):
        flage = counter
        key = str(sorted_scores[flage][1])
        Candidate_bins_by_step[key] = []
        # parameters_of_bin = []
        while counter < len(sorted_scores) and sorted_scores[flage][
                1] == sorted_scores[counter][1]:
            Candidate_bins_by_step[key].append(sorted_scores[counter][0])
            # parameters_of_bin.append(sorted_scores[counter][0]._alphas)
            counter += 1
        # print parameters_of_bin
        print key

    #############################################################################
    #SPLIT THE BINS
    #############################################################################

    # Candidate_bins_by_step = {}
    # for r in Bayesian_Model._candidates:
    # 	if str(sorted(r._alphas)) not in Candidate_bins_by_step.keys():
    # 		Candidate_bins_by_step[str(sorted(r._alphas))] = []
    # 		for c in Bayesian_Model._candidates:
    # 			if set(c._alphas) == set(r._alphas):
    # 				Candidate_bins_by_step[str(sorted(r._alphas))].append(c)

    #############################################################################
    #SUM UP the prob within the same bin
    #############################################################################

    # exp = calculate_prob_exp(Candidate_bins_by_step, Bayesian_Model, mechanism_parameter = 4,
    # 	sensitivity = Bayesian_Model._SS, savename = "_exp.txt")

    #############################################################################
    #SUM UP the prob within the same bin
    #############################################################################

    exp_gamma = calculate_prob_exp(Candidate_bins_by_step, Bayesian_Model,
                                   "gamma")

    #############################################################################
    #SUM UP the prob within the same bin
    #############################################################################
    exp_LS = calculate_prob_exp(Candidate_bins_by_step, Bayesian_Model,
                                "local")

    #############################################################################
    #SUM UP the prob within the same bin
    #############################################################################
    exp_GS = calculate_prob_exp(Candidate_bins_by_step, Bayesian_Model, "exp")

    #############################################################################
    #SETTING THE SENSITIVITY
    #############################################################################

    if (len(prior._alphas) == 2):
        sensitivity1 = 1.0
        sensitivity2 = 2.0
    else:
        sensitivity1 = 2.0
        sensitivity2 = len(prior._alphas) * 1.0

    #############################################################################
    #CALCULATE the Laplace prob within the same bin
    #############################################################################

    step, lap_1 = calculate_prob_lap(Candidate_bins_by_step,
                                     Bayesian_Model,
                                     sensitivity=sensitivity1,
                                     savename="_lap_1.txt")

    step, lap_2 = calculate_prob_lap(Candidate_bins_by_step,
                                     Bayesian_Model,
                                     sensitivity=sensitivity2,
                                     savename="_lap_2.txt")

    print step, exp_gamma, lap_1, lap_2
    # return

    #############################################################################
    #PLOT the prob within the same bin
    #############################################################################

    #############################################################################
    #LABELS SETTING
    #############################################################################

    labels = [
        r'Alg 5 - $\mathsf{EHDS}$ ', r"Alg 4 - $\mathsf{EHDL}$",
        r"Alg 3 - $\mathsf{EHD}$",
        r'Alg 1 - $\mathsf{LSDim}$ (sensitivity = ' + str(sensitivity2) + ')',
        r'Alg 2 - $\mathsf{LSHist}$ (sensitivity = ' + str(sensitivity1) + ')'
    ]

    #############################################################################
    #PLOTTING
    #############################################################################

    plt.figure()
    plt.plot(step, lap_2, '-', color='lightblue', label=(labels[3]))

    plt.plot(step, lap_1, '-', color='navy', label=(labels[4]))

    plt.plot(step, exp_GS, '-', label=(labels[2]))

    plt.plot(step, exp_LS, '-', label=(labels[1]))

    plt.plot(step, exp_gamma, '-', label=(labels[0]))

    # plt.plot(step, exp,  label=(labels[0]))

    # plt.plot(step, exp_new, label=(labels[1]))

    # plt.plot(step, exp_LS,  label=(labels[2]))

    # plt.plot(step, exp_GS,  label=(labels[3]))

    # plt.plot(step, lap_1, color = 'navy', label=(labels[4]))

    # plt.plot(step, lap_2, color = 'lightblue', label=(labels[5]))

    #############################################################################
    #PLOT FEATURE SETTING
    #############################################################################

    plt.xlabel("c / Hellinger distance from true posterior")
    plt.ylabel("Pr[H(BI(x),r) = c]")
    plt.title("Accuracy with Data size " + str(sample_size), fontsize=15)
    plt.legend()
    plt.grid()
    plt.show()