def blind_lucy_wrapper(image, max_its=8, its=5, N_filter=3, weiner=False,
                       estimation_noise=0, filter_estimation=1,
                       observation_noise=0):
    f = io.imread('../data/'+image+'.png', dtype=float)
    if len(f.shape) == 3: f = f.mean(axis=2)
    f /= f.max()
    print(f.shape)

    g = helper.gaussian(sigma=N_filter/3, N=N_filter)
    g_k = helper.gaussian(sigma=N_filter/3 * filter_estimation, N=N_filter)
    g_0 = g_k.copy()

    c = fftconvolve(f, g, mode='same')
    #c += observation_noise*np.random.randn(*c.shape)

    f_k = f + estimation_noise*np.random.randn(*f.shape)
    #f_k = c.copy()

    for k in range(int(max_its)):
        g_k = richardson_lucy(g_k, f_k, iterations=int(its), clip=True)
        if weiner: f_k = wiener(f_k, g_k, 1e-5)
        else: f_k = richardson_lucy(f_k, g_k, iterations=int(its), clip=True)

        print("on {}, f.max() = {:0.3e}, g.max() = {:0.3e}".format(k, np.abs(f_k.max()),
                                                         np.abs(g_k.max())))

    f_k, g_k = np.abs(f_k), np.abs(g_k)
    helper.show_images({'estimation':f_k, 'original':f, 'observations':c})
def blind_lucy_wrapper(image, max_its=8, its=5, N_filter=None, weiner=False,
                       estimation_noise=0, filter_estimation=1,
                       observation_noise=0):
    f = io.imread('../data/'+image+'.png', dtype=float)
    if len(f.shape) == 3: f = f.mean(axis=2)
    f /= f.max()

    if N_filter:
        g = helper.gaussian(sigma=N_filter/3, N=N_filter)
        g_k = helper.gaussian(sigma=N_filter/3 * filter_estimation, N=N_filter)
        g_0 = g_k.copy()
    else:
        N = f.shape[0]
        sigma = N/40
        g = helper.gaussian(sigma=sigma, N=N)
        g_k = helper.gaussian(sigma=filter_estimation*sigma, N=N)

    c = fftconvolve(f, g, mode='same')
    #c += observation_noise*np.random.randn(*c.shape)

    #f_k = f + estimation_noise*np.random.randn(*f.shape)
    f_k = c.copy()

    for k in range(int(max_its)):
        f_k1 = f_k.copy()
        for i in range(its):
            g_k = richardson_lucy(c, f_k1, g_k, iterations=1, clip=False)
            f_k = richardson_lucy(c, g_k,  f_k, iterations=1, clip=False)

        print("on {}, f.max() = {:0.3e}, g.max() = {:0.3e}".format(k, np.abs(f_k.max()), np.abs(g_k.max())))

    f_k, g_k = np.abs(f_k), np.abs(g_k)
    helper.show_images({'estimation':f_k, 'original':f, 'observations':c,
        'kernel estimate':g_k})
    return f_k, g_k
Beispiel #3
0
def main():
    tcp_ip = '127.0.0.1'
    tcp_port = int(sys.argv[1])
    buffer_size = 65536
    data = [3, 6, 9, 10, 15]
    xi = 4
    add_to_set = int(sys.argv[2])
    starting_at = int(sys.argv[3])

    #print "Arg "+ str(sys.argv[1])
    for i in range(add_to_set):
        data.append(3000 + i)
    #generate keys
    #priv, pub = p.generate_keypair(128)
    #print "pub " + str(pub)
    #shuffle data for blinding
    random.shuffle(data)

    #get keys
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((tcp_ip, tcp_port))
    recvk = s.recv(buffer_size)
    pubkey = pickle.loads(recvk)
    #privkey = s.recv(buffer_size)
    #generate and encrypt polynomial
    localP = helper.polyCoefficients(data)
    encP = []
    for coeff in localP:
        encP.append(p.encrypt(pubkey, coeff))
    #send polynomial to center
    data_string = pickle.dumps(encP)
    s.send(data_string)
    #s.close()

    #receive P from center
    #s.connect((tcp_ip, tcp_port))
    recvd = s.recv(buffer_size)

    #s.close()
    P = pickle.loads(recvd)
    #print "pubkey " + str(pubkey)
    #print "poly " + str(P)
    #evaluate P
    evaluated = helper.polyEvaluate(pubkey, P, data)

    #draw noise
    n = helper.gaussian(xi, 3)  #not working yet
    #print "noise: " + str(n)
    #ensure it is not negative
    if (n < 0):
        n = 0
    #add noise to encrypted values
    for i in range(n):
        evaluated.append(p.encrypt(pubkey, 0))

    #shuffle evaluated values

    #send evaluated values to center
    #s.connect((tcp_ip, tcp_port))
    data_string = pickle.dumps(evaluated)
    s.send(data_string)
    s.close()
Beispiel #4
0
def main():
    start_time = time.time()
    tcp_ip = '127.0.0.1'
    tcp_port = 5013
    tcp_port2 = 5005
    tcp_port3 = 5006
    buffer_size = 65536
    num_parties = int(sys.argv[1])
    add_to_set = int(sys.argv[2])
    xi = 4
    data = [2,4,6,8,10]

    for i in range(add_to_set):
        data.append(100 + i)

    s = []
    for i in range(num_parties):
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.bind((tcp_ip, tcp_port))
        sock.listen(1)
        s.append(sock)
        tcp_port += 1

    print "launching parties"
    cur_port = 5013
    offset = 200
    while (cur_port <= tcp_port):
        subprocess.Popen(["python", "party.py", str(cur_port), str(add_to_set), str(offset)])
        cur_port += 1
        offset += 100 
    print "launched"
    #generate keys
    privkey, pubkey = p.generate_keypair(128)
    #print "pub " + str(pub)

    #share keys
    print "sharing keys"
    conn = [None] * len(s)
    addr = [None] * len(s)
    pubdump = pickle.dumps(pubkey)
    for i in range(num_parties):
        conn[i], addr[i] = s[i].accept()
        conn[i].send(pubdump)

    #generate polynomial for center
    print "generating center polynomial"
    POne = helper.polyCoefficients(data)
    encP = []
    print "encrypting center polynomial"
    for coeff in POne:
        encP.append(p.encrypt(pubkey, coeff))
    POne = encP

    #get P2 and P3
    print "receiving other polynomials"
    #conn2, addr2 = s2.accept()
    poly_arr = []
    for i in range(num_parties):
        recvd = conn[i].recv(buffer_size)
        poly_arr.append(pickle.loads(recvd))

    #create P by summing polynomials
    print "summing polynomials"
    #print "POne: " + str(POne) 
    #print "PTwo: " + str(PTwo) 
    #print "PThree: " + str(PThree) 
    P = encP
    for i in range(len(poly_arr)):
        P = helper.betterPolySum(pubkey, P, poly_arr[i])

    #print "poly is " + str(P) 
    #send P to C2 and C3
    print "distributing P"
    polydata = pickle.dumps(P)

    for i in range(num_parties):
        conn[i].send(polydata)
    
    #evaluate P
    print "evaluating P"
    evaluated = helper.polyEvaluate(pubkey, P, data)

    #draw noise 
    n = helper.gaussian(xi, 3)
    #print "noise: " + str(n)
    #ensure it is not negative
    if (n < 0):
        n = 0
    #add noise to encrypted values
    for i in range(n):
        evaluated.append(p.encrypt(pubkey,0))
    
    #print "encrypted 0 " + str(p.encrypt(pubkey,0))
    #get values from C2 and C3
    print "receiving other evaluations"
    for i in range(num_parties):
        recvd = conn[i].recv(buffer_size)
        evaluated.extend(pickle.loads(recvd))
        conn[i].close()

    #add them to current values
    print "combining evaluations"

    #shuffle values for blinding
    random.shuffle(evaluated)

    #decrypt
    print "decrypting"
    #print "decrypting " + str(evaluated)
    decrypted = []
    for item in evaluated:
        #"decrypting " + str(item)
        decrypted.append(p.decrypt(privkey, pubkey, item))

    #print "decrypted " + str(decrypted)
    #tally results
    print "tallying results"
    intersection = 0
    for item in decrypted:
        if item == 0:
            intersection += 1
    
    intersection = intersection / num_parties

    print "intersection cardinality: " + str(intersection)
    print("--- %s seconds ---" % (time.time() - start_time))
Beispiel #5
0
        im_deconv *= ifft2(RB * G_mirror)
        F = fft2(im_deconv)
        #im_deconv *= fftconvolve(relative_blur, psf_mirror, 'same')

    if clip:
        im_deconv[im_deconv > 1] = 1
        im_deconv[im_deconv < -1] = -1

    return fft2(im_deconv), im_deconv

f = helper.get_image('cameraman64')
N, _ = f.shape
f /= f.sum()
F = fft2(f)

g = helper.gaussian(sigma=1, N=N)
g /= g.sum()
G = fft2(g)

C = F*G
c = ifft2(C)

max_its = 80
for k in range(int(max_its)):
    F, f_k = update_f(F, G, C, eps=0, iterations=1)
    #G, g_k = update_g(F, G, C, eps=0, iterations=2)

    print("iteration {}, f.max = {:0.2e}, g.max = {:0.2e}".format(k, np.abs(f).max(),
                                                    np.abs(g).max()))

G, g_k = update_g(F, G, C, eps=0, iterations=2)
        F = fft2(im_deconv)
        # im_deconv *= fftconvolve(relative_blur, psf_mirror, 'same')

    if clip:
        im_deconv[im_deconv > 1] = 1
        im_deconv[im_deconv < -1] = -1

    return fft2(im_deconv), im_deconv


f = helper.get_image("cameraman64")
N, _ = f.shape
f /= f.sum()
F = fft2(f)

g = helper.gaussian(sigma=1, N=N)
g /= g.sum()
G = fft2(g)

C = F * G
c = ifft2(C)

max_its = 80
for k in range(int(max_its)):
    F, f_k = update_f(F, G, C, eps=0, iterations=1)
    # G, g_k = update_g(F, G, C, eps=0, iterations=2)

    print("iteration {}, f.max = {:0.2e}, g.max = {:0.2e}".format(k, np.abs(f).max(), np.abs(g).max()))

G, g_k = update_g(F, G, C, eps=0, iterations=2)
helper.show_images({"kernel": np.abs(g_k), "estimate": np.abs(f_k), "observations": np.abs(c)})