def get(self, haystack, pin): global db haystack = str(haystack) pin = str(pin) response = {} logger = logging.getLogger('bloom-api') if len(pin) != 24 or len(haystack) != 24: response['completed'] = 0 response['description'] = "Incorrect inputs" logger.info('EXISTS - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:Bad inputs') self.write(json.dumps(response)) self.finish() else: response['completed'] = 1 response['description'] = 'Completed' # Check the existence try: result = db.bloom.find_one({'_id': ObjectId(haystack)}) except: response['completed'] = 0 response['description'] = "Couldn't connect to database" logger.error('EXISTS - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:DB down') self.write(json.dumps(response)) self.finish() return if result is None: logger.info('EXISTS - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:New document') response['exists'] = 0 bf = Bloom.Bloom(new_filter_capacity) serialized = Bloom.BloomSerializer.serialize(bf) serialized['_id'] = ObjectId(haystack) db.bloom.save(serialized) else: bf = Bloom.BloomSerializer.deserialize(result) if pin in bf: response['exists'] = 1 else: response['exists'] = 0 logger.info('EXISTS - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:' + str(response['exists'])) self.write(json.dumps(response)) self.finish
def insert(client, haystack, pin): db = client.patari result = db.bloom.find_one({'_id': ObjectId(haystack)}) if result is not None: bf = Bloom.BloomSerializer.deserialize(result) bf.add(pin) serialized = Bloom.BloomSerializer.serialize(bf) serialized['_id'] = ObjectId(haystack) db.bloom.save(serialized) else: bf = Bloom.Bloom(new_filter_capacity) bf.add(pin) serialized = Bloom.BloomSerializer.serialize(bf) serialized['_id'] = ObjectId(haystack) db.bloom.save(serialized)
__author__ = 'pawan' from Bloom import Bloom from OTRecv import OTRecv clientSet = [3,4,5,6, 7 ] bloomClient = Bloom(clientSet) bloomClient.generateBloom() BFc = bloomClient.getBloom() generator = 2045999832912957017696899038249723031652808586302786492701737751786256859920910559231421909153586598647911069920255352007045971901844488687357123721071418415 prime = 3989530240576982954905516988490555817184633741429235826809988606295372830739893893426271188667859430615399162320627698099477463527541398969175342947764145393 q = 4889130196785518327090094348640387030863521741947592925012240939087466704338105261551802927289043419871812698922337865317987087656300734030852135965397237 OTc = OTRecv(generator, prime, q) lam = bloomClient.getLambda() print "started" GBFi = OTc.obliviouslyReceive(BFc, len(BFc), lam) for share in clientSet: print share, bloomClient.queryGarbled(share, GBFi)
def get(self, haystack, pin): global db haystack = str(haystack) pin = str(pin) response = {} logger = logging.getLogger('bloom-api') if len(pin) != 24 or (len(haystack)) != 24: response['completed'] = 0 response['description'] = "Incorrect inputs" logger.info('ADD - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:Bad inputs') self.write(json.dumps(response)) self.finish() else: response['completed'] = 1 response['description'] = "Completed" # Implementing the update-if-current pattern # Just don't ask right now why I'm doing this while True: # Pull out the bloom filter try: result = db.bloom.find_one({'_id': ObjectId(haystack)}) except: response['completed'] = 0 response['description'] = "Couldn't connect to database" logger.error('ADD - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:DB down') self.write(json.dumps(response)) self.finish() return # Add the PinID if result is not None: bf = Bloom.BloomSerializer.deserialize(result) oldcount = bf.count bf.add(pin) # Mentioning old count makes you only access the older filter entry = {'_id': ObjectId(haystack), 'count': oldcount} to_update = { '$set': { 'bitarray': Binary(bf.bitarray.tobytes()), 'count': bf.count } } # Perform the update updated = db.bloom.find_one_and_update( entry, to_update, return_document=ReturnDocument.AFTER) if updated is not None: logger.info('ADD - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:' + str(updated['count'])) break else: logger.info('ADD - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:Concurrency issue, trying again') # Try fetching and updating again after delay, hence the infinite loop sleep(1) pass else: # The filter never existed in the first place bf = Bloom.Bloom(new_filter_capacity) bf.add(pin) serialized = Bloom.BloomSerializer.serialize(bf) serialized['_id'] = ObjectId(haystack) db.bloom.save(serialized) logger.info('ADD - Haystack:' + haystack + ' - Pin:' + pin + ' - Msg:New document') break self.write(json.dumps(response)) self.finish()
def setUp(self): self.filter = Bloom.Bloom(capacity=testing_capacity, error_rate=error_prob)
def setUp(self): self.filter = Bloom.Bloom(capacity=testing_capacity, error_rate=error_prob) self.pin = 'ffffffffffffffffffffffff' self.haystack = '000000000000000000000000'
__author__ = 'pawan' from Bloom import Bloom from OTSender import OTSender import random addr = '127.0.0.1' port = 3001 serverSet = [1,2,3,4,5,6] bloomServer = Bloom(serverSet) bloomServer.generateGarbledBloom() GBFs = bloomServer.getGarbledBloom() generator = 2045999832912957017696899038249723031652808586302786492701737751786256859920910559231421909153586598647911069920255352007045971901844488687357123721071418415 prime = 3989530240576982954905516988490555817184633741429235826809988606295372830739893893426271188667859430615399162320627698099477463527541398969175342947764145393 q = 4889130196785518327090094348640387030863521741947592925012240939087466704338105261551802927289043419871812698922337865317987087656300734030852135965397237 lam = bloomServer.getLambda() M0 = [random.getrandbits(lam) for i in xrange(len(GBFs))] OTs = OTSender(generator, prime, q) OTs.Obliviously_Send(M0, GBFs, len(GBFs), lam)