示例#1
0
	def __init__(self, params, keyMgr, collector):
		''' Default constructor.
		'''	
		super(Logger, self).__init__()
		self._stop = threading.Event()
		self.params = params

		# Create the policy actor
		self.manager = PolicyManager.start(params, keyMgr)

		# Create the encryption module and Keccak instance
		self.keyMgr = keyMgr
		self.collector = collector
		self.encryptionModule = EncryptionModule(keyMgr)
		self.sha3 = Keccak.Keccak()
		self.aesMode = AES.MODE_CBC

		# The in-memory keys that are maintained (and discarded as needed)
		self.initialEpochKey = {}
		self.initialEntityKey = {}
		self.epochKey = {} # key is (user, session)
		self.entityKey = {} # key is (user, session)
		self.policyKeyMap = {} # key is (user, session, policy)

		# Create the log queue
		self.queue = Queue.Queue()

		# Set up the Python logger
		logFile = 'abls.log'
		logging.basicConfig(filename=logFile,level=logging.DEBUG)
	def __init__(self, vid, logServer, keyServer, masterKey, publicKey):
		''' Constructor that stores the log server information.
		'''
		threading.Thread.__init__(self)
		self.id = vid
		self.logServer = logServer
		self.keyServer = keyServer
		self.running = True

		# Build the encryption module
		self.encryptionModule = EncryptionModule() # share the key

		# Generate the used entry bucket
		self.usedBin = {}
		self.MAX_TRIES = 10 # This can (and should) be configured by experimentation.
	def __init__(self, vid, logServer, keyServer, keyMgr):
		''' Constructor that stores the log server information.
		'''
		threading.Thread.__init__(self)
		self.id = vid
		self.logServer = logServer
		self.keyServer = keyServer
		self.running = True

		# Build the encryption module
		self.keyMgr = keyMgr
		self.encryptionModule = EncryptionModule(keyMgr) # pass along the key manager reference

		# Generate the used entry bucket
		self.usedBin = {}
		self.MAX_TRIES = 10 # This can (and should) be configured by experimentation.

		# Configure the logger
		logFile = 'abls.log'
		logging.basicConfig(filename=logFile,level=logging.DEBUG)
示例#4
0
	def __init__(self, params, keyMgr, collector):
		''' Default constructor.
		'''	
		super(Logger, self).__init__()
		self._stop = threading.Event()
		self.params = params

		# Create the policy actor
		self.manager = PolicyManager.start(params, keyMgr)

		# Create the encryption module and Keccak instance
		self.keyMgr = keyMgr
		self.collector = collector
		self.encryptionModule = EncryptionModule(keyMgr)
		self.sha3 = Keccak.Keccak()
		self.aesMode = AES.MODE_CBC

		# The in-memory keys that are maintained (and discarded as needed)
		self.initialEpochKey = {}
		self.initialEntityKey = {}
		self.epochKey = {} # key is (user, session)
		self.entityKey = {} # key is (user, session)
		self.policyKeyMap = {} # key is (user, session, policy)

		# Create the log queue
		self.queue = Queue.Queue()

		# Create the RabbitMQ connection
		self.connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
		self.channel = self.connection.channel()
		self.channel.queue_declare(queue='log') # Ensure the log queue is there
		self.channel.basic_consume(self.logCallback,
             	        	queue='log',
                	     	no_ack=True)

		# Set up the Python logger
		logFile = 'abls.log'
		logging.basicConfig(filename=logFile,level=logging.DEBUG)
class VerifyCrawler(threading.Thread):
	''' This is an active thread that is responsible for serving all
	messages that come in from the keylogger. It simply strips
	them out of the socket and forwards them along to the logger
	actor via a message dictionary.
	'''

	def __init__(self, vid, logServer, keyServer, keyMgr):
		''' Constructor that stores the log server information.
		'''
		threading.Thread.__init__(self)
		self.id = vid
		self.logServer = logServer
		self.keyServer = keyServer
		self.running = True

		# Build the encryption module
		self.keyMgr = keyMgr
		self.encryptionModule = EncryptionModule(keyMgr) # pass along the key manager reference

		# Generate the used entry bucket
		self.usedBin = {}
		self.MAX_TRIES = 10 # This can (and should) be configured by experimentation.

		# Configure the logger
		logFile = 'abls.log'
		logging.basicConfig(filename=logFile,level=logging.DEBUG)

	def run(self):
		''' The main thread loop for this verifier.
		'''
		# Create the shim
		self.logShim = DBShim(self.logServer, self.keyMgr)
		self.keyShim = DBShim(self.keyServer, self.keyMgr)

		# Run the crawler loop indefinitely...
		while self.running:
			logging.debug("Verifier " + str(self.id) + " is trying to grab a user session to verify.")
			(userId, sessionId) = self.selectRow()
			# Check to see if we found some valid data...
			if (userId != -1 and sessionId != -1):
				# Decrypt the user/session ID to get the original data
				userCT = userId.decode("hex")
				sessionCT = sessionId.decode("hex")
				key = hashlib.sha256(self.keyMgr.getMasterKey() + "log").digest()
				cipher = AES.new(key, AES.MODE_ECB)
				userPT = cipher.decrypt(userCT)
				sessionPT = cipher.decrypt(sessionCT)

				# Query the keys from the database
				logging.debug("Verifying: " + str(userPT) + " - " + str(sessionPT))
				valueMap = {"userId" : userPT, "sessionId" : sessionPT}
				epochKey = self.keyShim.executeMultiQuery("initialEpochKey", valueMap, ["userId", "sessionId"])
				key1 = epochKey[0]["key"]
				entityKey = self.keyShim.executeMultiQuery("initialEntityKey", valueMap, ["userId", "sessionId"])
				key2 = entityKey[0]["key"]

				# Decrypt the keys using the 'verifier' policy
				logging.debug("Trying to decrypt")
				sk = self.encryptionModule.generateUserKey(['VERIFIER'])
				k1 = self.encryptionModule.decrypt(sk, key1)[1] # [1] to pull out plaintext, [0] is T/F flag
				k2 = self.encryptionModule.decrypt(sk, key2)[1] # [1] to pull out plaintext, [0] is T/F flag

				# Query the last digest from the database
				logging.debug("Decryption successful - continue with the verification process")
				entityDigest = self.logShim.executeMultiQuery("entity", valueMap, ["userId", "sessionId"])
				digest = entityDigest[len(entityDigest) - 1]["digest"]			

				# Query for the log now.
				valueMap = {"userId" : userId, "sessionId" : sessionId}
				logResult = self.logShim.executeMultiQuery("log", valueMap, [])
				log = {}
				userId = int(userPT)
				sessionId = int(sessionPT)
				log[(userId, sessionId)] = []
				for i in range(0, len(logResult)):
					log[(userId, sessionId)].append([userId, sessionId, logResult[i]["epochId"], logResult[i]["message"], logResult[i]["xhash"], logResult[i]["yhash"]])

				# Verify the data extracted from the database...
				self.strongestVerify(userId, sessionId, log, k1, k2, digest, Logger.Logger.EPOCH_WINDOW_SIZE)

			# Don't hog the system resources
			time.sleep(15)

	def selectRow(self):
		''' Randomly select a row from the database to check with strong verification.
		'''
		userId = sessionId = 0
		
		foundNewRow = False
		tries = 0
		while not foundNewRow:
			result = self.logShim.randomQuery("log")
			if (len(result) > 0):
				userId = result[0]["userId"]
				sessionId = result[0]["sessionId"]
				if not ((userId, sessionId) in self.usedBin):
					self.usedBin[(userId, sessionId)] = 0
					foundNewRow = True

				# Upgrade all the instances for 
				for key in self.usedBin.keys():
					self.usedBin[key] = self.usedBin[key] + 1

				# See if we ran past the try cap
				tries = tries + 1
				if (tries >= self.MAX_TRIES):
					tk1, tk2, maxNum = 0, 0, 0
					for (k1, k2) in self.usedBin.keys():
						if (self.usedBin[(k1, k2)] > maxNum):
							maxNum = self.usedBin[(k1, k2)]
							tk1 = -1
							tk2 = -1

					del self.usedBin[(tk1, tk2)]
					userId = tk1
					sessionId = tk2
					foundNewRow = True # we're going to retry a previous row
			else:
				userId = sessionId = -1
				foundNewRow = True

		return (userId, sessionId)

	def strongestVerify(self, userId, sessionId, log, epochKey, entityKey, lastDigest, EPOCH_WINDOW_SIZE = Logger.Logger.EPOCH_WINDOW_SIZE):
		''' Walks the log chain and epoch chain for verification, and computes the 
		entity digests at every epoch cycle for comparison to check with
		the end result. Not publicly verifiable, and requires the initial epoch and entity keys.
		'''
		ctChain = []
		sha3 = Keccak.Keccak()

		# It is assumed that we would get this initial key from the trusted server...
		# This verification scheme is not possible without the epoch key...
		lastEpochDigest = hmac.new(epochKey, "0", hashlib.sha512).hexdigest()

		# Check to see if we even have anything to verify
		if not ((userId, sessionId) in log):
			return None
		else:
			# Handle the base of the chain
			first = log[(userId, sessionId)][0]
			firstPayload = str(userId) + str(sessionId) + str(0) + str(first[3]) + str(0)

			# Check the hash chain first
			xi = sha3.Keccak((len(bytes(firstPayload)), firstPayload.encode("hex")))
			computedV = sha3.Keccak((len(xi), xi))
			assert(xi == first[4])

			# Check the epoch chain next
			yi = hmac.new(epochKey, lastEpochDigest.encode("hex") + first[4].encode("hex"), hashlib.sha512).hexdigest()
			assert(yi == first[5])

			# Compute the first part of the entity chain now
			lastEntityDigest = hmac.new(entityKey, xi, hashlib.sha512).hexdigest()
			entityKey = hmac.new(entityKey, "some constant value", hashlib.sha512).hexdigest()

			# Append the first message.
			ctChain.append(first[3])

			# Walk the chain and make sure we can verify it...
			for i in range(1, len(log[(userId, sessionId)])):
				first = log[(userId, sessionId)][i]

				# Store the message
				firstMessage = first[3] # the message
				ctChain.append(firstMessage)

				# The other data...
				currentHash = first[4] # the hash
				previousHash = log[(userId, sessionId)][i - 1][4]
				
				# Verify that the first entry is correct
				firstPayload =  str(userId) + str(0) + str(i) + str(firstMessage) + str(previousHash)
				firstComputedHash = sha3.Keccak((len(bytes(firstPayload)), firstPayload.encode("hex")))
				assert(currentHash == firstComputedHash)

				# Check the epoch chain to see if we need to cycle
				if ((i % EPOCH_WINDOW_SIZE) == 0):
					# Update the epoch key
					currKey = epochKey
					newKey = sha3.Keccak((len(bytes(currKey)), currKey.encode("hex")))
					epochKey = newKey

					# Pull the last hash block
					length = len(log[(userId, sessionId)])
					lastHash = log[(userId, sessionId)][i - 1][4] 

					# Form the epoch block hash payload
					payload = str(lastEpochDigest) + str(lastHash)
					lastEpochDigest = hmac.new(newKey, payload, hashlib.sha512).hexdigest()

				# Compute the epoch chain value
				yi = hmac.new(epochKey, lastEpochDigest.encode("hex") + first[4].encode("hex"), hashlib.sha512).hexdigest()
				assert(yi == first[5])

				# Compute the first part of the entity chain now
				lastEntityDigest = hmac.new(entityKey, first[4], hashlib.sha512).hexdigest()
				entityKey = hmac.new(entityKey, "some constant value", hashlib.sha512).hexdigest() 

			assert(lastEntityDigest == lastDigest)
			logging.debug("Verification result:" + str(lastEntityDigest == lastDigest))

			return ctChain

	def weakVerify(self, userId, sessionId, log, epochKey, entityKey, EPOCH_WINDOW_SIZE):
		''' Only walks the log chain for verification.
		'''
		ctChain = []

		# Make sure we have something to verify first...
		if not ((userId, sessionId) in log):
			return None
		else:
			# Handle the base of the chain
			first = log[(userId, sessionId)][0]
			firstPayload = str(userId) + str(sessionId) + str(0) + str(first[3]) + str(0)

			digest = sha3.Keccak((len(bytes(firstPayload)), firstPayload.encode("hex")))
			assert(digest == first[4])

			# Append the first message.
			ctChain.append(first[3])

			# Walk the chain and make sure we can verify it...
			for i in range(1, len(log[(userId, sessionId)])):
				first = log[(userId, sessionId)][i]

				# Store the message
				firstMessage = first[3] # the message
				ctChain.append(firstMessage)

				# The other data...
				currentHash = first[4] # the hash
				previousHash = log[(userId, sessionId)][i - 1][4]
				
				# Verify that the first entry is correct
				firstPayload =  str(userId) + str(0) + str(i) + str(firstMessage) + str(previousHash)
				firstComputedHash = sha3.Keccak((len(bytes(firstPayload)), firstPayload.encode("hex")))
				assert(currentHash == firstComputedHash)

			return ctChain
示例#6
0
class Logger(threading.Thread):
	''' The logging thread that interacts with other actors to perform perform entry encryption
	'''

	# This can (and should) be changed as needed.
	EPOCH_WINDOW_SIZE = 5

	def __init__(self, params, keyMgr, collector):
		''' Default constructor.
		'''	
		super(Logger, self).__init__()
		self._stop = threading.Event()
		self.params = params

		# Create the policy actor
		self.manager = PolicyManager.start(params, keyMgr)

		# Create the encryption module and Keccak instance
		self.keyMgr = keyMgr
		self.collector = collector
		self.encryptionModule = EncryptionModule(keyMgr)
		self.sha3 = Keccak.Keccak()
		self.aesMode = AES.MODE_CBC

		# The in-memory keys that are maintained (and discarded as needed)
		self.initialEpochKey = {}
		self.initialEntityKey = {}
		self.epochKey = {} # key is (user, session)
		self.entityKey = {} # key is (user, session)
		self.policyKeyMap = {} # key is (user, session, policy)

		# Create the log queue
		self.queue = Queue.Queue()

		# Create the RabbitMQ connection
		self.connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
		self.channel = self.connection.channel()
		self.channel.queue_declare(queue='log') # Ensure the log queue is there
		self.channel.basic_consume(self.logCallback,
             	        	queue='log',
                	     	no_ack=True)

		# Set up the Python logger
		logFile = 'abls.log'
		logging.basicConfig(filename=logFile,level=logging.DEBUG)

	def createSession(self, userId, sessionId):
		''' Initialize the authentication keys that are used when verifying the 
		entries in the log database.
		'''

		# Generate the epoch and entity keys (both are random 32-bytes strings) - used for verification (integrity) only
		epochKey = Random.new().read(32)
		entityKey = Random.new().read(32)

		# These keys should be encrypted using CPABE for the (verifier role and user role)
		# so they can easily be recovered for verification
		msg = '{"userId":' + str(userId) + ',"sessionId":' + str(sessionId) + ',"payload":' + str(0) + '}' 
		logging.debug("verify msg: " + str(msg))
		policy = self.manager.ask({'command' : 'verifyPolicy', 'payload' : msg})
		encryptedEpochKey = self.encryptionModule.encrypt(epochKey, policy)
		encryptedEntityKey = self.encryptionModule.encrypt(entityKey, policy)

		# Persist the encrypted keys
		self.keyShim.replaceInTable("initialEpochKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, encryptedEpochKey, datetime.now().ctime()), [True, True, False, False]) 
		self.keyShim.replaceInTable("initialEntityKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, encryptedEntityKey, datetime.now().ctime()), [True, True, False, False]) 

		logging.debug("adding data to the in-memory dictionaries")
		self.initialEpochKey[(userId, sessionId)] = epochKey
		logging.debug("initial epoch key dict = " + str(self.initialEpochKey))
		self.initialEntityKey[(userId, sessionId)] = entityKey

	def getQueue(self):
		''' Fetch this logger's internal queue.
		'''
		return self.queue

	def endSession(self):
		''' End this session - clear the memory.
		'''
		self.running = False
		self.initialEpochKey = None
		self.initialEntityKey = None
		self.epochKey = None
		self.entityKey = None
		self.policyKeyMap = None

	def logCallback(ch, method, properties, body):
		''' Rabbit message queue callback.
		'''
		self.processLogEntry(body)
    	#print " [x] Received %r" % (body,)

	def run(self):
		''' Empty the queue into the log as fast as possible. We are the bottleneck. >.<
		'''
		# Create the log shim.
		self.logShim = DBShim.DBShim(self.params["LOG_DB"], self.keyMgr)
		self.keyShim = DBShim.DBShim(self.params["KEY_DB"], self.keyMgr)

		while not self.stopped():
			msg = self.queue.get()
			self.processLogEntry(msg)

	def addNewEvent(self, userId, sessionId, message):
		''' Construct a new event to add to the log. It is assumed the epoch key is 
		already initialized before this happens.
		'''
		# Some definitions
		xi = None
		yi = None
		zi = None
		payload = ""
		lastEpochDigest = None

		# Generate the initial log/epoch results
		valueMap = {"userId" : userId, "sessionId" : sessionId}
		logResults = self.logShim.executeMultiQuery("log", valueMap, ["userId", "sessionId"])
		epochResults = self.logShim.executeMultiQuery("epoch", valueMap, ["userId", "sessionId"])

		# Check to see if we are starting a new chain or appending to an existing one.
		if (len(logResults) == 0):
			# Create the initial epoch block
			logging.debug("initial epoch key dict = " + str(self.initialEpochKey))
			currKey = self.initialEpochKey[(userId, sessionId)]
			self.epochKey[(userId, sessionId)] = currKey
			self.keyShim.insertIntoTable("epochKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, currKey, datetime.now().ctime()), [True, True, False, False])
			logging.debug("****** CURRENT KEY = " + str(currKey))
			lastEpochDigest = hmac.new(currKey, "0", hashlib.sha512).hexdigest()

			# Set the entity key
			self.entityKey[(userId, sessionId)] = self.initialEntityKey[(userId, sessionId)]
			self.keyShim.insertIntoTable("entityKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, self.entityKey[(userId, sessionId)], datetime.now().ctime()), [True, True, False, False])

			# Save the epoch digest
			self.logShim.insertIntoTable("epoch", "(userId, sessionId, digest, inserted_at)", (userId, sessionId, lastEpochDigest, datetime.now().ctime()), [True, True, False, False])

			# Create the entry payload
			payload = str(userId) + str(sessionId) + str(0) + str(message) + str(0) # hash of this entry is (user, session, epoch, msg, previous == 0)
		else:
			# Update the epoch/entity key values from the database
			length = len(logResults)
			valueMap = {"userId" : userId, "sessionId" : sessionId}
			epochKeyResults = self.keyShim.executeMultiQuery("epochKey", valueMap, ["userId", "sessionId"])
			entityKeyResults = self.keyShim.executeMultiQuery("entityKey", valueMap, ["userId", "sessionId"])
			self.epochKey[(userId, sessionId)] = epochKeyResults[len(epochKeyResults) - 1]["key"]
			self.entityKey[(userId, sessionId)] = entityKeyResults[len(entityKeyResults) - 1]["key"]

			# Check to see if we have cycled to a new epoch window
			if (length % self.EPOCH_WINDOW_SIZE) == 0: 
				# Update the epoch key
				currKey = str(self.epochKey[(userId, sessionId)])
				newKey = self.sha3.Keccak((len(bytes(currKey)), currKey.encode("hex")))
				self.epochKey[(userId, sessionId)] = newKey
				self.keyShim.insertIntoTable("epochKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, newKey, datetime.now().ctime()), [True, True, False, False])

				# Pull the last epoch block
				length = len(epochResults)
				lastEpoch = epochResults[length - 1]["digest"]

				# Pull the last hash block
				length = len(logResults)
				lastHash = logResults[length - 1]["xhash"]

				# Form the epoch block hash payload
				payload = str(lastEpoch) + str(lastHash)
				digest = hmac.new(newKey, payload, hashlib.sha512).hexdigest()

				# Store the epoch digest...
				self.logShim.insertIntoTable("epoch", "(userId, sessionId, digest, inserted_at)", (userId, sessionId, digest, datetime.now().ctime()), [True, True, False, False])

			# Now, generate the payload for this log entry
			logLength = len(logResults)
			lastHash = logResults[length - 1]["xhash"]
			payload = str(userId) + str(0) + str(logLength) + str(message) + str(lastHash)

		# Finally, query the data to build the final log entry
		valueMap = {"userId" : userId, "sessionId" : sessionId}
		logResults = self.logShim.executeMultiQuery("log", valueMap, ["userId", "sessionId"])
		epochResults = self.logShim.executeMultiQuery("epoch", valueMap, ["userId", "sessionId"])

		# Now hash the hash chain entry... But first, build up the data that's needed
		currKey = str(self.epochKey[(userId, sessionId)])
		epochLength = len(epochResults)
		logging.debug("epoch results = " + str(epochResults))
		lastEpoch = epochResults[epochLength - 1]["digest"]

		# Here are the elements for the log entry tuple
		xi = self.sha3.Keccak((len(bytes(payload)), payload.encode("hex"))) # just a plain old hash
		yi = hmac.new(currKey, lastEpoch.encode("hex") + xi.encode("hex"), hashlib.sha512).hexdigest()

		# Store the latest entity digest
		currEntityKey = str(self.entityKey[(userId, sessionId)])
		lastEntityDigest = hmac.new(currEntityKey, xi, hashlib.sha512).hexdigest()
		self.logShim.replaceInTable("entity", "(userId, sessionId, digest, inserted_at)", (userId, sessionId, lastEntityDigest, datetime.now().ctime()), [True, True, False, False])
		self.entityKey[(userId, sessionId)] = hmac.new(currEntityKey, "some constant value", hashlib.sha512).hexdigest() # update the keys
		self.keyShim.insertIntoTable("entityKey", "(userId, sessionId, key, inserted_at)", (userId, sessionId, self.entityKey[(userId, sessionId)], datetime.now().ctime()), [True, True, False, False])

		# Store the elements now
		self.logShim.insertIntoTable("log", "(userId, sessionId, epochId, message, xhash, yhash, inserted_at)", (userId, sessionId, epochLength, message, xi, yi, datetime.now().ctime()), [True, True, False, False, False, False, False])

		# Debug
		logging.debug("Inserted the log: " + str((userId, sessionId, epochLength, message, xi, yi)))

	def processLogEntry(self, msg):
		''' This method is responsible for processing a single msg retrieved from the log proxy.
		'''
		# Parse the host application data
		entry = LogEntry.LogEntry(jsonString = msg)

		logging.debug("requesting policy")
		policy = self.manager.ask({'command' : 'policy', 'payload' : msg})
		key = None
		iv = None
		logging.debug("Policy for the piece of data: " + str(policy))
		if not ((entry.userId, entry.sessionId, policy) in self.policyKeyMap.keys()):
			iv = Random.new().read(AES.block_size) # we need an IV of 16-bytes, this is also random...
			key = Random.new().read(32)

			# Encrypt the key using the policy and store it in memory and in the database
			encryptedKey = self.encryptionModule.encrypt(key, policy)
			self.policyKeyMap[(entry.userId, entry.sessionId, policy)] = (key, iv)
			self.keyShim.insertIntoTable("policyKey", "(userId, sessionId, policy, key, iv, inserted_at)", (entry.userId, entry.sessionId, policy, encryptedKey, iv, datetime.now().ctime()), [True, True, False, False, False, False])
		else:
			key = self.policyKeyMap[(entry.userId, entry.sessionId, policy)][0]
			iv = self.policyKeyMap[(entry.userId, entry.sessionId, policy)][1]

		# Pad the msg if necessary to make it a multiple of 16
		plaintext = entry.payload
		#print(plaintext)
		if (len(str(plaintext)) % 16 != 0):
			plaintext = plaintext + (' ' * (16 - len(plaintext) % 16))
		ciphertext = AES.new(key, self.aesMode, iv).encrypt(plaintext)
		#print(ciphertext)
		logging.debug("ciphertext = " + str(ciphertext))

		# See if this is a new session that we need to manage, or if it's part of an existing session
		valueMap = {"userId" : entry.userId, "sessionId" : entry.sessionId}
		#results = self.logShim.executeMultiQuery("InitialEpochKey", valueMap)
		try:
			results = self.keyShim.executeMultiQuery("initialEpochKey", valueMap, ["userId", "sessionId"])
		except:
			logging.debug("Error: Unable to update the initialEpochKey table")
			traceback.print_exc(file=sys.stdout)
		if (len(results) == 0):
			self.createSession(int(entry.userId), int(entry.sessionId))

		# Now store the event in the log 
		self.addNewEvent(int(entry.userId), int(entry.sessionId), ciphertext.encode("hex"))

	def stop(self):
		''' Stop this logging thread.
		'''
		self._stop.set()

	def stopped(self):
		''' Check to see if this logging thread was stopped correctly.
		''' 
		return self._stop.isSet()
## Test random function
#group = PairingGroup('SS512')
#print(group.random(G1))
#print(group.random(G1))
#print(group.random(G1))
#print(group.random(G1))
#print(group.random(G1))

# The test policy and plaintext
policy = '((one or three))' # needs to be in parentheses (because it's a gate!)
attrs = ['ONE', 'TWO', 'THREE']
msg = "Hello world!"

# The two separate encryption modules
enc1 = EncryptionModule()
enc2 = EncryptionModule()

 # Test before sharing
(mk1, pk1) = enc1.getValues()
(mk2, pk2) = enc2.getValues()
print("Master keys (before sharing)")
print(objectToBytes(mk1, PairingGroup('SS512')) == objectToBytes(mk2, PairingGroup('SS512'))) 
print("Public keys (before sharing)")
print(objectToBytes(pk1, PairingGroup('SS512')) == objectToBytes(pk2, PairingGroup('SS512')))

# Test before sharing the keys
ct1 = enc1.encrypt(msg, policy)
ct2 = enc2.encrypt(msg, policy)
sk1 = enc1.generateUserKey(attrs) # takes a list of attributes (in caps?)
sk2 = enc2.generateUserKey(attrs) # takes a list of attributes (in caps?)