def process_temp_data(self, data: Data): """ Parse the received Data packet containing temperature info """ content_bytes = data.getContent().toBytes() temperature = int.from_bytes(content_bytes, byteorder='little') logging.info('Received {}: {} degrees'.format(str(data.getName()), temperature)) print('Received {}: {} degrees'.format(str(data.getName()), temperature)) self.data_frame = self.data_frame.append({ 'Time': datetime.now().strftime('%Y-%m-%d-%H'), 'DistrictCode': random.randint(0, 5), 'TypeCode': random.randint(0, 5), 'Popularity': temperature }, ignore_index=True) # If collected a batch of data, perform incremental learning on it print('len: {}'.format(len(self.data_frame))) if len(self.data_frame) >= self.batch_size + 24: csv_name = str(int(time.time())) + '.csv' csv_path = os.path.join('data', csv_name) self.data_frame.to_csv(csv_path, index=False) self.data_frame = self.data_frame[-24:] logging.info('Start incremental training on batch {}'.format(csv_name)) self.learner.load_data(csv_path) self.learner.train_once()
def test_full_name(self): data = Data() data.wireDecode(codedData) # Check the full name format. self.assertEqual(data.getFullName().size(), data.getName().size() + 1) self.assertEqual(data.getName(), data.getFullName().getPrefix(-1)) self.assertEqual(data.getFullName().get(-1).getValue().size(), 32) # Check the independent digest calculation. sha256 = hashes.Hash(hashes.SHA256(), backend=default_backend()) sha256.update(Blob(codedData).toBytes()) newDigest = Blob(bytearray(sha256.finalize()), False) self.assertTrue(newDigest.equals( data.getFullName().get(-1).getValue())) # Check the expected URI. self.assertEqual( data.getFullName().toUri(), "/ndn/abc/sha256digest=" + "96556d685dcb1af04be4ae57f0e7223457d4055ea9b3d07c0d337bef4a8b3ee9") # Changing the Data packet should change the full name. saveFullName = Name(data.getFullName()) data.setContent(Blob()) self.assertNotEqual(data.getFullName().get(-1), saveFullName.get(-1))
def onInterest(self, prefix, interest, transport, registeredPrefixId): print "received interest" initInterest = Name(interest.getName()) print "interest name:",initInterest.toUri() d = Data(interest.getName().append(self.deviceComponent)) try: print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " +self.device+":"+currentString+ "\n") self.face.registerPrefix(self.changePrefix,self.onInterest,self.onRegisterFailed) except KeyboardInterrupt: print "key interrupt" sys.exit(1) except Exception as e: print e d.setContent("Bad command\n") finally: self.keychain.sign(d,self.certificateName) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent() self.loop.close() self.face.shutdown() self.face = None
def onInterest(self, prefix, interest, transport, registeredPrefixId): print "received interest" initInterest = Name(interest.getName()) print "interest name:", initInterest.toUri() #d = Data(interest.getName().getPrefix(prefix.size()+1)) #self.excludeDevice = interest.getName().get(prefix.size()) #initInterest = interest.getName() d = Data(interest.getName().append(self.deviceComponent)) try: print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " + self.device + ":" + currentString + "\n") self.face.registerPrefix(self.changePrefix, self.onInterest, self.onRegisterFailed) except KeyboardInterrupt: print "key interrupt" sys.exit(1) except Exception as e: print e d.setContent("Bad command\n") finally: self.keychain.sign(d, self.certificateName) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent() self.stop() '''print"remove register"
def onReadPir(self, interest): # try to find a matching pir pirInfo = next((pair[1] for pair in self._pirs.items() if Name(pair[1]["device"]).match(interest.getName())), None) if pirInfo is None: data = Data(interest.getName()) data.setContent("MALFORMED COMMAND") data.getMetaInfo().setFreshnessPeriod( 1000) # 1 second, in milliseconds return data lastTime = pirInfo["lastTime"] lastValue = pirInfo["lastVal"] # If interest exclude doesn't match timestamp from last tx'ed data # then resend data if not interest.getExclude().matches(Name.Component(str(lastTime))): print "Received interest without exclude ACK:", interest.getExclude( ).toUri() print "\tprevious timestamp:", str(lastTime) data = Data(Name(interest.getName()).append(str(lastTime))) payload = {"pir": lastValue} content = json.dumps(payload) data.setContent(content) data.getMetaInfo().setFreshnessPeriod( 1000) # 1 second, in milliseconds print "Sent data:", data.getName().toUri(), "with content", content return data # otherwise, make new data currentValue = pirInfo["device"].read() timestamp = int(time.time() * 1000) # in milliseconds pirInfo["lastTime"] = timestamp pirInfo["lastVal"] = currentValue data = Data(Name(interest.getName()).append(str(timestamp))) payload = {"pir": currentValue} content = json.dumps(payload) data.setContent(content) data.getMetaInfo().setFreshnessPeriod( 1000) # 1 second, in milliseconds print "Sent data:", data.getName().toUri(), "with content", content return data
def onInterest(self, prefix, interest, transport, registeredPrefixId): print "received interest" initInterest = Name(interest.getName()) print "interest name:",initInterest.toUri() #d = Data(interest.getName().getPrefix(prefix.size()+1)) #self.excludeDevice = interest.getName().get(prefix.size()) #initInterest = interest.getName() d = Data(interest.getName().append(self.deviceComponent)) try: if(initInterest == self.prefix): print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " +self.device+":"+currentString+ "\n") else: self.excludeDevice = initInterest.get(prefix.size()) print "excludeDevice",self.excludeDevice.toEscapedString() if(self.excludeDevice != self.deviceComponent): print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " +self.device+":"+currentString+ "\n") else: print"remove register" self.face.removeRegisteredPrefix(registeredPrefixId) time.sleep(30) #sleep 30s which means user cannot update the song list twice within 1 minutes print"register again" self.face.registerPrefix(self.prefix, self.onInterest, self.onRegisterFailed) except KeyboardInterrupt: print "key interrupt" sys.exit(1) except Exception as e: print e d.setContent("Bad command\n") finally: self.keychain.sign(d,self.certificateName) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent()
def onInterest(self, prefix, interest, transport, registeredPrefixId): print "received interest" initInterest = Name(interest.getName()) print "interest name:", initInterest.toUri() #d = Data(interest.getName().getPrefix(prefix.size()+1)) #self.excludeDevice = interest.getName().get(prefix.size()) #initInterest = interest.getName() d = Data(interest.getName().append(self.deviceComponent)) try: if (initInterest == self.prefix): print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " + self.device + ":" + currentString + "\n") else: self.excludeDevice = initInterest.get(prefix.size()) print "excludeDevice", self.excludeDevice.toEscapedString() if (self.excludeDevice != self.deviceComponent): print "start to set data's content" currentString = ','.join(currentList) d.setContent("songList of " + self.device + ":" + currentString + "\n") else: print "remove register" self.face.removeRegisteredPrefix(registeredPrefixId) time.sleep(30) #sleep 30s which means user cannot update the song list twice within 1 minutes print "register again" self.face.registerPrefix(self.prefix, self.onInterest, self.onRegisterFailed) except KeyboardInterrupt: print "key interrupt" sys.exit(1) except Exception as e: print e d.setContent("Bad command\n") finally: self.keychain.sign(d, self.certificateName) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent()
def onInterest(self, prefix, interest, transport, registeredPrefixId): initInterest = Name(interest.getName()) print "interest name:",initInterest.toUri() d = Data(interest.getName().append(self.deviceComponent)) try: if(initInterest == self.listPrefix): print "initial db,start to set data's content" currentString = ','.join(currentList) d.setContent(currentString) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent() else: self.excludeDevice = initInterest.get(self.listPrefix.size()) excDevice = self.excludeDevice.toEscapedString() if(excDevice != str("exc")+self.device): print "not init db,start to set data's content" currentString = ','.join(currentList) d.setContent(currentString) encodedData = d.wireEncode() transport.send(encodedData.toBuffer()) print d.getName().toUri() print d.getContent() else: print"controller has exclude me, I have to remove register!!!!!!!" self.face.removeRegisteredPrefix(registeredPrefixId) print"register again" self.face.registerPrefix(self.listPrefix,self.onInterest,self.onRegisterFailed) except KeyboardInterrupt: print "key interrupt" sys.exit(1) except Exception as e: print e d.setContent("Bad command\n") finally: self.keychain.sign(d,self.certificateName)
def run(self): print 'Simple KDS start' # Publish sym key keyid = hashlib.sha256(self.usr_key.publickey().exportKey("DER")).digest() cipher = PKCS1_v1_5.new(self.usr_key) ciphertext = cipher.encrypt(self.symkey) symkey_name = self.prefix.append(bytearray(self.timestamp)).append(bytearray(keyid)) symkey_data = Data(symkey_name) symkey_data.setContent(bytearray(ciphertext)) self.keychain.sign(symkey_data, self.cert_name) self.publisher.put(symkey_data) print symkey_data.getName().toUri() print 'Simple KDS stop'
def prepare_data(self): """ Shard file into data packets. """ logging.info('preparing data') with open(self.file_path, 'rb') as binary_file: b_array = bytearray(binary_file.read()) if len(b_array) == 0: logging.warning("File is 0 bytes") return self.n_packets = int((len(b_array) - 1) / MAX_BYTES_IN_DATA_PACKET + 1) logging.info('There are {} packets in total'.format(self.n_packets)) seq = 0 for i in range(0, len(b_array), MAX_BYTES_IN_DATA_PACKET): data = Data(Name(self.name_at_repo).append(str(seq))) data.metaInfo.freshnessPeriod = 100000 data.setContent( b_array[i:min(i + MAX_BYTES_IN_DATA_PACKET, len(b_array))]) data.metaInfo.setFinalBlockId( Name.Component.fromSegment(self.n_packets - 1)) self.keychain.signWithSha256(data) self.m_name_str_to_data[str(data.getName())] = data seq += 1
def after_fetched(data: Data): nonlocal recv_window, b_array, seq_to_bytes_unordered """ Reassemble data packets in sequence. """ if not isinstance(data, Data): return try: seq = int(str(data.getName()).split('/')[-1]) logging.info('seq: {}'.format(seq)) except ValueError: logging.warning('Sequence number decoding error') return # Temporarily store out-of-order packets if seq <= recv_window: return elif seq == recv_window + 1: b_array.extend(data.getContent().toBytes()) logging.info('saved packet: seq {}'.format(seq)) recv_window += 1 while recv_window + 1 in seq_to_bytes_unordered: b_array.extend(seq_to_bytes_unordered[recv_window + 1]) seq_to_bytes_unordered.pop(recv_window + 1) logging.info('saved packet: seq {}'.format(recv_window + 1)) recv_window += 1 else: logging.info( 'Received out of order packet: seq {}'.format(seq)) seq_to_bytes_unordered[seq] = data.getContent().toBytes()
def publishData(self): timestamp = time.time() info = {''} if self._pir == None: cpu_use = ps.cpu_percent() users = [u.name for u in ps.users()] nProcesses = len(ps.pids()) memUse = ps.virtual_memory().percent swapUse = ps.swap_memory().percent info = {'count': self._count, 'cpu_usage':cpu_use, 'users':users, 'processes':nProcesses, 'memory_usage':memUse, 'swap_usage':swapUse} else: info = {'count': self._count, 'pir_bool': self._pir.read()} self._count += 1 dataOut = Data(Name(self._dataPrefix).appendVersion(int(timestamp))) dataOut.setContent(json.dumps(info)) dataOut.getMetaInfo().setFreshnessPeriod(10000) self.signData(dataOut) #self._dataCache.add(dataOut) # instead of adding data to content cache, we put data to nfd anyway self.send(dataOut.wireEncode().buf()) print('data name: ' + dataOut.getName().toUri() + '; content: ' + str(info)) # repeat every 1 seconds self.loop.call_later(1, self.publishData)
def generateData(self, baseName): ''' This appends the segment number to the data name, since repo-ng tends to expect it ''' # just make up some data and return it ts = (time.time()) segmentId = 0 # compatible with repo-ng test: may change to test segmented data versionStr = baseName.get(-1).toEscapedString() dataName = Name(baseName) dataName.appendSegment(segmentId) d = Data(dataName) content = "(" + str(ts) + ") Data named " + dataName.toUri() d.setContent(content) d.getMetaInfo().setFinalBlockID(segmentId) d.getMetaInfo().setFreshnessPeriod(-1) if shouldSign: self.keychain.sign(d, self.certificateName) else: d.setSignature(self.fakeSignature) stats.insertDataForVersion(versionStr, {'publish_time':time.time()}) logger.debug('Publishing: '+d.getName().toUri()) return d
def prepare_data(filePath, keychain: KeyChain): """ Shard file into data packets. """ temp_data={} logging.info('preparing data for {}'.format(filePath)) print('preparing data for {}'.format(filePath)) with open(filePath, 'rb') as binary_file: b_array = bytearray(binary_file.read()) if len(b_array) == 0: logging.warning("File is 0 bytes") return n_packets = int((len(b_array) - 1) / MAX_BYTES_IN_DATA_PACKET + 1) print('There are {} packets'.format(n_packets)) seq = 0 for i in range(0, len(b_array), MAX_BYTES_IN_DATA_PACKET): data = Data(Name(VIDEO_STREAM_NAME).append(filePath.split('.')[0]).append(str(seq))) data.setContent(b_array[i: min(i + MAX_BYTES_IN_DATA_PACKET, len(b_array))]) data.metaInfo.setFinalBlockId(Name.Component.fromSegment(n_packets - 1)) keychain.signWithSha256(data) temp_data[str(data.getName())] = data seq += 1 print('{} packets prepared: {}'.format(n_packets, str(Name(VIDEO_STREAM_NAME).append(filePath.split('.')[0])))) return temp_data
def main(): data = Data() data.wireDecode(TlvData) # Use a hard-wired secret for testing. In a real application the signer # ensures that the verifier knows the shared key and its keyName. key = Blob(bytearray([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 ])) if KeyChain.verifyDataWithHmacWithSha256(data, key): dump("Hard-coded data signature verification: VERIFIED") else: dump("Hard-coded data signature verification: FAILED") freshData = Data(Name("/ndn/abc")) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) freshData.setSignature(signature) freshData.setContent("SUCCESS!") dump("Signing fresh data packet", freshData.getName().toUri()) KeyChain.signWithHmacWithSha256(freshData, key) if KeyChain.verifyDataWithHmacWithSha256(freshData, key): dump("Freshly-signed data signature verification: VERIFIED") else: dump("Freshly-signed data signature verification: FAILED")
def main(): data = Data() data.wireDecode(TlvData) # Use a hard-wired secret for testing. In a real application the signer # ensures that the verifier knows the shared key and its keyName. key = Blob( bytearray([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 ])) if KeyChain.verifyDataWithHmacWithSha256(data, key): dump("Hard-coded data signature verification: VERIFIED") else: dump("Hard-coded data signature verification: FAILED") freshData = Data(Name("/ndn/abc")) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) freshData.setSignature(signature) freshData.setContent("SUCCESS!") dump("Signing fresh data packet", freshData.getName().toUri()) KeyChain.signWithHmacWithSha256(freshData, key) if KeyChain.verifyDataWithHmacWithSha256(freshData, key): dump("Freshly-signed data signature verification: VERIFIED") else: dump("Freshly-signed data signature verification: FAILED")
def createData(self, namePrefix, timestamp, payload, certName): data = Data(Name(self._namespace).append(namePrefix).append(str(int(float(timestamp))))) data.setContent(payload) self._keyChain.sign(data, certName) data.getMetaInfo().setFreshnessPeriod(self.DEFAULT_DATA_LIFETIME) if __debug__: print(data.getName().toUri()) return data
def onData(self, interest, data): print data.getName().toUri() usr_pubkey = str(bytearray(data.getContent().toBuffer())) # Publish sym key usrkey = RSA.importKey(usr_pubkey) keyid = hashlib.sha256(usrkey.publickey().exportKey("DER")).digest() cipher = PKCS1_v1_5.new(usrkey) ciphertext = cipher.encrypt(self.symkey) symkey_name = self.prefix.append(bytearray(self.timestamp)).append(bytearray(keyid)) symkey_data = Data(symkey_name) symkey_data.setContent(bytearray(ciphertext)) self.keychain.sign(symkey_data, self.cert_name) self.publisher.put(symkey_data) print symkey_data.getName().toUri() self.flag_terminate = 1
def onReadPir(self, interest): # try to find a matching pir pirInfo = next((pair[1] for pair in self._pirs.items() if Name(pair[1]["device"]).match(interest.getName())), None) if pirInfo is None: data = Data(interest.getName()) data.setContent("MALFORMED COMMAND") data.getMetaInfo().setFreshnessPeriod(1000) # 1 second, in milliseconds return data lastTime = pirInfo["lastTime"] lastValue = pirInfo["lastVal"] # If interest exclude doesn't match timestamp from last tx'ed data # then resend data if not interest.getExclude().matches(Name.Component(str(lastTime))): print "Received interest without exclude ACK:", interest.getExclude().toUri() print "\tprevious timestamp:", str(lastTime) data = Data(Name(interest.getName()).append(str(lastTime))) payload = { "pir" : lastValue} content = json.dumps(payload) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(1000) # 1 second, in milliseconds print "Sent data:", data.getName().toUri(), "with content", content return data # otherwise, make new data currentValue = pirInfo["device"].read() timestamp = int(time.time() * 1000) # in milliseconds pirInfo["lastTime"] = timestamp pirInfo["lastVal"] = currentValue data = Data(Name(interest.getName()).append(str(timestamp))) payload = { "pir" : currentValue} content = json.dumps(payload) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(1000) # 1 second, in milliseconds print "Sent data:", data.getName().toUri(), "with content", content return data
def createData(self, namePrefix, timestamp, payload, certName): data = Data( Name(self._namespace).append(namePrefix).append(str(timestamp))) data.setContent(payload) self._keyChain.sign(data, certName) data.getMetaInfo().setFreshnessPeriod(self.DEFAULT_DATA_LIFETIME) if __debug__: print(data.getName().toUri()) return data
def contentCacheAddEntityData(self, name, entityInfo): content = self._serializer.serialize(entityInfo) data = Data(Name(name)) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(self._entityDataFreshnessPeriod) self._keyChain.sign(data, self._certificateName) self._memoryContentCache.add(data) print "added entity to cache: " + data.getName().toUri() + "; " + data.getContent().toRawStr()
def publish_temp_packet(self): tp = int(time.time()) tp = tp - (tp % 5) self.latest_tp = tp data_name = Name(self.prefix).append(str(self.latest_tp)) data = Data(data_name) temp = self.get_temp() content_blob = Blob(temp.to_bytes(2, byteorder='little')) data.setContent(content_blob) data.metaInfo.setFreshnessPeriod(1000000) logging.info('Publish temp data {}, {} degree'.format(data.getName(), temp)) self.keychain.sign(data) self.name_str_to_data[str(data.getName())] = data if use_repo is True: event_loop = asyncio.get_event_loop() event_loop.create_task(self.send_cmd_interest()) logging.info("send repo insertion command")
def onInterest(self, prefix, interest, face, interestFilterId, filter): key = Blob( bytearray([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 ])) print "Got onboarding interest with name: %s" % ( interest.getName().toUri()) try: if KeyChain.verifyInterestWithHmacWithSha256(interest, key): dump("Onboarding interest signature verification: VERIFIED") else: dump("Onboarding interest signature verification: FAILED") except: print "Exception when attempting to verify onboarding interest signature." data = Data(interest.getName()) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) data.setSignature(signature) data.setContent("") dump("Signing onboarding response data packet", data.getName().toUri()) KeyChain.signWithHmacWithSha256(data, key) deviceID = str(interest.getName().getSubName(-3, 1).toUri()[1:]) deviceIP = str(interest.getName().getSubName(-4, 1).toUri()[1:]) print "Device ip: %s" % (deviceIP) print "Device ID: %s" % (deviceID) routeToRegister = str(Name(deviceID)) registerRouteWithNameAndIp(routeToRegister, deviceIP) thread = threading.Thread(target=run_data_fetcher, args=(deviceID)) thread.daemon = True # Daemonize thread thread.start() #commandRouteToRegister = "/device/command/" + deviceID #registerRouteWithNameAndIp(commandRouteToRegister, deviceIP) face.putData(data) with open('%s' % (deviceIDListName), 'a') as the_file: the_file.seek(0) read_file = open('%s' % (deviceIDListName), 'r') if deviceID not in read_file.read(): the_file.write('%s\n' % (deviceID))
def publishMetadata(self): # For now, hardcoded sensor list on gateway's end data = Data( Name(self._namespace).append("_meta").append( str(int(time.time() * 1000.0)))) data.setContent(json.dumps(self._sensorList)) data.getMetaInfo().setFreshnessPeriod(self._defaultFreshnessPeriod) self._keyChain.sign(data) self._cache.add(data) print("Metadata " + data.getName().toUri() + " added for sensor list: " + str(self._sensorList)) self.startRepoInsertion(data) return
def test_full_name(self): data = Data() data.wireDecode(codedData) # Check the full name format. self.assertEqual(data.getFullName().size(), data.getName().size() + 1) self.assertEqual(data.getName(), data.getFullName().getPrefix(-1)) self.assertEqual(data.getFullName().get(-1).getValue().size(), 32) # Check the independent digest calculation. sha256 = hashes.Hash(hashes.SHA256(), backend=default_backend()) sha256.update(Blob(codedData).toBytes()) newDigest = Blob(bytearray(sha256.finalize()), False) self.assertTrue(newDigest.equals(data.getFullName().get(-1).getValue())) # Check the expected URI. self.assertEqual( data.getFullName().toUri(), "/ndn/abc/sha256digest=" + "96556d685dcb1af04be4ae57f0e7223457d4055ea9b3d07c0d337bef4a8b3ee9") # Changing the Data packet should change the full name. saveFullName = Name(data.getFullName()) data.setContent(Blob()) self.assertNotEqual(data.getFullName().get(-1), saveFullName.get(-1))
def onInterest(self, prefix, interest, face, interestFilterId, filter): print "Got interest for device ID list." file = open("%s" % (deviceIDListName), "r") deviceIDList = file.read() data = Data(interest.getName()) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) data.setSignature(signature) data.setContent(deviceIDList) dump("Signing device ID List data packet", data.getName().toUri()) KeyChain.signWithHmacWithSha256(data, key) face.putData(data)
def test_content_symmetric_encrypt(self): for input in encryptorAesTestInputs: data = Data() Encryptor.encryptData( data, input.plainText, input.keyName, input.key, input.encryptParams) self.assertTrue(data.getName().equals(Name("/FOR").append(input.keyName)), input.testName) self.assertTrue(input.encryptedContent.equals(data.getContent()), input.testName) content = EncryptedContent() content.wireDecode(data.getContent()) decryptedOutput = AesAlgorithm.decrypt( input.key, content.getPayload(), input.encryptParams) self.assertTrue(input.plainText.equals(decryptedOutput), input.testName)
def produce(self): # Produce the bounding box print "ready to produce" maxLong = -3600 minLong = 3600 maxLat = -3600 minLat = 3600 if len(self.rawData) == 0: print "No raw data as producer input" for item in self.rawData: print item if item["lng"] > maxLong: maxLong = item["lng"] if item["lng"] < minLong: minLong = item["lng"] if item["lat"] > maxLat: maxLat = item["lat"] if item["lat"] < minLat: minLat = item["lat"] result = json.dumps({ "maxlng": maxLong, "minlng": minLong, "maxlat": maxLat, "minlat": minLat, "size": len(self.rawData) }) if self.encrypted: # TODO: replace fixed timestamp for now for produced data, createContentKey as needed testTime1 = Schedule.fromIsoString("20160320T080000") self.producer.createContentKey(testTime1) self.producer.produce(testTime1, result) else: # Arbitrary produced data lifetime data = Data(Name(self.identityName).append("20160320T080000")) data.getMetaInfo().setFreshnessPeriod(400000) data.setContent(result) # If the interest's still within lifetime, this will satisfy the interest self.memoryContentCache.add(data) print "Produced data with name " + data.getName().toUri()
def onInterest(self, prefix, interest, face, interestFilterId, filter): print "Got interest for latest device seq num." deviceID = str(interest.getName().getSubName(-1, 1).toUri()[1:]) file = open("../repo-ng/seq/%s.seq" % (deviceID), "r") deviceIDList = file.read() data = Data(interest.getName()) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) data.setSignature(signature) data.setContent(deviceIDList) dump("Signing device ID List data packet", data.getName().toUri()) KeyChain.signWithHmacWithSha256(data, key) face.putData(data)
def publishData(self, idx): # Translation of the video URL has finished by the time of the publishData call; # if not, we set translated to "publish"; this is data race free since translateUrl and publishData are scheduled in the same thread if self._events[idx]["translated"] != "none": # Order published events sequence numbers by start times in destination data = Data(Name(self._namePrefixString + str(self._currentIdx))) data.setContent(json.dumps(self._events[idx])) data.getMetaInfo().setFreshnessPeriod(self._dataLifetime) self._keyChain.sign(data, self._certificateName) self._memoryContentCache.add(data) self._currentIdx += 1 if __debug__: eventId = str(self._events[idx]["event_id"]) channel = str(self._events[idx]["channel"]) srcUrl = str(self._events[idx]["src_url"]) clipName = str(self._events[idx]["clipName"]) ytPresent = str(self._events[idx]["ytPresent"]) clipStartTime = str(self._events[idx]["dst_start_time"]) clipEndTime = str(self._events[idx]["dst_end_time"]) print( str(time.time()) + " Added event [" + eventId + "-" + channel + "|" + clipName + " YT:" + ytPresent + " " + srcUrl[0:30] + "... " + clipStartTime + "-" + clipEndTime + "] (" + data.getName().toUri() + ")" ) else: self._events[idx]["translated"] = "publish"
def test_content_asymmetric_encrypt_small(self): for input in encryptorRsaTestInputs: rawContent = Blob( bytearray([ 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73 ]), False) data = Data() rsaParams = RsaKeyParams(1024) keyName = Name("test") decryptKey = RsaAlgorithm.generateKey(rsaParams) encryptKey = RsaAlgorithm.deriveEncryptKey(decryptKey.getKeyBits()) eKey = encryptKey.getKeyBits() dKey = decryptKey.getKeyBits() encryptParams = EncryptParams(input.type) Encryptor.encryptData(data, rawContent, keyName, eKey, encryptParams) self.assertTrue( data.getName().equals(Name("/FOR").append(keyName)), input.testName) extractContent = EncryptedContent() extractContent.wireDecode(data.getContent()) self.assertTrue( keyName.equals(extractContent.getKeyLocator().getKeyName()), input.testName) self.assertEqual(extractContent.getInitialVector().size(), 0, input.testName) self.assertEqual(extractContent.getAlgorithmType(), input.type, input.testName) recovered = extractContent.getPayload() decrypted = RsaAlgorithm.decrypt(dKey, recovered, encryptParams) self.assertTrue(rawContent.equals(decrypted), input.testName)
def onBtleData(self, data): # expect data format like "0.2,0.1,0.3" content = data.getContent().toRawStr() print "got data: " + data.getName().toUri() + " : " + content if self._security: # Hmac verify the data we receive pass pyr = content.split(',') if len(pyr) >= 3: resultingContent = "{\"p\":" + pyr[0] + ",\"y\":" + pyr[1] + ",\"r\":" + pyr[2] + "}" timestamp = time.time() * 1000 dataOut = Data(Name(self._dataPrefix).appendVersion(int(timestamp))) dataOut.setContent(resultingContent) dataOut.getMetaInfo().setFreshnessPeriod(10000) self._keyChain.sign(dataOut, self._certificateName) self._dataCache.add(dataOut) print "data added: " + dataOut.getName().toUri()
def publishData(self): timestamp = time.time() cpu_use = ps.cpu_percent() users = [u.name for u in ps.users()] nProcesses = len(ps.pids()) memUse = ps.virtual_memory().percent swapUse = ps.swap_memory().percent info = {'cpu_usage':cpu_use, 'users':users, 'processes':nProcesses, 'memory_usage':memUse, 'swap_usage':swapUse} dataOut = Data(Name(self._dataPrefix).appendVersion(int(timestamp))) dataOut.setContent(json.dumps(info)) dataOut.getMetaInfo().setFreshnessPeriod(10000) self._keyChain.sign(dataOut, self._certificateName) self._dataCache.add(dataOut) print "data added: " + dataOut.getName().toUri() # repeat every 5 seconds self._face.callLater(5000, self.publishData)
def prepareNextData(self,referenceSegmentNo): if (self._isFinished): return if self._mData: maxSegmentNo = len(self._mData) if(maxSegmentNo - referenceSegmentNo >= self._nDataToPrepare): return self._nDataToPrepare -= (maxSegmentNo - referenceSegmentNo) # Prepare _nDataToPrepare number of data segments, and insert them into _mData for i in range(0,self._nDataToPrepare): buffer = self.insertStream.read(self._segmentSize) if not buffer: self._isFinished = True # For now, repo-ng cannot handle FinalBlockID: # ERROR: Invalid length for nonNegativeInteger (only 1, 2, 4, and 8 are allowed) #d.getMetaInfo().setFinalBlockID(self._currentSegmentNo) # using EndBlockId in repo protocol spec instead break if self._currentSegmentNo == 0: dataName = Name(self._dataName).append(PutFile.componentFromSingleDigitNumberWithMarkerCXX(0, 0x00)) else: dataName = Name(self._dataName).appendSegment(self._currentSegmentNo) d = Data(dataName) # Biggest mistake: wrong data name. Still finding out why, though # Original one: #d = Data(Name(self._dataName).append(self._currentSegmentNo)) print "Given data name", d.getName().toUri(), " Segment no", self._currentSegmentNo d.setContent(buffer) self._keyChain.sign(d, self._certificateName) self._mData.append(d) self._currentSegmentNo += 1
def test_content_asymmetric_encrypt_small(self): for input in encryptorRsaTestInputs: rawContent = Blob(bytearray([ 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73 ]), False) data = Data() rsaParams = RsaKeyParams(1024) keyName = Name("test") decryptKey = RsaAlgorithm.generateKey(rsaParams) encryptKey = RsaAlgorithm.deriveEncryptKey(decryptKey.getKeyBits()) eKey = encryptKey.getKeyBits() dKey = decryptKey.getKeyBits() encryptParams = EncryptParams(input.type) Encryptor.encryptData(data, rawContent, keyName, eKey, encryptParams) self.assertTrue(data.getName().equals(Name("/FOR").append(keyName)), input.testName) extractContent = EncryptedContent() extractContent.wireDecode(data.getContent()) self.assertTrue( keyName.equals(extractContent.getKeyLocator().getKeyName()), input.testName) self.assertEqual( extractContent.getInitialVector().size(), 0, input.testName) self.assertEqual( extractContent.getAlgorithmType(), input.type, input.testName) recovered = extractContent.getPayload() decrypted = RsaAlgorithm.decrypt(dKey, recovered, encryptParams) self.assertTrue(rawContent.equals(decrypted), input.testName)
def publishFloorImage(self): with open(self._imageFilePath, 'rb') as imageFile: segment = 0 bytes = imageFile.read(defaultBlockSize) while bytes != "": data = Data( Name(self._namespace).append("_img").append(str(segment))) segment += 1 #data.getMetaInfo().setFinalBlockId() data.setContent(b64encode(bytes)) data.getMetaInfo().setFreshnessPeriod( self._defaultFreshnessPeriod) self._keyChain.sign(data) self._cache.add(data) print(data.getName().toUri()) yield None self.startRepoInsertion(data) bytes = imageFile.read(defaultBlockSize) time.sleep(0.1) return
def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat = False): doCalc = True dataList = [] # TODO: an intermediate node cannot produce raw data for now if len(childrenList.keys()) != 0: for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: data = self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey] dataList.append(float(data.getContent().toRawStr())) else: #print('Child ' + childName + ' has not replied yet') doCalc = False break else: for inst in self._dataQueue[dataType]._dataDict.keys(): if int(inst) >= startTime and int(inst) < startTime + interval: dataList.append(self._dataQueue[dataType]._dataDict[inst]) if doCalc: content = self._aggregation.getAggregation(aggregationType, dataList) if content: publishData = Data(Name(publishingPrefix).append(str(startTime)).append(str(startTime + interval))) publishData.setContent(str(content)) publishData.getMetaInfo().setFreshnessPeriod(DEFAULT_DATA_LIFETIME) self._keyChain.sign(publishData, self._certificateName) self._memoryContentCache.add(publishData) for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: del self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey] if __debug__: print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr()) # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData if repeat: self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat) return
def test_content_asymmetric_encrypt_large(self): for input in encryptorRsaTestInputs: largeContent = Blob(bytearray([ 0x73, 0x5a, 0xbd, 0x47, 0x0c, 0xfe, 0xf8, 0x7d, 0x2e, 0x17, 0xaa, 0x11, 0x6f, 0x23, 0xc5, 0x10, 0x23, 0x36, 0x88, 0xc4, 0x2a, 0x0f, 0x9a, 0x72, 0x54, 0x31, 0xa8, 0xb3, 0x51, 0x18, 0x9f, 0x0e, 0x1b, 0x93, 0x62, 0xd9, 0xc4, 0xf5, 0xf4, 0x3d, 0x61, 0x9a, 0xca, 0x05, 0x65, 0x6b, 0xc6, 0x41, 0xf9, 0xd5, 0x1c, 0x67, 0xc1, 0xd0, 0xd5, 0x6f, 0x7b, 0x70, 0xb8, 0x8f, 0xdb, 0x19, 0x68, 0x7c, 0xe0, 0x2d, 0x04, 0x49, 0xa9, 0xa2, 0x77, 0x4e, 0xfc, 0x60, 0x0d, 0x7c, 0x1b, 0x93, 0x6c, 0xd2, 0x61, 0xc4, 0x6b, 0x01, 0xe9, 0x12, 0x28, 0x6d, 0xf5, 0x78, 0xe9, 0x99, 0x0b, 0x9c, 0x4f, 0x90, 0x34, 0x3e, 0x06, 0x92, 0x57, 0xe3, 0x7a, 0x8f, 0x13, 0xc7, 0xf3, 0xfe, 0xf0, 0xe2, 0x59, 0x48, 0x15, 0xb9, 0xdb, 0x77, 0x07, 0x1d, 0x6d, 0xb5, 0x65, 0x17, 0xdf, 0x76, 0x6f, 0xb5, 0x43, 0xde, 0x71, 0xac, 0xf1, 0x22, 0xbf, 0xb2, 0xe5, 0xd9, 0x22, 0xf1, 0x67, 0x76, 0x71, 0x0c, 0xff, 0x99, 0x7b, 0x94, 0x9b, 0x24, 0x20, 0x80, 0xe3, 0xcc, 0x06, 0x4a, 0xed, 0xdf, 0xec, 0x50, 0xd5, 0x87, 0x3d, 0xa0, 0x7d, 0x9c, 0xe5, 0x13, 0x10, 0x98, 0x14, 0xc3, 0x90, 0x10, 0xd9, 0x25, 0x9a, 0x59, 0xe9, 0x37, 0x26, 0xfd, 0x87, 0xd7, 0xf4, 0xf9, 0x11, 0x91, 0xad, 0x5c, 0x00, 0x95, 0xf5, 0x2b, 0x37, 0xf7, 0x4e, 0xb4, 0x4b, 0x42, 0x7c, 0xb3, 0xad, 0xd6, 0x33, 0x5f, 0x0b, 0x84, 0x57, 0x7f, 0xa7, 0x07, 0x73, 0x37, 0x4b, 0xab, 0x2e, 0xfb, 0xfe, 0x1e, 0xcb, 0xb6, 0x4a, 0xc1, 0x21, 0x5f, 0xec, 0x92, 0xb7, 0xac, 0x97, 0x75, 0x20, 0xc9, 0xd8, 0x9e, 0x93, 0xd5, 0x12, 0x7a, 0x64, 0xb9, 0x4c, 0xed, 0x49, 0x87, 0x44, 0x5b, 0x4f, 0x90, 0x34, 0x3e, 0x06, 0x92, 0x57, 0xe3, 0x7a, 0x8f, 0x13, 0xc7, 0xf3, 0xfe, 0xf0, 0xe2, 0x59, 0x48, 0x15, 0xb9, 0xdb, 0x77, 0x07, 0x1d, 0x6d, 0xb5, 0x65, 0x17, 0xdf, 0x76, 0x6f, 0xb5, 0x43, 0xde, 0x71, 0xac, 0xf1, 0x22, 0xbf, 0xb2, 0xe5, 0xd9 ]), False) data = Data() rsaParams = RsaKeyParams(1024) keyName = Name("test") decryptKey = RsaAlgorithm.generateKey(rsaParams) encryptKey = RsaAlgorithm.deriveEncryptKey(decryptKey.getKeyBits()) eKey = encryptKey.getKeyBits() dKey = decryptKey.getKeyBits() encryptParams = EncryptParams(input.type) Encryptor.encryptData(data, largeContent, keyName, eKey, encryptParams) self.assertTrue(data.getName().equals(Name("/FOR").append(keyName)), input.testName) largeDataContent = data.getContent() # largeDataContent is a sequence of the two EncryptedContent. encryptedNonce = EncryptedContent() encryptedNonce.wireDecode(largeDataContent) self.assertTrue(keyName.equals(encryptedNonce.getKeyLocator().getKeyName()), input.testName) self.assertEqual(encryptedNonce.getInitialVector().size(), 0, input.testName) self.assertEqual(encryptedNonce.getAlgorithmType(), input.type, input.testName) # Use the size of encryptedNonce to find the start of encryptedPayload. payloadContent = largeDataContent.buf()[encryptedNonce.wireEncode().size():] encryptedPayload = EncryptedContent() encryptedPayload.wireDecode(payloadContent) nonceKeyName = Name(keyName) nonceKeyName.append("nonce") self.assertTrue(nonceKeyName.equals(encryptedPayload.getKeyLocator().getKeyName()), input.testName) self.assertEqual(encryptedPayload.getInitialVector().size(), 16, input.testName) self.assertEqual(encryptedPayload.getAlgorithmType(), EncryptAlgorithmType.AesCbc, input.testName) self.assertEqual( largeDataContent.size(), encryptedNonce.wireEncode().size() + encryptedPayload.wireEncode().size(), input.testName) blobNonce = encryptedNonce.getPayload() nonce = RsaAlgorithm.decrypt(dKey, blobNonce, encryptParams) encryptParams.setAlgorithmType(EncryptAlgorithmType.AesCbc) encryptParams.setInitialVector(encryptedPayload.getInitialVector()) bufferPayload = encryptedPayload.getPayload() largePayload = AesAlgorithm.decrypt(nonce, bufferPayload, encryptParams) self.assertTrue(largeContent.equals(largePayload), input.testName)
class TestProducer(object): """ Create a TestProducer with an OnInterestCallback for use with registerPrefix to answer interests with prepared packets. When finished, a callback will set _enabled to False. """ def __init__(self, contentPrefix, userKeyName, keyChain, certificateName): self._enabled = True self._responseCount = 0 # Imitate test_consumer from the PyNDN integration tests. contentName0 = Name(contentPrefix).append("Content").appendSegment(0) contentName1 = Name(contentPrefix).append("Content").appendSegment(1) cKeyName = Name("/Prefix/SAMPLE/Content/C-KEY/1") dKeyName = Name("/Prefix/READ/D-KEY/1/2") # Generate the E-KEY and D-KEY. params = RsaKeyParams() fixtureDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits() fixtureEKeyBlob = RsaAlgorithm.deriveEncryptKey( fixtureDKeyBlob).getKeyBits() # The user key. fixtureUserEKeyBlob = Blob(FIXTURE_USER_E_KEY) # Load the C-KEY. fixtureCKeyBlob = Blob(AES_KEY, False) # Imitate createEncryptedContent. Make two segments. encryptParams = EncryptParams(EncryptAlgorithmType.AesCbc) encryptParams.setInitialVector(Blob(INITIAL_VECTOR, False)) self._contentData0 = Data(contentName0) Encryptor.encryptData( self._contentData0, Blob(DATA0_CONTENT, False), cKeyName, fixtureCKeyBlob, encryptParams) self._contentData0.getMetaInfo().setFinalBlockId( Name().appendSegment(1)[0]) keyChain.sign(self._contentData0, certificateName) self._contentData1 = Data(contentName1) Encryptor.encryptData( self._contentData1, Blob(DATA1_CONTENT, False), cKeyName, fixtureCKeyBlob, encryptParams) self._contentData1.getMetaInfo().setFinalBlockId( Name().appendSegment(1)[0]) keyChain.sign(self._contentData1, certificateName) # Imitate createEncryptedCKey. self._cKeyData = Data(cKeyName) encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep) Encryptor.encryptData( self._cKeyData, fixtureCKeyBlob, dKeyName, fixtureEKeyBlob, encryptParams) keyChain.sign(self._cKeyData, certificateName) # Imitate createEncryptedDKey. self._dKeyData = Data(dKeyName) encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep) Encryptor.encryptData( self._dKeyData, fixtureDKeyBlob, userKeyName, fixtureUserEKeyBlob, encryptParams) keyChain.sign(self._dKeyData, certificateName) def onInterest(self, prefix, interest, face, interestFilterId, filter): if interest.matchesName(self._contentData0.getName()): data = self._contentData0 elif interest.matchesName(self._cKeyData.getName()): data = self._cKeyData elif interest.matchesName(self._contentData1.getName()): data = self._contentData1 elif interest.matchesName(self._dKeyData.getName()): data = self._dKeyData else: return dump("Sending Data packet " + data.getName().toUri()) face.putData(data) self._responseCount += 1 if self._responseCount >= 4: # We sent all the packets. self._enabled = False def onRegisterFailed(self, prefix): dump("Register failed for prefix", prefix.toUri()) self._enabled = False
append(originalTimeString)) catalogContentArray = [] for i in range(0, dataNum): emptyData = Data() timeString = basetimeString + str(i).zfill(baseZFill) timeFloat = Schedule.fromIsoString(timeString) dataObject = json.dumps({ "lat": baseLat + random.randint(-10, 10), "timestamp": int(timeFloat / 1000), "lng": baseLng + random.randint(-10, 10) }) testProducer.producer.produce(emptyData, timeFloat, Blob(dataObject, False)) producedName = emptyData.getName() memoryContentCache.add(emptyData) print "Produced " + emptyData.getName().toUri() # Insert content into repo-ng testProducer.initiateContentStoreInsertion(repoPrefix, emptyData) catalogContentArray.append(int(timeFloat / 1000)) catalogData.setContent(json.dumps(catalogContentArray)) testProducer.keyChain.sign(catalogData) print "Unencrypted catalog name is " + catalogData.getName().toUri() encryptedCatalogData = Data() testProducer.catalogProducer.produce( encryptedCatalogData, Schedule.fromIsoString(basetimeString + str(0).zfill(baseZFill)),
timeFloat = Schedule.fromIsoString(originalTimeString) testProducer.createContentKey(timeFloat) memoryContentCache.registerPrefix(Name(username), onRegisterFailed, onDataNotFound) catalogData = Data(Name(username).append(Name("/data/fitness/physical_activity/time_location/catalog/")).append(originalTimeString).appendVersion(1)) catalogContentArray = [] for i in range(0, dataNum): emptyData = Data() timeString = basetimeString + str(i).zfill(baseZFill) timeFloat = Schedule.fromIsoString(timeString) dataObject = json.dumps({"lat": baseLat + random.randint(-10, 10), "timestamp": int(timeFloat / 1000), "lng": baseLng + random.randint(-10, 10)}) testProducer.producer.produce(emptyData, timeFloat, Blob(dataObject, False)) producedName = emptyData.getName() memoryContentCache.add(emptyData) print "Produced " + emptyData.getName().toUri() # Insert content into repo-ng testProducer.initiateContentStoreInsertion(repoPrefix, emptyData) catalogContentArray.append(int(timeFloat / 1000)) catalogData.setContent(json.dumps(catalogContentArray)) testProducer.keyChain.sign(catalogData) encryptedCatalogData = Data() testProducer.catalogProducer.produce(encryptedCatalogData, Schedule.fromIsoString(basetimeString + str(0).zfill(baseZFill)), Blob(json.dumps(catalogContentArray), False)) print "Encrypted catalog name is " + encryptedCatalogData.getName().toUri() # Put the unencrypted as well as encrypted catalog into repo
def test_content_key_request(self): prefix = Name("/prefix") suffix = Name("/a/b/c") expectedInterest = Name(prefix) expectedInterest.append(Encryptor.NAME_COMPONENT_READ) expectedInterest.append(suffix) expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY) cKeyName = Name(prefix) cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE) cKeyName.append(suffix) cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY) timeMarker = Name("20150101T100000/20150101T120000") testTime1 = Schedule.fromIsoString("20150101T100001") testTime2 = Schedule.fromIsoString("20150101T110001") testTimeRounded1 = Name.Component("20150101T100000") testTimeRounded2 = Name.Component("20150101T110000") testTimeComponent2 = Name.Component("20150101T110001") # Create content keys required for this test case: for i in range(suffix.size()): self.createEncryptionKey(expectedInterest, timeMarker) expectedInterest = expectedInterest.getPrefix(-2).append( Encryptor.NAME_COMPONENT_E_KEY) expressInterestCallCount = [0] # Prepare a TestFace to instantly answer calls to expressInterest. class TestFace(object): def __init__(self, handleExpressInterest): self.handleExpressInterest = handleExpressInterest def expressInterest(self, interest, onData, onTimeout, onNetworkNack): return self.handleExpressInterest(interest, onData, onTimeout, onNetworkNack) def handleExpressInterest(interest, onData, onTimeout, onNetworkNack): expressInterestCallCount[0] += 1 interestName = Name(interest.getName()) interestName.append(timeMarker) self.assertTrue(interestName in self.encryptionKeys) onData(interest, self.encryptionKeys[interestName]) return 0 face = TestFace(handleExpressInterest) # Verify that the content key is correctly encrypted for each domain, and # the produce method encrypts the provided data with the same content key. testDb = Sqlite3ProducerDb(self.databaseFilePath) producer = Producer(prefix, suffix, face, self.keyChain, testDb) contentKey = [None] # Blob def checkEncryptionKeys(result, testTime, roundedTime, expectedExpressInterestCallCount): self.assertEqual(expectedExpressInterestCallCount, expressInterestCallCount[0]) self.assertEqual(True, testDb.hasContentKey(testTime)) contentKey[0] = testDb.getContentKey(testTime) params = EncryptParams(EncryptAlgorithmType.RsaOaep) for i in range(len(result)): key = result[i] keyName = key.getName() self.assertEqual(cKeyName, keyName.getSubName(0, 6)) self.assertEqual(keyName.get(6), roundedTime) self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR) self.assertEqual(True, keyName.getSubName(8) in self.decryptionKeys) decryptionKey = self.decryptionKeys[keyName.getSubName(8)] self.assertEqual(True, decryptionKey.size() != 0) encryptedKeyEncoding = key.getContent() content = EncryptedContent() content.wireDecode(encryptedKeyEncoding) encryptedKey = content.getPayload() retrievedKey = RsaAlgorithm.decrypt(decryptionKey, encryptedKey, params) self.assertTrue(contentKey[0].equals(retrievedKey)) self.assertEqual(3, len(result)) # An initial test to confirm that keys are created for this time slot. contentKeyName1 = producer.createContentKey( testTime1, lambda keys: checkEncryptionKeys( keys, testTime1, testTimeRounded1, 3)) # Verify that we do not repeat the search for e-keys. The total # expressInterestCallCount should be the same. contentKeyName2 = producer.createContentKey( testTime2, lambda keys: checkEncryptionKeys( keys, testTime2, testTimeRounded2, 3)) # Confirm content key names are correct self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1)) self.assertEqual(testTimeRounded1, contentKeyName1.get(6)) self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1)) self.assertEqual(testTimeRounded2, contentKeyName2.get(6)) # Confirm that produce encrypts with the correct key and has the right name. testData = Data() producer.produce(testData, testTime2, Blob(DATA_CONTENT, False)) producedName = testData.getName() self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5)) self.assertEqual(testTimeComponent2, producedName.get(5)) self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6)) self.assertEqual(cKeyName, producedName.getSubName(7, 6)) self.assertEqual(testTimeRounded2, producedName.get(13)) dataBlob = testData.getContent() dataContent = EncryptedContent() dataContent.wireDecode(dataBlob) encryptedData = dataContent.getPayload() initialVector = dataContent.getInitialVector() params = EncryptParams(EncryptAlgorithmType.AesCbc, 16) params.setInitialVector(initialVector) decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData, params) self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))
def scanForNistSensors(): scanner = Scanner().withDelegate(ScanDelegate()) scanner.scan(.1) if foundNistSensor == 0: print "Didn't find any nist sensors..." return False p = Peripheral(esp32Address) p.setMTU(500) #svcList = p.getServices() #print "Handle UUID Properties" #print "-------------------------------------------------------" #for svc in svcList: # print (str(svc.uuid)) #chList = p.getCharacteristics() #print "Handle UUID Properties" #print "-------------------------------------------------------" #for ch in chList: # print (" 0x"+ format(ch.getHandle(),'02X') +" "+str(ch.uuid) +" " + ch.propertiesToString()) nist_service_uuid = UUID("0000ffe0-0000-1000-8000-00805f9b34fb") nist_characteristic_uuid = UUID("beb5483e-36e1-4688-b7f5-ea07361b26a8") nistService = p.getServiceByUUID(nist_service_uuid) #nistCharacteristic = p.getCharacteristics(nist_characteristic_uuid)[0] nistCharacteristic = nistService.getCharacteristics("beb5483e-36e1-4688-b7f5-ea07361b26a8")[0] #readBytes = bytes(p.readCharacteristic(0x2A)) #readBytes = bytes(nistCharacteristic.read()) #print binascii.hexlify(readBytes) #with open('/home/pi/Desktop/esp32-ndn-ble/src/readBytes.txt', 'a') as the_file: # the_file.seek(0) # the_file.truncate() # the_file.write(binascii.hexlify(readBytes)) #TlvData = Blob(readBytes) #data = Data() #data.wireDecode(TlvData) # Use a hard-wired secret for testing. In a real application the signer # ensures that the verifier knows the shared key and its keyName. key = Blob(bytearray([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 ])) #if KeyChain.verifyDataWithHmacWithSha256(data, key): # dump("Hard-coded data signature verification: VERIFIED") #else: # dump("Hard-coded data signature verification: FAILED") freshData = Data(Name("/netInfo")) signature = HmacWithSha256Signature() signature.getKeyLocator().setType(KeyLocatorType.KEYNAME) signature.getKeyLocator().setKeyName(Name("key1")) freshData.setSignature(signature) freshData.setContent("EdwardPi\n11111111\n192.168.4.1\n") dump("Signing fresh data packet", freshData.getName().toUri()) KeyChain.signWithHmacWithSha256(freshData, key) if KeyChain.verifyDataWithHmacWithSha256(freshData, key): dump("Freshly-signed data signature verification: VERIFIED") else: dump("Freshly-signed data signature verification: FAILED") bytesSend = freshData.wireEncode() print binascii.hexlify(bytes(bytesSend)) try: nistCharacteristic.write(bytes(bytesSend), True) except: print "Exception when trying to write to BLE characteristic."
def publish(self, line): # Pull out and parse datetime for log entry # (note we shoudld use point time for timestamp) try: if not ": (point" in line: return point = parse.search("(point {})", line)[0].split(" ") except Exception as detail: print("publish: Parse error for", line, "-", detail) return try: tempTime = datetime.strptime( parse.search("[{}]", line)[0], "%Y-%m-%d %H:%M:%S.%f") except Exception as detail: print("publish: Date/time conversion error for", line, "-", detail) return sensorName = point[0] aggregationNamePrefix = self.pointNameToNDNName(sensorName) dataDict = self.pointToJSON(point) if aggregationNamePrefix is not None: #if __debug__: # print(dateTime, aggregationNamePrefix, dataDict["timestamp"], "payload:", dataDict["value"]) try: # TODO: since the leaf sensor publisher is not a separate node for now, we also publish aggregated data # of the same sensor over the past given time period in this code; # bms_node code has adaptation for leaf sensor publishers as well, ref: example-sensor1.conf # Here we make the assumption of fixed time window for *all* sensors # First publish aggregation dataTime = int(float(dataDict["timestamp"]) * 1000) if self._startTime == 0: self._startTime = dataTime if not (sensorName in self._dataQueue): # We don't have record of this sensor, so we create an identity for it, and print the cert string for now to get signed sensorIdentityName = Name(self._namespace).append( aggregationNamePrefix).getPrefix(-3) sensorCertificateName = self._keyChain.createIdentityAndCertificate( sensorIdentityName) if __debug__: print("Sensor identity name: " + sensorIdentityName.toUri()) certificateData = self._keyChain.getIdentityManager( )._identityStorage.getCertificate(sensorCertificateName) # We should only ask for cert to be signed upon the first run of a certain sensor if DO_CERT_SETUP: if (KeyLocator.getFromSignature( certificateData.getSignature()).getKeyName(). equals(sensorCertificateName.getPrefix(-1))): # Need to configure for remote gateway deployment; for now, remote uses its own branch with my public IP. print("certificate " + sensorCertificateName.toUri() + " asking for signature") response = urllib2.urlopen( "http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode( certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + sensorIdentityName.toUri() + '&subject_name=' + sensorIdentityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob( b64decode(response))) self._cache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._cache.add(certificateData) else: self._cache.add(certificateData) self._dataQueue[sensorName] = DataQueueItem( [], self._startTime + self._defaultInterval, sensorIdentityName, sensorCertificateName) self._dataQueue[sensorName]._dataList.append( dataDict["value"]) elif dataTime > self._dataQueue[sensorName]._timeThreshold: # calculate the aggregation with what's already in the queue, publish data packet, and delete current queue # TODO: This should be mutex locked against self if len(self._dataQueue[sensorName]._dataList) > 0: avg = 0.0 for item in self._dataQueue[sensorName]._dataList: avg += float(item) avg = avg / len(self._dataQueue[sensorName]._dataList) data = Data( Name(self._namespace). append(aggregationNamePrefix).append("avg").append( str(self._dataQueue[sensorName]._timeThreshold) ).append( str(self._dataQueue[sensorName]._timeThreshold + self._defaultInterval))) data.setContent(str(avg)) data.getMetaInfo().setFreshnessPeriod( self.DEFAULT_DATA_LIFETIME) self._keyChain.sign( data, self._dataQueue[sensorName]._certificateName) self._cache.add(data) print("Aggregation produced " + data.getName().toUri()) self._dataQueue[sensorName]._dataList = [dataDict["value"]] self._dataQueue[ sensorName]._timeThreshold = self._dataQueue[ sensorName]._timeThreshold + self._defaultInterval else: self._dataQueue[sensorName]._dataList.append( dataDict["value"]) # Then publish raw data # Timestamp in data name uses the timestamp from data payload instDataPrefix = self.pointNameToNDNName(sensorName, False) dataTemp = self.createData( instDataPrefix, dataDict["timestamp"], dataDict["value"], self._dataQueue[sensorName]._certificateName) if __debug__: print("Produced raw data name " + dataTemp.getName().toUri()) print("Produced raw data content " + dataTemp.getContent().toRawStr()) self._cache.add(dataTemp) except Exception as detail: print("publish: Error calling createData for", line, "-", detail)
class TestProducer(object): """ Create a TestProducer with an OnInterestCallback for use with registerPrefix to answer interests with prepared packets. When finished, a callback will set _enabled to False. """ def __init__(self, contentPrefix, userKeyName, keyChain, certificateName): self._enabled = True self._responseCount = 0 # Imitate test_consumer from the PyNDN integration tests. contentName0 = Name(contentPrefix).append("Content").appendSegment(0) contentName1 = Name(contentPrefix).append("Content").appendSegment(1) cKeyName = Name("/Prefix/SAMPLE/Content/C-KEY/1") dKeyName = Name("/Prefix/READ/D-KEY/1/2") # Generate the E-KEY and D-KEY. params = RsaKeyParams() fixtureDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits() fixtureEKeyBlob = RsaAlgorithm.deriveEncryptKey( fixtureDKeyBlob).getKeyBits() # The user key. fixtureUserEKeyBlob = Blob(FIXTURE_USER_E_KEY) # Load the C-KEY. fixtureCKeyBlob = Blob(AES_KEY, False) # Imitate createEncryptedContent. Make two segments. encryptParams = EncryptParams(EncryptAlgorithmType.AesCbc) encryptParams.setInitialVector(Blob(INITIAL_VECTOR, False)) self._contentData0 = Data(contentName0) Encryptor.encryptData(self._contentData0, Blob(DATA0_CONTENT, False), cKeyName, fixtureCKeyBlob, encryptParams) self._contentData0.getMetaInfo().setFinalBlockId( Name().appendSegment(1)[0]) keyChain.sign(self._contentData0, certificateName) self._contentData1 = Data(contentName1) Encryptor.encryptData(self._contentData1, Blob(DATA1_CONTENT, False), cKeyName, fixtureCKeyBlob, encryptParams) self._contentData1.getMetaInfo().setFinalBlockId( Name().appendSegment(1)[0]) keyChain.sign(self._contentData1, certificateName) # Imitate createEncryptedCKey. self._cKeyData = Data(cKeyName) encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep) Encryptor.encryptData(self._cKeyData, fixtureCKeyBlob, dKeyName, fixtureEKeyBlob, encryptParams) keyChain.sign(self._cKeyData, certificateName) # Imitate createEncryptedDKey. self._dKeyData = Data(dKeyName) encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep) Encryptor.encryptData(self._dKeyData, fixtureDKeyBlob, userKeyName, fixtureUserEKeyBlob, encryptParams) keyChain.sign(self._dKeyData, certificateName) def onInterest(self, prefix, interest, face, interestFilterId, filter): if interest.matchesName(self._contentData0.getName()): data = self._contentData0 elif interest.matchesName(self._cKeyData.getName()): data = self._cKeyData elif interest.matchesName(self._contentData1.getName()): data = self._contentData1 elif interest.matchesName(self._dKeyData.getName()): data = self._dKeyData else: return dump("Sending Data packet " + data.getName().toUri()) face.putData(data) self._responseCount += 1 if self._responseCount >= 4: # We sent all the packets. self._enabled = False def onRegisterFailed(self, prefix): dump("Register failed for prefix", prefix.toUri()) self._enabled = False
def publish(self, line): # Pull out and parse datetime for log entry # (note we shoudld use point time for timestamp) try: if not ": (point" in line: return point = parse.search("(point {})", line)[0].split(" ") except Exception as detail: print("publish: Parse error for", line, "-", detail) return try: tempTime = datetime.strptime(parse.search("[{}]", line)[0], "%Y-%m-%d %H:%M:%S.%f") except Exception as detail: print("publish: Date/time conversion error for", line, "-", detail) return sensorName = point[0] aggregationNamePrefix = self.pointNameToNDNName(sensorName) dataDict = self.pointToJSON(point) self._lastDataTimestamp = time.time() if aggregationNamePrefix is not None: #if __debug__: # print(dateTime, aggregationNamePrefix, dataDict["timestamp"], "payload:", dataDict["value"]) try: # TODO: since the leaf sensor publisher is not a separate node for now, we also publish aggregated data # of the same sensor over the past given time period in this code; # bms_node code has adaptation for leaf sensor publishers as well, ref: example-sensor1.conf # Here we make the assumption of fixed time window for *all* sensors # First publish aggregation dataTime = int(float(dataDict["timestamp"]) * 1000) if self._startTime == 0: self._startTime = dataTime if not (sensorName in self._dataQueue): # We don't have record of this sensor, so we create an identity for it, and print the cert string for now to get signed sensorIdentityName = Name(self._namespace).append(aggregationNamePrefix).getPrefix(-3) sensorCertificateName = self._keyChain.createIdentityAndCertificate(sensorIdentityName) if __debug__: print("Sensor identity name: " + sensorIdentityName.toUri()) certificateData = self._keyChain.getIdentityManager()._identityStorage.getCertificate(sensorCertificateName, True) # We should only ask for cert to be signed upon the first run of a certain sensor if DO_CERT_SETUP: if (KeyLocator.getFromSignature(certificateData.getSignature()).getKeyName().equals(sensorCertificateName.getPrefix(-1))): # Need to configure for remote gateway deployment; for now, remote uses its own branch with my public IP. print("certificate " + sensorCertificateName.toUri() + " asking for signature") response = urllib2.urlopen("http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + sensorIdentityName.toUri() + '&subject_name=' + sensorIdentityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._cache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._cache.add(certificateData) else: self._cache.add(certificateData) self._dataQueue[sensorName] = DataQueueItem([], self._startTime + self._defaultInterval, sensorIdentityName, sensorCertificateName) self._dataQueue[sensorName]._dataList.append(dataDict["value"]) elif dataTime > self._dataQueue[sensorName]._timeThreshold: # calculate the aggregation with what's already in the queue, publish data packet, and delete current queue # TODO: This should be mutex locked against self if len(self._dataQueue[sensorName]._dataList) > 0: avg = 0.0 for item in self._dataQueue[sensorName]._dataList: avg += float(item) avg = avg / len(self._dataQueue[sensorName]._dataList) data = Data(Name(self._namespace).append(aggregationNamePrefix).append("avg").append(str(self._dataQueue[sensorName]._timeThreshold)).append(str(self._dataQueue[sensorName]._timeThreshold + self._defaultInterval))) data.setContent(str(avg)) data.getMetaInfo().setFreshnessPeriod(self.DEFAULT_DATA_LIFETIME) self._keyChain.sign(data, self._dataQueue[sensorName]._certificateName) self._cache.add(data) print("Aggregation produced " + data.getName().toUri()) self._dataQueue[sensorName]._dataList = [dataDict["value"]] self._dataQueue[sensorName]._timeThreshold = self._dataQueue[sensorName]._timeThreshold + self._defaultInterval else: self._dataQueue[sensorName]._dataList.append(dataDict["value"]) # Then publish raw data # Timestamp in data name uses the timestamp from data payload instDataPrefix = self.pointNameToNDNName(sensorName, False) dataTemp = self.createData(instDataPrefix, dataDict["timestamp"], json.dumps(dataDict), self._dataQueue[sensorName]._certificateName) if __debug__: print("Produced raw data name " + dataTemp.getName().toUri()) print("Produced raw data content " + dataTemp.getContent().toRawStr()) self._cache.add(dataTemp) # For now we only insert raw data into repo parameter = repo_command_parameter_pb2.RepoCommandParameterMessage() # Add the Name. for i in range(dataTemp.getName().size()): parameter.repo_command_parameter.name.component.append( dataTemp.getName().get(i).toEscapedString()) # Create the command interest. commandInterest = Interest(Name(repoCommandPrefix).append("insert") .append(Name.Component(ProtobufTlv.encode(parameter)))) self._face.makeCommandInterest(commandInterest) # Send the command interest and get the response or timeout. def onRepoCommandResponse(interest, data): # repo_command_response_pb2 was produced by protoc. response = repo_command_response_pb2.RepoCommandResponseMessage() try: ProtobufTlv.decode(response, data.content) except: print("Cannot decode the repo command response") if response.repo_command_response.status_code == 100: if __debug__: print("Insertion started") else: print("Got repo command error code", response.repo_command_response.status_code) def onRepoCommandTimeout(interest): if __debug__: print("Insert repo command timeout") self._face.expressInterest(commandInterest, onRepoCommandResponse, onRepoCommandTimeout) except Exception as detail: print("publish: Error calling createData for", line, "-", detail)
def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat=False): doCalc = True dataList = [] # TODO: an intermediate node cannot produce raw data for now if len(childrenList.keys()) != 0: for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: data = self._dataQueue[ dataType + aggregationType]._dataDict[dataDictKey] dataList.append(float(data.getContent().toRawStr())) else: #print('Child ' + childName + ' has not replied yet') doCalc = False break else: for inst in self._dataQueue[dataType]._dataDict.keys(): if int(inst) >= startTime and int(inst) < startTime + interval: dataList.append(self._dataQueue[dataType]._dataDict[inst]) if doCalc: content = self._aggregation.getAggregation(aggregationType, dataList) if content: publishData = Data( Name(publishingPrefix).append(str(startTime)).append( str(startTime + interval))) publishData.setContent(str(content)) publishData.getMetaInfo().setFreshnessPeriod( DEFAULT_DATA_LIFETIME) self._keyChain.sign(publishData, self._certificateName) self._memoryContentCache.add(publishData) for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[ dataType + aggregationType]._dataDict: del self._dataQueue[ dataType + aggregationType]._dataDict[dataDictKey] if __debug__: print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr()) # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData if repeat: self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat) return
def test_content_key_request(self): prefix = Name("/prefix") suffix = Name("/a/b/c") expectedInterest = Name(prefix) expectedInterest.append(Encryptor.NAME_COMPONENT_READ) expectedInterest.append(suffix) expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY) cKeyName = Name(prefix) cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE) cKeyName.append(suffix) cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY) timeMarker = Name("20150101T100000/20150101T120000") testTime1 = Schedule.fromIsoString("20150101T100001") testTime2 = Schedule.fromIsoString("20150101T110001") testTimeRounded1 = Name.Component("20150101T100000") testTimeRounded2 = Name.Component("20150101T110000") # Create content keys required for this test case: for i in range(suffix.size()): self.createEncryptionKey(expectedInterest, timeMarker) expectedInterest = expectedInterest.getPrefix(-2).append( Encryptor.NAME_COMPONENT_E_KEY) expressInterestCallCount = [0] # Prepare a TestFace to instantly answer calls to expressInterest. class TestFace(object): def __init__(self, handleExpressInterest): self.handleExpressInterest = handleExpressInterest def expressInterest(self, interest, onData, onTimeout): return self.handleExpressInterest(interest, onData, onTimeout) def handleExpressInterest(interest, onData, onTimeout): expressInterestCallCount[0] += 1 interestName = Name(interest.getName()) interestName.append(timeMarker) self.assertTrue(interestName in self.encryptionKeys) onData(interest, self.encryptionKeys[interestName]) return 0 face = TestFace(handleExpressInterest) # Verify that the content key is correctly encrypted for each domain, and # the produce method encrypts the provided data with the same content key. testDb = Sqlite3ProducerDb(self.databaseFilePath) producer = Producer(prefix, suffix, face, self.keyChain, testDb) contentKey = [None] # Blob def checkEncryptionKeys( result, testTime, roundedTime, expectedExpressInterestCallCount): self.assertEqual(expectedExpressInterestCallCount, expressInterestCallCount[0]) self.assertEqual(True, testDb.hasContentKey(testTime)) contentKey[0] = testDb.getContentKey(testTime) params = EncryptParams(EncryptAlgorithmType.RsaOaep) for i in range(len(result)): key = result[i] keyName = key.getName() self.assertEqual(cKeyName, keyName.getSubName(0, 6)) self.assertEqual(keyName.get(6), roundedTime) self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR) self.assertEqual( True, keyName.getSubName(8) in self.decryptionKeys) decryptionKey = self.decryptionKeys[keyName.getSubName(8)] self.assertEqual(True, decryptionKey.size() != 0) encryptedKeyEncoding = key.getContent() content = EncryptedContent() content.wireDecode(encryptedKeyEncoding) encryptedKey = content.getPayload() retrievedKey = RsaAlgorithm.decrypt( decryptionKey, encryptedKey, params) self.assertTrue(contentKey[0].equals(retrievedKey)) self.assertEqual(3, len(result)) # An initial test to confirm that keys are created for this time slot. contentKeyName1 = producer.createContentKey( testTime1, lambda keys: checkEncryptionKeys(keys, testTime1, testTimeRounded1, 3)) # Verify that we do not repeat the search for e-keys. The total # expressInterestCallCount should be the same. contentKeyName2 = producer.createContentKey( testTime2, lambda keys: checkEncryptionKeys(keys, testTime2, testTimeRounded2, 3)) # Confirm content key names are correct self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1)) self.assertEqual(testTimeRounded1, contentKeyName1.get(6)) self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1)) self.assertEqual(testTimeRounded2, contentKeyName2.get(6)) # Confirm that produce encrypts with the correct key and has the right name. testData = Data() producer.produce(testData, testTime2, Blob(DATA_CONTENT, False)) producedName = testData.getName() self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5)) self.assertEqual(testTimeRounded2, producedName.get(5)) self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6)) self.assertEqual(cKeyName, producedName.getSubName(7, 6)) self.assertEqual(testTimeRounded2, producedName.get(13)) dataBlob = testData.getContent() dataContent = EncryptedContent() dataContent.wireDecode(dataBlob) encryptedData = dataContent.getPayload() initialVector = dataContent.getInitialVector() params = EncryptParams(EncryptAlgorithmType.AesCbc, 16) params.setInitialVector(initialVector) decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData, params) self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))
async def handleReceive(self): """ Handle one incoming TCP connection. Multiple data packets may be transferred over a single connection. """ logging.info("handleReceive()") data_bytes = await self.reader.read( len(self.buffer) - self.m_inputBufferSize) self.buffer = self.buffer[:self. m_inputBufferSize] + data_bytes + bytearray( len(self.buffer) - self.m_inputBufferSize - len(data_bytes)) assert len(self.buffer) == BUFFER_SIZE nBytesReceived = len(data_bytes) # Read 0 bytes means the other side has closed the connection if nBytesReceived == 0: logging.info('Otherside closed connection') return self.m_inputBufferSize += nBytesReceived isOk = True offset = 0 while self.m_inputBufferSize - offset > 0: data = Data() decoder = Tlv0_2WireFormat() try: decoder.decodeData(data, self.buffer[offset:], False) except ValueError: logging.warning('Decoding failed') isOk = False break # Obtain data size by encoding it again offset += len(decoder.encodeData(data)[0]) assert offset <= self.m_inputBufferSize self.storage.put(str(data.getName()), pickle.dumps(data.wireEncode().toBytes())) logging.info('Inserted data: {}'.format(str(data.getName()))) existing = CommandHandle.update_prefixes_in_storage( self.storage, data.getName().toUri()) if not existing: self.read_handle.listen(data.getName()) # If buffer is filled up with un-parsable data, shutdown connection if not isOk and self.m_inputBufferSize == len( self.buffer) and offset == 0: logging.warning('Invalid data packet, drop connection ...') self.writer.close() return if offset > 0: if offset != self.m_inputBufferSize: self.buffer = self.buffer[ offset:self.m_inputBufferSize] + bytearray( len(self.buffer) - self.m_inputBufferSize + offset) assert len(self.buffer) == BUFFER_SIZE self.m_inputBufferSize -= offset else: self.m_inputBufferSize = 0 event_loop = asyncio.get_event_loop() event_loop.create_task(self.handleReceive())