def __init__(self, face, encryptResult, link = None): # Set up face self.face = face self._encryptResult = encryptResult self._link = link self.databaseFilePath = "policy_config/test_consumer_dpu.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/haitao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/ndn/edu/basel/dpu") # Function name: the function that this DPU provides self._functionName = "bounding_box" self._identityName = identityName self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage # For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error #self.keyChain.installIdentityCertificate() self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self._tasks = dict() return
def beforeLoopStart(self): if not self._policyManager.hasRootSignedCertificate(): # make one.... self.log.warn('Generating controller certificate...') newKey = self._identityManager.generateRSAKeyPairAsDefault( self.prefix, isKsk=True) newCert = self._identityManager.selfSign(newKey) self._identityManager.addCertificateAsDefault(newCert) # Trusting root's own certificate upon each run # TODO: debug where application starts first and controller starts second, application's interest cannot be verified self._rootCertificate = self._keyChain.getCertificate( self.getDefaultCertificateName()) self._policyManager._certificateCache.insertCertificate( self._rootCertificate) self._memoryContentCache = MemoryContentCache(self.face) self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName()) self._memoryContentCache.registerPrefix( self.prefix, onRegisterFailed=self.onRegisterFailed, onRegisterSuccess=None, onDataNotFound=self._onCommandReceived) # Serve root certificate in our memoryContentCache self._memoryContentCache.add(self._rootCertificate) self.loadApplications() self.loop.call_soon(self.onStartup)
def __init__(self, face, groupManagerName, dataType, dKeyDatabaseFilePath): # Set up face self.face = face #self.loop = eventLoop # Set up the keyChain. identityStorage = MemoryIdentityStorage() privateKeyStorage = MemoryPrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.certificateName = self.keyChain.createIdentityAndCertificate( groupManagerName) self.dKeyDatabaseFilePath = dKeyDatabaseFilePath self.manager = GroupManager( groupManagerName, dataType, Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1, self.keyChain) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(groupManagerName, self.onRegisterFailed, self.onDataNotFound) self.needToPublishGroupKeys = False return
def __init__(self, face, keyChain, certificateName, syncPrefix, observer, serializer, syncDataFreshnessPeriod = 4000, initialDigest = "00", syncInterestLifetime = 4000, syncInterestMinInterval = 500, timeoutCntThreshold = 3, maxResponseWaitPeriod = 2000, minResponseWaitPeriod = 400, entityDataFreshnessPeriod = 10000): self._face = face self._keyChain = keyChain self._syncPrefix = Name(syncPrefix) self._objects = dict() self._hostedObjects = dict() self._memoryContentCache = MemoryContentCache(self._face) self._certificateName = Name(certificateName) self._currentDigest = initialDigest self._syncDataFreshnessPeriod = syncDataFreshnessPeriod self._initialDigest = initialDigest self._syncInterestLifetime = syncInterestLifetime self._syncInterestMinInterval = syncInterestMinInterval self._timeoutCntThreshold = timeoutCntThreshold self._entityDataFreshnessPeriod = entityDataFreshnessPeriod # TODO: policy manager etc setup #self._maxResponseWaitPeriod = maxResponseWaitPeriod #self._minResponseWaitPeriod = minResponseWaitPeriod self._observer = observer self._serializer = serializer self._numOutstandingInterest = 0 return
def __init__(self, face): self._defaultIdentity = None self._defaultCertificateName = None self._controllerName = None self._controllerCertificate = None self._applicationName = "" self._identityManager = IdentityManager(BasicIdentityStorage(), FilePrivateKeyStorage()) self._policyManager = ConfigPolicyManager() self._policyManager.config.read("validator \n \ { \n \ rule \n \ { \n \ id \"initial rule\" \n \ for data \n \ checker \n \ { \n \ type hierarchical \n \ } \n \ } \n \ }", "initial-schema") # keyChain is what we return to the application after successful setup # TODO: should we separate keyChain from internal KeyChain used to verify trust schemas? self._keyChain = KeyChain(self._identityManager, self._policyManager) self._face = face # setFace for keyChain or else it won't be able to express interests for certs self._keyChain.setFace(self._face) self._certificateContentCache = MemoryContentCache(face) self._trustSchemas = dict()
def __init__(self, face): # Set up face self.face = face self.databaseFilePath = "policy_config/test_consumer.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/zhehao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/org/openmhealth/dvu-python-3") # Unauthorized identity #identityName = Name("/org/openmhealth/dvu-python-1") self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self.consumeCatalog = True return
def __init__(self, face, groupManagerName, dataType, readAccessName, dKeyDatabaseFilePath): # Set up face self.face = face #self.loop = eventLoop # Set up the keyChain. identityStorage = MemoryIdentityStorage() privateKeyStorage = MemoryPrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.certificateName = self.keyChain.createIdentityAndCertificate( groupManagerName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) self.dKeyDatabaseFilePath = dKeyDatabaseFilePath try: os.remove(self.dKeyDatabaseFilePath) except OSError: # no such file pass self.manager = GroupManager( groupManagerName, dataType, Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1, self.keyChain) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix( Name(groupManagerName).append("READ"), self.onRegisterFailed, self.onDataNotFound) self.face.registerPrefix(readAccessName, self.onAccessInterest, self.onAccessTimeout) self.updateGroupKeys = False return
def __init__(self, applyEDLAdjustment=True): # prepare trollius logging self.prepareLogging() self._events = dict() self._running = False self._applyEDLAdjustment = applyEDLAdjustment # NDN related variables self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) # Use the system default key chain and certificate name to sign commands. self._keyChain = KeyChain() self._keyChain.setFace(self._face) self._certificateName = self._keyChain.getDefaultCertificateName() self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # Publishing parameters conf iguration self._translationServiceUrl = "http://the-archive.la/losangeles/services/get-youtube-url" self._namePrefixString = "/ndn/edu/ucla/remap/test/edl/" self._dataLifetime = 2000 self._publishBeforeSeconds = 3 self._translateBeforeSeconds = 60 self._currentIdx = 0 # Youtube related variables: # Channel Global song: UCSMJaKICZKXkpvr7Gj8pPUg # Channel Los Angeles: UCeuQoBBzMW6SWkxd8_1I8NQ # self._channelID = 'UCSMJaKICZKXkpvr7Gj8pPUg' self._channelID = "UCSMJaKICZKXkpvr7Gj8pPUg" self._accessKey = "AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s" # queryStr = 'https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=' + apiKey + '&id=' # Video query example # https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=AIzaSyDUY_AX1iJQcwCW1mASEp5GcLtq1V9BM1Q&id=_ebELPKANxo # Channel query example # https://www.googleapis.com/youtube/v3/search?key=AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s&channelId=UCSMJaKICZKXkpvr7Gj8pPUg&part=snippet,id&order=date&maxResults=20 self._videoUrlDict = dict() self._edlAdjustmentDict = dict() return
def beforeLoopStart(self): if not self._policyManager.hasRootSignedCertificate(): # make one.... self.log.warn('Generating controller certificate...') newKey = self._identityManager.generateRSAKeyPairAsDefault( self.prefix, isKsk=True) newCert = self._identityManager.selfSign(newKey) self._identityManager.addCertificateAsDefault(newCert) # Trusting root's own certificate upon each run # TODO: debug where application starts first and controller starts second, application's interest cannot be verified self._rootCertificate = self._keyChain.getCertificate(self.getDefaultCertificateName()) self._policyManager._certificateCache.insertCertificate(self._rootCertificate) self._memoryContentCache = MemoryContentCache(self.face) self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName()) self._memoryContentCache.registerPrefix(self.prefix, onRegisterFailed = self.onRegisterFailed, onRegisterSuccess = None, onDataNotFound = self._onCommandReceived) # Serve root certificate in our memoryContentCache self._memoryContentCache.add(self._rootCertificate) self.loadApplications() self.loop.call_soon(self.onStartup)
def __init__(self, face, identityName, groupName, catalogPrefix, rawDataPrefix, producerDbFilePath, consumerDbFilePath, encrypted=False): self.face = face # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.identityName = Name(identityName) self.groupName = Name(groupName) self.rawDataPrefix = rawDataPrefix self.catalogPrefix = catalogPrefix self.certificateName = self.keyChain.createIdentityAndCertificate( self.identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) # Set up the memoryContentCache self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(self.identityName, self.onRegisterFailed, self.onDataNotFound) self.producerPrefix = Name(identityName) self.producerSuffix = Name() self.producer = DPUProducer(face, self.memoryContentCache, self.producerPrefix, self.producerSuffix, self.keyChain, self.certificateName, producerDbFilePath) # Put own (consumer) certificate in memoryContentCache consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName( self.certificateName) consumerCertificate = identityStorage.getCertificate( self.certificateName, True) # TODO: request that this DPU be added as a trusted group member self.remainingTasks = dict() try: os.remove(consumerDbFilePath) except OSError: # no such file pass self.consumer = Consumer(face, self.keyChain, self.groupName, consumerKeyName, Sqlite3ConsumerDb(consumerDbFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open( privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: base64Content = keyFile.read() der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache.add(consumerCertificate) self.encrypted = encrypted self.rawData = [] self.catalogFetchFinished = False self.remainingData = 0 return
class SyncBasedDiscovery(object): """ Sync (digest exchange) based name discovery. Discovery maintains a list of discovered, and a list of hosted objects. Calls observer.onStateChanged(name, msgType, msg) when an entity is discovered or removed. Uses serializer.serialize(entityObject) to serialize a hosted entity's entityInfo into string. :param face: :type face: Face :param keyChain: :type keyChain: KeyChain :param certificateName: :type certificateName: Name :param syncPrefix: :type syncPrefix: Name :param observer: :type observer: ExternalObserver :param serializer: :type serializer: EntitySerializer """ def __init__(self, face, keyChain, certificateName, syncPrefix, observer, serializer, syncDataFreshnessPeriod = 4000, initialDigest = "00", syncInterestLifetime = 4000, syncInterestMinInterval = 500, timeoutCntThreshold = 3, maxResponseWaitPeriod = 2000, minResponseWaitPeriod = 400, entityDataFreshnessPeriod = 10000): self._face = face self._keyChain = keyChain self._syncPrefix = Name(syncPrefix) self._objects = dict() self._hostedObjects = dict() self._memoryContentCache = MemoryContentCache(self._face) self._certificateName = Name(certificateName) self._currentDigest = initialDigest self._syncDataFreshnessPeriod = syncDataFreshnessPeriod self._initialDigest = initialDigest self._syncInterestLifetime = syncInterestLifetime self._syncInterestMinInterval = syncInterestMinInterval self._timeoutCntThreshold = timeoutCntThreshold self._entityDataFreshnessPeriod = entityDataFreshnessPeriod # TODO: policy manager etc setup #self._maxResponseWaitPeriod = maxResponseWaitPeriod #self._minResponseWaitPeriod = minResponseWaitPeriod self._observer = observer self._serializer = serializer self._numOutstandingInterest = 0 return """ Public facing interface """ def start(self): """ Starts the discovery """ interest = Interest(Name(self._syncPrefix).append(self._initialDigest)) interest.setMustBeFresh(True) interest.setInterestLifetimeMilliseconds(self._syncInterestLifetime) self._face.expressInterest(interest, self.onSyncData, self.onSyncTimeout) self._numOutstandingInterest += 1 if __debug__: print("Express interest: " + interest.getName().toUri()) return def stop(self): """ Stops the discovery """ # TODO: interest expression and data response flag self._memoryContentCache.unregisterAll() return def getHostedObjects(self): return self._hostedObjects def getObjects(self): return self._objects def publishObject(self, name, entityInfo): """ Adds another object and registers prefix for that object's name :param name: the object's name string :type name: str :param entityInfo: the application given entity info to describe this object name with :type entityInfo: EntityInfo """ # If this is the first object we host, we register for sync namespace: meaning a participant not hosting anything # is only "listening" for sync, and will not help in the sync process if len(self._hostedObjects.keys()) == 0: self._memoryContentCache.registerPrefix(self._syncPrefix, self.onRegisterFailed, self.onSyncInterest) if self.addObject(name, False): # Do not add itself to contentCache if its currentDigest is "00". if self._currentDigest != self._initialDigest: self.contentCacheAddSyncData(Name(self._syncPrefix).append(self._currentDigest)) self.updateDigest() interest = Interest(Name(self._syncPrefix).append(self._currentDigest)) interest.setInterestLifetimeMilliseconds(self._syncInterestLifetime) interest.setMustBeFresh(True) self._face.expressInterest(interest, self.onSyncData, self.onSyncTimeout) self._numOutstandingInterest += 1 self._hostedObjects[name] = entityInfo self.contentCacheAddEntityData(name, entityInfo) self.notifyObserver(name, "ADD", "") # TODO: should the user configure this prefix as well? self._memoryContentCache.registerPrefix(Name(name), self.onRegisterFailed, self.onEntityDataNotFound) else: if __debug__: print("Item with this name already added") return def removeHostedObject(self, name): """ Removes a locally hosted object :param name: the object's name string :type name: str :return: whether removal's successful or not :rtype: bool """ if name in self._hostedObjects: del self._hostedObjects[name] if len(self._hostedObjects) == 0: self._memoryContentCache.unregisterAll() if self.removeObject(name): return True else: if __debug__: print("Hosted item not in objects list") return False else: return False """ Internal functions """ def contentCacheAddEntityData(self, name, entityInfo): content = self._serializer.serialize(entityInfo) data = Data(Name(name)) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(self._entityDataFreshnessPeriod) self._keyChain.sign(data, self._certificateName) self._memoryContentCache.add(data) print "added entity to cache: " + data.getName().toUri() + "; " + data.getContent().toRawStr() def contentCacheAddSyncData(self, dataName): sortedKeys = sorted(self._objects.keys()) content = "" for key in sortedKeys: content += key + "\n" content.strip() data = Data(Name(dataName)) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(self._syncDataFreshnessPeriod) self._keyChain.sign(data, self._certificateName) # adding this data to memoryContentCache should satisfy the pending interest self._memoryContentCache.add(data) def onSyncInterest(self, prefix, interest, face, interestFilterId, filter): if interest.getName().size() != self._syncPrefix.size() + 1: # Not an interest for us return digest = interest.getName().get(-1).toEscapedString() self.updateDigest() if digest != self._currentDigest: # Wait a random period before replying; rationale being that "we are always doing ChronoSync recovery...this is the recovery timer but randomized" # Consider this statement: we are always doing ChronoSync recovery # TODO: this has the problem of potentially answering with wrong data, there will be more interest exchanges needed for the lifetime duration of one wrong answer # Consider appending "answerer" as the last component of data name? # TODO2: don't see why we should wait here self.replySyncInterest(interest, digest) #dummyInterest = Interest(Name("/local/timeout1")) #dummyInterest.setInterestLifetimeMilliseconds(random.randint(self._minResponseWaitPeriod, self._maxResponseWaitPeriod)) #self._face.expressInterest(dummyInterest, self.onDummyData, lambda a : self.replySyncInterest(a, digest)) return def replySyncInterest(self, interest, receivedDigest): self.updateDigest() if receivedDigest != self._currentDigest: # TODO: one participant may be answering with wrong info: scenario: 1 has {a}, 2 has {b} # 2 gets 1's {a} and asks again before 1 gets 2's {b}, 2 asks 1 with the digest of {a, b}, 1 will # create a data with the content {a} for the digest of {a, b}, and this data will be able to answer # later steady state interests from 2 until it expires (and by which time 1 should be updated with # {a, b} as well) self.contentCacheAddSyncData(Name(self._syncPrefix).append(receivedDigest)) return def onSyncData(self, interest, data): # TODO: do verification first if __debug__: print("Got sync data; name: " + data.getName().toUri() + "; content: " + data.getContent().toRawStr()) content = data.getContent().toRawStr().split('\n') self._numOutstandingInterest -= 1 for itemName in content: if itemName not in self._objects: if itemName != "": self.onReceivedSyncData(itemName) # Hack for re-expressing sync interest after a short interval dummyInterest = Interest(Name("/local/timeout")) dummyInterest.setInterestLifetimeMilliseconds(self._syncInterestLifetime) self._face.expressInterest(dummyInterest, self.onDummyData, self.onSyncTimeout) self._numOutstandingInterest += 1 return def onSyncTimeout(self, interest): newInterest = Interest(Name(self._syncPrefix).append(self._currentDigest)) newInterest.setInterestLifetimeMilliseconds(self._syncInterestLifetime) newInterest.setMustBeFresh(True) self._numOutstandingInterest -= 1 print "re-expressing: " + newInterest.getName().toUri() if self._numOutstandingInterest <= 0: self._face.expressInterest(newInterest, self.onSyncData, self.onSyncTimeout) self._numOutstandingInterest += 1 return """ Handling received sync data: express entity interest """ def onReceivedSyncData(self, itemName): interest = Interest(Name(itemName)) interest.setInterestLifetimeMilliseconds(4000) interest.setMustBeFresh(False) self._face.expressInterest(interest, self.onEntityData, self.onEntityTimeout) return def onEntityTimeout(self, interest): print "Item interest times out: " + interest.getName().toUri() return def onEntityData(self, interest, data): self.addObject(interest.getName().toUri(), True) self.notifyObserver(interest.getName().toUri(), "ADD", ""); dummyInterest = Interest(Name("/local/timeout")) dummyInterest.setInterestLifetimeMilliseconds(4000) self._face.expressInterest(dummyInterest, self.onDummyData, lambda a : self.expressHeartbeatInterest(a, interest)) return def expressHeartbeatInterest(self, dummyInterest, entityInterest): newInterest = Interest(entityInterest) newInterest.refreshNonce() self._face.expressInterest(newInterest, self.onHeartbeatData, self.onHeartbeatTimeout) def onHeartbeatData(self, interest, data): self.resetTimeoutCnt(interest.getName().toUri()) dummyInterest = Interest(Name("/local/timeout")) dummyInterest.setInterestLifetimeMilliseconds(4000) self._face.expressInterest(dummyInterest, self.onDummyData, lambda a : self.expressHeartbeatInterest(a, interest)) def onHeartbeatTimeout(self, interest): if self.incrementTimeoutCnt(interest.getName().toUri()): print "Remove: " + interest.getName().toUri() + " because of consecutive timeout cnt exceeded" else: newInterest = Interest(interest.getName()) newInterest.setInterestLifetimeMilliseconds(4000) self._face.expressInterest(newInterest, self.onHeartbeatData, self.onHeartbeatTimeout) def onDummyData(self, interest, data): if __debug__: print("Unexpected reply to dummy interest: " + data.getContent().toRawStr()) return def expressSyncInterest(self, interest): self.onSyncTimeout(interest) return def addObject(self, name, update): if name in self._objects: return False else: self._objects[name] = {"timeout_count": 0} if update: self.updateDigest() return True def removeObject(self, name): if name in self._objects: del self._objects[name] self.notifyObserver(name, "REMOVE", "") self.contentCacheAddSyncData(Name(self._syncPrefix).append(self._currentDigest)) self.updateDigest() return True else: return False def updateDigest(self): # TODO: for now, may change the format of the list encoding for easier cross language compatibility if len(self._objects) > 0: m = hashlib.sha256() for item in sorted(self._objects.keys()): m.update(item) self._currentDigest = str(m.hexdigest()) else: self._currentDigest = self._initialDigest return def incrementTimeoutCnt(self, name): if name in self._objects: self._objects[name]["timeout_count"] += 1 if self._objects[name]["timeout_count"] >= self._timeoutCntThreshold: return self.removeObject(name) else: return False else: return False def resetTimeoutCnt(self, name): if name in self._objects: self._objects[name]["timeout_count"] = 0 return True else: return False def notifyObserver(self, name, msgType, msg): self._observer.onStateChanged(name, msgType, msg) return def onRegisterFailed(self, prefix): if __debug__: print("Prefix registration failed: " + prefix.toUri()) return def onEntityDataNotFound(self, prefix, interest, face, interestFilterId, filter): name = interest.getName().toUri() if name in self._hostedObjects: content = self._serializer.serialize(self._hostedObjects[name]) data = Data(Name(name)) data.setContent(content) data.getMetaInfo().setFreshnessPeriod(self._entityDataFreshnessPeriod) self._keyChain.sign(data, self._certificateName) self._face.putData(data) return
class BmsNode(object): def __init__(self): self.conf = None self._keyChain = None self._certificateName = None self._dataQueue = dict() self._memoryContentCache = None self._identityName = None self._aggregation = Aggregation() def setConfiguration(self, fileName, trustSchemaFile): self.conf = BoostInfoParser() self.conf.read(fileName) self._identityName = Name(self.conf.getNodePrefix()) self._trustSchemaFile = trustSchemaFile def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): #print('Data not found for ' + interest.getName().toUri()) return def startPublishing(self): # One-time security setup self.prepareLogging() privateKeyStorage = FilePrivateKeyStorage() identityStorage = BasicIdentityStorage() policyManager = ConfigPolicyManager(self._trustSchemaFile) self._keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage), policyManager) self._certificateName = self._keyChain.createIdentityAndCertificate(self._identityName) print("My Identity name: " + self._identityName.toUri()) print("My certificate name: " + self._certificateName.toUri()) certificateData = self._keyChain.getIdentityManager()._identityStorage.getCertificate(self._certificateName, True) print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer())) # self._keyChain.getIdentityCertificate(self._certificateName).) self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) self._keyChain.setFace(self._face) self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # We should only ask for cert to be signed upon the first run of a certain aggregator if DO_CERT_SETUP: if (KeyLocator.getFromSignature(certificateData.getSignature()).getKeyName().equals(self._certificateName.getPrefix(-1))): # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again print("certificate " + self._certificateName.toUri() + " asking for signature") response = urllib2.urlopen("http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._memoryContentCache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) # desanitize + sign in GET request cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._memoryContentCache.add(certificateData) else: self._memoryContentCache.add(certificateData) dataNode = self.conf.getDataNode() childrenNode = self.conf.getChildrenNode() self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound) # For each type of data, we refresh each type of aggregation according to the interval in the configuration for i in range(len(dataNode.subtrees)): dataType = dataNode.subtrees.keys()[i] aggregationParams = self.conf.getProducingParamsForAggregationType(dataNode.subtrees.items()[i][1]) if childrenNode == None: self._dataQueue[dataType] = DataQueue(None, None, None) self.generateData(dataType, 2, 0) for aggregationType in aggregationParams: childrenList = OrderedDict() if childrenNode != None: for j in range(len(childrenNode.subtrees)): if dataType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees: if aggregationType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType].subtrees: childrenList[childrenNode.subtrees.items()[j][0]] = self.conf.getProducingParamsForAggregationType(childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType])[aggregationType] self.startPublishingAggregation(aggregationParams[aggregationType], childrenList, dataType, aggregationType) return def startPublishingAggregation(self, params, childrenList, dataType, aggregationType): if __debug__: print('Start publishing for ' + dataType + '-' + aggregationType) # aggregation calculating and publishing mechanism publishingPrefix = Name(self._identityName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType) self._dataQueue[dataType + aggregationType] = DataQueue(params, childrenList, publishingPrefix) if len(childrenList.keys()) == 0: # TODO: make start_time optional for leaf nodes self._loop.call_later(int(params['producer_interval']), self.calculateAggregation, dataType, aggregationType, childrenList, int(params['start_time']), int(params['producer_interval']), publishingPrefix, True) else: # express interest for children who produce the same data and aggregation type for childName in childrenList.keys(): name = Name(self._identityName).append(childName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType) interest = Interest(name) # if start_time is specified, we ask for data starting at start_time; # if not, we ask for the right most child and go from there if ('start_time' in childrenList[childName]): endTime = int(childrenList[childName]['start_time']) + int(childrenList[childName]['producer_interval']) interest.getName().append(str(childrenList[childName]['start_time'])).append(str(endTime)) else: # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost interest.setChildSelector(0) interest.setMustBeFresh(True) interest.setInterestLifetimeMilliseconds(DEFAULT_INTEREST_LIFETIME) if __debug__: print(' Issue interest: ' + interest.getName().toUri()) self._face.expressInterest(interest, self.onData, self.onTimeout) return # TODO: once one calculation's decided a child has not answered, we should do another calculation def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat = False): doCalc = True dataList = [] # TODO: an intermediate node cannot produce raw data for now if len(childrenList.keys()) != 0: for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: data = self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey] dataList.append(float(data.getContent().toRawStr())) else: #print('Child ' + childName + ' has not replied yet') doCalc = False break else: for inst in self._dataQueue[dataType]._dataDict.keys(): if int(inst) >= startTime and int(inst) < startTime + interval: dataList.append(self._dataQueue[dataType]._dataDict[inst]) if doCalc: content = self._aggregation.getAggregation(aggregationType, dataList) if content: publishData = Data(Name(publishingPrefix).append(str(startTime)).append(str(startTime + interval))) publishData.setContent(str(content)) publishData.getMetaInfo().setFreshnessPeriod(DEFAULT_DATA_LIFETIME) self._keyChain.sign(publishData, self._certificateName) self._memoryContentCache.add(publishData) for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: del self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey] if __debug__: print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr()) # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData if repeat: self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat) return def generateData(self, dataType, interval, startTime): self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint(0,9) self._loop.call_later(interval, self.generateData, dataType, interval, startTime + interval) return def onRegisterFailed(self, prefix): raise RuntimeError("Register failed for prefix", prefix.toUri()) def onVerified(self, data): print('Data verified: ' + data.getName().toUri()) return def onVerifyFailed(self, data): print('Data verification failed: ' + data.getName().toUri()) return def onData(self, interest, data): self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed) dataName = data.getName() dataQueue = None if __debug__: print("Got data: " + dataName.toUri() + "; " + data.getContent().toRawStr()) for i in range(0, len(dataName)): if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT: dataType = dataName.get(i - 1).toEscapedString() aggregationType = dataName.get(i + 1).toEscapedString() startTime = int(dataName.get(i + 2).toEscapedString()) endTime = int(dataName.get(i + 3).toEscapedString()) childName = dataName.get(i - 3).toEscapedString() dataAndAggregationType = dataType + aggregationType dataDictKey = self.getDataDictKey(startTime, endTime, childName) dataQueue = self._dataQueue[dataAndAggregationType] dataQueue._dataDict[dataDictKey] = data break # TODO: check what if interval/starttime is misconfigured if dataQueue: self.calculateAggregation(dataType, aggregationType, dataQueue._childrenList, startTime, endTime - startTime, dataQueue._publishingPrefix) # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime))) # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead newInterestName = dataName.getPrefix(i + 2) newInterest = Interest(interest) newInterest.setName(newInterestName) newInterest.setChildSelector(0) exclude = Exclude() exclude.appendAny() exclude.appendComponent(dataName.get(i + 2)) newInterest.setExclude(exclude) self._face.expressInterest(newInterest, self.onData, self.onTimeout) if __debug__: print(" issue interest: " + interest.getName().toUri()) return def onTimeout(self, interest): if __debug__: print(" interest timeout: " + interest.getName().toUri() + "; reexpress") pass self._face.expressInterest(interest, self.onData, self.onTimeout) return def stop(self): self._loop.stop() if __debug__: print("Stopped") return # This creation of dataDictKey means parent and child should not have the same name @staticmethod def getDataDictKey(startTime, endTime, childName): return str(startTime) + '/' + str(endTime) + '/' + childName ## # Logging ## def prepareLogging(self): self.log = logging.getLogger(str(self.__class__)) self.log.setLevel(logging.DEBUG) logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s" self._console = logging.StreamHandler() self._console.setFormatter(logging.Formatter(logFormat)) self._console.setLevel(logging.INFO) # without this, a lot of ThreadsafeFace errors get swallowed up logging.getLogger("trollius").addHandler(self._console) self.log.addHandler(self._console) def setLogLevel(self, level): """ Set the log level that will be output to standard error :param level: A log level constant defined in the logging module (e.g. logging.INFO) """ self._console.setLevel(level) def getLogger(self): """ :return: The logger associated with this node :rtype: logging.Logger """ return self.log
class DPU(object): def __init__(self, face, identityName, groupName, catalogPrefix, rawDataPrefix, producerDbFilePath, consumerDbFilePath, encrypted=False): self.face = face # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.identityName = Name(identityName) self.groupName = Name(groupName) self.rawDataPrefix = rawDataPrefix self.catalogPrefix = catalogPrefix self.certificateName = self.keyChain.createIdentityAndCertificate( self.identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) # Set up the memoryContentCache self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(self.identityName, self.onRegisterFailed, self.onDataNotFound) self.producerPrefix = Name(identityName) self.producerSuffix = Name() self.producer = DPUProducer(face, self.memoryContentCache, self.producerPrefix, self.producerSuffix, self.keyChain, self.certificateName, producerDbFilePath) # Put own (consumer) certificate in memoryContentCache consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName( self.certificateName) consumerCertificate = identityStorage.getCertificate( self.certificateName, True) # TODO: request that this DPU be added as a trusted group member self.remainingTasks = dict() try: os.remove(consumerDbFilePath) except OSError: # no such file pass self.consumer = Consumer(face, self.keyChain, self.groupName, consumerKeyName, Sqlite3ConsumerDb(consumerDbFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open( privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: base64Content = keyFile.read() der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache.add(consumerCertificate) self.encrypted = encrypted self.rawData = [] self.catalogFetchFinished = False self.remainingData = 0 return def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() if interest.getName().get( -3).toEscapedString() == "bout" or interest.getName().get( -3).toEscapedString() == "genericfunctions": if interest.getName().toUri() in self.remainingTasks: # We are already trying to process this task, so we don't add it to the list of tasks pass else: self.remainingTasks[interest.getName().toUri()] = "in-progress" else: print "Got unexpected interest: " + interest.getName().toUri() # .../SAMPLE/<timestamp> timestamp = interest.getName().get(-1) catalogInterest = Interest(self.catalogPrefix) # Traverse catalogs in range from leftmost child catalogInterest.setChildSelector(0) catalogInterest.setMustBeFresh(True) catalogInterest.setInterestLifetimeMilliseconds(4000) exclude = Exclude() exclude.appendAny() exclude.appendComponent(timestamp) catalogInterest.setExclude(catalogInterest) self.face.expressInterest(catalogInterest, self.onCatalogData, self.onCatalogTimeout) print "Expressed catalog interest " + catalogInterest.getName().toUri() return def onRegisterFailed(self, prefix): print "Prefix registration failed" return def onCatalogData(self, interest, data): # Find the next catalog print "Received catalog data " + data.getName().toUri() catalogTimestamp = data.getName().get(-2) exclude = Exclude() exclude.appendAny() exclude.appendComponent(catalogTimestamp) nextCatalogInterest = Interest(interest.getName()) nextCatalogInterest.setExclude(exclude) nextCatalogInterest.setChildSelector(0) nextCatalogInterest.setMustBeFresh(True) nextCatalogInterest.setInterestLifetimeMilliseconds(2000) self.face.expressInterest(nextCatalogInterest, self.onCatalogData, self.onCatalogTimeout) print "Expressed catalog interest " + nextCatalogInterest.getName( ).toUri() # We ignore the version in the catalog if self.encrypted: self.consumer.consume(contentName, self.onCatalogConsumeComplete, self.onConsumeFailed) else: self.onCatalogConsumeComplete(data, data.getContent()) def onCatalogConsumeComplete(self, data, result): print "Consume complete for catalog name: " + data.getName().toUri() catalog = json.loads(result.toRawStr()) for timestamp in catalog: # For encrypted data, timestamp format will have to change rawDataName = Name(self.rawDataPrefix).append( Schedule.toIsoString(timestamp * 1000)) dataInterest = Interest(rawDataName) dataInterest.setInterestLifetimeMilliseconds(2000) dataInterest.setMustBeFresh(True) self.face.expressInterest(dataInterest, self.onRawData, self.onRawDataTimeout) self.remainingData += 1 return # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run def onRawDataConsumeComplete(self, data, result): resultObject = json.loads(result.toRawStr()) # TODO: the original data for timestamp should be an array self.rawData.append(resultObject) self.remainingData -= 1 print "Remaing data number: " + str(self.remainingData) if self.remainingData == 0 and self.catalogFetchFinished: self.produce() # TODO: Unideal distanceTo production for item in self.remainingTasks: username = data.getName().get(2) timestamp = Name(item).get(-1).toEscapedString() if "distanceTo" in item and username in item and timestamp in data.getName( ).toUri(): # We want this distanceTo destCoordinate = Name(item).get(-2).toEscapedString() coordinates = destCoordinate[1:-1].split(",").strip() x = int(coordinates[0]) y = int(coordinates[1]) dataObject = json.dumps({ "distanceTo": math.sqrt((x - resultObject["lat"]) * (x - resultObject["lat"]) + (y - resultObject["lng"]) * (y - resultObject["lng"])) }) data = Data(data) data.getMetaInfo().setFreshnessPeriod(40000000000) data.setContent(dataObject) self.keyChain.sign(data) # If the interest's still within lifetime, this will satisfy the interest self.memoryContentCache.add(data) return def onConsumeFailed(self, code, message): print "Consume error " + str(code) + ": " + message def onRawData(self, interest, data): print "Raw data received: " + data.getName().toUri() # TODO: Quick hack for deciding if the data is encrypted if "zhehao" in data.getName().toUri(): self.consumer.consume(data.getName(), self.onRawDataConsumeComplete, self.onConsumeFailed) else: print "raw data consume complete" self.onRawDataConsumeComplete(data, data.getContent()) # if self.encrypted: # self.consumer.consume(data.getName(), self.onRawDataConsumeComplete, self.onConsumeFailed) # else: # self.onRawDataConsumeComplete(data, data.getContent()) def onCatalogTimeout(self, interest): print "Catalog times out: " + interest.getName().toUri() # TODO: 1 timeout would result in this dpu thinking that catalog fetching's done! self.catalogFetchFinished = True if self.remainingData == 0: self.produce() return def onRawDataTimeout(self, interest): print "Raw data times out: " + interest.getName().toUri() return # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run def produce(self): # Produce the bounding box print "ready to produce" maxLong = -3600 minLong = 3600 maxLat = -3600 minLat = 3600 if len(self.rawData) == 0: print "No raw data as producer input" for item in self.rawData: print item if item["lng"] > maxLong: maxLong = item["lng"] if item["lng"] < minLong: minLong = item["lng"] if item["lat"] > maxLat: maxLat = item["lat"] if item["lat"] < minLat: minLat = item["lat"] result = json.dumps({ "maxlng": maxLong, "minlng": minLong, "maxlat": maxLat, "minlat": minLat, "size": len(self.rawData) }) if self.encrypted: # TODO: replace fixed timestamp for now for produced data, createContentKey as needed testTime1 = Schedule.fromIsoString("20160320T080000") self.producer.createContentKey(testTime1) self.producer.produce(testTime1, result) else: # Arbitrary produced data lifetime data = Data(Name(self.identityName).append("20160320T080000")) data.getMetaInfo().setFreshnessPeriod(400000) data.setContent(result) # If the interest's still within lifetime, this will satisfy the interest self.memoryContentCache.add(data) print "Produced data with name " + data.getName().toUri()
class TestDPU(object): def __init__(self, face, encryptResult, link = None): # Set up face self.face = face self._encryptResult = encryptResult self._link = link self.databaseFilePath = "policy_config/test_consumer_dpu.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/haitao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/ndn/edu/basel/dpu") # Function name: the function that this DPU provides self._functionName = "bounding_box" self._identityName = identityName self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage # For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error #self.keyChain.installIdentityCertificate() self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self._tasks = dict() return def onAccessRequestData(self, interest, data): print "Access request data: " + data.getName().toUri() return def onAccessRequestTimeout(self, interest): print "Access request times out: " + interest.getName().toUri() print "Assuming certificate sent and D-key generated" return def startConsuming(self, userId, basetimeString, producedDataName, dataNum, outerDataName): contentName = Name(userId).append(Name("/SAMPLE/fitness/physical_activity/time_location/")) baseZFill = 2 for i in range(0, dataNum): timeString = basetimeString + str(i).zfill(baseZFill) + '00' timeFloat = Schedule.fromIsoString(timeString) self.consume(Name(contentName).append(timeString), producedDataName, outerDataName) print "Trying to consume: " + Name(contentName).append(timeString).toUri() def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() functionComponentIdx = len(self._identityName) if interest.getName().get(functionComponentIdx).toEscapedString() == self._functionName: try: parameters = interest.getName().get(functionComponentIdx + 1).toEscapedString() pattern = re.compile('([^,]*),([^,]*),([^,]*)') matching = pattern.match(str(Name.fromEscapedString(parameters))) #print parameters #print str(Name.fromEscapedString(parameters)) userId = matching.group(1) basetimeString = matching.group(2) producedDataName = matching.group(3) dataNum = 2 self._tasks[producedDataName] = {"cap_num": dataNum, "current_num": 0, "dataset": []} self.startConsuming(userId, basetimeString, producedDataName, dataNum, interest.getName().toUri()) except Exception as e: print "Exception in processing function arguments: " + str(e) else: print "function name mismatch: expected " + self._functionName + " ; got " + interest.getName().get(functionComponentIdx).toEscapedString() return def onRegisterFailed(self, prefix): print "Prefix registration failed: " + prefix.toUri() return def consume(self, contentName, producedDataName, outerDataName): self.consumer.consume(contentName, lambda data, result: self.onConsumeComplete(data, result, producedDataName, outerDataName), lambda code, message : self.onConsumeFailed(code, message, producedDataName, outerDataName)) def onConsumeComplete(self, data, result, producedDataName, outerDataName): print "Consume complete for data name: " + data.getName().toUri() if producedDataName in self._tasks: self._tasks[producedDataName]["current_num"] += 1 resultObject = json.loads(str(result)) for i in range(0, len(resultObject)): self._tasks[producedDataName]["dataset"].append(resultObject[i]) if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]: self.onGetAllData(producedDataName, outerDataName) def onConsumeFailed(self, code, message, producedDataName, outerDataName): print "Consume error " + str(code) + ": " + message if producedDataName in self._tasks: self._tasks[producedDataName]["current_num"] += 1 if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]: self.onGetAllData(producedDataName, outerDataName) def onGetAllData(self, producedDataName, outerDataName): maxLng = -1000 minLng = 1000 maxLat = -1000 minLat = 1000 for item in self._tasks[producedDataName]["dataset"]: dataObject = json.loads(str(item)) if dataObject["lat"] > maxLat: maxLat = dataObject["lat"] if dataObject["lat"] < minLat: minLat = dataObject["lat"] if dataObject["lng"] > maxLng: maxLng = dataObject["lng"] if dataObject["lng"] < minLng: minLng = dataObject["lng"] if not self._encryptResult: innerData = Data(Name(str(producedDataName))) innerData.setContent(json.dumps({"minLat": minLat, "maxLat": maxLat, "minLng": minLng, "maxLng": maxLng})) #self.keyChain.sign(innerData) outerData = Data(Name(str(outerDataName))) outerData.setContent(innerData.wireEncode()) #self.keyChain.sign(outerData) self.memoryContentCache.add(outerData) self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", outerData) print "Calculation completed, put data to repo" else: print "Encrypt result is not implemented" def initiateContentStoreInsertion(self, repoCommandPrefix, data): fetchName = data.getName() parameter = repo_command_parameter_pb2.RepoCommandParameterMessage() # Add the Name. for i in range(fetchName.size()): parameter.repo_command_parameter.name.component.append( fetchName[i].getValue().toBytes()) # Create the command interest. interest = Interest(Name(repoCommandPrefix).append("insert") .append(Name.Component(ProtobufTlv.encode(parameter)))) self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout) def onRepoData(self, interest, data): #print "received repo data: " + interest.getName().toUri() return def onRepoTimeout(self, interest): #print "repo command times out: " + interest.getName().getPrefix(-1).toUri() return
class AccessManager(object): def __init__(self, face, groupManagerName, dataType, dKeyDatabaseFilePath): # Set up face self.face = face #self.loop = eventLoop # Set up the keyChain. identityStorage = MemoryIdentityStorage() privateKeyStorage = MemoryPrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.certificateName = self.keyChain.createIdentityAndCertificate( groupManagerName) self.dKeyDatabaseFilePath = dKeyDatabaseFilePath self.manager = GroupManager( groupManagerName, dataType, Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1, self.keyChain) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(groupManagerName, self.onRegisterFailed, self.onDataNotFound) self.needToPublishGroupKeys = False return def onRepoData(self, interest, data): #print "received repo data: " + interest.getName().toUri() return def onRepoTimeout(self, interest): #print "repo command times out: " + interest.getName().getPrefix(-1).toUri() return def addSchedule(self, scheduleName, managerStartDate, managerEndDate, managerStartHour, managerEndHour): schedule = Schedule() interval = RepetitiveInterval(Schedule.fromIsoString(managerStartDate), Schedule.fromIsoString(managerEndDate), managerStartHour, managerEndHour, 1, RepetitiveInterval.RepeatUnit.DAY) schedule.addWhiteInterval(interval) self.manager.addSchedule(scheduleName, schedule) def onMemberCertificateData(self, interest, data, accessInterest): print "Member certificate with name retrieved: " + data.getName( ).toUri() + "; member added to group!" self.manager.addMember("schedule1", data) self.needToPublishGroupKeys = True accessResponse = Data(accessInterest.getName()) accessResponse.setContent("granted") self.face.putData(accessResponse) def onMemberCertificateTimeout(self, interest, accessInterest): print "Member certificate interest times out: " + interest.getName( ).toUri() newInterest = Interest(interest) newInterest.refreshNonce() self.face.expressInterest( newInterest, lambda memberInterest, memberData: self.onMemberCertificateData( memberInterest, memberData, accessInterest), lambda memberInterest: self.onMemberCertificateTimeout( memberInterest, accessInterest)) return def publishGroupKeys(self, timeStr): timePoint = Schedule.fromIsoString(timeStr) print timeStr result = self.manager.getGroupKey(timePoint) # The first is group public key, E-key # The rest are group private keys encrypted with each member's public key, D-key for i in range(0, len(result)): self.memoryContentCache.add(result[i]) self.initiateContentStoreInsertion( "/ndn/edu/ucla/remap/ndnfit/repo", result[i]) print "Publish key name: " + str( i) + " " + result[i].getName().toUri() print "key content: " + str(result[i].getContent().toBytes()) self.needToPublishGroupKeys = False def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() if interest.getExclude(): print "Interest has exclude: " + interest.getExclude().toUri() return def onRegisterFailed(self, prefix): print "Prefix registration failed" return def initiateContentStoreInsertion(self, repoCommandPrefix, data): fetchName = data.getName() parameter = repo_command_parameter_pb2.RepoCommandParameterMessage() # Add the Name. for i in range(fetchName.size()): parameter.repo_command_parameter.name.component.append( fetchName[i].getValue().toBytes()) # Create the command interest. interest = Interest( Name(repoCommandPrefix).append("insert").append( Name.Component(ProtobufTlv.encode(parameter)))) self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout)
self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", keys[i]) return def onDataNotFound(prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() return def onRegisterFailed(prefix): print "Prefix registration failed" return if __name__ == "__main__": print "Start NAC producer test" face = Face() memoryContentCache = MemoryContentCache(face) # Produce encrypted data for this user username = "******" # Insert into this repo repoPrefix = "/ndn/edu/ucla/remap/ndnfit/repo" testProducer = SampleProducer(face, username, memoryContentCache) basetimeString = "20160620T080" baseZFill = 3 baseLat = 34 baseLng = -118 # This should be less than 1 minute dataNum = 60 # Create the content key once originalTimeString = basetimeString + str(0).zfill(baseZFill)
class IotController(BaseNode): """ The controller class has a few built-in commands: - listDevices: return the names and capabilities of all attached devices - certificateRequest: takes public key information and returns name of new certificate - updateCapabilities: should be sent periodically from IotNodes to update their command lists - addDevice: add a device based on HMAC It is unlikely that you will need to subclass this. """ def __init__(self, nodeName, networkName, applicationDirectory=""): super(IotController, self).__init__() self.deviceSuffix = Name(nodeName) self.networkPrefix = Name(networkName) self.prefix = Name(self.networkPrefix).append(self.deviceSuffix) self._policyManager.setEnvironmentPrefix(self.networkPrefix) self._policyManager.setTrustRootIdentity(self.prefix) self._policyManager.setDeviceIdentity(self.prefix) self._policyManager.updateTrustRules() # the controller keeps a directory of capabilities->names self._directory = defaultdict(list) # keep track of who's still using HMACs # key is device serial, value is the HmacHelper self._hmacDevices = {} # our capabilities self._baseDirectory = {} # add the built-ins self._insertIntoCapabilities('listDevices', 'directory', False) self._insertIntoCapabilities('updateCapabilities', 'capabilities', True) self._directory.update(self._baseDirectory) # Set up application directory if applicationDirectory == "": applicationDirectory = os.path.expanduser( '~/.ndn/iot/applications') self._applicationDirectory = applicationDirectory self._applications = dict() def _insertIntoCapabilities(self, commandName, keyword, isSigned): newUri = Name(self.prefix).append(Name(commandName)).toUri() self._baseDirectory[keyword] = [{'signed': isSigned, 'name': newUri}] def beforeLoopStart(self): if not self._policyManager.hasRootSignedCertificate(): # make one.... self.log.warn('Generating controller certificate...') newKey = self._identityManager.generateRSAKeyPairAsDefault( self.prefix, isKsk=True) newCert = self._identityManager.selfSign(newKey) self._identityManager.addCertificateAsDefault(newCert) # Trusting root's own certificate upon each run # TODO: debug where application starts first and controller starts second, application's interest cannot be verified self._rootCertificate = self._keyChain.getCertificate( self.getDefaultCertificateName()) self._policyManager._certificateCache.insertCertificate( self._rootCertificate) self._memoryContentCache = MemoryContentCache(self.face) self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName()) self._memoryContentCache.registerPrefix( self.prefix, onRegisterFailed=self.onRegisterFailed, onRegisterSuccess=None, onDataNotFound=self._onCommandReceived) # Serve root certificate in our memoryContentCache self._memoryContentCache.add(self._rootCertificate) self.loadApplications() self.loop.call_soon(self.onStartup) ###### # Initial configuration ####### # TODO: deviceSuffix will be replaced by deviceSerial def _addDeviceToNetwork(self, deviceSerial, newDeviceSuffix, pin): h = HmacHelper(pin) self._hmacDevices[deviceSerial] = h d = DeviceConfigurationMessage() for source, dest in [ (self.networkPrefix, d.configuration.networkPrefix), (self.deviceSuffix, d.configuration.controllerName), (newDeviceSuffix, d.configuration.deviceSuffix) ]: for i in range(source.size()): component = source.get(i) dest.components.append(component.getValue().toRawStr()) interestName = Name('/home/configure').append(Name(deviceSerial)) encodedParams = ProtobufTlv.encode(d) interestName.append(encodedParams) interest = Interest(interestName) h.signInterest(interest) self.face.expressInterest(interest, self._deviceAdditionResponse, self._deviceAdditionTimedOut) def _deviceAdditionTimedOut(self, interest): deviceSerial = str(interest.getName().get(2).getValue()) self.log.warn("Timed out trying to configure device " + deviceSerial) # don't try again self._hmacDevices.pop(deviceSerial) def _deviceAdditionResponse(self, interest, data): status = data.getContent().toRawStr() deviceSerial = str(interest.getName().get(2).getValue()) hmacChecker = self._hmacDevices[deviceSerial] if (hmacChecker.verifyData(data)): self.log.info("Received {} from {}".format(status, deviceSerial)) else: self.log.warn("Received invalid HMAC from {}".format(deviceSerial)) ###### # Certificate signing ###### def _handleCertificateRequest(self, interest): """ Extracts a public key name and key bits from a command interest name component. Generates a certificate if the request is verifiable. This expects an HMAC signed interest. """ message = CertificateRequestMessage() commandParamsTlv = interest.getName().get(self.prefix.size() + 1) ProtobufTlv.decode(message, commandParamsTlv.getValue()) signature = HmacHelper.extractInterestSignature(interest) deviceSerial = str( signature.getKeyLocator().getKeyName().get(-1).getValue()) response = Data(interest.getName()) certData = None hmac = None try: hmac = self._hmacDevices[deviceSerial] if hmac.verifyInterest(interest): certData = self._createCertificateFromRequest(message) # remove this hmac; another request will require a new pin self._hmacDevices.pop(deviceSerial) except KeyError: self.log.warn( 'Received certificate request for device with no registered key' ) except SecurityException as e: self.log.warn('Could not create device certificate: ' + str(e)) else: self.log.info( 'Creating certificate for device {}'.format(deviceSerial)) if certData is not None: response.setContent(certData.wireEncode()) response.getMetaInfo().setFreshnessPeriod( 10000) # should be good even longer else: response.setContent("Denied") if hmac is not None: hmac.signData(response) self.sendData(response, False) def _createCertificateFromRequest(self, message): """ Generate an IdentityCertificate from the public key information given. """ # TODO: Verify the certificate was actually signed with the private key # matching the public key we are issuing a cert for!! keyComponents = message.command.keyName.components keyName = Name("/".join(keyComponents)) self.log.debug("Key name: " + keyName.toUri()) if not self._policyManager.getEnvironmentPrefix().match(keyName): # we do not issue certs for keys outside of our network return None keyDer = Blob(message.command.keyBits) keyType = message.command.keyType try: self._identityStorage.addKey(keyName, keyType, keyDer) except SecurityException as e: print(e) # assume this is due to already existing? pass certificate = self._identityManager._generateCertificateForKey(keyName) self._keyChain.sign(certificate, self.getDefaultCertificateName()) # store it for later use + verification self._identityStorage.addCertificate(certificate) self._policyManager._certificateCache.insertCertificate(certificate) return certificate ###### # Device Capabilities ###### def _updateDeviceCapabilities(self, interest): """ Take the received capabilities update interest and update our directory listings. """ # we assume the sender is the one who signed the interest... signature = self._policyManager._extractSignature(interest) certificateName = signature.getKeyLocator().getKeyName() senderIdentity = IdentityCertificate.certificateNameToPublicKeyName( certificateName).getPrefix(-1) self.log.info('Updating capabilities for {}'.format( senderIdentity.toUri())) # get the params from the interest name messageComponent = interest.getName().get(self.prefix.size() + 1) message = UpdateCapabilitiesCommandMessage() ProtobufTlv.decode(message, messageComponent.getValue()) # we remove all the old capabilities for the sender tempDirectory = defaultdict(list) for keyword in self._directory: tempDirectory[keyword] = [ cap for cap in self._directory[keyword] if not senderIdentity.match(Name(cap['name'])) ] # then we add the ones from the message for capability in message.capabilities: capabilityPrefix = Name() for component in capability.commandPrefix.components: capabilityPrefix.append(component) commandUri = capabilityPrefix.toUri() if not senderIdentity.match(capabilityPrefix): self.log.error( "Node {} tried to register another prefix: {} - ignoring update" .format(senderIdentity.toUri(), commandUri)) else: for keyword in capability.keywords: allUris = [info['name'] for info in tempDirectory[keyword]] if capabilityPrefix not in allUris: listing = { 'signed': capability.needsSignature, 'name': commandUri } tempDirectory[keyword].append(listing) self._directory = tempDirectory def _prepareCapabilitiesList(self, interestName): """ Responds to a directory listing request with JSON """ dataName = Name(interestName).append( Name.Component.fromNumber(int(time.time()))) response = Data(dataName) response.setContent(json.dumps(self._directory)) return response ##### # Interest handling #### def _onCommandReceived(self, prefix, interest, face, interestFilterId, filter): """ """ interestName = interest.getName() #if it is a certificate name, serve the certificate # TODO: since we've memoryContentCache serving root cert now, this should no longer be required try: if interestName.isPrefixOf(self.getDefaultCertificateName()): foundCert = self._identityManager.getCertificate( self.getDefaultCertificateName()) self.log.debug("Serving certificate request") self.face.putData(foundCert) return except SecurityException as e: # We don't have this certificate, this is probably not a certificate request # TODO: this does not differentiate from certificate request but certificate not exist; should update print(str(e)) pass afterPrefix = interestName.get(prefix.size()).toEscapedString() if afterPrefix == "listDevices": #compose device list self.log.debug("Received device list request") response = self._prepareCapabilitiesList(interestName) self.sendData(response) elif afterPrefix == "certificateRequest": #build and sign certificate self.log.debug("Received certificate request") self._handleCertificateRequest(interest) elif afterPrefix == "updateCapabilities": # needs to be signed! self.log.debug("Received capabilities update") def onVerifiedCapabilities(interest): print("capabilities good") response = Data(interest.getName()) response.setContent(str(time.time())) self.sendData(response) self._updateDeviceCapabilities(interest) self._keyChain.verifyInterest(interest, onVerifiedCapabilities, self.verificationFailed) elif afterPrefix == "requests": # application request to publish under some names received; need to be signed def onVerifiedAppRequest(interest): # TODO: for now, we automatically grant access to any valid signed interest print("verified! send response!") message = AppRequestMessage() ProtobufTlv.decode( message, interest.getName().get(prefix.size() + 1).getValue()) certName = Name("/".join(message.command.idName.components)) dataPrefix = Name("/".join( message.command.dataPrefix.components)) appName = message.command.appName isUpdated = self.updateTrustSchema(appName, certName, dataPrefix, True) response = Data(interest.getName()) if isUpdated: response.setContent( "{\"status\": 200, \"message\": \"granted, trust schema updated OK\" }" ) self.log.info( "Verified and granted application publish request") else: response.setContent( "{\"status\": 400, \"message\": \"not granted, requested publishing namespace already exists\" }" ) self.log.info( "Verified and but requested namespace already exists") self.sendData(response) return def onVerificationFailedAppRequest(interest): print("application request verify failed!") response = Data(interest.getName()) response.setContent( "{\"status\": 401, \"message\": \"command interest verification failed\" }" ) self.sendData(response) self.log.info("Received application request: " + interestName.toUri()) #print("Verifying with trust schema: ") #print(self._policyManager.config) self._keyChain.verifyInterest(interest, onVerifiedAppRequest, onVerificationFailedAppRequest) else: print("Got interest unable to answer yet: " + interest.getName().toUri()) if interest.getExclude(): print("interest has exclude: " + interest.getExclude().toUri()) # response = Data(interest.getName()) # response.setContent("500") # response.getMetaInfo().setFreshnessPeriod(1000) # self.sendData(response) def onStartup(self): # begin taking add requests self.loop.call_soon(self.displayMenu) self.loop.add_reader(stdin, self.handleUserInput) def displayMenu(self): menuStr = "\n" menuStr += "P)air a new device with serial and PIN\n" menuStr += "D)irectory listing\n" menuStr += "E)xpress an interest\n" menuStr += "L)oad hosted applications (" + ( self._applicationDirectory) + ")\n" menuStr += "Q)uit\n" print(menuStr) print("> ", end="") stdout.flush() def listDevices(self): menuStr = '' for capability, commands in self._directory.items(): menuStr += '{}:\n'.format(capability) for info in commands: signingStr = 'signed' if info['signed'] else 'unsigned' menuStr += '\t{} ({})\n'.format(info['name'], signingStr) print(menuStr) self.loop.call_soon(self.displayMenu) def loadApplicationsMenuSelect(self): try: confirm = input( 'This will override existing trust schemas, continue? (Y/N): ' ).upper().startswith('Y') if confirm: self.loadApplications(override=True) else: print("Aborted") except KeyboardInterrupt: print("Aborted") finally: self.loop.call_soon(self.displayMenu) def onInterestTimeout(self, interest): print('Interest timed out: {}'.interest.getName().toUri()) def onDataReceived(self, interest, data): print('Received data named: {}'.format(data.getName().toUri())) print('Contents:\n{}'.format(data.getContent().toRawStr())) def expressInterest(self): try: interestName = input('Interest name: ') if len(interestName): toSign = input('Signed? (y/N): ').upper().startswith('Y') interest = Interest(Name(interestName)) print(interest) interest.setInterestLifetimeMilliseconds(10000) interest.setChildSelector(1) if (toSign): self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onDataReceived, self.onInterestTimeout) else: print("Aborted") except KeyboardInterrupt: print("Aborted") finally: self.loop.call_soon(self.displayMenu) def beginPairing(self): try: deviceSerial = input('Device serial: ') devicePin = input('PIN: ') deviceSuffix = input('Node name: ') except KeyboardInterrupt: print('Pairing attempt aborted') else: if len(deviceSerial) and len(devicePin) and len(deviceSuffix): self._addDeviceToNetwork(deviceSerial, Name(deviceSuffix), devicePin.decode('hex')) else: print('Pairing attempt aborted') finally: self.loop.call_soon(self.displayMenu) def handleUserInput(self): inputStr = stdin.readline().upper() if inputStr.startswith('D'): self.listDevices() elif inputStr.startswith('P'): self.beginPairing() elif inputStr.startswith('E'): self.expressInterest() elif inputStr.startswith('Q'): self.stop() elif inputStr.startswith('L'): self.loadApplicationsMenuSelect() else: self.loop.call_soon(self.displayMenu) ######################## # application trust schema distribution ######################## def updateTrustSchema(self, appName, certName, dataPrefix, publishNew=False): if appName in self._applications: if dataPrefix.toUri() in self._applications[appName]["dataPrefix"]: print("some key is configured for namespace " + dataPrefix.toUri() + " for application " + appName + ". Ignoring this request.") return False else: # TODO: Handle malformed conf where validator tree does not exist validatorNode = self._applications[appName]["tree"][ "validator"][0] else: # This application does not previously exist, we create its trust schema # (and for now, add in static rules for sync data) self._applications[appName] = { "tree": BoostInfoParser(), "dataPrefix": [], "version": 0 } validatorNode = self._applications[appName]["tree"].getRoot( ).createSubtree("validator") trustAnchorNode = validatorNode.createSubtree("trust-anchor") #trustAnchorNode.createSubtree("type", "file") #trustAnchorNode.createSubtree("file-name", os.path.expanduser("~/.ndn/iot/root.cert")) trustAnchorNode.createSubtree("type", "base64") trustAnchorNode.createSubtree( "base64-string", Blob(b64encode(self._rootCertificate.wireEncode().toBytes()), False).toRawStr()) #create cert verification rule # TODO: the idea for this would be, if the cert has /home-prefix/<one-component>/KEY/ksk-*/ID-CERT, then it should be signed by fixed controller(s) # if the cert has /home-prefix/<multiple-components>/KEY/ksk-*/ID-CERT, then it should be checked hierarchically (this is for subdomain support) certRuleNode = validatorNode.createSubtree("rule") certRuleNode.createSubtree("id", "Certs") certRuleNode.createSubtree("for", "data") filterNode = certRuleNode.createSubtree("filter") filterNode.createSubtree("type", "regex") filterNode.createSubtree("regex", "^[^<KEY>]*<KEY><>*<ID-CERT>") checkerNode = certRuleNode.createSubtree("checker") # TODO: wait how did my first hierarchical verifier work? #checkerNode.createSubtree("type", "hierarchical") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") # We don't put cert version in there keyLocatorNode.createSubtree( "name", Name(self.getDefaultCertificateName()).getPrefix(-1).toUri()) keyLocatorNode.createSubtree("relation", "equal") # Discovery rule: anything that multicasts under my home prefix should be signed, and the signer should have been authorized by root # TODO: This rule as of right now is over-general discoveryRuleNode = validatorNode.createSubtree("rule") discoveryRuleNode.createSubtree("id", "sync-data") discoveryRuleNode.createSubtree("for", "data") filterNode = discoveryRuleNode.createSubtree("filter") filterNode.createSubtree("type", "regex") filterNode.createSubtree("regex", "^[^<MULTICAST>]*<MULTICAST><>*") checkerNode = discoveryRuleNode.createSubtree("checker") # TODO: wait how did my first hierarchical verifier work? #checkerNode.createSubtree("type", "hierarchical") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") keyLocatorNode.createSubtree("regex", "^[^<KEY>]*<KEY><>*<ID-CERT>") ruleNode = validatorNode.createSubtree("rule") ruleNode.createSubtree("id", dataPrefix.toUri()) ruleNode.createSubtree("for", "data") filterNode = ruleNode.createSubtree("filter") filterNode.createSubtree("type", "name") filterNode.createSubtree("name", dataPrefix.toUri()) filterNode.createSubtree("relation", "is-prefix-of") checkerNode = ruleNode.createSubtree("checker") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") # We don't put cert version in there keyLocatorNode.createSubtree("name", certName.getPrefix(-1).toUri()) keyLocatorNode.createSubtree("relation", "equal") if not os.path.exists(self._applicationDirectory): os.makedirs(self._applicationDirectory) self._applications[appName]["tree"].write( os.path.join(self._applicationDirectory, appName + ".conf")) self._applications[appName]["dataPrefix"].append(dataPrefix.toUri()) self._applications[appName]["version"] = int(time.time()) if publishNew: # TODO: ideally, this is the trust schema of the application, and does not necessarily carry controller prefix. # We make it carry controller prefix here so that prefix registration / route setup is easier (implementation workaround) data = Data( Name(self.prefix).append(appName).append( "_schema").appendVersion( self._applications[appName]["version"])) data.setContent(str(self._applications[appName]["tree"].getRoot())) self.signData(data) self._memoryContentCache.add(data) return True # TODO: putting existing confs into memoryContentCache def loadApplications(self, directory=None, override=False): if not directory: directory = self._applicationDirectory if override: self._applications.clear() if os.path.exists(directory): for f in os.listdir(directory): fullFileName = os.path.join(directory, f) if os.path.isfile(fullFileName) and f.endswith('.conf'): appName = f.rstrip('.conf') if appName in self._applications and not override: print( "loadApplications: " + appName + " already exists, do nothing for configuration file: " + fullFileName) else: self._applications[appName] = { "tree": BoostInfoParser(), "dataPrefix": [], "version": int(time.time()) } self._applications[appName]["tree"].read(fullFileName) data = Data( Name(self.prefix).append(appName).append( "_schema").appendVersion( self._applications[appName]["version"])) data.setContent( str(self._applications[appName]["tree"].getRoot())) self.signData(data) self._memoryContentCache.add(data) try: validatorTree = self._applications[appName][ "tree"]["validator"][0] for rule in validatorTree["rule"]: self._applications[appName][ "dataPrefix"].append(rule["id"][0].value) # TODO: don't swallow any general exceptions, we want to catch only KeyError (make sure) here except Exception as e: print( "loadApplications parse configuration file " + fullFileName + " : " + str(e)) return
class BmsNode(object): def __init__(self): self.conf = None self._keyChain = None self._certificateName = None self._dataQueue = dict() self._memoryContentCache = None self._identityName = None self._aggregation = Aggregation() def setConfiguration(self, fileName, trustSchemaFile): self.conf = BoostInfoParser() self.conf.read(fileName) self._identityName = Name(self.conf.getNodePrefix()) self._trustSchemaFile = trustSchemaFile def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): #print('Data not found for ' + interest.getName().toUri()) return def startPublishing(self): # One-time security setup self.prepareLogging() privateKeyStorage = FilePrivateKeyStorage() identityStorage = BasicIdentityStorage() policyManager = ConfigPolicyManager(self._trustSchemaFile) self._keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), policyManager) self._certificateName = self._keyChain.createIdentityAndCertificate( self._identityName) print("My Identity name: " + self._identityName.toUri()) print("My certificate name: " + self._certificateName.toUri()) certificateData = self._keyChain.getIdentityManager( )._identityStorage.getCertificate(self._certificateName) print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer())) # self._keyChain.getIdentityCertificate(self._certificateName).) self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) self._keyChain.setFace(self._face) self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # We should only ask for cert to be signed upon the first run of a certain aggregator if DO_CERT_SETUP: if (KeyLocator.getFromSignature( certificateData.getSignature()).getKeyName().equals( self._certificateName.getPrefix(-1))): # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again print("certificate " + self._certificateName.toUri() + " asking for signature") response = urllib2.urlopen( "http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._memoryContentCache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) # desanitize + sign in GET request cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._memoryContentCache.add(certificateData) else: self._memoryContentCache.add(certificateData) dataNode = self.conf.getDataNode() childrenNode = self.conf.getChildrenNode() self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound) # For each type of data, we refresh each type of aggregation according to the interval in the configuration for i in range(len(dataNode.subtrees)): dataType = dataNode.subtrees.keys()[i] aggregationParams = self.conf.getProducingParamsForAggregationType( dataNode.subtrees.items()[i][1]) if childrenNode == None: self._dataQueue[dataType] = DataQueue(None, None, None) self.generateData(dataType, 2, 0) for aggregationType in aggregationParams: childrenList = OrderedDict() if childrenNode != None: for j in range(len(childrenNode.subtrees)): if dataType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees: if aggregationType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees[ dataType].subtrees: childrenList[childrenNode.subtrees.items()[j][ 0]] = self.conf.getProducingParamsForAggregationType( childrenNode.subtrees.items()[j] [1].subtrees['data'].subtrees[dataType] )[aggregationType] self.startPublishingAggregation( aggregationParams[aggregationType], childrenList, dataType, aggregationType) return def startPublishingAggregation(self, params, childrenList, dataType, aggregationType): if __debug__: print('Start publishing for ' + dataType + '-' + aggregationType) # aggregation calculating and publishing mechanism publishingPrefix = Name( self._identityName).append(DATA_COMPONENT).append(dataType).append( AGGREGATION_COMPONENT).append(aggregationType) self._dataQueue[dataType + aggregationType] = DataQueue( params, childrenList, publishingPrefix) if len(childrenList.keys()) == 0: # TODO: make start_time optional for leaf nodes self._loop.call_later(int(params['producer_interval']), self.calculateAggregation, dataType, aggregationType, childrenList, int(params['start_time']), int(params['producer_interval']), publishingPrefix, True) else: # express interest for children who produce the same data and aggregation type for childName in childrenList.keys(): name = Name(self._identityName).append(childName).append( DATA_COMPONENT).append(dataType).append( AGGREGATION_COMPONENT).append(aggregationType) interest = Interest(name) # if start_time is specified, we ask for data starting at start_time; # if not, we ask for the right most child and go from there if ('start_time' in childrenList[childName]): endTime = int(childrenList[childName]['start_time']) + int( childrenList[childName]['producer_interval']) interest.getName().append( str(childrenList[childName]['start_time'])).append( str(endTime)) else: # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost interest.setChildSelector(0) interest.setMustBeFresh(True) interest.setInterestLifetimeMilliseconds( DEFAULT_INTEREST_LIFETIME) if __debug__: print(' Issue interest: ' + interest.getName().toUri()) self._face.expressInterest(interest, self.onData, self.onTimeout) return # TODO: once one calculation's decided a child has not answered, we should do another calculation def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat=False): doCalc = True dataList = [] # TODO: an intermediate node cannot produce raw data for now if len(childrenList.keys()) != 0: for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict: data = self._dataQueue[ dataType + aggregationType]._dataDict[dataDictKey] dataList.append(float(data.getContent().toRawStr())) else: #print('Child ' + childName + ' has not replied yet') doCalc = False break else: for inst in self._dataQueue[dataType]._dataDict.keys(): if int(inst) >= startTime and int(inst) < startTime + interval: dataList.append(self._dataQueue[dataType]._dataDict[inst]) if doCalc: content = self._aggregation.getAggregation(aggregationType, dataList) if content: publishData = Data( Name(publishingPrefix).append(str(startTime)).append( str(startTime + interval))) publishData.setContent(str(content)) publishData.getMetaInfo().setFreshnessPeriod( DEFAULT_DATA_LIFETIME) self._keyChain.sign(publishData, self._certificateName) self._memoryContentCache.add(publishData) for childName in childrenList.keys(): dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName) if dataDictKey in self._dataQueue[ dataType + aggregationType]._dataDict: del self._dataQueue[ dataType + aggregationType]._dataDict[dataDictKey] if __debug__: print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr()) # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData if repeat: self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat) return def generateData(self, dataType, interval, startTime): self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint( 0, 9) self._loop.call_later(interval, self.generateData, dataType, interval, startTime + interval) return def onRegisterFailed(self, prefix): raise RuntimeError("Register failed for prefix", prefix.toUri()) def onVerified(self, data): print('Data verified: ' + data.getName().toUri()) return def onVerifyFailed(self, data): print('Data verification failed: ' + data.getName().toUri()) return def onData(self, interest, data): self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed) dataName = data.getName() dataQueue = None if __debug__: print("Got data: " + dataName.toUri() + "; " + data.getContent().toRawStr()) for i in range(0, len(dataName)): if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT: dataType = dataName.get(i - 1).toEscapedString() aggregationType = dataName.get(i + 1).toEscapedString() startTime = int(dataName.get(i + 2).toEscapedString()) endTime = int(dataName.get(i + 3).toEscapedString()) childName = dataName.get(i - 3).toEscapedString() dataAndAggregationType = dataType + aggregationType dataDictKey = self.getDataDictKey(startTime, endTime, childName) dataQueue = self._dataQueue[dataAndAggregationType] dataQueue._dataDict[dataDictKey] = data break # TODO: check what if interval/starttime is misconfigured if dataQueue: self.calculateAggregation(dataType, aggregationType, dataQueue._childrenList, startTime, endTime - startTime, dataQueue._publishingPrefix) # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime))) # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead newInterestName = dataName.getPrefix(i + 2) newInterest = Interest(interest) newInterest.setName(newInterestName) newInterest.setChildSelector(0) exclude = Exclude() exclude.appendAny() exclude.appendComponent(dataName.get(i + 2)) newInterest.setExclude(exclude) self._face.expressInterest(newInterest, self.onData, self.onTimeout) if __debug__: print(" issue interest: " + interest.getName().toUri()) return def onTimeout(self, interest): if __debug__: print(" interest timeout: " + interest.getName().toUri() + "; reexpress") pass self._face.expressInterest(interest, self.onData, self.onTimeout) return def stop(self): self._loop.stop() if __debug__: print("Stopped") return # This creation of dataDictKey means parent and child should not have the same name @staticmethod def getDataDictKey(startTime, endTime, childName): return str(startTime) + '/' + str(endTime) + '/' + childName ## # Logging ## def prepareLogging(self): self.log = logging.getLogger(str(self.__class__)) self.log.setLevel(logging.DEBUG) logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s" self._console = logging.StreamHandler() self._console.setFormatter(logging.Formatter(logFormat)) self._console.setLevel(logging.INFO) # without this, a lot of ThreadsafeFace errors get swallowed up logging.getLogger("trollius").addHandler(self._console) self.log.addHandler(self._console) def setLogLevel(self, level): """ Set the log level that will be output to standard error :param level: A log level constant defined in the logging module (e.g. logging.INFO) """ self._console.setLevel(level) def getLogger(self): """ :return: The logger associated with this node :rtype: logging.Logger """ return self.log
def __init__(self, face, encryptResult, defaultPrefix, link = None): # Set up face self.face = face self._encryptResult = encryptResult self._link = link self.databaseFilePath = "policy_config/test_consumer_dpu.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name(defaultPrefix) # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/ndn/edu/basel/dpu") # Function name: the function that this DPU provides self._functionName = "bounding_box" self._identityName = identityName self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage # For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error #self.keyChain.installIdentityCertificate() self.memoryContentCache = MemoryContentCache(self.face) try: commandSigningKeyChain = KeyChain() print "Default certificate name is: " + self.keyChain.getDefaultCertificateName().toUri() self.face.setCommandSigningInfo(commandSigningKeyChain, commandSigningKeyChain.getDefaultCertificateName()) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) except SecurityException as e: print str(e) print "Cannot use default certificate, use created certificate in FilePrivateKeyStorage" self.face.setCommandSigningInfo(self.keyChain, self.certificateName) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self._tasks = dict() return
def __init__(self, face): # Set up face self.face = face self.databaseFilePath = "policy_config/test_consumer.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/haitao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/org/openmhealth/dvu-python-3") # Unauthorized identity #identityName = Name("/org/openmhealth/dvu-python-1") self.certificateName = self.keyChain.createIdentityAndCertificate( identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName( self.certificateName) consumerCertificate = identityStorage.getCertificate( self.certificateName) self.consumer = Consumer(face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open( privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest( Name(self.groupName).append("read_access_request").append( self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName( ).toUri() self.consumeCatalog = True return
class NaiveEDLParserAndPublisher(object): def __init__(self, applyEDLAdjustment=True): # prepare trollius logging self.prepareLogging() self._events = dict() self._running = False self._applyEDLAdjustment = applyEDLAdjustment # NDN related variables self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) # Use the system default key chain and certificate name to sign commands. self._keyChain = KeyChain() self._keyChain.setFace(self._face) self._certificateName = self._keyChain.getDefaultCertificateName() self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # Publishing parameters conf iguration self._translationServiceUrl = "http://the-archive.la/losangeles/services/get-youtube-url" self._namePrefixString = "/ndn/edu/ucla/remap/test/edl/" self._dataLifetime = 2000 self._publishBeforeSeconds = 3 self._translateBeforeSeconds = 60 self._currentIdx = 0 # Youtube related variables: # Channel Global song: UCSMJaKICZKXkpvr7Gj8pPUg # Channel Los Angeles: UCeuQoBBzMW6SWkxd8_1I8NQ # self._channelID = 'UCSMJaKICZKXkpvr7Gj8pPUg' self._channelID = "UCSMJaKICZKXkpvr7Gj8pPUg" self._accessKey = "AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s" # queryStr = 'https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=' + apiKey + '&id=' # Video query example # https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=AIzaSyDUY_AX1iJQcwCW1mASEp5GcLtq1V9BM1Q&id=_ebELPKANxo # Channel query example # https://www.googleapis.com/youtube/v3/search?key=AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s&channelId=UCSMJaKICZKXkpvr7Gj8pPUg&part=snippet,id&order=date&maxResults=20 self._videoUrlDict = dict() self._edlAdjustmentDict = dict() return def getClipUrlOAuth(self): self._videoUrlDict = dict((k.lower(), v) for k, v in getAllVideosFromChannel().iteritems()) # Old getClipUrl function that looks at the public Youtube channel without using Python API def getClipUrl(self, nextPageToken=None): options = {"part": "snippet,id", "order": "date", "maxResults": "20"} if nextPageToken is not None: options["pageToken"] = nextPageToken prefix = "https://www.googleapis.com/youtube/v3/search?" queryUrl = prefix + "key=" + self._accessKey + "&channelId=" + self._channelID for item in options: queryUrl += "&" + item + "=" + options[item] result = json.loads(urllib.urlopen(queryUrl).read()) for item in result["items"]: if "snippet" in item and "id" in item and "videoId" in item["id"]: self._videoUrlDict[item["snippet"]["title"].lower()] = item["id"]["videoId"] else: print("Unexpected JSON from youtube channel query") if "nextPageToken" in result: self.getClipUrl(result["nextPageToken"]) else: if __debug__: print("Building videoUrl dict finished; number of entries: " + str(len(self._videoUrlDict))) # for item in self._videoUrlDict: # print("* " + item) return def parse(self, fileName): isEventBegin = False lastEventID = -1 with open(fileName, "r") as edlFile: for line in edlFile: if isEventBegin: components = line.split() try: eventID = int(components[0]) except ValueError: print("Cannot cast " + components[0] + " to eventID") continue # We seem to have a fixed number of components here; # reference: http://www.edlmax.com/maxguide.html reelName = components[1] channel = components[2] trans = components[3] timeComponentsIdx = len(components) - 4 srcStartTime = components[timeComponentsIdx] srcEndTime = components[timeComponentsIdx + 1] dstStartTime = components[timeComponentsIdx + 2] dstEndTime = components[timeComponentsIdx + 3] self._events[eventID] = json.loads( '{ \ "event_id": "%s", \ "reel_name": "%s", \ "channel": "%s", \ "trans": "%s", \ "src_start_time": "%s", \ "src_end_time": "%s", \ "dst_start_time": "%s", \ "dst_end_time": "%s", \ "src_url": "%s", \ "translated": "%s", \ "clipName": "%s", \ "ytPresent": "%s" \ }' % ( str(eventID), reelName, channel, trans, srcStartTime, srcEndTime, dstStartTime, dstEndTime, "none", "none", "n/a", "n/a", ) ) isEventBegin = False lastEventID = eventID elif re.match(r"\s+", line) is not None or line == "": isEventBegin = True elif lastEventID > 0: # Skipping events that do not have right offset if not eventID in self._events: print("Line skipped because of missing start time adjustment") continue fromClipNameMatch = re.match(r"\* FROM CLIP NAME: ([^\n]*)\n", line) if fromClipNameMatch is not None: clipName = fromClipNameMatch.group(1).strip() parsedClipName = clipName.lower().replace("_", " ").replace("-", " ") if self._applyEDLAdjustment: if clipName in self._edlAdjustmentDict: startTimeAdjusted = self.getTimeMinus( self._edlAdjustmentDict[clipName].split(":"), self._events[eventID]["src_start_time"].split(":"), ) endTimeAdjusted = self.getTimeMinus( self._edlAdjustmentDict[clipName].split(":"), self._events[eventID]["src_end_time"].split(":"), ) self._events[eventID]["src_start_time"] = startTimeAdjusted self._events[eventID]["src_end_time"] = endTimeAdjusted # Skipping events that do not have right offset if startTimeAdjusted == "" or endTimeAdjusted == "": print( clipName + " : " + startTimeAdjusted, " start time incorrect; event " + str(eventID) + " ignored", ) del self._events[eventID] continue else: # Skipping events that do not have right offset print( "Warning: EDL adjustment not found for " + clipName + "; event " + str(eventID) + " ignored" ) del self._events[eventID] continue self._events[eventID]["clipName"] = parsedClipName # We don't do audio (only .wav or .mp3) for now if parsedClipName.endswith(".wav") or parsedClipName.endswith(".mp3"): continue else: parsedClipName = (" ").join(parsedClipName.split(".")[:-1]) # print(parsedClipName) if parsedClipName in self._videoUrlDict: # we assume one src_url from one FROM CLIP NAME for now self._events[eventID]["src_url"] = ( "https://www.youtube.com/watch?v=" + self._videoUrlDict[parsedClipName] ) self._events[eventID]["ytPresent"] = "YES" print("src_url is " + self._events[eventID]["src_url"]) else: self._events[eventID]["ytPresent"] = "NO" print("Warning: file not found in Youtube channel: " + clipName) else: if "payload" not in self._events[eventID]: self._events[eventID]["payload"] = [line] else: self._events[eventID]["payload"].append(line) @asyncio.coroutine def startPublishing(self): if len(self._events) == 0: return elif not self._running: self._memoryContentCache.registerPrefix( Name(self._namePrefixString), self.onRegisterFailed, self.onDataNotFound ) startTime = time.time() latestEventTime = 0 lastEventID = 0 for event_id in sorted(self._events): timeStrs = self._events[event_id]["dst_start_time"].split(":") publishingTime = self.getScheduledTime(timeStrs, self._publishBeforeSeconds) translationTime = self.getScheduledTime(timeStrs, self._translateBeforeSeconds) if publishingTime > latestEventTime: latestEventTime = publishingTime self._loop.call_later(translationTime, self.translateUrl, event_id) self._loop.call_later(publishingTime, self.publishData, event_id) lastEventID = event_id # append arbitrary 'end' data lastEventID = lastEventID + 1 self._events[lastEventID] = json.loads( '{ \ "event_id": "%s", \ "src_url": "%s", \ "translated": "%s" \ }' % (str(lastEventID), "end", "not-required") ) startTime = self.getScheduledTime(self._events[lastEventID - 1]["src_start_time"].split(":"), 0) endTime = self.getScheduledTime(self._events[lastEventID - 1]["src_end_time"].split(":"), 0) print("scheduled end " + str(endTime - startTime) + " sec from now") self._loop.call_later(latestEventTime + 1, self.publishData, lastEventID) self._loop.call_later(latestEventTime + 2 + (endTime - startTime), self._loop.stop) self._running = True def translateUrl(self, idx): queryUrl = self._translationServiceUrl timeStrs = self._events[idx]["src_start_time"].split(":") # we don't have the video from Youtube if self._events[idx]["src_url"] == "none": # print("no video from Youtube") # we still publish the data even if src_url is "none", to maintain consecutive sequence numbers self._events[idx]["translated"] = "non-existent" return serviceUrl = self._events[idx]["src_url"] # + "&t=" + str(self.timeToSeconds(timeStrs)) + "s" values = {"url": serviceUrl, "fetchIfNotExist": "true"} data = urllibparse.urlencode(values) req = urllib.Request(queryUrl, data) # This synchronous request might block the execution of publishData; should be changed later response = urllib.urlopen(req) videoUrl = response.read() self._events[idx]["ori_url"] = serviceUrl self._events[idx]["src_url"] = videoUrl if self._events[idx]["translated"] == "publish": # We already missed the scheduled publishing time; should publish as soon as translation finishes self.publishData(idx) else: self._events[idx]["translated"] = "translated" return def publishData(self, idx): # Translation of the video URL has finished by the time of the publishData call; # if not, we set translated to "publish"; this is data race free since translateUrl and publishData are scheduled in the same thread if self._events[idx]["translated"] != "none": # Order published events sequence numbers by start times in destination data = Data(Name(self._namePrefixString + str(self._currentIdx))) data.setContent(json.dumps(self._events[idx])) data.getMetaInfo().setFreshnessPeriod(self._dataLifetime) self._keyChain.sign(data, self._certificateName) self._memoryContentCache.add(data) self._currentIdx += 1 if __debug__: eventId = str(self._events[idx]["event_id"]) channel = str(self._events[idx]["channel"]) srcUrl = str(self._events[idx]["src_url"]) clipName = str(self._events[idx]["clipName"]) ytPresent = str(self._events[idx]["ytPresent"]) clipStartTime = str(self._events[idx]["dst_start_time"]) clipEndTime = str(self._events[idx]["dst_end_time"]) print( str(time.time()) + " Added event [" + eventId + "-" + channel + "|" + clipName + " YT:" + ytPresent + " " + srcUrl[0:30] + "... " + clipStartTime + "-" + clipEndTime + "] (" + data.getName().toUri() + ")" ) else: self._events[idx]["translated"] = "publish" def timeToSeconds(self, timeStrs): seconds = int(timeStrs[2]) minutes = int(timeStrs[1]) hours = int(timeStrs[0]) ret = hours * 3600 + minutes * 60 + seconds return ret def getTimeMinus(self, timeStrs1, timeStrs2): frameNumber = int(timeStrs1[3]) seconds = int(timeStrs1[2]) minutes = int(timeStrs1[1]) hours = int(timeStrs1[0]) frameNumber2 = int(timeStrs2[3]) - frameNumber seconds2 = int(timeStrs2[2]) - seconds minutes2 = int(timeStrs2[1]) - minutes hours2 = int(timeStrs2[0]) - hours if frameNumber2 < 0: # frame rate assumption frameNumber2 = 30 + frameNumber2 seconds2 = seconds2 - 1 if seconds2 < 0: seconds2 = 60 + seconds2 minutes2 = minutes2 - 1 if minutes2 < 0: minutes2 = 60 + minutes2 hours2 = hours2 - 1 if hours2 < 0: print("Warning: time minus smaller than 0") return "" # Arbitrary guard of start times that are off if hours2 > 1 or minutes2 > 1: return "" return ":".join([str(hours2), str(minutes2), str(seconds2), str(frameNumber2)]) def getScheduledTime(self, timeStrs, beforeSeconds): frameNumber = int(timeStrs[3]) seconds = int(timeStrs[2]) minutes = int(timeStrs[1]) hours = int(timeStrs[0]) ret = hours * 3600 + minutes * 60 + seconds - beforeSeconds return 0 if ret < 0 else ret def onRegisterFailed(self, prefix): raise RuntimeError("Register failed for prefix", prefix.toUri()) def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): # print('Data not found for interest: ' + interest.getName().toUri()) return ############################# # Logging ############################# def prepareLogging(self): self.log = logging.getLogger(str(self.__class__)) self.log.setLevel(logging.DEBUG) logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s" self._console = logging.StreamHandler() self._console.setFormatter(logging.Formatter(logFormat)) self._console.setLevel(logging.INFO) # without this, a lot of ThreadsafeFace errors get swallowed up logging.getLogger("trollius").addHandler(self._console) self.log.addHandler(self._console) def setLogLevel(self, level): """ Set the log level that will be output to standard error :param level: A log level constant defined in the logging module (e.g. logging.INFO) """ self._console.setLevel(level) def getLogger(self): """ :return: The logger associated with this node :rtype: logging.Logger """ return self.log ############################ def loadEDLAdjustment(self, csvFile): with open(csvFile, "rb") as csvfile: reader = csv.reader(csvfile, delimiter=",", quotechar="|") for row in reader: self._edlAdjustmentDict[row[3]] = row[1]
class TestGroupManager(object): def __init__(self, face, groupManagerName, dataType, readAccessName, dKeyDatabaseFilePath): # Set up face self.face = face #self.loop = eventLoop # Set up the keyChain. identityStorage = MemoryIdentityStorage() privateKeyStorage = MemoryPrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) self.certificateName = self.keyChain.createIdentityAndCertificate( groupManagerName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) self.dKeyDatabaseFilePath = dKeyDatabaseFilePath try: os.remove(self.dKeyDatabaseFilePath) except OSError: # no such file pass self.manager = GroupManager( groupManagerName, dataType, Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1, self.keyChain) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix( Name(groupManagerName).append("READ"), self.onRegisterFailed, self.onDataNotFound) self.face.registerPrefix(readAccessName, self.onAccessInterest, self.onAccessTimeout) self.updateGroupKeys = False return def onAccessInterest(self, prefix, interest, face, interestFilterId, filter): print "On Access request interest: " + interest.getName().toUri() certInterest = Interest(interest.getName().getSubName(4)) certInterest.setName(certInterest.getName().getPrefix(-1)) certInterest.setInterestLifetimeMilliseconds(2000) self.face.expressInterest( certInterest, lambda memberInterest, memberData: self.onMemberCertificateData( memberInterest, memberData, interest), lambda memberInterest: self.onMemberCertificateTimeout( memberInterest, interest)) print "Retrieving member certificate: " + certInterest.getName().toUri( ) return def onAccessTimeout(self, prefix): print "Prefix registration failed: " + prefix.toUri() return def onRepoData(self, interest, data): #print "received repo data: " + interest.getName().toUri() return def onRepoTimeout(self, interest): #print "repo command times out: " + interest.getName().getPrefix(-1).toUri() return def setManager(self): schedule1 = Schedule() interval11 = RepetitiveInterval( Schedule.fromIsoString("20161001T000000"), Schedule.fromIsoString("20161031T000000"), 0, 24, 1, RepetitiveInterval.RepeatUnit.DAY) schedule1.addWhiteInterval(interval11) self.manager.addSchedule("schedule1", schedule1) def onMemberCertificateData(self, interest, data, accessInterest): print "Member certificate with name retrieved: " + data.getName( ).toUri() + "; member added to group!" self.manager.addMember("schedule1", data) self.updateGroupKeys = True accessResponse = Data(accessInterest.getName()) accessResponse.setContent("granted") self.face.putData(accessResponse) def onMemberCertificateTimeout(self, interest, accessInterest): print "Member certificate interest times out: " + interest.getName( ).toUri() newInterest = Interest(interest) newInterest.refreshNonce() self.face.expressInterest( newInterest, lambda memberInterest, memberData: self.onMemberCertificateData( memberInterest, memberData, accessInterest), lambda memberInterest: self.onMemberCertificateTimeout( memberInterest, accessInterest)) return def publishGroupKeys(self): timePoint1 = Schedule.fromIsoString("20161024T083000") result = self.manager.getGroupKey(timePoint1) # The first is group public key, E-key # The rest are group private keys encrypted with each member's public key, D-key for i in range(0, len(result)): self.memoryContentCache.add(result[i]) self.initiateContentStoreInsertion( "/ndn/edu/ucla/remap/ndnfit/repo", result[i]) print "Publish key name: " + str( i) + " " + result[i].getName().toUri() self.updateGroupKeys = False def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() if interest.getExclude(): print "Interest has exclude: " + interest.getExclude().toUri() return def onRegisterFailed(self, prefix): print "Prefix registration failed" return def initiateContentStoreInsertion(self, repoCommandPrefix, data): fetchName = data.getName() parameter = repo_command_parameter_pb2.RepoCommandParameterMessage() # Add the Name. for i in range(fetchName.size()): parameter.repo_command_parameter.name.component.append( fetchName[i].getValue().toBytes()) # Create the command interest. interest = Interest( Name(repoCommandPrefix).append("insert").append( Name.Component(ProtobufTlv.encode(parameter)))) self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout)
def startPublishing(self): # One-time security setup self.prepareLogging() privateKeyStorage = FilePrivateKeyStorage() identityStorage = BasicIdentityStorage() policyManager = ConfigPolicyManager(self._trustSchemaFile) self._keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage), policyManager) self._certificateName = self._keyChain.createIdentityAndCertificate(self._identityName) print("My Identity name: " + self._identityName.toUri()) print("My certificate name: " + self._certificateName.toUri()) certificateData = self._keyChain.getIdentityManager()._identityStorage.getCertificate(self._certificateName, True) print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer())) # self._keyChain.getIdentityCertificate(self._certificateName).) self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) self._keyChain.setFace(self._face) self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # We should only ask for cert to be signed upon the first run of a certain aggregator if DO_CERT_SETUP: if (KeyLocator.getFromSignature(certificateData.getSignature()).getKeyName().equals(self._certificateName.getPrefix(-1))): # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again print("certificate " + self._certificateName.toUri() + " asking for signature") response = urllib2.urlopen("http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._memoryContentCache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) # desanitize + sign in GET request cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._memoryContentCache.add(certificateData) else: self._memoryContentCache.add(certificateData) dataNode = self.conf.getDataNode() childrenNode = self.conf.getChildrenNode() self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound) # For each type of data, we refresh each type of aggregation according to the interval in the configuration for i in range(len(dataNode.subtrees)): dataType = dataNode.subtrees.keys()[i] aggregationParams = self.conf.getProducingParamsForAggregationType(dataNode.subtrees.items()[i][1]) if childrenNode == None: self._dataQueue[dataType] = DataQueue(None, None, None) self.generateData(dataType, 2, 0) for aggregationType in aggregationParams: childrenList = OrderedDict() if childrenNode != None: for j in range(len(childrenNode.subtrees)): if dataType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees: if aggregationType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType].subtrees: childrenList[childrenNode.subtrees.items()[j][0]] = self.conf.getProducingParamsForAggregationType(childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType])[aggregationType] self.startPublishingAggregation(aggregationParams[aggregationType], childrenList, dataType, aggregationType) return
class TestConsumer(object): def __init__(self, face): # Set up face self.face = face self.databaseFilePath = "policy_config/test_consumer.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/zhehao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/org/openmhealth/dvu-python-3") # Unauthorized identity #identityName = Name("/org/openmhealth/dvu-python-1") self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self.consumeCatalog = True return def onAccessRequestData(self, interest, data): print "Access request data: " + data.getName().toUri() print "Start consuming" self.startConsuming() return def onAccessRequestTimeout(self, interest): print "Access request times out: " + interest.getName().toUri() print "Assuming certificate sent and D-key generated, start consuming" self.startConsuming() return def startConsuming(self): if self.consumeCatalog: contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/catalog/20160620T080000") self.consumer.consume(contentName, self.onCatalogConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + contentName.toUri() else: contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/") dataNum = 60 baseZFill = 3 basetimeString = "20160620T080" for i in range(0, dataNum): timeString = basetimeString + str(i).zfill(baseZFill) timeFloat = Schedule.fromIsoString(timeString) self.consumer.consume(Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + Name(contentName).append(timeString).toUri() def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() return def onRegisterFailed(self, prefix): print "Prefix registration failed: " + prefix.toUri() return def onCatalogConsumeComplete(self, data, result): print "Consume complete for catalog: " + data.getName().toUri() resultObject = json.loads(result.toRawStr()) contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/") for i in range(0, len(resultObject)): timeString = Schedule.toIsoString(int(resultObject[i]) * 1000) self.consumer.consume(Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + Name(contentName).append(timeString).toUri() def onConsumeComplete(self, data, result): print "Consume complete for data name: " + data.getName().toUri() print result # Test the length of encrypted data # dataBlob = data.getContent() # dataContent = EncryptedContent() # dataContent.wireDecode(dataBlob) # encryptedData = dataContent.getPayload() # print len(encryptedData) # TODO: shouldn't this indicate the consumption of what has failed though def onConsumeFailed(self, code, message): print "Consume error " + str(code) + ": " + message
class TestConsumer(object): def __init__(self, face): # Set up face self.face = face self.databaseFilePath = "policy_config/test_consumer.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name("/org/openmhealth/haitao") # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/org/openmhealth/dvu-python-3") # Unauthorized identity #identityName = Name("/org/openmhealth/dvu-python-1") self.certificateName = self.keyChain.createIdentityAndCertificate( identityName) self.face.setCommandSigningInfo(self.keyChain, self.certificateName) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName( self.certificateName) consumerCertificate = identityStorage.getCertificate( self.certificateName) self.consumer = Consumer(face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open( privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache = MemoryContentCache(self.face) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest( Name(self.groupName).append("read_access_request").append( self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName( ).toUri() self.consumeCatalog = True return def onAccessRequestData(self, interest, data): print "Access request data: " + data.getName().toUri() print "Start consuming" self.startConsuming() return def onAccessRequestTimeout(self, interest): print "Access request times out: " + interest.getName().toUri() print "Assuming certificate sent and D-key generated, start consuming" self.startConsuming() return def startConsuming(self): if self.consumeCatalog: contentName = Name( "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/catalog/20161024T213400" ) catalogInterest = Interest(contentName) self.face.expressInterest(catalogInterest, self.onCatalogConsumeComplete, self.onCatalogConsumeFailed) # self.consumer.consume(contentName, self.onCatalogConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + contentName.toUri() else: contentName = Name( "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/" ) dataNum = 60 baseZFill = 3 basetimeString = "20160620T080" for i in range(0, dataNum): timeString = basetimeString + str(i).zfill(baseZFill) timeFloat = Schedule.fromIsoString(timeString) self.consumer.consume( Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + Name(contentName).append( timeString).toUri() def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() return def onRegisterFailed(self, prefix): print "Prefix registration failed: " + prefix.toUri() return def onCatalogConsumeComplete(self, interest, data): print "Consume complete for catalog: " + data.getName().toUri() resultObject = json.loads(data.getContent().toRawStr()) print data.getContent().toRawStr() contentName = Name( "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/" ) for i in range(0, len(resultObject)): # timeString = Schedule.toIsoString(int(resultObject[i]) * 1000) timeString = resultObject[i] self.consumer.consume( Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed) print "Trying to consume: " + Name(contentName).append( timeString).toUri() def onCatalogConsumeFailed(self, interest): print "Data request times out: " + interest.getName().toUri() return def onConsumeComplete(self, data, result): print "Consume complete for data name: " + data.getName().toUri() print result # Test the length of encrypted data # dataBlob = data.getContent() # dataContent = EncryptedContent() # dataContent.wireDecode(dataBlob) # encryptedData = dataContent.getPayload() # print len(encryptedData) # TODO: shouldn't this indicate the consumption of what has failed though def onConsumeFailed(self, code, message): print "Consume error " + str(code) + ": " + message
class Bootstrap(object): """ Create a Bootstrap object. Bootstrap object provides interface for setting up KeyChain, default certificate name; (as a producer) requesting publishing authorization from controller; and (as a consumer) keeping track of changes :param face: the face for communicating with a local / remote forwarder :type face: ThreadsafeFace TODO: support Face as well as ThreadsafeFace """ def __init__(self, face): self._defaultIdentity = None self._defaultCertificateName = None self._controllerName = None self._controllerCertificate = None self._applicationName = "" self._identityManager = IdentityManager(BasicIdentityStorage(), FilePrivateKeyStorage()) self._policyManager = ConfigPolicyManager() self._policyManager.config.read("validator \n \ { \n \ rule \n \ { \n \ id \"initial rule\" \n \ for data \n \ checker \n \ { \n \ type hierarchical \n \ } \n \ } \n \ }", "initial-schema") # keyChain is what we return to the application after successful setup # TODO: should we separate keyChain from internal KeyChain used to verify trust schemas? self._keyChain = KeyChain(self._identityManager, self._policyManager) self._face = face # setFace for keyChain or else it won't be able to express interests for certs self._keyChain.setFace(self._face) self._certificateContentCache = MemoryContentCache(face) self._trustSchemas = dict() ############################################### # Initial keyChain and defaultCertificate setup ############################################### def setupDefaultIdentityAndRoot(self, defaultIdentityOrFileName, signerName = None, onSetupComplete = None, onSetupFailed = None): """ Sets up the keyChain, default key name and certificate name according to given configuration. If successful, this KeyChain and default certificate name will be returned to the application, which can be passed to instances like Consumer, Discovery, etc :param defaultIdentityOrFileName: if str, the name of the configuration file; if Name, the default identity name of this IoT node. The node will use the default keys and certificate of that identity name. :type defaultIdentityOrFileName: Name or str :param signerName: (optional) the expected signing identity of the certificate :type signerName: Name :param onSetupComplete: (optional) onSetupComplete(Name, KeyChain) will be called if set up's successful :type onSetupComplete: function object :param onSetupFailed: (optional) onSetupFailed(msg) will be called if setup fails :type onSetupFailed: function object """ def helper(identityName, signerName): try: self._defaultIdentity = identityName self._defaultCertificateName = self._identityManager.getDefaultCertificateNameForIdentity(self._defaultIdentity) self._defaultKeyName = self._identityManager.getDefaultKeyNameForIdentity(identityName) except SecurityException: msg = "Identity " + identityName.toUri() + " in configuration does not exist. Please configure the device with this identity first." if onSetupFailed: onSetupFailed(msg) return if not self._defaultCertificateName: msg = "Unable to get default certificate name for identity " + identityName.toUri() + ". Please configure the device with this identity first." if onSetupFailed: onSetupFailed(msg) return if not self._defaultKeyName: msg = "Unable to get default key name for identity " + identityName.toUri() + ". Please configure the device with this identity first." if onSetupFailed: onSetupFailed(msg) return # Note we'll not be able to issue face commands before this point self._face.setCommandSigningInfo(self._keyChain, self._defaultCertificateName) # Serve our own certificate self._certificateContentCache.registerPrefix(Name(self._defaultCertificateName).getPrefix(-1), self.onRegisterFailed) self._certificateContentCache.add(self._keyChain.getCertificate(self._defaultCertificateName)) actualSignerName = self._keyChain.getCertificate(self._defaultCertificateName).getSignature().getKeyLocator().getKeyName() if not signerName: print "Deriving from " + actualSignerName.toUri() + " for controller name" else: if signerName and actualSignerName.toUri() != signerName.toUri(): msg = "Configuration signer names mismatch: expected " + signerName.toUri() + "; got " + actualSignerName.toUri() print msg if onSetupFailed: onSetupFailed(msg) self._controllerName = self.getIdentityNameFromCertName(actualSignerName) print "Controller name: " + self._controllerName.toUri() try: self._controllerCertificate = self._keyChain.getCertificate(self._identityManager.getDefaultCertificateNameForIdentity(self._controllerName)) # TODO: this does not seem a good approach, implementation-wise and security implication self._policyManager._certificateCache.insertCertificate(self._controllerCertificate) if onSetupComplete: onSetupComplete(Name(self._defaultCertificateName), self._keyChain) except SecurityException as e: print "don't have controller certificate " + actualSignerName.toUri() + " yet" controllerCertInterest = Interest(Name(actualSignerName)) controllerCertInterest.setInterestLifetimeMilliseconds(4000) controllerCertRetries = 3 self._face.expressInterest(controllerCertInterest, lambda interest, data: self.onControllerCertData(interest, data, onSetupComplete, onSetupFailed), lambda interest: self.onControllerCertTimeout(interest, onSetupComplete, onSetupFailed, controllerCertRetries)) return if isinstance(defaultIdentityOrFileName, basestring): confObj = self.processConfiguration(defaultIdentityOrFileName) if "identity" in confObj: if confObj["identity"] == "default": # TODO: handling the case where no default identity is present defaultIdentity = self._keyChain.getDefaultIdentity() else: defaultIdentity = Name(confObj["identity"]) else: defaultIdentity = self._keyChain.getDefaultIdentity() # TODO: handling signature with direct bits instead of keylocator keyname if "signer" in confObj: if confObj["signer"] == "default": signerName = None else: signerName = Name(confObj["signer"]) else: signerName = None print "Deriving from " + signerName.toUri() + " for controller name" helper(defaultIdentity, signerName) else: if isinstance(defaultIdentityOrFileName, Name): helper(defaultIdentityOrFileName, signerName) else: raise RuntimeError("Please call setupDefaultIdentityAndRoot with identity name and root key name") return def onControllerCertData(self, interest, data, onSetupComplete, onSetupFailed): # TODO: verification rule for received self-signed cert. # So, if a controller comes masquerading in at this point with the right name, it is problematic. Similar with ndn-pi's implementation self._controllerCertificate = IdentityCertificate(data) # insert root certificate so that we could verify initial trust schemas # TODO: this does not seem a good approach, implementation-wise and security implication self._keyChain.getPolicyManager()._certificateCache.insertCertificate(self._controllerCertificate) try: self._identityManager.addCertificate(self._controllerCertificate) except SecurityException as e: print str(e) for schema in self._trustSchemas: # TODO: remove the concept of pending-schema if "pending-schema" in self._trustSchemas[schema]: self._keyChain.verifyData(self._trustSchemas[schema]["pending-schema"], self.onSchemaVerified, self.onSchemaVerificationFailed) if onSetupComplete: onSetupComplete(Name(self._defaultCertificateName), self._keyChain) return def onControllerCertTimeout(self, interest, onSetupComplete, onSetupFailed, controllerCertRetries): print "Controller certificate interest times out" newInterest = Interest(interest) newInterest.refreshNonce() if controllerCertRetries == 0: if onSetupFailed: onSetupFailed("Controller certificate interest times out") else: print "Set up failed: controller certificate interest times out" else: self._face.expressInterest(newInterest, lambda interest, data: self.onControllerCertData(interest, data, onSetupComplete, onSetupFailed), lambda interest: self.onControllerCertTimeout(interest, onSetupComplete, onSetupFailed, controllerCertRetries - 1)) return ######################################################### # Handling application consumption (trust schema updates) ######################################################### # TODO: if trust schema gets over packet size limit, segmentation def startTrustSchemaUpdate(self, appPrefix, onUpdateSuccess = None, onUpdateFailed = None): """ Starts trust schema update for under an application prefix: initial interest asks for the rightMostChild, and later interests are sent with previous version excluded. Each verified trust schema will trigger onUpdateSuccess and update the ConfigPolicyManager for the keyChain in this instance, and unverified ones will trigger onUpdateFailed. The keyChain and trust anchor should be set up using setupDefaultIdentityAndRoot before calling this method. :param appPrefix: the prefix to ask trust schema for. (interest name: /<prefix>/_schema) :type appPrefix: Name :param onUpdateSuccess: (optional) onUpdateSuccess(trustSchemaStr, isInitial) is called when update succeeds :type onUpdateSuccess: function object :param onUpdateFailed: (optional) onUpdateFailed(msg) is called when update fails :type onUpdateFailed: function object """ namespace = appPrefix.toUri() if namespace in self._trustSchemas: if self._trustSchemas[namespace]["following"] == True: print "Already following trust schema under this namespace!" return self._trustSchemas[namespace]["following"] = True else: self._trustSchemas[namespace] = {"following": True, "version": 0, "is-initial": True} initialInterest = Interest(Name(namespace).append("_schema")) initialInterest.setChildSelector(1) self._face.expressInterest(initialInterest, lambda interest, data: self.onTrustSchemaData(interest, data, onUpdateSuccess, onUpdateFailed), lambda interest: self.onTrustSchemaTimeout(interest, onUpdateSuccess, onUpdateFailed)) return def stopTrustSchemaUpdate(self): print "stopTrustSchemaUpdate not implemented" return def onSchemaVerified(self, data, onUpdateSuccess, onUpdateFailed): print "trust schema verified: " + data.getName().toUri() version = data.getName().get(-1) namespace = data.getName().getPrefix(-2).toUri() if not (namespace in self._trustSchemas): print "unexpected: received trust schema for application namespace that's not being followed; malformed data name?" return if version.toVersion() <= self._trustSchemas[namespace]["version"]: msg = "Got out-of-date trust schema" print msg if onUpdateFailed: onUpdateFailed(msg) return self._trustSchemas[namespace]["version"] = version.toVersion() if "pending-schema" in self._trustSchemas[namespace] and self._trustSchemas[namespace]["pending-schema"].getName().toUri() == data.getName().toUri(): # we verified a pending trust schema, don't need to keep that any more del self._trustSchemas[namespace]["pending-schema"] self._trustSchemas[namespace]["trust-schema"] = data.getContent().toRawStr() print self._trustSchemas[namespace]["trust-schema"] # TODO: what about trust schema for discovery, is discovery its own application? newInterest = Interest(Name(data.getName()).getPrefix(-1)) newInterest.setChildSelector(1) exclude = Exclude() exclude.appendAny() exclude.appendComponent(version) newInterest.setExclude(exclude) self._face.expressInterest(newInterest, lambda interest, data: self.onTrustSchemaData(interest, data, onUpdateSuccess, onUpdateFailed), lambda interest: self.onTrustSchemaTimeout(interest, onUpdateSuccess, onUpdateFailed)) # Note: this changes the verification rules for root cert, future trust schemas as well; ideally from the outside this doesn't have an impact, but do we want to avoid this? # Per reset function in ConfigPolicyManager; For now we don't call reset as we still want root cert in our certCache, instead of asking for it again (when we want to verify) each time we update the trust schema self._policyManager.config = BoostInfoParser() self._policyManager.config.read(self._trustSchemas[namespace]["trust-schema"], "updated-schema") if onUpdateSuccess: onUpdateSuccess(data.getContent().toRawStr(), self._trustSchemas[namespace]["is-initial"]) self._trustSchemas[namespace]["is-initial"] = False return def onSchemaVerificationFailed(self, data, reason, onUpdateSuccess, onUpdateFailed): print "trust schema verification failed: " + reason namespace = data.getName().getPrefix(-2).toUri() if not (namespace in self._trustSchemas): print "unexpected: received trust schema for application namespace that's not being followed; malformed data name?" return newInterest = Interest(Name(data.getName()).getPrefix(-1)) newInterest.setChildSelector(1) exclude = Exclude() exclude.appendAny() exclude.appendComponent(Name.Component.fromVersion(self._trustSchemas[namespace]["version"])) newInterest.setExclude(exclude) # Don't immediately ask for potentially the same content again if verification fails self._face.callLater(4000, lambda : self._face.expressInterest(newInterest, lambda interest, data: self.onTrustSchemaData(interest, data, onUpdateSuccess, onUpdateFailed), lambda interest: self.onTrustSchemaTimeout(interest, onUpdateSuccess, onUpdateFailed))) return def onTrustSchemaData(self, interest, data, onUpdateSuccess, onUpdateFailed): print("Trust schema received: " + data.getName().toUri()) namespace = data.getName().getPrefix(-2).toUri() # Process newly received trust schema if not self._controllerCertificate: # we don't yet have the root certificate fetched, so we store this cert for now print "Controller certificate not yet present, verify once it's in place" self._trustSchemas[namespace]["pending-schema"] = data else: # we veriy the received trust schema, should we use an internal KeyChain instead? self._keyChain.verifyData(data, lambda data: self.onSchemaVerified(data, onUpdateSuccess, onUpdateFailed), lambda data, reason: self.onSchemaVerificationFailed(data, reason, onUpdateSuccess, onUpdateFailed)) return def onTrustSchemaTimeout(self, interest, onUpdateSuccess, onUpdateFailed): print("Trust schema interest times out: " + interest.getName().toUri()) newInterest = Interest(interest) newInterest.refreshNonce() self._face.expressInterest(newInterest, lambda interest, data: self.onTrustSchemaData(interest, data, onUpdateSuccess, onUpdateFailed), lambda interest: self.onTrustSchemaTimeout(interest, onUpdateSuccess, onUpdateFailed)) return ############################################### # Handling application producing authorizations ############################################### # Wrapper for sendAppRequest, fills in already configured defaultCertificateName def requestProducerAuthorization(self, dataPrefix, appName, onRequestSuccess = None, onRequestFailed = None): """ Requests producing authorization for a data prefix: commandInterest is sent out to the controller, using /<controller identity>/requests/<encoded-application-parameters>/<signed-interest-suffix> where encoded-application-parameters is a ProtobufTlv encoding of {appPrefix, certificateName, appName} The keyChain, trust anchor and controller name should be set up using setupDefaultIdentityAndRoot before calling this method. :param dataPrefix: the prefix to request publishing for :type dataPrefix: Name :param appName: the application name to request publishing for :type appName: str :param onRequestSuccess: (optional) onRequestSuccess() is called when a valid response if received for the request :type onRequestSuccess: function object :param onRequestFailed: (optional) onRequestFailed(msg) is called when request fails :type onRequestFailed: function object """ # TODO: update logic on this part, should the presence of default certificate name be mandatory? # And allow application developer to send app request to a configured root/controller? if not self._defaultCertificateName: raise RuntimeError("Default certificate is missing! Try setupDefaultIdentityAndRoot first?") return self.sendAppRequest(self._defaultCertificateName, dataPrefix, appName, onRequestSuccess, onRequestFailed) def sendAppRequest(self, certificateName, dataPrefix, applicationName, onRequestSuccess, onRequestFailed): message = AppRequestMessage() for component in range(certificateName.size()): message.command.idName.components.append(certificateName.get(component).toEscapedString()) for component in range(dataPrefix.size()): message.command.dataPrefix.components.append(dataPrefix.get(component).toEscapedString()) message.command.appName = applicationName paramComponent = ProtobufTlv.encode(message) requestInterest = Interest(Name(self._controllerName).append("requests").append(paramComponent)) requestInterest.setInterestLifetimeMilliseconds(4000) self._face.makeCommandInterest(requestInterest) appRequestTimeoutCnt = 3 self._face.expressInterest(requestInterest, lambda interest, data : self.onAppRequestData(interest, data, onRequestSuccess, onRequestFailed), lambda interest : self.onAppRequestTimeout(interest, onRequestSuccess, onRequestFailed, appRequestTimeoutCnt)) print "Application publish request sent: " + requestInterest.getName().toUri() return def onAppRequestData(self, interest, data, onRequestSuccess, onRequestFailed): print "Got application publishing request data" def onVerified(data): responseObj = json.loads(data.getContent().toRawStr()) if responseObj["status"] == "200": if onRequestSuccess: onRequestSuccess() else: print "onSetupComplete" else: print "Verified content: " + data.getContent().toRawStr() if onRequestFailed: onRequestFailed(data.getContent().toRawStr()) def onVerifyFailed(data, reason): msg = "Application request response verification failed: " + reason print msg if onRequestFailed: onRequestFailed(msg) self._keyChain.verifyData(data, onVerified, onVerifyFailed) return def onAppRequestTimeout(self, interest, onSetupComplete, onSetupFailed, appRequestTimeoutCnt): print "Application publishing request times out" newInterest = Interest(interest) newInterest.refreshNonce() if appRequestTimeoutCnt == 0: if onSetupFailed: onSetupFailed("Application publishing request times out") else: print "Setup failed: application publishing request times out" else: self._face.expressInterest(newInterest, lambda interest, data : self.onAppRequestData(interest, data, onSetupComplete, onSetupFailed), lambda interest : self.onAppRequestTimeout(interest, onSetupComplete, onSetupFailed, appRequestTimeoutCnt - 1)) return ############################################### # Helper functions ############################################### def onRegisterFailed(self, prefix): print("register failed for prefix " + prefix.getName().toUri()) return def processConfiguration(self, confFile): config = BoostInfoParser() config.read(confFile) # TODO: handle missing configuration, refactor dict representation confObj = dict() try: confObj["identity"] = config["application/identity"][0].value confObj["signer"] = config["application/signer"][0].value except KeyError as e: msg = "Missing key in configuration: " + str(e) print msg return None return confObj def getIdentityNameFromCertName(self, certName): i = certName.size() - 1 idString = "KEY" while i >= 0: if certName.get(i).toEscapedString() == idString: break i -= 1 if i < 0: print "Error: unexpected certName " + certName.toUri() return None return Name(certName.getPrefix(i)) ################################# # Getters and setters ################################# def getKeyChain(self): return self._keyChain
def startPublishing(self): # One-time security setup self.prepareLogging() privateKeyStorage = FilePrivateKeyStorage() identityStorage = BasicIdentityStorage() policyManager = ConfigPolicyManager(self._trustSchemaFile) self._keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), policyManager) self._certificateName = self._keyChain.createIdentityAndCertificate( self._identityName) print("My Identity name: " + self._identityName.toUri()) print("My certificate name: " + self._certificateName.toUri()) certificateData = self._keyChain.getIdentityManager( )._identityStorage.getCertificate(self._certificateName) print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer())) # self._keyChain.getIdentityCertificate(self._certificateName).) self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) self._keyChain.setFace(self._face) self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # We should only ask for cert to be signed upon the first run of a certain aggregator if DO_CERT_SETUP: if (KeyLocator.getFromSignature( certificateData.getSignature()).getKeyName().equals( self._certificateName.getPrefix(-1))): # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again print("certificate " + self._certificateName.toUri() + " asking for signature") response = urllib2.urlopen( "http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._memoryContentCache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) # desanitize + sign in GET request cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._memoryContentCache.add(certificateData) else: self._memoryContentCache.add(certificateData) dataNode = self.conf.getDataNode() childrenNode = self.conf.getChildrenNode() self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound) # For each type of data, we refresh each type of aggregation according to the interval in the configuration for i in range(len(dataNode.subtrees)): dataType = dataNode.subtrees.keys()[i] aggregationParams = self.conf.getProducingParamsForAggregationType( dataNode.subtrees.items()[i][1]) if childrenNode == None: self._dataQueue[dataType] = DataQueue(None, None, None) self.generateData(dataType, 2, 0) for aggregationType in aggregationParams: childrenList = OrderedDict() if childrenNode != None: for j in range(len(childrenNode.subtrees)): if dataType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees: if aggregationType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees[ dataType].subtrees: childrenList[childrenNode.subtrees.items()[j][ 0]] = self.conf.getProducingParamsForAggregationType( childrenNode.subtrees.items()[j] [1].subtrees['data'].subtrees[dataType] )[aggregationType] self.startPublishingAggregation( aggregationParams[aggregationType], childrenList, dataType, aggregationType) return
class IotController(BaseNode): """ The controller class has a few built-in commands: - listDevices: return the names and capabilities of all attached devices - certificateRequest: takes public key information and returns name of new certificate - updateCapabilities: should be sent periodically from IotNodes to update their command lists - addDevice: add a device based on HMAC It is unlikely that you will need to subclass this. """ def __init__(self, nodeName, networkName, applicationDirectory = ""): super(IotController, self).__init__() self.deviceSuffix = Name(nodeName) self.networkPrefix = Name(networkName) self.prefix = Name(self.networkPrefix).append(self.deviceSuffix) self._policyManager.setEnvironmentPrefix(self.networkPrefix) self._policyManager.setTrustRootIdentity(self.prefix) self._policyManager.setDeviceIdentity(self.prefix) self._policyManager.updateTrustRules() # the controller keeps a directory of capabilities->names self._directory = defaultdict(list) # keep track of who's still using HMACs # key is device serial, value is the HmacHelper self._hmacDevices = {} # our capabilities self._baseDirectory = {} # add the built-ins self._insertIntoCapabilities('listDevices', 'directory', False) self._insertIntoCapabilities('updateCapabilities', 'capabilities', True) self._directory.update(self._baseDirectory) # Set up application directory if applicationDirectory == "": applicationDirectory = os.path.expanduser('~/.ndn/iot/applications') self._applicationDirectory = applicationDirectory self._applications = dict() def _insertIntoCapabilities(self, commandName, keyword, isSigned): newUri = Name(self.prefix).append(Name(commandName)).toUri() self._baseDirectory[keyword] = [{'signed':isSigned, 'name':newUri}] def beforeLoopStart(self): if not self._policyManager.hasRootSignedCertificate(): # make one.... self.log.warn('Generating controller certificate...') newKey = self._identityManager.generateRSAKeyPairAsDefault( self.prefix, isKsk=True) newCert = self._identityManager.selfSign(newKey) self._identityManager.addCertificateAsDefault(newCert) # Trusting root's own certificate upon each run # TODO: debug where application starts first and controller starts second, application's interest cannot be verified self._rootCertificate = self._keyChain.getCertificate(self.getDefaultCertificateName()) self._policyManager._certificateCache.insertCertificate(self._rootCertificate) self._memoryContentCache = MemoryContentCache(self.face) self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName()) self._memoryContentCache.registerPrefix(self.prefix, onRegisterFailed = self.onRegisterFailed, onRegisterSuccess = None, onDataNotFound = self._onCommandReceived) # Serve root certificate in our memoryContentCache self._memoryContentCache.add(self._rootCertificate) self.loadApplications() self.loop.call_soon(self.onStartup) ###### # Initial configuration ####### # TODO: deviceSuffix will be replaced by deviceSerial def _addDeviceToNetwork(self, deviceSerial, newDeviceSuffix, pin): h = HmacHelper(pin) self._hmacDevices[deviceSerial] = h d = DeviceConfigurationMessage() for source, dest in [(self.networkPrefix, d.configuration.networkPrefix), (self.deviceSuffix, d.configuration.controllerName), (newDeviceSuffix, d.configuration.deviceSuffix)]: for i in range(source.size()): component = source.get(i) dest.components.append(component.getValue().toRawStr()) interestName = Name('/home/configure').append(Name(deviceSerial)) encodedParams = ProtobufTlv.encode(d) interestName.append(encodedParams) interest = Interest(interestName) h.signInterest(interest) self.face.expressInterest(interest, self._deviceAdditionResponse, self._deviceAdditionTimedOut) def _deviceAdditionTimedOut(self, interest): deviceSerial = str(interest.getName().get(2).getValue()) self.log.warn("Timed out trying to configure device " + deviceSerial) # don't try again self._hmacDevices.pop(deviceSerial) def _deviceAdditionResponse(self, interest, data): status = data.getContent().toRawStr() deviceSerial = str(interest.getName().get(2).getValue()) hmacChecker = self._hmacDevices[deviceSerial] if (hmacChecker.verifyData(data)): self.log.info("Received {} from {}".format(status, deviceSerial)) else: self.log.warn("Received invalid HMAC from {}".format(deviceSerial)) ###### # Certificate signing ###### def _handleCertificateRequest(self, interest): """ Extracts a public key name and key bits from a command interest name component. Generates a certificate if the request is verifiable. This expects an HMAC signed interest. """ message = CertificateRequestMessage() commandParamsTlv = interest.getName().get(self.prefix.size()+1) ProtobufTlv.decode(message, commandParamsTlv.getValue()) signature = HmacHelper.extractInterestSignature(interest) deviceSerial = str(signature.getKeyLocator().getKeyName().get(-1).getValue()) response = Data(interest.getName()) certData = None hmac = None try: hmac = self._hmacDevices[deviceSerial] if hmac.verifyInterest(interest): certData = self._createCertificateFromRequest(message) # remove this hmac; another request will require a new pin self._hmacDevices.pop(deviceSerial) except KeyError: self.log.warn('Received certificate request for device with no registered key') except SecurityException as e: self.log.warn('Could not create device certificate: ' + str(e)) else: self.log.info('Creating certificate for device {}'.format(deviceSerial)) if certData is not None: response.setContent(certData.wireEncode()) response.getMetaInfo().setFreshnessPeriod(10000) # should be good even longer else: response.setContent("Denied") if hmac is not None: hmac.signData(response) self.sendData(response, False) def _createCertificateFromRequest(self, message): """ Generate an IdentityCertificate from the public key information given. """ # TODO: Verify the certificate was actually signed with the private key # matching the public key we are issuing a cert for!! keyComponents = message.command.keyName.components keyName = Name("/".join(keyComponents)) self.log.debug("Key name: " + keyName.toUri()) if not self._policyManager.getEnvironmentPrefix().match(keyName): # we do not issue certs for keys outside of our network return None keyDer = Blob(message.command.keyBits) keyType = message.command.keyType try: self._identityStorage.addKey(keyName, keyType, keyDer) except SecurityException as e: print(e) # assume this is due to already existing? pass certificate = self._identityManager._generateCertificateForKey(keyName) self._keyChain.sign(certificate, self.getDefaultCertificateName()) # store it for later use + verification self._identityStorage.addCertificate(certificate) self._policyManager._certificateCache.insertCertificate(certificate) return certificate ###### # Device Capabilities ###### def _updateDeviceCapabilities(self, interest): """ Take the received capabilities update interest and update our directory listings. """ # we assume the sender is the one who signed the interest... signature = self._policyManager._extractSignature(interest) certificateName = signature.getKeyLocator().getKeyName() senderIdentity = IdentityCertificate.certificateNameToPublicKeyName(certificateName).getPrefix(-1) self.log.info('Updating capabilities for {}'.format(senderIdentity.toUri())) # get the params from the interest name messageComponent = interest.getName().get(self.prefix.size()+1) message = UpdateCapabilitiesCommandMessage() ProtobufTlv.decode(message, messageComponent.getValue()) # we remove all the old capabilities for the sender tempDirectory = defaultdict(list) for keyword in self._directory: tempDirectory[keyword] = [cap for cap in self._directory[keyword] if not senderIdentity.match(Name(cap['name']))] # then we add the ones from the message for capability in message.capabilities: capabilityPrefix = Name() for component in capability.commandPrefix.components: capabilityPrefix.append(component) commandUri = capabilityPrefix.toUri() if not senderIdentity.match(capabilityPrefix): self.log.error("Node {} tried to register another prefix: {} - ignoring update".format( senderIdentity.toUri(),commandUri)) else: for keyword in capability.keywords: allUris = [info['name'] for info in tempDirectory[keyword]] if capabilityPrefix not in allUris: listing = {'signed':capability.needsSignature, 'name':commandUri} tempDirectory[keyword].append(listing) self._directory = tempDirectory def _prepareCapabilitiesList(self, interestName): """ Responds to a directory listing request with JSON """ dataName = Name(interestName).append(Name.Component.fromNumber(int(time.time()))) response = Data(dataName) response.setContent(json.dumps(self._directory)) return response ##### # Interest handling #### def _onCommandReceived(self, prefix, interest, face, interestFilterId, filter): """ """ interestName = interest.getName() #if it is a certificate name, serve the certificate # TODO: since we've memoryContentCache serving root cert now, this should no longer be required try: if interestName.isPrefixOf(self.getDefaultCertificateName()): foundCert = self._identityManager.getCertificate(self.getDefaultCertificateName()) self.log.debug("Serving certificate request") self.face.putData(foundCert) return except SecurityException as e: # We don't have this certificate, this is probably not a certificate request # TODO: this does not differentiate from certificate request but certificate not exist; should update print(str(e)) pass afterPrefix = interestName.get(prefix.size()).toEscapedString() if afterPrefix == "listDevices": #compose device list self.log.debug("Received device list request") response = self._prepareCapabilitiesList(interestName) self.sendData(response) elif afterPrefix == "certificateRequest": #build and sign certificate self.log.debug("Received certificate request") self._handleCertificateRequest(interest) elif afterPrefix == "updateCapabilities": # needs to be signed! self.log.debug("Received capabilities update") def onVerifiedCapabilities(interest): print("capabilities good") response = Data(interest.getName()) response.setContent(str(time.time())) self.sendData(response) self._updateDeviceCapabilities(interest) self._keyChain.verifyInterest(interest, onVerifiedCapabilities, self.verificationFailed) elif afterPrefix == "requests": # application request to publish under some names received; need to be signed def onVerifiedAppRequest(interest): # TODO: for now, we automatically grant access to any valid signed interest print("verified! send response!") message = AppRequestMessage() ProtobufTlv.decode(message, interest.getName().get(prefix.size() + 1).getValue()) certName = Name("/".join(message.command.idName.components)) dataPrefix = Name("/".join(message.command.dataPrefix.components)) appName = message.command.appName isUpdated = self.updateTrustSchema(appName, certName, dataPrefix, True) response = Data(interest.getName()) if isUpdated: response.setContent("{\"status\": 200, \"message\": \"granted, trust schema updated OK\" }") self.log.info("Verified and granted application publish request") else: response.setContent("{\"status\": 400, \"message\": \"not granted, requested publishing namespace already exists\" }") self.log.info("Verified and but requested namespace already exists") self.sendData(response) return def onVerificationFailedAppRequest(interest): print("application request verify failed!") response = Data(interest.getName()) response.setContent("{\"status\": 401, \"message\": \"command interest verification failed\" }") self.sendData(response) self.log.info("Received application request: " + interestName.toUri()) #print("Verifying with trust schema: ") #print(self._policyManager.config) self._keyChain.verifyInterest(interest, onVerifiedAppRequest, onVerificationFailedAppRequest) else: print("Got interest unable to answer yet: " + interest.getName().toUri()) if interest.getExclude(): print("interest has exclude: " + interest.getExclude().toUri()) # response = Data(interest.getName()) # response.setContent("500") # response.getMetaInfo().setFreshnessPeriod(1000) # self.sendData(response) def onStartup(self): # begin taking add requests self.loop.call_soon(self.displayMenu) self.loop.add_reader(stdin, self.handleUserInput) def displayMenu(self): menuStr = "\n" menuStr += "P)air a new device with serial and PIN\n" menuStr += "D)irectory listing\n" menuStr += "E)xpress an interest\n" menuStr += "L)oad hosted applications (" + (self._applicationDirectory) + ")\n" menuStr += "Q)uit\n" print(menuStr) print ("> ", end="") stdout.flush() def listDevices(self): menuStr = '' for capability, commands in self._directory.items(): menuStr += '{}:\n'.format(capability) for info in commands: signingStr = 'signed' if info['signed'] else 'unsigned' menuStr += '\t{} ({})\n'.format(info['name'], signingStr) print(menuStr) self.loop.call_soon(self.displayMenu) def loadApplicationsMenuSelect(self): try: confirm = input('This will override existing trust schemas, continue? (Y/N): ').upper().startswith('Y') if confirm: self.loadApplications(override = True) else: print("Aborted") except KeyboardInterrupt: print("Aborted") finally: self.loop.call_soon(self.displayMenu) def onInterestTimeout(self, interest): print('Interest timed out: {}'.interest.getName().toUri()) def onDataReceived(self, interest, data): print('Received data named: {}'.format(data.getName().toUri())) print('Contents:\n{}'.format(data.getContent().toRawStr())) def expressInterest(self): try: interestName = input('Interest name: ') if len(interestName): toSign = input('Signed? (y/N): ').upper().startswith('Y') interest = Interest(Name(interestName)) interest.setInterestLifetimeMilliseconds(5000) interest.setChildSelector(1) if (toSign): self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onDataReceived, self.onInterestTimeout) else: print("Aborted") except KeyboardInterrupt: print("Aborted") finally: self.loop.call_soon(self.displayMenu) def beginPairing(self): try: deviceSerial = input('Device serial: ') devicePin = input('PIN: ') deviceSuffix = input('Node name: ') except KeyboardInterrupt: print('Pairing attempt aborted') else: if len(deviceSerial) and len(devicePin) and len(deviceSuffix): self._addDeviceToNetwork(deviceSerial, Name(deviceSuffix), devicePin.decode('hex')) else: print('Pairing attempt aborted') finally: self.loop.call_soon(self.displayMenu) def handleUserInput(self): inputStr = stdin.readline().upper() if inputStr.startswith('D'): self.listDevices() elif inputStr.startswith('P'): self.beginPairing() elif inputStr.startswith('E'): self.expressInterest() elif inputStr.startswith('Q'): self.stop() elif inputStr.startswith('L'): self.loadApplicationsMenuSelect() else: self.loop.call_soon(self.displayMenu) ######################## # application trust schema distribution ######################## def updateTrustSchema(self, appName, certName, dataPrefix, publishNew = False): if appName in self._applications: if dataPrefix.toUri() in self._applications[appName]["dataPrefix"]: print("some key is configured for namespace " + dataPrefix.toUri() + " for application " + appName + ". Ignoring this request.") return False else: # TODO: Handle malformed conf where validator tree does not exist validatorNode = self._applications[appName]["tree"]["validator"][0] else: # This application does not previously exist, we create its trust schema # (and for now, add in static rules for sync data) self._applications[appName] = {"tree": BoostInfoParser(), "dataPrefix": [], "version": 0} validatorNode = self._applications[appName]["tree"].getRoot().createSubtree("validator") trustAnchorNode = validatorNode.createSubtree("trust-anchor") #trustAnchorNode.createSubtree("type", "file") #trustAnchorNode.createSubtree("file-name", os.path.expanduser("~/.ndn/iot/root.cert")) trustAnchorNode.createSubtree("type", "base64") trustAnchorNode.createSubtree("base64-string", Blob(b64encode(self._rootCertificate.wireEncode().toBytes()), False).toRawStr()) #create cert verification rule # TODO: the idea for this would be, if the cert has /home-prefix/<one-component>/KEY/ksk-*/ID-CERT, then it should be signed by fixed controller(s) # if the cert has /home-prefix/<multiple-components>/KEY/ksk-*/ID-CERT, then it should be checked hierarchically (this is for subdomain support) certRuleNode = validatorNode.createSubtree("rule") certRuleNode.createSubtree("id", "Certs") certRuleNode.createSubtree("for", "data") filterNode = certRuleNode.createSubtree("filter") filterNode.createSubtree("type", "regex") filterNode.createSubtree("regex", "^[^<KEY>]*<KEY><>*<ID-CERT>") checkerNode = certRuleNode.createSubtree("checker") # TODO: wait how did my first hierarchical verifier work? #checkerNode.createSubtree("type", "hierarchical") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") # We don't put cert version in there keyLocatorNode.createSubtree("name", Name(self.getDefaultCertificateName()).getPrefix(-1).toUri()) keyLocatorNode.createSubtree("relation", "equal") # Discovery rule: anything that multicasts under my home prefix should be signed, and the signer should have been authorized by root # TODO: This rule as of right now is over-general discoveryRuleNode = validatorNode.createSubtree("rule") discoveryRuleNode.createSubtree("id", "sync-data") discoveryRuleNode.createSubtree("for", "data") filterNode = discoveryRuleNode.createSubtree("filter") filterNode.createSubtree("type", "regex") filterNode.createSubtree("regex", "^[^<MULTICAST>]*<MULTICAST><>*") checkerNode = discoveryRuleNode.createSubtree("checker") # TODO: wait how did my first hierarchical verifier work? #checkerNode.createSubtree("type", "hierarchical") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") keyLocatorNode.createSubtree("regex", "^[^<KEY>]*<KEY><>*<ID-CERT>") ruleNode = validatorNode.createSubtree("rule") ruleNode.createSubtree("id", dataPrefix.toUri()) ruleNode.createSubtree("for", "data") filterNode = ruleNode.createSubtree("filter") filterNode.createSubtree("type", "name") filterNode.createSubtree("name", dataPrefix.toUri()) filterNode.createSubtree("relation", "is-prefix-of") checkerNode = ruleNode.createSubtree("checker") checkerNode.createSubtree("type", "customized") checkerNode.createSubtree("sig-type", "rsa-sha256") keyLocatorNode = checkerNode.createSubtree("key-locator") keyLocatorNode.createSubtree("type", "name") # We don't put cert version in there keyLocatorNode.createSubtree("name", certName.getPrefix(-1).toUri()) keyLocatorNode.createSubtree("relation", "equal") if not os.path.exists(self._applicationDirectory): os.makedirs(self._applicationDirectory) self._applications[appName]["tree"].write(os.path.join(self._applicationDirectory, appName + ".conf")) self._applications[appName]["dataPrefix"].append(dataPrefix.toUri()) self._applications[appName]["version"] = int(time.time()) if publishNew: # TODO: ideally, this is the trust schema of the application, and does not necessarily carry controller prefix. # We make it carry controller prefix here so that prefix registration / route setup is easier (implementation workaround) data = Data(Name(self.prefix).append(appName).append("_schema").appendVersion(self._applications[appName]["version"])) data.setContent(str(self._applications[appName]["tree"].getRoot())) self.signData(data) self._memoryContentCache.add(data) return True # TODO: putting existing confs into memoryContentCache def loadApplications(self, directory = None, override = False): if not directory: directory = self._applicationDirectory if override: self._applications.clear() if os.path.exists(directory): for f in os.listdir(directory): fullFileName = os.path.join(directory, f) if os.path.isfile(fullFileName) and f.endswith('.conf'): appName = f.rstrip('.conf') if appName in self._applications and not override: print("loadApplications: " + appName + " already exists, do nothing for configuration file: " + fullFileName) else: self._applications[appName] = {"tree": BoostInfoParser(), "dataPrefix": [], "version": int(time.time())} self._applications[appName]["tree"].read(fullFileName) data = Data(Name(self.prefix).append(appName).append("_schema").appendVersion(self._applications[appName]["version"])) data.setContent(str(self._applications[appName]["tree"].getRoot())) self.signData(data) self._memoryContentCache.add(data) try: validatorTree = self._applications[appName]["tree"]["validator"][0] for rule in validatorTree["rule"]: self._applications[appName]["dataPrefix"].append(rule["id"][0].value) # TODO: don't swallow any general exceptions, we want to catch only KeyError (make sure) here except Exception as e: print("loadApplications parse configuration file " + fullFileName + " : " + str(e)) return
def onDataNotFound(prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() return def onRegisterFailed(prefix): print "Prefix registration failed" return if __name__ == "__main__": print "Start NAC producer test" face = Face() memoryContentCache = MemoryContentCache(face) # Produce encrypted data for this user username = "******" # Insert into this repo repoPrefix = "/ndn/edu/ucla/remap/ndnfit/repo" testProducer = SampleProducer(face, username, memoryContentCache) basetimeString = "20161024T080" baseZFill = 3 baseLat = 34 baseLng = -118 # This should be less than 1 minute dataNum = 2 # Create the content key once originalTimeString = basetimeString + str(0).zfill(baseZFill)
class TestDPU(object): def __init__(self, face, encryptResult, defaultPrefix, link = None): # Set up face self.face = face self._encryptResult = encryptResult self._link = link self.databaseFilePath = "policy_config/test_consumer_dpu.db" try: os.remove(self.databaseFilePath) except OSError: # no such file pass self.groupName = Name(defaultPrefix) # Set up the keyChain. identityStorage = BasicIdentityStorage() privateKeyStorage = FilePrivateKeyStorage() self.keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), NoVerifyPolicyManager()) # Authorized identity identityName = Name("/ndn/edu/basel/dpu") # Function name: the function that this DPU provides self._functionName = "bounding_box" self._identityName = identityName self.certificateName = self.keyChain.createIdentityAndCertificate(identityName) # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage # For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error #self.keyChain.installIdentityCertificate() self.memoryContentCache = MemoryContentCache(self.face) try: commandSigningKeyChain = KeyChain() print "Default certificate name is: " + self.keyChain.getDefaultCertificateName().toUri() self.face.setCommandSigningInfo(commandSigningKeyChain, commandSigningKeyChain.getDefaultCertificateName()) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) except SecurityException as e: print str(e) print "Cannot use default certificate, use created certificate in FilePrivateKeyStorage" self.face.setCommandSigningInfo(self.keyChain, self.certificateName) self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound) consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName) consumerCertificate = identityStorage.getCertificate(self.certificateName) self.consumer = Consumer( face, self.keyChain, self.groupName, identityName, Sqlite3ConsumerDb(self.databaseFilePath)) # TODO: Read the private key to decrypt d-key...this may or may not be ideal base64Content = None with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile: print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri") base64Content = keyFile.read() #print base64Content der = Blob(base64.b64decode(base64Content), False) self.consumer.addDecryptionKey(consumerKeyName, der) self.memoryContentCache.add(consumerCertificate) accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time()))) self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout) print "Access request interest name: " + accessRequestInterest.getName().toUri() self._tasks = dict() return def onAccessRequestData(self, interest, data): print "Access request data: " + data.getName().toUri() return def onAccessRequestTimeout(self, interest): print "Access request times out: " + interest.getName().toUri() print "Assuming certificate sent and D-key generated" return def startConsuming(self, userId, basetimeString, producedDataName, dataNum, outerDataName): contentName = Name(userId).append(Name("/SAMPLE/fitness/physical_activity/time_location/")) baseZFill = 3 for i in range(0, dataNum): timeString = basetimeString + str(i).zfill(baseZFill) timeFloat = Schedule.fromIsoString(timeString) self.consume(Name(contentName).append(timeString), producedDataName, outerDataName) print "Trying to consume: " + Name(contentName).append(timeString).toUri() def onDataNotFound(self, prefix, interest, face, interestFilterId, filter): print "Data not found for interest: " + interest.getName().toUri() functionComponentIdx = len(self._identityName) if interest.getName().get(functionComponentIdx).toEscapedString() == self._functionName: try: parameters = interest.getName().get(functionComponentIdx + 1).toEscapedString() pattern = re.compile('([^,]*),([^,]*),([^,]*)') matching = pattern.match(str(Name.fromEscapedString(parameters))) userId = matching.group(1) basetimeString = matching.group(2) producedDataName = matching.group(3) dataNum = 60 self._tasks[producedDataName] = {"cap_num": dataNum, "current_num": 0, "dataset": []} self.startConsuming(userId, basetimeString, producedDataName, dataNum, interest.getName().toUri()) except Exception as e: print "Exception in processing function arguments: " + str(e) else: print "function name mismatch: expected " + self._functionName + " ; got " + interest.getName().get(functionComponentIdx).toEscapedString() return def onRegisterFailed(self, prefix): print "Prefix registration failed: " + prefix.toUri() return def consume(self, contentName, producedDataName, outerDataName): self.consumer.consume(contentName, lambda data, result: self.onConsumeComplete(data, result, producedDataName, outerDataName), self.onConsumeFailed) def onConsumeComplete(self, data, result, producedDataName, outerDataName): print "Consume complete for data name: " + data.getName().toUri() if producedDataName in self._tasks: self._tasks[producedDataName]["current_num"] += 1 self._tasks[producedDataName]["dataset"].append(result) if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]: maxLng = -1000 minLng = 1000 maxLat = -1000 minLat = 1000 for item in self._tasks[producedDataName]["dataset"]: dataObject = json.loads(str(item)) if dataObject["lat"] > maxLat: maxLat = dataObject["lat"] if dataObject["lat"] < minLat: minLat = dataObject["lat"] if dataObject["lng"] > maxLng: maxLng = dataObject["lng"] if dataObject["lng"] < minLng: minLng = dataObject["lng"] if not self._encryptResult: innerData = Data(Name(str(producedDataName))) innerData.setContent(json.dumps({"minLat": minLat, "maxLat": maxLat, "minLng": minLng, "maxLng": maxLng})) #self.keyChain.sign(innerData) outerData = Data(Name(str(outerDataName))) outerData.setContent(innerData.wireEncode()) #self.keyChain.sign(outerData) self.memoryContentCache.add(outerData) self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", outerData) print "Calculation completed, put data to repo" else: print "Encrypt result is not implemented" def onConsumeFailed(self, code, message): print "Consume error " + str(code) + ": " + message def initiateContentStoreInsertion(self, repoCommandPrefix, data): fetchName = data.getName() parameter = repo_command_parameter_pb2.RepoCommandParameterMessage() # Add the Name. for i in range(fetchName.size()): parameter.repo_command_parameter.name.component.append( fetchName[i].getValue().toBytes()) # Create the command interest. interest = Interest(Name(repoCommandPrefix).append("insert") .append(Name.Component(ProtobufTlv.encode(parameter)))) self.face.makeCommandInterest(interest) self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout) def onRepoData(self, interest, data): #print "received repo data: " + interest.getName().toUri() return def onRepoTimeout(self, interest): #print "repo command times out: " + interest.getName().getPrefix(-1).toUri() return