def __init__(self, identityManager = None, policyManager = None): if identityManager == None: identityManager = IdentityManager() if policyManager == None: policyManager = NoVerifyPolicyManager() self._identityManager = identityManager self._policyManager = policyManager self._face = None
def main(): # Params parsing parser = argparse.ArgumentParser( description= 'bms gateway node to Parse or follow Cascade Datahub log and publish to MiniNdn.' ) parser.add_argument('filename', help='datahub log file') parser.add_argument('-f', dest='follow', action='store_true', help='follow (tail -f) the log file') parser.add_argument('--namespace', default='/ndn/nist/bms', help='root of ndn name, no trailing slash') parser.add_argument('--image', dest='image', default='../simulator/res/floor2.jpg', help='the floor plan to publish') parser.add_argument('--location', dest='location', default='../simulator/res/locations.txt', help='the floor plan to publish') args = parser.parse_args() # Setup logging logger = Logger() logger.prepareLogging() # Face, KeyChain, memoryContentCache and asio event loop initialization loop = asyncio.get_event_loop() face = ThreadsafeFace(loop) keyChain = KeyChain(IdentityManager(BasicIdentityStorage())) # For the gateway publisher, we create one identity for it to sign nfd command interests certificateName = keyChain.createIdentityAndCertificate( Name("/ndn/nist/gateway")) face.setCommandSigningInfo(keyChain, certificateName) cache = MemoryContentCache(face) dataPublisher = DataPublisher(face, keyChain, loop, cache, args.namespace, args.image, args.location) cache.registerPrefix(Name(args.namespace), dataPublisher.onRegisterFailed, dataPublisher.onDataNotFound) loop.run_until_complete(dataPublisher.publishFloorImage()) if args.follow: #asyncio.async(loop.run_in_executor(executor, followfile, args.filename, args.namespace, cache)) loop.run_until_complete(dataPublisher.followfile(args.filename)) else: loop.run_until_complete(dataPublisher.readfile(args.filename)) loop.run_forever() face.shutdown()
def __init__(self, identityManager=None, policyManager=None): if identityManager == None: identityManager = IdentityManager() if policyManager == None: policyManager = NoVerifyPolicyManager() self._identityManager = identityManager self._policyManager = policyManager self._encryptionManager = None self._face = None self._maxSteps = 100
def main(): # Params parsing parser = argparse.ArgumentParser( description= 'bms gateway node to Parse or follow Cascade Datahub log and publish to MiniNdn.' ) parser.add_argument('filename', help='datahub log file') parser.add_argument('-f', dest='follow', action='store_true', help='follow (tail -f) the log file') parser.add_argument('--namespace', default='/ndn/edu/ucla/remap/bms', help='root of ndn name, no trailing slash') args = parser.parse_args() # Setup logging logger = Logger() logger.prepareLogging() # Face, KeyChain, memoryContentCache and asio event loop initialization loop = asyncio.get_event_loop() face = ThreadsafeFace(loop, "128.97.98.7") keyChain = KeyChain( IdentityManager(BasicIdentityStorage(), FilePrivateKeyStorage())) # For the gateway publisher, we create one identity for it to sign nfd command interests #certificateName = keyChain.createIdentityAndCertificate(Name("/ndn/bms/gateway-publisher")) face.setCommandSigningInfo(keyChain, keyChain.getDefaultCertificateName()) print "Using certificate name " + keyChain.getDefaultCertificateName( ).toUri() cache = MemoryContentCache(face) dataPublisher = DataPublisher(face, keyChain, loop, cache, args.namespace) cache.registerPrefix(Name(args.namespace), dataPublisher.onRegisterFailed, dataPublisher.onDataNotFound) # Parse csv to decide the mapping between sensor JSON -> <NDN name, data type> dataPublisher.populateSensorNDNDictFromCSV( 'bms-sensor-data-types-sanitized.csv') loop.call_later(dataPublisher._restartInterval, dataPublisher.checkAlive) if args.follow: #asyncio.async(loop.run_in_executor(executor, followfile, args.filename, args.namespace, cache)) loop.run_until_complete(dataPublisher.followfile(args.filename)) else: loop.run_until_complete(dataPublisher.readfile(args.filename)) loop.run_forever() face.shutdown()
def startPublishing(self): # One-time security setup self.prepareLogging() privateKeyStorage = FilePrivateKeyStorage() identityStorage = BasicIdentityStorage() policyManager = ConfigPolicyManager(self._trustSchemaFile) self._keyChain = KeyChain( IdentityManager(identityStorage, privateKeyStorage), policyManager) self._certificateName = self._keyChain.createIdentityAndCertificate( self._identityName) print("My Identity name: " + self._identityName.toUri()) print("My certificate name: " + self._certificateName.toUri()) certificateData = self._keyChain.getIdentityManager( )._identityStorage.getCertificate(self._certificateName) print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer())) # self._keyChain.getIdentityCertificate(self._certificateName).) self._loop = asyncio.get_event_loop() self._face = ThreadsafeFace(self._loop) self._keyChain.setFace(self._face) self._face.setCommandSigningInfo(self._keyChain, self._certificateName) self._memoryContentCache = MemoryContentCache(self._face) # We should only ask for cert to be signed upon the first run of a certain aggregator if DO_CERT_SETUP: if (KeyLocator.getFromSignature( certificateData.getSignature()).getKeyName().equals( self._certificateName.getPrefix(-1))): # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again print("certificate " + self._certificateName.toUri() + " asking for signature") response = urllib2.urlopen( "http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read() signedCertData = Data() signedCertData.wireDecode(Blob(b64decode(response))) self._memoryContentCache.add(signedCertData) cmdline = ['ndnsec-install-cert', '-'] p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE) # desanitize + sign in GET request cert, err = p.communicate(response) if p.returncode != 0: raise RuntimeError("ndnsec-install-cert error") else: self._memoryContentCache.add(certificateData) else: self._memoryContentCache.add(certificateData) dataNode = self.conf.getDataNode() childrenNode = self.conf.getChildrenNode() self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound) # For each type of data, we refresh each type of aggregation according to the interval in the configuration for i in range(len(dataNode.subtrees)): dataType = dataNode.subtrees.keys()[i] aggregationParams = self.conf.getProducingParamsForAggregationType( dataNode.subtrees.items()[i][1]) if childrenNode == None: self._dataQueue[dataType] = DataQueue(None, None, None) self.generateData(dataType, 2, 0) for aggregationType in aggregationParams: childrenList = OrderedDict() if childrenNode != None: for j in range(len(childrenNode.subtrees)): if dataType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees: if aggregationType in childrenNode.subtrees.items( )[j][1].subtrees['data'].subtrees[ dataType].subtrees: childrenList[childrenNode.subtrees.items()[j][ 0]] = self.conf.getProducingParamsForAggregationType( childrenNode.subtrees.items()[j] [1].subtrees['data'].subtrees[dataType] )[aggregationType] self.startPublishingAggregation( aggregationParams[aggregationType], childrenList, dataType, aggregationType) return