def _processRecoveryInterest(self, interest, syncDigest, transport): logging.getLogger(__name__).info("processRecoveryInterest") if self._logFind(syncDigest) != -1: tempContent = sync_state_pb2.SyncStateMsg() for i in range(self._digestTree.size()): content = getattr(tempContent, "ss").add() content.name = self._applicationDataPrefixUri content.type = SyncState_UPDATE content.seqno.seq = self._sequenceNo content.seqno.session = self._digestTree.get(i).getSessionNo() if len(getattr(tempContent, "ss")) != 0: # TODO: Check if this works in Python 3. #pylint: disable=E1103 array = tempContent.SerializeToString() #pylint: enable=E1103 data = Data(interest.getName()) data.setContent(Blob(array)) self._keyChain.sign(data, self._certificateName) try: transport.send(data.wireEncode().toBuffer()) except Exception as ex: logging.getLogger(__name__).error( "Error in transport.send: %s", str(ex)) return logging.getLogger(__name__).info("send recovery data back") logging.getLogger(__name__).info("%s", interest.getName().toUri())
def _processRecoveryInterest(self, interest, syncDigest, face): logging.getLogger(__name__).info("processRecoveryInterest") if self._logFind(syncDigest) != -1: tempContent = SyncStateMsg() for i in range(self._digestTree.size()): content = getattr(tempContent, "ss").add() content.name = self._digestTree.get(i).getDataPrefix() content.type = SyncState_UPDATE content.seqno.seq = self._digestTree.get(i).getSequenceNo() content.seqno.session = self._digestTree.get(i).getSessionNo() if len(getattr(tempContent, "ss")) != 0: # TODO: Check if this works in Python 3. #pylint: disable=E1103 array = tempContent.SerializeToString() #pylint: enable=E1103 data = Data(interest.getName()) data.setContent(Blob(array)) if interest.getName().get(-1).toEscapedString() == "00": # Limit the lifetime of replies to interest for "00" since # they can be different. data.getMetaInfo().setFreshnessPeriod(1000) self._keyChain.sign(data, self._certificateName) try: face.putData(data) except Exception as ex: logging.getLogger(__name__).error( "Error in face.putData: %s", str(ex)) return logging.getLogger(__name__).info("send recovery data back") logging.getLogger(__name__).info("%s", interest.getName().toUri())
def _registerPrefixHelper(self, registeredPrefixId, prefix, onInterest, onRegisterFailed, flags, wireFormat): """ Do the work of registerPrefix to register with NDNx once we have an ndndId_. :param int registeredPrefixId: The _RegisteredPrefix.getNextRegisteredPrefixId() which registerPrefix got so it could return it to the caller. If this is 0, then don't add to registeredPrefixTable_ (assuming it has already been done). """ # Create a ForwardingEntry. # Note: ndnd ignores any freshness that is larger than 3600 seconds and # sets 300 seconds instead. To register "forever", (=2000000000 sec), # the freshness period must be omitted. forwardingEntry = ForwardingEntry() forwardingEntry.setAction("selfreg") forwardingEntry.setPrefix(prefix) forwardingEntry.setForwardingFlags(flags) content = forwardingEntry.wireEncode(wireFormat) # Set the ForwardingEntry as the content of a Data packet and sign. data = Data() data.setContent(content) # Set the name to a random value so that each request is unique. nonce = bytearray(4) for i in range(len(nonce)): nonce[i] = _systemRandom.randint(0, 0xff) data.getName().append(nonce) # The ndnd ignores the signature, so set to blank values. data.getSignature().getKeyLocator().setType( KeyLocatorType.KEY_LOCATOR_DIGEST) data.getSignature().getKeyLocator().setKeyData( Blob(bytearray(32), False)) data.getSignature().setSignature(Blob(bytearray(128), False)) encodedData = data.wireEncode(wireFormat) # Create an interest where the name has the encoded Data packet. interestName = Name().append("ndnx").append( self._ndndId).append("selfreg").append(encodedData) interest = Interest(interestName) interest.setInterestLifetimeMilliseconds(4000.0) interest.setScope(1) encodedInterest = interest.wireEncode(wireFormat) if registeredPrefixId != 0: # Save the onInterest callback and send the registration interest. self._registeredPrefixTable.append( Node._RegisteredPrefix(registeredPrefixId, prefix, onInterest)) response = Node._RegisterResponse(self, prefix, onInterest, onRegisterFailed, flags, wireFormat, False) self.expressInterest(interest, response.onData, response.onTimeout, wireFormat)
def _registerPrefixHelper( self, registeredPrefixId, prefix, onInterest, onRegisterFailed, flags, wireFormat): """ Do the work of registerPrefix to register with NDNx once we have an _ndndId. :param int registeredPrefixId: The _RegisteredPrefix.getNextRegisteredPrefixId() which registerPrefix got so it could return it to the caller. If this is 0, then don't add to _registeredPrefixTable (assuming it has already been done). """ # Create a ForwardingEntry. # Note: ndnd ignores any freshness that is larger than 3600 seconds and # sets 300 seconds instead. To register "forever", (=2000000000 sec), # the freshness period must be omitted. forwardingEntry = ForwardingEntry() forwardingEntry.setAction("selfreg") forwardingEntry.setPrefix(prefix) forwardingEntry.setForwardingFlags(flags) content = forwardingEntry.wireEncode(wireFormat) # Set the ForwardingEntry as the content of a Data packet and sign. data = Data() data.setContent(content) # Set the name to a random value so that each request is unique. nonce = bytearray(4) for i in range(len(nonce)): nonce[i] = _systemRandom.randint(0, 0xff) data.getName().append(nonce) # The ndnd ignores the signature, so set to blank values. data.getSignature().getKeyLocator().setType( KeyLocatorType.KEY_LOCATOR_DIGEST) data.getSignature().getKeyLocator().setKeyData( Blob(bytearray(32), False)) data.getSignature().setSignature(Blob(bytearray(128), False)) encodedData = data.wireEncode(wireFormat) # Create an interest where the name has the encoded Data packet. interestName = Name().append("ndnx").append(self._ndndId).append( "selfreg").append(encodedData) interest = Interest(interestName) interest.setInterestLifetimeMilliseconds(4000.0) interest.setScope(1) encodedInterest = interest.wireEncode(wireFormat) if registeredPrefixId != 0: # Save the onInterest callback and send the registration interest. self._registeredPrefixTable.append(Node._RegisteredPrefix( registeredPrefixId, prefix, onInterest)) response = Node._RegisterResponse( self, prefix, onInterest, onRegisterFailed, flags, wireFormat, False) self.expressInterest( interest, response.onData, response.onTimeout, wireFormat)
def addMember(self, memberCertificate): """ Authorize a member identified by memberCertificate to decrypt data under the policy. :param CertificateV2 memberCertificate: The certificate that identifies the member to authorize. :return: The published KDK Data packet. :rtype: Data """ kdkName = Name(self._nacKey.getIdentityName()) kdkName.append(EncryptorV2.NAME_COMPONENT_KDK).append( # key-id self._nacKey.getName().get(-1)).append( EncryptorV2.NAME_COMPONENT_ENCRYPTED_BY).append( memberCertificate.getKeyName()) secretLength = 32 secret = bytearray(secretLength) for i in range(secretLength): secret[i] = _systemRandom.randint(0, 0xff) # To be compatible with OpenSSL which uses a null-terminated string, # replace each 0 with 1. And to be compatible with the Java security # library which interprets the secret as a char array converted to UTF8, # limit each byte to the ASCII range 1 to 127. for i in range(secretLength): if secret[i] == 0: secret[i] = 1 secret[i] &= 0x7f kdkSafeBag = self._keyChain.exportSafeBag( self._nacKey.getDefaultCertificate(), Blob(secret, False).toBytes()) memberKey = PublicKey(memberCertificate.getPublicKey()) encryptedContent = EncryptedContent() encryptedContent.setPayload(kdkSafeBag.wireEncode()) encryptedContent.setPayloadKey( memberKey.encrypt( Blob(secret, False).toBytes(), EncryptAlgorithmType.RsaOaep)) kdkData = Data(kdkName) kdkData.setContent(encryptedContent.wireEncodeV2()) # FreshnessPeriod can serve as a soft access control for revoking access. kdkData.getMetaInfo().setFreshnessPeriod( AccessManagerV2.DEFAULT_KDK_FRESHNESS_PERIOD_MS) self._keyChain.sign(kdkData, SigningInfo(self._identity)) self._storage.insert(kdkData) return kdkData
def _broadcastSyncState(self, digest, syncMessage): """ Make a data packet with the syncMessage and with name applicationBroadcastPrefix_ + digest. Sign and send. :param str digest: The root digest as a hex string for the data packet name. :param sync_state_pb2.SyncState syncMessage: """ data = Data(self._applicationBroadcastPrefix) data.getName().append(digest) # TODO: Check if this works in Python 3. data.setContent(Blob(syncMessage.SerializeToString())) self._keyChain.sign(data, self._certificateName) self._contentCache.add(data)
def _createEKeyData(self, startTimeStamp, endTimeStamp, publicKeyBlob): """ Create an E-KEY Data packet for the given public key. :param str startTimeStamp: The start time stamp string to put in the name. :param str endTimeStamp: The end time stamp string to put in the name. :param Blob publicKeyBlob: A Blob of the public key DER. :return: The Data packet. :rtype: Data """ name = Name(self._namespace) name.append(Encryptor.NAME_COMPONENT_E_KEY).append(startTimeStamp).append(endTimeStamp) data = Data(name) data.getMetaInfo().setFreshnessPeriod(self._freshnessHours * GroupManager.MILLISECONDS_IN_HOUR) data.setContent(publicKeyBlob) self._keyChain.sign(data) return data
def _createEKeyData(self, startTimeStamp, endTimeStamp, publicKeyBlob): """ Create an E-KEY Data packet for the given public key. :param str startTimeStamp: The start time stamp string to put in the name. :param str endTimeStamp: The end time stamp string to put in the name. :param Blob publicKeyBlob: A Blob of the public key DER. :return: The Data packet. :rtype: Data """ name = Name(self._namespace) name.append(Encryptor.NAME_COMPONENT_E_KEY).append( startTimeStamp).append(endTimeStamp) data = Data(name) data.getMetaInfo().setFreshnessPeriod( self._freshnessHours * GroupManager.MILLISECONDS_IN_HOUR) data.setContent(publicKeyBlob) self._keyChain.sign(data) return data
def _makeAndPublishCkData(self, onError): """ Make a CK Data packet for _ckName encrypted by the KEK in _kekData and insert it in the _storage. :param onError: On failure, this calls onError(errorCode, message) where errorCode is from EncryptError.ErrorCode, and message is an error string. :type onError: function object :return: True on success, else False. :rtype: bool """ try: kek = PublicKey(self._kekData.getContent()) content = EncryptedContent() content.setPayload( kek.encrypt(Blob(self._ckBits, False), EncryptAlgorithmType.RsaOaep)) ckData = Data( Name(self._ckName).append( EncryptorV2.NAME_COMPONENT_ENCRYPTED_BY).append( self._kekData.getName())) ckData.setContent(content.wireEncodeV2()) # FreshnessPeriod can serve as a soft access control for revoking access. ckData.getMetaInfo().setFreshnessPeriod( EncryptorV2.DEFAULT_CK_FRESHNESS_PERIOD_MS) self._keyChain.sign(ckData, self._ckDataSigningInfo) self._storage.insert(ckData) logging.getLogger(__name__).info("Publishing CK data: " + ckData.getName().toUri()) return True except Exception as ex: onError( EncryptError.ErrorCode.EncryptionFailure, "Failed to encrypt generated CK with KEK " + self._kekData.getName().toUri()) return False
def _makeAndPublishCkData(self, onError): """ Make a CK Data packet for _ckName encrypted by the KEK in _kekData and insert it in the _storage. :param onError: On failure, this calls onError(errorCode, message) where errorCode is from EncryptError.ErrorCode, and message is an error string. :type onError: function object :return: True on success, else False. :rtype: bool """ try: kek = PublicKey(self._kekData.getContent()) content = EncryptedContent() content.setPayload(kek.encrypt (Blob(self._ckBits, False), EncryptAlgorithmType.RsaOaep)) ckData = Data( Name(self._ckName).append(EncryptorV2.NAME_COMPONENT_ENCRYPTED_BY) .append(self._kekData.getName())) ckData.setContent(content.wireEncodeV2()) # FreshnessPeriod can serve as a soft access control for revoking access. ckData.getMetaInfo().setFreshnessPeriod( EncryptorV2.DEFAULT_CK_FRESHNESS_PERIOD_MS) self._keyChain.sign(ckData, self._ckDataSigningInfo) self._storage.insert(ckData) logging.getLogger(__name__).info("Publishing CK data: " + ckData.getName().toUri()) return True except Exception as ex: onError(EncryptError.ErrorCode.EncryptionFailure, "Failed to encrypt generated CK with KEK " + self._kekData.getName().toUri()) return False
def _registerPrefixHelper( self, registeredPrefixId, prefix, onInterest, onRegisterFailed, flags, wireFormat, face): """ Do the work of registerPrefix to register with NDNx once we have an _ndndId. :param int registeredPrefixId: The getNextEntryId() which registerPrefix got so it could return it to the caller. If this is 0, then don't add to _registeredPrefixTable (assuming it has already been done). """ if not WireFormat.ENABLE_NDNX: # We can get here if the command signing info is set, but running NDNx. raise RuntimeError( "registerPrefix with NDNx is deprecated. To enable while you upgrade your code to use NFD, set WireFormat.ENABLE_NDNX = True") # Create a ForwardingEntry. # Note: ndnd ignores any freshness that is larger than 3600 seconds and # sets 300 seconds instead. To register "forever", (=2000000000 sec), # the freshness period must be omitted. forwardingEntry = ForwardingEntry() forwardingEntry.setAction("selfreg") forwardingEntry.setPrefix(prefix) forwardingEntry.setForwardingFlags(flags) content = forwardingEntry.wireEncode(wireFormat) # Set the ForwardingEntry as the content of a Data packet and sign. data = Data() data.setContent(content) # Set the name to a random value so that each request is unique. nonce = bytearray(4) for i in range(len(nonce)): nonce[i] = _systemRandom.randint(0, 0xff) data.getName().append(nonce) # The ndnd ignores the signature, so set to blank values. data.getSignature().getKeyLocator().setType( KeyLocatorType.KEY_LOCATOR_DIGEST) data.getSignature().getKeyLocator().setKeyData( Blob(bytearray(32), False)) data.getSignature().setSignature(Blob(bytearray(128), False)) encodedData = data.wireEncode(wireFormat) # Create an interest where the name has the encoded Data packet. interestName = Name().append("ndnx").append(self._ndndId).append( "selfreg").append(encodedData) interest = Interest(interestName) interest.setInterestLifetimeMilliseconds(4000.0) interest.setScope(1) if registeredPrefixId != 0: interestFilterId = 0 if onInterest != None: # registerPrefix was called with the "combined" form that includes # the callback, so add an InterestFilterEntry. interestFilterId = self.getNextEntryId() self.setInterestFilter( interestFilterId, InterestFilter(prefix), onInterest, face) self._registeredPrefixTable.append(Node._RegisteredPrefix( registeredPrefixId, prefix, interestFilterId)) # Send the registration interest. response = Node._RegisterResponse( self, prefix, onInterest, onRegisterFailed, flags, wireFormat, False, face) self.expressInterest( self.getNextEntryId(), interest, response.onData, response.onTimeout, wireFormat, face)
def publish(self, interestName, dataName, content, freshnessPeriod, signingInfo = SigningInfo()): """ Put all the segments in the memory store. :param Name interestName: If the Interest name ends in a segment, immediately send the Data packet for the segment to the Face. :param Name dataName: The Data name, which has components after the Interest name. :param Blob content: The content of the data to be segmented. :param float freshnessPeriod The freshness period of the segments, in milliseconds. :param SigningInfo signingInfo (optional) The SigningInfo for signing segment Data packets. If omitted, use the default SigningInfo(). """ interestSegment = 0 if interestName[-1].isSegment(): interestSegment = interestName[-1].toSegment() rawBuffer = content.buf() iSegmentBegin = 0 iEnd = len(content) maxPacketSize = int(Common.MAX_NDN_PACKET_SIZE / 2) totalSegments = int(len(content) / maxPacketSize) finalBlockId = Name.Component.fromSegment(totalSegments) segmentPrefix = Name(dataName) segmentPrefix.appendVersion(int(Common.getNowMilliseconds())) segmentNo = 0 while(True): iSegmentEnd = iSegmentBegin + maxPacketSize if iSegmentEnd > iEnd: iSegmentEnd = iEnd segmentName = Name(segmentPrefix) segmentName.appendSegment(segmentNo) data = Data(segmentName) data.setContent(Blob(rawBuffer[iSegmentBegin : iSegmentEnd])) data.getMetaInfo().setFreshnessPeriod(freshnessPeriod) data.getMetaInfo().setFinalBlockId(finalBlockId) iSegmentBegin = iSegmentEnd self._keyChain.sign(data, signingInfo) # Only send the segment to the Face if it has a pending interest. # Otherwise, the segment is unsolicited. if interestSegment == segmentNo: self._face.putData(data) # Until InMemoryStorageFifo implements an eviction policy, use InMemoryStorageRetaining. # storage_.insert(*data, freshnessPeriod) self._storage.insert(data) # Make and return a callback since segmentName is different each time. def makeCallback(localSegmentName): def callback(): self._storage.remove(localSegmentName) return callback self._face.callLater(freshnessPeriod, makeCallback(segmentName)) segmentNo += 1 if not (iSegmentBegin < iEnd): break
def _processSyncInterest(self, index, syncDigest, face): """ Common interest processing, using digest log to find the difference after syncDigest. :return: True if sent a data packet to satisfy the interest, otherwise False. :rtype: bool """ nameList = [] # of str sequenceNoList = [] # of int sessionNoList = [] # of int for j in range(index + 1, len(self._digestLog)): temp = self._digestLog[j].getData() # array of sync_state_pb2.SyncState. for i in range(len(temp)): syncState = temp[i] if syncState.type != SyncState_UPDATE: continue if self._digestTree.find( syncState.name, syncState.seqno.session) != -1: n = -1 for k in range(len(nameList)): if nameList[k] == syncState.name: n = k break if n == -1: nameList.append(syncState.name) sequenceNoList.append(syncState.seqno.seq) sessionNoList.append(syncState.seqno.session) else: sequenceNoList[n] = syncState.seqno.seq sessionNoList[n] = syncState.seqno.session tempContent = SyncStateMsg() for i in range(len(nameList)): content = getattr(tempContent, "ss").add() content.name = nameList[i] content.type = SyncState_UPDATE content.seqno.seq = sequenceNoList[i] content.seqno.session = sessionNoList[i] sent = False if len(getattr(tempContent, "ss")) != 0: name = Name(self._applicationBroadcastPrefix) name.append(syncDigest) # TODO: Check if this works in Python 3. #pylint: disable=E1103 array = tempContent.SerializeToString() #pylint: enable=E1103 data = Data(name) data.setContent(Blob(array)) self._keyChain.sign(data, self._certificateName) try: face.putData(data) except Exception as ex: logging.getLogger(__name__).error( "Error in face.putData: %s", str(ex)) return sent = True logging.getLogger(__name__).info("Sync Data send") logging.getLogger(__name__).info("%s", name.toUri()) return sent
def _processSyncInterest(self, index, syncDigest, face): """ Common interest processing, using digest log to find the difference after syncDigest. :return: True if sent a data packet to satisfy the interest, otherwise False. :rtype: bool """ nameList = [] # of str sequenceNoList = [] # of int sessionNoList = [] # of int for j in range(index + 1, len(self._digestLog)): temp = self._digestLog[j].getData( ) # array of sync_state_pb2.SyncState. for i in range(len(temp)): syncState = temp[i] if syncState.type != SyncState_UPDATE: continue if self._digestTree.find(syncState.name, syncState.seqno.session) != -1: n = -1 for k in range(len(nameList)): if nameList[k] == syncState.name: n = k break if n == -1: nameList.append(syncState.name) sequenceNoList.append(syncState.seqno.seq) sessionNoList.append(syncState.seqno.session) else: sequenceNoList[n] = syncState.seqno.seq sessionNoList[n] = syncState.seqno.session tempContent = SyncStateMsg() for i in range(len(nameList)): content = getattr(tempContent, "ss").add() content.name = nameList[i] content.type = SyncState_UPDATE content.seqno.seq = sequenceNoList[i] content.seqno.session = sessionNoList[i] sent = False if len(getattr(tempContent, "ss")) != 0: name = Name(self._applicationBroadcastPrefix) name.append(syncDigest) # TODO: Check if this works in Python 3. #pylint: disable=E1103 array = tempContent.SerializeToString() #pylint: enable=E1103 data = Data(name) data.setContent(Blob(array)) self._keyChain.sign(data, self._certificateName) try: face.putData(data) except Exception as ex: logging.getLogger(__name__).error("Error in face.putData: %s", str(ex)) return sent = True logging.getLogger(__name__).info("Sync Data send") logging.getLogger(__name__).info("%s", name.toUri()) return sent
def publish(self, interestName, dataName, content, freshnessPeriod, signingInfo=SigningInfo()): """ Put all the segments in the memory store. :param Name interestName: If the Interest name ends in a segment, immediately send the Data packet for the segment to the Face. :param Name dataName: The Data name, which has components after the Interest name. :param Blob content: The content of the data to be segmented. :param float freshnessPeriod The freshness period of the segments, in milliseconds. :param SigningInfo signingInfo (optional) The SigningInfo for signing segment Data packets. If omitted, use the default SigningInfo(). """ interestSegment = 0 if interestName[-1].isSegment(): interestSegment = interestName[-1].toSegment() rawBuffer = content.buf() iSegmentBegin = 0 iEnd = len(content) maxPacketSize = int(Common.MAX_NDN_PACKET_SIZE / 2) totalSegments = int(len(content) / maxPacketSize) finalBlockId = Name.Component.fromSegment(totalSegments) segmentPrefix = Name(dataName) segmentPrefix.appendVersion(int(Common.getNowMilliseconds())) segmentNo = 0 while (True): iSegmentEnd = iSegmentBegin + maxPacketSize if iSegmentEnd > iEnd: iSegmentEnd = iEnd segmentName = Name(segmentPrefix) segmentName.appendSegment(segmentNo) data = Data(segmentName) data.setContent(Blob(rawBuffer[iSegmentBegin:iSegmentEnd])) data.getMetaInfo().setFreshnessPeriod(freshnessPeriod) data.getMetaInfo().setFinalBlockId(finalBlockId) iSegmentBegin = iSegmentEnd self._keyChain.sign(data, signingInfo) # Only send the segment to the Face if it has a pending interest. # Otherwise, the segment is unsolicited. if interestSegment == segmentNo: self._face.putData(data) # Until InMemoryStorageFifo implements an eviction policy, use InMemoryStorageRetaining. # storage_.insert(*data, freshnessPeriod) self._storage.insert(data) # Make and return a callback since segmentName is different each time. def makeCallback(localSegmentName): def callback(): self._storage.remove(localSegmentName) return callback self._face.callLater(freshnessPeriod, makeCallback(segmentName)) segmentNo += 1 if not (iSegmentBegin < iEnd): break