コード例 #1
0
    def test_self_verification(self):
        policyManager = SelfVerifyPolicyManager(self.identityStorage)
        keyChain = KeyChain(self.identityManager, policyManager)

        identityName  = Name('TestValidator/RsaSignatureVerification')
        keyChain.createIdentityAndCertificate(identityName)

        data = Data(Name('/TestData/1'))
        keyChain.signByIdentity(data, identityName)

        vr = doVerify(policyManager, data)

        self.assertFalse(vr.hasFurtherSteps,
                "SelfVerifyPolicyManager returned a ValidationRequest")
        self.assertEqual(vr.failureCount, 0,
            "Verification of identity-signed data failed")
        self.assertEqual(vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
            vr.successCount))

        data2 = Data(Name('/TestData/2'))

        vr = doVerify(policyManager,
                data2)

        self.assertFalse(vr.hasFurtherSteps,
                "SelfVerifyPolicyManager returned a ValidationRequest")
        self.assertEqual(vr.successCount, 0,
            "Verification of unsigned data succeeded")
        self.assertEqual(vr.failureCount, 1,
            "Verification failure callback called {} times instead of 1".format(
            vr.failureCount))
コード例 #2
0
ファイル: test_policy_manager.py プロジェクト: mjycom/PyNDN2
    def test_self_verification(self):
        policyManager = SelfVerifyPolicyManager(self.identityStorage)
        keyChain = KeyChain(self.identityManager, policyManager)

        identityName  = Name('TestValidator/RsaSignatureVerification')
        keyChain.createIdentityAndCertificate(identityName)

        data = Data(Name('/TestData/1'))
        keyChain.signByIdentity(data, identityName)

        vr = doVerify(policyManager, data)

        self.assertFalse(vr.hasFurtherSteps,
                "SelfVerifyPolicyManager returned a ValidationRequest")
        self.assertEqual(vr.failureCount, 0,
            "Verification of identity-signed data failed")
        self.assertEqual(vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
            vr.successCount))

        data2 = Data(Name('/TestData/2'))

        vr = doVerify(policyManager,
                data2)

        self.assertFalse(vr.hasFurtherSteps,
                "SelfVerifyPolicyManager returned a ValidationRequest")
        self.assertEqual(vr.successCount, 0,
            "Verification of unsigned data succeeded")
        self.assertEqual(vr.failureCount, 1,
            "Verification failure callback called {} times instead of 1".format(
            vr.failureCount))
コード例 #3
0
def main():
    # The default Face will connect using a Unix socket, or to "localhost".
    face = Face()

    # Use the system default key chain and certificate name to sign commands.
    #print("key1")
    #keyChain = KeyChain()
    #print("key2")
    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                        NoVerifyPolicyManager())
    identityName = Name("TestProducer")
    certificateName = keyChain.createIdentityAndCertificate(identityName)
    keyChain.getIdentityManager().setDefaultIdentity(identityName)

    face.setCommandSigningInfo(keyChain, keyChain.getDefaultCertificateName())

    # Also use the default certificate name to sign data packets.
    ubicdn = UbiCDN(keyChain, certificateName)
    prefix = Name("/ubicdn/video")
    dump("Register prefix", prefix.toUri())
    face.registerPrefix(prefix, ubicdn.onInterest, ubicdn.onRegisterFailed)

    while 1:
        #while ubicdn._responseCount < 1:
        face.processEvents()
        # We need to sleep for a few milliseconds so we don't use 100% of the CPU.
        time.sleep(0.01)

    face.shutdown()
コード例 #4
0
ファイル: test_policy_manager.py プロジェクト: mjycom/PyNDN2
    def test_no_verify(self):
        policyManager = NoVerifyPolicyManager()
        identityName = Name('TestValidator/Null').appendVersion(int(time.time()))

        keyChain = KeyChain(self.identityManager, policyManager)
        keyChain.createIdentityAndCertificate(identityName)
        data = Data(Name(identityName).append('data'))
        keyChain.signByIdentity(data, identityName)

        vr = doVerify(policyManager, data)

        self.assertFalse(vr.hasFurtherSteps,
                "NoVerifyPolicyManager returned a ValidationRequest")

        self.assertEqual(vr.failureCount, 0,
            "Verification failed with NoVerifyPolicyManager")
        self.assertEqual(vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
            vr.successCount))
コード例 #5
0
    def test_no_verify(self):
        policyManager = NoVerifyPolicyManager()
        identityName = Name('TestValidator/Null').appendVersion(int(time.time()))

        keyChain = KeyChain(self.identityManager, policyManager)
        keyChain.createIdentityAndCertificate(identityName)
        data = Data(Name(identityName).append('data'))
        keyChain.signByIdentity(data, identityName)

        vr = doVerify(policyManager, data)

        self.assertFalse(vr.hasFurtherSteps,
                "NoVerifyPolicyManager returned a ValidationRequest")

        self.assertEqual(vr.failureCount, 0,
            "Verification failed with NoVerifyPolicyManager")
        self.assertEqual(vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
            vr.successCount))
コード例 #6
0
def main():
    # Params parsing
    parser = argparse.ArgumentParser(
        description=
        'bms gateway node to Parse or follow Cascade Datahub log and publish to MiniNdn.'
    )
    parser.add_argument('filename', help='datahub log file')
    parser.add_argument('-f',
                        dest='follow',
                        action='store_true',
                        help='follow (tail -f) the log file')
    parser.add_argument('--namespace',
                        default='/ndn/nist/bms',
                        help='root of ndn name, no trailing slash')

    parser.add_argument('--image',
                        dest='image',
                        default='../simulator/res/floor2.jpg',
                        help='the floor plan to publish')
    parser.add_argument('--location',
                        dest='location',
                        default='../simulator/res/locations.txt',
                        help='the floor plan to publish')

    args = parser.parse_args()

    # Setup logging
    logger = Logger()
    logger.prepareLogging()

    # Face, KeyChain, memoryContentCache and asio event loop initialization
    loop = asyncio.get_event_loop()
    face = ThreadsafeFace(loop)

    keyChain = KeyChain(IdentityManager(BasicIdentityStorage()))
    # For the gateway publisher, we create one identity for it to sign nfd command interests
    certificateName = keyChain.createIdentityAndCertificate(
        Name("/ndn/nist/gateway"))
    face.setCommandSigningInfo(keyChain, certificateName)
    cache = MemoryContentCache(face)

    dataPublisher = DataPublisher(face, keyChain, loop, cache, args.namespace,
                                  args.image, args.location)
    cache.registerPrefix(Name(args.namespace), dataPublisher.onRegisterFailed,
                         dataPublisher.onDataNotFound)
    loop.run_until_complete(dataPublisher.publishFloorImage())

    if args.follow:
        #asyncio.async(loop.run_in_executor(executor, followfile, args.filename, args.namespace, cache))
        loop.run_until_complete(dataPublisher.followfile(args.filename))
    else:
        loop.run_until_complete(dataPublisher.readfile(args.filename))

    loop.run_forever()
    face.shutdown()
コード例 #7
0
ファイル: bms_publisher.py プロジェクト: zhehaowang/bms-node
def main():
    # Params parsing
    parser = argparse.ArgumentParser(
        description=
        'bms gateway node to Parse or follow Cascade Datahub log and publish to MiniNdn.'
    )
    parser.add_argument('filename', help='datahub log file')
    parser.add_argument('-f',
                        dest='follow',
                        action='store_true',
                        help='follow (tail -f) the log file')
    parser.add_argument('--namespace',
                        default='/ndn/edu/ucla/remap/bms',
                        help='root of ndn name, no trailing slash')
    args = parser.parse_args()

    # Setup logging
    logger = Logger()
    logger.prepareLogging()

    # Face, KeyChain, memoryContentCache and asio event loop initialization
    loop = asyncio.get_event_loop()
    face = ThreadsafeFace(loop)

    keyChain = KeyChain(
        IdentityManager(BasicIdentityStorage(), FilePrivateKeyStorage()))
    # For the gateway publisher, we create one identity for it to sign nfd command interests
    certificateName = keyChain.createIdentityAndCertificate(
        Name("/ndn/bms/gateway-publisher"))
    face.setCommandSigningInfo(keyChain, certificateName)
    cache = MemoryContentCache(face)

    dataPublisher = DataPublisher(face, keyChain, loop, cache, args.namespace)
    cache.registerPrefix(Name(args.namespace), dataPublisher.onRegisterFailed,
                         dataPublisher.onDataNotFound)

    # Parse csv to decide the mapping between sensor JSON -> <NDN name, data type>
    dataPublisher.populateSensorNDNDictFromCSV(
        'bms-sensor-data-types-sanitized.csv')

    if args.follow:
        #asyncio.async(loop.run_in_executor(executor, followfile, args.filename, args.namespace, cache))
        loop.run_until_complete(dataPublisher.followfile(args.filename))
    else:
        loop.run_until_complete(dataPublisher.readfile(args.filename))

    loop.run_forever()
    face.shutdown()
コード例 #8
0
ファイル: bms_publisher.py プロジェクト: zhehaowang/bms-node
def main():
    # Params parsing
    parser = argparse.ArgumentParser(
        description="bms gateway node to Parse or follow Cascade Datahub log and publish to MiniNdn."
    )
    parser.add_argument("filename", help="datahub log file")
    parser.add_argument("-f", dest="follow", action="store_true", help="follow (tail -f) the log file")
    parser.add_argument("--namespace", default="/ndn/edu/ucla/remap/bms", help="root of ndn name, no trailing slash")
    args = parser.parse_args()

    # Setup logging
    logger = Logger()
    logger.prepareLogging()

    # Face, KeyChain, memoryContentCache and asio event loop initialization
    loop = asyncio.get_event_loop()
    face = ThreadsafeFace(loop)

    keyChain = KeyChain(IdentityManager(BasicIdentityStorage(), FilePrivateKeyStorage()))
    # For the gateway publisher, we create one identity for it to sign nfd command interests
    certificateName = keyChain.createIdentityAndCertificate(Name("/ndn/bms/gateway-publisher"))
    face.setCommandSigningInfo(keyChain, certificateName)
    cache = MemoryContentCache(face)

    dataPublisher = DataPublisher(face, keyChain, loop, cache, args.namespace)
    cache.registerPrefix(Name(args.namespace), dataPublisher.onRegisterFailed, dataPublisher.onDataNotFound)

    # Parse csv to decide the mapping between sensor JSON -> <NDN name, data type>
    dataPublisher.populateSensorNDNDictFromCSV("bms-sensor-data-types-sanitized.csv")

    if args.follow:
        # asyncio.async(loop.run_in_executor(executor, followfile, args.filename, args.namespace, cache))
        loop.run_until_complete(dataPublisher.followfile(args.filename))
    else:
        loop.run_until_complete(dataPublisher.readfile(args.filename))

    loop.run_forever()
    face.shutdown()
コード例 #9
0
ファイル: bms_node.py プロジェクト: zhehaowang/bms-node
class BmsNode(object):
    def __init__(self):
        self.conf = None
        self._keyChain = None
        self._certificateName = None

        self._dataQueue = dict()
        self._memoryContentCache = None
        self._identityName = None

        self._aggregation = Aggregation()

    def setConfiguration(self, fileName, trustSchemaFile):
        self.conf = BoostInfoParser()
        self.conf.read(fileName)
        self._identityName = Name(self.conf.getNodePrefix())
        self._trustSchemaFile = trustSchemaFile

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        #print('Data not found for ' + interest.getName().toUri())
        return

    def startPublishing(self):
        # One-time security setup
        self.prepareLogging()

        privateKeyStorage = FilePrivateKeyStorage()
        identityStorage = BasicIdentityStorage()
        policyManager = ConfigPolicyManager(self._trustSchemaFile)

        self._keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage), policyManager)
        self._certificateName = self._keyChain.createIdentityAndCertificate(self._identityName)

        print("My Identity name: " + self._identityName.toUri())
        print("My certificate name: " + self._certificateName.toUri())
        certificateData = self._keyChain.getIdentityManager()._identityStorage.getCertificate(self._certificateName, True)
        print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer()))
        # self._keyChain.getIdentityCertificate(self._certificateName).)

        self._loop = asyncio.get_event_loop()
        self._face = ThreadsafeFace(self._loop)
        self._keyChain.setFace(self._face)

        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)
        self._memoryContentCache = MemoryContentCache(self._face)

        # We should only ask for cert to be signed upon the first run of a certain aggregator
        if DO_CERT_SETUP:
            if (KeyLocator.getFromSignature(certificateData.getSignature()).getKeyName().equals(self._certificateName.getPrefix(-1))):
                # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again
                print("certificate " + self._certificateName.toUri() + " asking for signature")
                response = urllib2.urlopen("http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read()
                
                signedCertData = Data()
                signedCertData.wireDecode(Blob(b64decode(response)))

                self._memoryContentCache.add(signedCertData)
                cmdline = ['ndnsec-install-cert', '-']
                p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
                # desanitize + sign in GET request
                cert, err = p.communicate(response)
                if p.returncode != 0:
                    raise RuntimeError("ndnsec-install-cert error")
            else:
                self._memoryContentCache.add(certificateData)
        else:
            self._memoryContentCache.add(certificateData)

        dataNode = self.conf.getDataNode()
        childrenNode = self.conf.getChildrenNode()

        self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound)

        # For each type of data, we refresh each type of aggregation according to the interval in the configuration
        for i in range(len(dataNode.subtrees)):
            dataType = dataNode.subtrees.keys()[i]
            aggregationParams = self.conf.getProducingParamsForAggregationType(dataNode.subtrees.items()[i][1])

            if childrenNode == None:
                self._dataQueue[dataType] = DataQueue(None, None, None)
                self.generateData(dataType, 2, 0)

            for aggregationType in aggregationParams:
                childrenList = OrderedDict()
                if childrenNode != None:

                    for j in range(len(childrenNode.subtrees)):
                        if dataType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees:
                            if aggregationType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType].subtrees:
                                childrenList[childrenNode.subtrees.items()[j][0]] = self.conf.getProducingParamsForAggregationType(childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType])[aggregationType]

                self.startPublishingAggregation(aggregationParams[aggregationType], childrenList, dataType, aggregationType)
        return

    def startPublishingAggregation(self, params, childrenList, dataType, aggregationType):
        if __debug__:
            print('Start publishing for ' + dataType + '-' + aggregationType)
        
        # aggregation calculating and publishing mechanism
        publishingPrefix = Name(self._identityName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType)
        self._dataQueue[dataType + aggregationType] = DataQueue(params, childrenList, publishingPrefix)

        if len(childrenList.keys()) == 0:
            # TODO: make start_time optional for leaf nodes
            self._loop.call_later(int(params['producer_interval']), self.calculateAggregation, dataType, aggregationType, childrenList, int(params['start_time']), int(params['producer_interval']), publishingPrefix, True)
        else:
            # express interest for children who produce the same data and aggregation type
            for childName in childrenList.keys():
                name = Name(self._identityName).append(childName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType)
                interest = Interest(name)
                # if start_time is specified, we ask for data starting at start_time; 
                # if not, we ask for the right most child and go from there
                if ('start_time' in childrenList[childName]):
                    endTime = int(childrenList[childName]['start_time']) + int(childrenList[childName]['producer_interval'])
                    interest.getName().append(str(childrenList[childName]['start_time'])).append(str(endTime))
                else:
                    # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost
                    interest.setChildSelector(0)
                    interest.setMustBeFresh(True)
                interest.setInterestLifetimeMilliseconds(DEFAULT_INTEREST_LIFETIME)
                if __debug__:
                    print('  Issue interest: ' + interest.getName().toUri())
                self._face.expressInterest(interest, self.onData, self.onTimeout)

        return

    # TODO: once one calculation's decided a child has not answered, we should do another calculation
    def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat = False):
        doCalc = True
        dataList = []

        # TODO: an intermediate node cannot produce raw data for now
        if len(childrenList.keys()) != 0:
            for childName in childrenList.keys():
                dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName)
                if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict:
                    data = self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey]
                    dataList.append(float(data.getContent().toRawStr()))
                else:
                    #print('Child ' + childName + ' has not replied yet')
                    doCalc = False
                    break
        else:
            for inst in self._dataQueue[dataType]._dataDict.keys():
                if int(inst) >= startTime and int(inst) < startTime + interval:
                    dataList.append(self._dataQueue[dataType]._dataDict[inst])
        if doCalc:
            content = self._aggregation.getAggregation(aggregationType, dataList)
            if content:
                publishData = Data(Name(publishingPrefix).append(str(startTime)).append(str(startTime + interval)))
                publishData.setContent(str(content))
                publishData.getMetaInfo().setFreshnessPeriod(DEFAULT_DATA_LIFETIME)
                self._keyChain.sign(publishData, self._certificateName)
                self._memoryContentCache.add(publishData)
                for childName in childrenList.keys():
                    dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName)
                    if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict:
                        del self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey]
                if __debug__:
                    print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr())

        # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData
        if repeat:
            self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat)
        return

    def generateData(self, dataType, interval, startTime):
        self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint(0,9)
        self._loop.call_later(interval, self.generateData, dataType, interval, startTime + interval)
        return

    def onRegisterFailed(self, prefix):
        raise RuntimeError("Register failed for prefix", prefix.toUri())

    def onVerified(self, data):
        print('Data verified: ' + data.getName().toUri())
        return

    def onVerifyFailed(self, data):
        print('Data verification failed: ' + data.getName().toUri())
        return

    def onData(self, interest, data):
        self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed)

        dataName = data.getName()
        dataQueue = None

        if __debug__:
            print("Got data: " + dataName.toUri() + "; " + data.getContent().toRawStr())
        for i in range(0, len(dataName)):
            if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT:
                dataType = dataName.get(i - 1).toEscapedString()
                aggregationType = dataName.get(i + 1).toEscapedString()
                
                startTime = int(dataName.get(i + 2).toEscapedString())
                endTime = int(dataName.get(i + 3).toEscapedString())
                childName = dataName.get(i - 3).toEscapedString()

                dataAndAggregationType = dataType + aggregationType
                
                dataDictKey = self.getDataDictKey(startTime, endTime, childName)
                dataQueue = self._dataQueue[dataAndAggregationType]
                dataQueue._dataDict[dataDictKey] = data
                break

        # TODO: check what if interval/starttime is misconfigured
        if dataQueue:
            self.calculateAggregation(dataType, aggregationType, dataQueue._childrenList, startTime, endTime - startTime, dataQueue._publishingPrefix)

        # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime
        #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime)))
        
        # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead
        newInterestName = dataName.getPrefix(i + 2)
        newInterest = Interest(interest)
        newInterest.setName(newInterestName)
        newInterest.setChildSelector(0)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(dataName.get(i + 2))
        newInterest.setExclude(exclude)

        self._face.expressInterest(newInterest, self.onData, self.onTimeout)
        if __debug__:
            print("  issue interest: " + interest.getName().toUri())

        return

    def onTimeout(self, interest):
        if __debug__:
            print("  interest timeout: " + interest.getName().toUri() + "; reexpress")
            pass
        self._face.expressInterest(interest, self.onData, self.onTimeout)
        return

    def stop(self):
        self._loop.stop()
        if __debug__:
            print("Stopped")
        return
    
    # This creation of dataDictKey means parent and child should not have the same name            
    @staticmethod
    def getDataDictKey(startTime, endTime, childName):
        return str(startTime) + '/' + str(endTime) + '/' + childName

##
# Logging
##
    def prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log
コード例 #10
0
ファイル: dpu.py プロジェクト: zhtaoxiang/omh-dpu-dvu
class DPU(object):
    def __init__(self,
                 face,
                 identityName,
                 groupName,
                 catalogPrefix,
                 rawDataPrefix,
                 producerDbFilePath,
                 consumerDbFilePath,
                 encrypted=False):
        self.face = face
        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        self.identityName = Name(identityName)
        self.groupName = Name(groupName)
        self.rawDataPrefix = rawDataPrefix
        self.catalogPrefix = catalogPrefix

        self.certificateName = self.keyChain.createIdentityAndCertificate(
            self.identityName)
        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        # Set up the memoryContentCache
        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(self.identityName,
                                               self.onRegisterFailed,
                                               self.onDataNotFound)

        self.producerPrefix = Name(identityName)
        self.producerSuffix = Name()

        self.producer = DPUProducer(face, self.memoryContentCache,
                                    self.producerPrefix, self.producerSuffix,
                                    self.keyChain, self.certificateName,
                                    producerDbFilePath)

        # Put own (consumer) certificate in memoryContentCache
        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(
            self.certificateName)
        consumerCertificate = identityStorage.getCertificate(
            self.certificateName, True)

        # TODO: request that this DPU be added as a trusted group member

        self.remainingTasks = dict()

        try:
            os.remove(consumerDbFilePath)
        except OSError:
            # no such file
            pass

        self.consumer = Consumer(face, self.keyChain, self.groupName,
                                 consumerKeyName,
                                 Sqlite3ConsumerDb(consumerDbFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(
                privateKeyStorage.nameTransform(consumerKeyName.toUri(),
                                                ".pri")) as keyFile:
            base64Content = keyFile.read()
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)

        self.memoryContentCache.add(consumerCertificate)

        self.encrypted = encrypted

        self.rawData = []

        self.catalogFetchFinished = False
        self.remainingData = 0
        return

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()

        if interest.getName().get(
                -3).toEscapedString() == "bout" or interest.getName().get(
                    -3).toEscapedString() == "genericfunctions":
            if interest.getName().toUri() in self.remainingTasks:
                # We are already trying to process this task, so we don't add it to the list of tasks
                pass
            else:
                self.remainingTasks[interest.getName().toUri()] = "in-progress"
        else:
            print "Got unexpected interest: " + interest.getName().toUri()

        # .../SAMPLE/<timestamp>
        timestamp = interest.getName().get(-1)
        catalogInterest = Interest(self.catalogPrefix)

        # Traverse catalogs in range from leftmost child
        catalogInterest.setChildSelector(0)
        catalogInterest.setMustBeFresh(True)
        catalogInterest.setInterestLifetimeMilliseconds(4000)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(timestamp)
        catalogInterest.setExclude(catalogInterest)
        self.face.expressInterest(catalogInterest, self.onCatalogData,
                                  self.onCatalogTimeout)
        print "Expressed catalog interest " + catalogInterest.getName().toUri()

        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed"
        return

    def onCatalogData(self, interest, data):
        # Find the next catalog
        print "Received catalog data " + data.getName().toUri()

        catalogTimestamp = data.getName().get(-2)
        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(catalogTimestamp)

        nextCatalogInterest = Interest(interest.getName())
        nextCatalogInterest.setExclude(exclude)
        nextCatalogInterest.setChildSelector(0)
        nextCatalogInterest.setMustBeFresh(True)
        nextCatalogInterest.setInterestLifetimeMilliseconds(2000)
        self.face.expressInterest(nextCatalogInterest, self.onCatalogData,
                                  self.onCatalogTimeout)
        print "Expressed catalog interest " + nextCatalogInterest.getName(
        ).toUri()

        # We ignore the version in the catalog
        if self.encrypted:
            self.consumer.consume(contentName, self.onCatalogConsumeComplete,
                                  self.onConsumeFailed)
        else:
            self.onCatalogConsumeComplete(data, data.getContent())

    def onCatalogConsumeComplete(self, data, result):
        print "Consume complete for catalog name: " + data.getName().toUri()
        catalog = json.loads(result.toRawStr())

        for timestamp in catalog:
            # For encrypted data, timestamp format will have to change
            rawDataName = Name(self.rawDataPrefix).append(
                Schedule.toIsoString(timestamp * 1000))
            dataInterest = Interest(rawDataName)
            dataInterest.setInterestLifetimeMilliseconds(2000)
            dataInterest.setMustBeFresh(True)
            self.face.expressInterest(dataInterest, self.onRawData,
                                      self.onRawDataTimeout)
            self.remainingData += 1
        return

    # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run
    def onRawDataConsumeComplete(self, data, result):
        resultObject = json.loads(result.toRawStr())
        # TODO: the original data for timestamp should be an array
        self.rawData.append(resultObject)

        self.remainingData -= 1
        print "Remaing data number: " + str(self.remainingData)

        if self.remainingData == 0 and self.catalogFetchFinished:
            self.produce()

        # TODO: Unideal distanceTo production
        for item in self.remainingTasks:
            username = data.getName().get(2)
            timestamp = Name(item).get(-1).toEscapedString()
            if "distanceTo" in item and username in item and timestamp in data.getName(
            ).toUri():
                # We want this distanceTo
                destCoordinate = Name(item).get(-2).toEscapedString()
                coordinates = destCoordinate[1:-1].split(",").strip()
                x = int(coordinates[0])
                y = int(coordinates[1])
                dataObject = json.dumps({
                    "distanceTo":
                    math.sqrt((x - resultObject["lat"]) *
                              (x - resultObject["lat"]) +
                              (y - resultObject["lng"]) *
                              (y - resultObject["lng"]))
                })

                data = Data(data)
                data.getMetaInfo().setFreshnessPeriod(40000000000)
                data.setContent(dataObject)
                self.keyChain.sign(data)

                # If the interest's still within lifetime, this will satisfy the interest
                self.memoryContentCache.add(data)

        return

    def onConsumeFailed(self, code, message):
        print "Consume error " + str(code) + ": " + message

    def onRawData(self, interest, data):
        print "Raw data received: " + data.getName().toUri()

        # TODO: Quick hack for deciding if the data is encrypted
        if "zhehao" in data.getName().toUri():
            self.consumer.consume(data.getName(),
                                  self.onRawDataConsumeComplete,
                                  self.onConsumeFailed)
        else:
            print "raw data consume complete"
            self.onRawDataConsumeComplete(data, data.getContent())

        # if self.encrypted:
        #     self.consumer.consume(data.getName(), self.onRawDataConsumeComplete, self.onConsumeFailed)
        # else:
        #     self.onRawDataConsumeComplete(data, data.getContent())

    def onCatalogTimeout(self, interest):
        print "Catalog times out: " + interest.getName().toUri()
        # TODO: 1 timeout would result in this dpu thinking that catalog fetching's done!

        self.catalogFetchFinished = True
        if self.remainingData == 0:
            self.produce()
        return

    def onRawDataTimeout(self, interest):
        print "Raw data times out: " + interest.getName().toUri()
        return

    # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run
    def produce(self):
        # Produce the bounding box
        print "ready to produce"
        maxLong = -3600
        minLong = 3600
        maxLat = -3600
        minLat = 3600

        if len(self.rawData) == 0:
            print "No raw data as producer input"

        for item in self.rawData:
            print item
            if item["lng"] > maxLong:
                maxLong = item["lng"]
            if item["lng"] < minLong:
                minLong = item["lng"]
            if item["lat"] > maxLat:
                maxLat = item["lat"]
            if item["lat"] < minLat:
                minLat = item["lat"]

        result = json.dumps({
            "maxlng": maxLong,
            "minlng": minLong,
            "maxlat": maxLat,
            "minlat": minLat,
            "size": len(self.rawData)
        })

        if self.encrypted:
            # TODO: replace fixed timestamp for now for produced data, createContentKey as needed
            testTime1 = Schedule.fromIsoString("20160320T080000")
            self.producer.createContentKey(testTime1)
            self.producer.produce(testTime1, result)
        else:
            # Arbitrary produced data lifetime
            data = Data(Name(self.identityName).append("20160320T080000"))
            data.getMetaInfo().setFreshnessPeriod(400000)
            data.setContent(result)

            # If the interest's still within lifetime, this will satisfy the interest
            self.memoryContentCache.add(data)
            print "Produced data with name " + data.getName().toUri()
コード例 #11
0
ファイル: access_manager.py プロジェクト: zhtaoxiang/dpu-dvu
class AccessManager(object):
    def __init__(self, face, groupManagerName, dataType, dKeyDatabaseFilePath):
        # Set up face
        self.face = face
        #self.loop = eventLoop

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())

        self.certificateName = self.keyChain.createIdentityAndCertificate(
            groupManagerName)

        self.dKeyDatabaseFilePath = dKeyDatabaseFilePath

        self.manager = GroupManager(
            groupManagerName, dataType,
            Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1,
            self.keyChain)

        self.memoryContentCache = MemoryContentCache(self.face)

        self.memoryContentCache.registerPrefix(groupManagerName,
                                               self.onRegisterFailed,
                                               self.onDataNotFound)

        self.needToPublishGroupKeys = False
        return

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return

    def addSchedule(self, scheduleName, managerStartDate, managerEndDate,
                    managerStartHour, managerEndHour):
        schedule = Schedule()
        interval = RepetitiveInterval(Schedule.fromIsoString(managerStartDate),
                                      Schedule.fromIsoString(managerEndDate),
                                      managerStartHour, managerEndHour, 1,
                                      RepetitiveInterval.RepeatUnit.DAY)
        schedule.addWhiteInterval(interval)

        self.manager.addSchedule(scheduleName, schedule)

    def onMemberCertificateData(self, interest, data, accessInterest):
        print "Member certificate with name retrieved: " + data.getName(
        ).toUri() + "; member added to group!"
        self.manager.addMember("schedule1", data)
        self.needToPublishGroupKeys = True

        accessResponse = Data(accessInterest.getName())
        accessResponse.setContent("granted")
        self.face.putData(accessResponse)

    def onMemberCertificateTimeout(self, interest, accessInterest):
        print "Member certificate interest times out: " + interest.getName(
        ).toUri()
        newInterest = Interest(interest)
        newInterest.refreshNonce()
        self.face.expressInterest(
            newInterest,
            lambda memberInterest, memberData: self.onMemberCertificateData(
                memberInterest, memberData, accessInterest),
            lambda memberInterest: self.onMemberCertificateTimeout(
                memberInterest, accessInterest))
        return

    def publishGroupKeys(self, timeStr):
        timePoint = Schedule.fromIsoString(timeStr)
        print timeStr
        result = self.manager.getGroupKey(timePoint)

        # The first is group public key, E-key
        # The rest are group private keys encrypted with each member's public key, D-key
        for i in range(0, len(result)):
            self.memoryContentCache.add(result[i])
            self.initiateContentStoreInsertion(
                "/ndn/edu/ucla/remap/ndnfit/repo", result[i])
            print "Publish key name: " + str(
                i) + " " + result[i].getName().toUri()
            print "key content: " + str(result[i].getContent().toBytes())

        self.needToPublishGroupKeys = False

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        if interest.getExclude():
            print "Interest has exclude: " + interest.getExclude().toUri()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed"
        return

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
                fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(
            Name(repoCommandPrefix).append("insert").append(
                Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData,
                                  self.onRepoTimeout)
コード例 #12
0
class TestConsumer(object):
    def __init__(self, face):
        # Set up face
        self.face = face

        self.databaseFilePath = "policy_config/test_consumer.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name("/org/openmhealth/zhehao")

        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())
        # Authorized identity
        identityName = Name("/org/openmhealth/dvu-python-3")
        # Unauthorized identity
        #identityName = Name("/org/openmhealth/dvu-python-1")
        
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        
        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName)
        consumerCertificate = identityStorage.getCertificate(self.certificateName)
        self.consumer = Consumer(
          face, self.keyChain, self.groupName, identityName,
          Sqlite3ConsumerDb(self.databaseFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile:
            print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")
            base64Content = keyFile.read()
            #print base64Content
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)

        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound)
        self.memoryContentCache.add(consumerCertificate)

        accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time())))
        self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout)
        print "Access request interest name: " + accessRequestInterest.getName().toUri()

        self.consumeCatalog = True
        return

    def onAccessRequestData(self, interest, data):
        print "Access request data: " + data.getName().toUri()
        print "Start consuming"
        self.startConsuming()
        return

    def onAccessRequestTimeout(self, interest):
        print "Access request times out: " + interest.getName().toUri()
        print "Assuming certificate sent and D-key generated, start consuming"
        self.startConsuming()
        return

    def startConsuming(self):
        if self.consumeCatalog:
            contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/catalog/20160620T080000")
            self.consumer.consume(contentName, self.onCatalogConsumeComplete, self.onConsumeFailed)
            print "Trying to consume: " + contentName.toUri()
        else:
            contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/")
            dataNum = 60
            baseZFill = 3
            basetimeString = "20160620T080"

            for i in range(0, dataNum):
                timeString = basetimeString + str(i).zfill(baseZFill)
                timeFloat = Schedule.fromIsoString(timeString)

                self.consumer.consume(Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed)
                print "Trying to consume: " + Name(contentName).append(timeString).toUri()


    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed: " + prefix.toUri()
        return

    def onCatalogConsumeComplete(self, data, result):
        print "Consume complete for catalog: " + data.getName().toUri()
        resultObject = json.loads(result.toRawStr())

        contentName = Name("/org/openmhealth/zhehao/SAMPLE/fitness/physical_activity/time_location/")
        
        for i in range(0, len(resultObject)):
            timeString = Schedule.toIsoString(int(resultObject[i]) * 1000)
            self.consumer.consume(Name(contentName).append(timeString), self.onConsumeComplete, self.onConsumeFailed)
            print "Trying to consume: " + Name(contentName).append(timeString).toUri()

    def onConsumeComplete(self, data, result):
        print "Consume complete for data name: " + data.getName().toUri()
        print result
        
        # Test the length of encrypted data

        # dataBlob = data.getContent()
        # dataContent = EncryptedContent()
        # dataContent.wireDecode(dataBlob)
        # encryptedData = dataContent.getPayload()
        # print len(encryptedData)

    # TODO: shouldn't this indicate the consumption of what has failed though
    def onConsumeFailed(self, code, message):
        print "Consume error " + str(code) + ": " + message
コード例 #13
0
class TestSqlIdentityStorage(ut.TestCase):
    def setUp(self):
        # Reuse the policy_config subdirectory for the temporary SQLite file.
        self.databaseFilePath = "policy_config/test-public-info.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass
        self.identityStorage = BasicIdentityStorage(self.databaseFilePath)

        self.identityManager = IdentityManager(self.identityStorage,
                                               FilePrivateKeyStorage())
        self.policyManager = SelfVerifyPolicyManager(self.identityStorage)
        self.keyChain = KeyChain(self.identityManager, self.policyManager)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def test_identity_create_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        certificateName = self.keyChain.createIdentityAndCertificate(
            identityName)
        keyName = IdentityCertificate.certificateNameToPublicKeyName(
            certificateName)

        self.assertTrue(self.identityStorage.doesIdentityExist(identityName),
                        "Identity was not added to IdentityStorage")
        self.assertIsNotNone(keyName, "New identity has no key")
        self.assertTrue(self.identityStorage.doesKeyExist(keyName),
                        "Key was not added to IdentityStorage")
        self.assertIsNotNone(certificateName,
                             "Certificate was not added to IdentityStorage")

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesIdentityExist(identityName),
                         "Identity still in IdentityStorage after revoking")
        self.assertFalse(
            self.identityStorage.doesKeyExist(keyName),
            "Key still in IdentityStorage after identity was deletedInfo")
        self.assertFalse(
            self.identityStorage.doesCertificateExist(certificateName),
            "Certificate still in IdentityStorage after identity was deletedInfo"
        )

        with self.assertRaises(SecurityException):
            self.identityManager.getDefaultCertificateNameForIdentity(
                identityName)

    def test_key_create_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        keyName1 = self.keyChain.generateRSAKeyPair(identityName, True)
        self.keyChain.getIdentityManager().setDefaultKeyForIdentity(keyName1)

        keyName2 = self.keyChain.generateRSAKeyPair(identityName, False)
        self.assertEqual(
            self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName1, "Default key name was changed without explicit request")
        self.assertNotEqual(
            self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName2,
            "Newly created key replaced default key without explicit request")

        self.identityStorage.deletePublicKeyInfo(keyName2)

        self.assertFalse(self.identityStorage.doesKeyExist(keyName2))
        self.keyChain.deleteIdentity(identityName)

    def test_key_autocreate_identity(self):
        keyName1 = Name('/TestSqlIdentityStorage/KeyType/RSA/ksk-12345')
        identityName = keyName1[:-1]

        decodedKey = base64.b64decode(RSA_DER)
        self.identityStorage.addKey(keyName1, KeyType.RSA, Blob(decodedKey))
        self.identityStorage.setDefaultKeyNameForIdentity(keyName1)

        self.assertTrue(self.identityStorage.doesKeyExist(keyName1),
                        "Key was not added")
        self.assertTrue(self.identityStorage.doesIdentityExist(identityName),
                        "Identity for key was not automatically created")

        self.assertEqual(
            self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName1, "Default key was not set on identity creation")

        with self.assertRaises(SecurityException):
            self.identityStorage.getDefaultCertificateNameForKey(keyName1)

        with self.assertRaises(SecurityException):
            # we have no private key for signing
            self.identityManager.selfSign(keyName1)

        with self.assertRaises(SecurityException):
            self.identityStorage.getDefaultCertificateNameForKey(keyName1)

        with self.assertRaises(SecurityException):
            self.identityManager.getDefaultCertificateNameForIdentity(
                identityName)

        keyName2 = self.identityManager.generateRSAKeyPairAsDefault(
            identityName)
        cert = self.identityManager.selfSign(keyName2)
        self.identityManager.addCertificateAsIdentityDefault(cert)

        certName1 = self.identityManager.getDefaultCertificateNameForIdentity(
            identityName)
        certName2 = self.identityStorage.getDefaultCertificateNameForKey(
            keyName2)

        self.assertEqual(
            certName1, certName2,
            "Key-certificate mapping and identity-certificate mapping are not consistent"
        )

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesKeyExist(keyName1))

    def test_certificate_add_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        self.identityManager.createIdentityAndCertificate(
            identityName, KeyChain.DEFAULT_KEY_PARAMS)
        keyName1 = self.identityManager.getDefaultKeyNameForIdentity(
            identityName)
        cert2 = self.identityManager.selfSign(keyName1)
        self.identityStorage.addCertificate(cert2)
        certName2 = cert2.getName()

        certName1 = self.identityManager.getDefaultCertificateNameForIdentity(
            identityName)
        self.assertNotEqual(
            certName1, certName2,
            "New certificate was set as default without explicit request")

        self.identityStorage.deleteCertificateInfo(certName1)
        self.assertTrue(self.identityStorage.doesCertificateExist(certName2))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName1))

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName2))

    def test_stress(self):
        # ndn-cxx/tests/unit-tests/security/test-sec-public-info-sqlite3.cpp
        identityName = Name("/TestSecPublicInfoSqlite3/Delete").appendVersion(
            int(time.time()))

        # ndn-cxx returns the cert name, but the IndentityManager docstring
        # specifies a key
        certName1 = self.keyChain.createIdentityAndCertificate(identityName)
        keyName1 = IdentityCertificate.certificateNameToPublicKeyName(
            certName1)
        keyName2 = self.keyChain.generateRSAKeyPairAsDefault(identityName)

        cert2 = self.identityManager.selfSign(keyName2)
        certName2 = cert2.getName()
        self.identityManager.addCertificateAsDefault(cert2)

        keyName3 = self.keyChain.generateRSAKeyPairAsDefault(identityName)
        cert3 = self.identityManager.selfSign(keyName3)
        certName3 = cert3.getName()
        self.identityManager.addCertificateAsDefault(cert3)

        cert4 = self.identityManager.selfSign(keyName3)
        self.identityManager.addCertificateAsDefault(cert4)
        certName4 = cert4.getName()

        cert5 = self.identityManager.selfSign(keyName3)
        self.identityManager.addCertificateAsDefault(cert5)
        certName5 = cert5.getName()

        self.assertTrue(self.identityStorage.doesIdentityExist(identityName))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName1))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName3))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName1))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName2))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName3))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName4))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName5))

        self.identityStorage.deleteCertificateInfo(certName5)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName5))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName4))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName3))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))

        self.identityStorage.deletePublicKeyInfo(keyName3)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName4))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName3))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName3))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName1))
        self.assertTrue(self.identityStorage.doesIdentityExist(identityName))

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName2))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName2))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName1))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName1))
        self.assertFalse(self.identityStorage.doesIdentityExist(identityName))
コード例 #14
0
ファイル: bms_node.py プロジェクト: zhehaowang/bms-node
class BmsNode(object):
    def __init__(self):
        self.conf = None
        self._keyChain = None
        self._certificateName = None

        self._dataQueue = dict()
        self._memoryContentCache = None
        self._identityName = None

        self._aggregation = Aggregation()

    def setConfiguration(self, fileName, trustSchemaFile):
        self.conf = BoostInfoParser()
        self.conf.read(fileName)
        self._identityName = Name(self.conf.getNodePrefix())
        self._trustSchemaFile = trustSchemaFile

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        #print('Data not found for ' + interest.getName().toUri())
        return

    def startPublishing(self):
        # One-time security setup
        self.prepareLogging()

        privateKeyStorage = FilePrivateKeyStorage()
        identityStorage = BasicIdentityStorage()
        policyManager = ConfigPolicyManager(self._trustSchemaFile)

        self._keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage), policyManager)
        self._certificateName = self._keyChain.createIdentityAndCertificate(
            self._identityName)

        print("My Identity name: " + self._identityName.toUri())
        print("My certificate name: " + self._certificateName.toUri())
        certificateData = self._keyChain.getIdentityManager(
        )._identityStorage.getCertificate(self._certificateName)
        print("My certificate string: " +
              b64encode(certificateData.wireEncode().toBuffer()))
        # self._keyChain.getIdentityCertificate(self._certificateName).)

        self._loop = asyncio.get_event_loop()
        self._face = ThreadsafeFace(self._loop)
        self._keyChain.setFace(self._face)

        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)
        self._memoryContentCache = MemoryContentCache(self._face)

        # We should only ask for cert to be signed upon the first run of a certain aggregator
        if DO_CERT_SETUP:
            if (KeyLocator.getFromSignature(
                    certificateData.getSignature()).getKeyName().equals(
                        self._certificateName.getPrefix(-1))):
                # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again
                print("certificate " + self._certificateName.toUri() +
                      " asking for signature")
                response = urllib2.urlopen(
                    "http://192.168.56.1:5000/bms-cert-hack?cert=" +
                    b64encode(certificateData.wireEncode().toBuffer()) +
                    "&cert_prefix=" + self._identityName.toUri() +
                    '&subject_name=' + self._identityName.toUri()).read()

                signedCertData = Data()
                signedCertData.wireDecode(Blob(b64decode(response)))

                self._memoryContentCache.add(signedCertData)
                cmdline = ['ndnsec-install-cert', '-']
                p = subprocess.Popen(cmdline,
                                     stdin=subprocess.PIPE,
                                     stdout=subprocess.PIPE)
                # desanitize + sign in GET request
                cert, err = p.communicate(response)
                if p.returncode != 0:
                    raise RuntimeError("ndnsec-install-cert error")
            else:
                self._memoryContentCache.add(certificateData)
        else:
            self._memoryContentCache.add(certificateData)

        dataNode = self.conf.getDataNode()
        childrenNode = self.conf.getChildrenNode()

        self._memoryContentCache.registerPrefix(Name(self._identityName),
                                                self.onRegisterFailed,
                                                self.onDataNotFound)

        # For each type of data, we refresh each type of aggregation according to the interval in the configuration
        for i in range(len(dataNode.subtrees)):
            dataType = dataNode.subtrees.keys()[i]
            aggregationParams = self.conf.getProducingParamsForAggregationType(
                dataNode.subtrees.items()[i][1])

            if childrenNode == None:
                self._dataQueue[dataType] = DataQueue(None, None, None)
                self.generateData(dataType, 2, 0)

            for aggregationType in aggregationParams:
                childrenList = OrderedDict()
                if childrenNode != None:

                    for j in range(len(childrenNode.subtrees)):
                        if dataType in childrenNode.subtrees.items(
                        )[j][1].subtrees['data'].subtrees:
                            if aggregationType in childrenNode.subtrees.items(
                            )[j][1].subtrees['data'].subtrees[
                                    dataType].subtrees:
                                childrenList[childrenNode.subtrees.items()[j][
                                    0]] = self.conf.getProducingParamsForAggregationType(
                                        childrenNode.subtrees.items()[j]
                                        [1].subtrees['data'].subtrees[dataType]
                                    )[aggregationType]

                self.startPublishingAggregation(
                    aggregationParams[aggregationType], childrenList, dataType,
                    aggregationType)
        return

    def startPublishingAggregation(self, params, childrenList, dataType,
                                   aggregationType):
        if __debug__:
            print('Start publishing for ' + dataType + '-' + aggregationType)

        # aggregation calculating and publishing mechanism
        publishingPrefix = Name(
            self._identityName).append(DATA_COMPONENT).append(dataType).append(
                AGGREGATION_COMPONENT).append(aggregationType)
        self._dataQueue[dataType + aggregationType] = DataQueue(
            params, childrenList, publishingPrefix)

        if len(childrenList.keys()) == 0:
            # TODO: make start_time optional for leaf nodes
            self._loop.call_later(int(params['producer_interval']),
                                  self.calculateAggregation, dataType,
                                  aggregationType, childrenList,
                                  int(params['start_time']),
                                  int(params['producer_interval']),
                                  publishingPrefix, True)
        else:
            # express interest for children who produce the same data and aggregation type
            for childName in childrenList.keys():
                name = Name(self._identityName).append(childName).append(
                    DATA_COMPONENT).append(dataType).append(
                        AGGREGATION_COMPONENT).append(aggregationType)
                interest = Interest(name)
                # if start_time is specified, we ask for data starting at start_time;
                # if not, we ask for the right most child and go from there
                if ('start_time' in childrenList[childName]):
                    endTime = int(childrenList[childName]['start_time']) + int(
                        childrenList[childName]['producer_interval'])
                    interest.getName().append(
                        str(childrenList[childName]['start_time'])).append(
                            str(endTime))
                else:
                    # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost
                    interest.setChildSelector(0)
                    interest.setMustBeFresh(True)
                interest.setInterestLifetimeMilliseconds(
                    DEFAULT_INTEREST_LIFETIME)
                if __debug__:
                    print('  Issue interest: ' + interest.getName().toUri())
                self._face.expressInterest(interest, self.onData,
                                           self.onTimeout)

        return

    # TODO: once one calculation's decided a child has not answered, we should do another calculation
    def calculateAggregation(self,
                             dataType,
                             aggregationType,
                             childrenList,
                             startTime,
                             interval,
                             publishingPrefix,
                             repeat=False):
        doCalc = True
        dataList = []

        # TODO: an intermediate node cannot produce raw data for now
        if len(childrenList.keys()) != 0:
            for childName in childrenList.keys():
                dataDictKey = self.getDataDictKey(startTime,
                                                  (startTime + interval),
                                                  childName)
                if dataDictKey in self._dataQueue[dataType +
                                                  aggregationType]._dataDict:
                    data = self._dataQueue[
                        dataType + aggregationType]._dataDict[dataDictKey]
                    dataList.append(float(data.getContent().toRawStr()))
                else:
                    #print('Child ' + childName + ' has not replied yet')
                    doCalc = False
                    break
        else:
            for inst in self._dataQueue[dataType]._dataDict.keys():
                if int(inst) >= startTime and int(inst) < startTime + interval:
                    dataList.append(self._dataQueue[dataType]._dataDict[inst])
        if doCalc:
            content = self._aggregation.getAggregation(aggregationType,
                                                       dataList)
            if content:
                publishData = Data(
                    Name(publishingPrefix).append(str(startTime)).append(
                        str(startTime + interval)))
                publishData.setContent(str(content))
                publishData.getMetaInfo().setFreshnessPeriod(
                    DEFAULT_DATA_LIFETIME)
                self._keyChain.sign(publishData, self._certificateName)
                self._memoryContentCache.add(publishData)
                for childName in childrenList.keys():
                    dataDictKey = self.getDataDictKey(startTime,
                                                      (startTime + interval),
                                                      childName)
                    if dataDictKey in self._dataQueue[
                            dataType + aggregationType]._dataDict:
                        del self._dataQueue[
                            dataType + aggregationType]._dataDict[dataDictKey]
                if __debug__:
                    print("Produced: " + publishData.getName().toUri() + "; " +
                          publishData.getContent().toRawStr())

        # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData
        if repeat:
            self._loop.call_later(interval, self.calculateAggregation,
                                  dataType, aggregationType, childrenList,
                                  startTime + interval, interval,
                                  publishingPrefix, repeat)
        return

    def generateData(self, dataType, interval, startTime):
        self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint(
            0, 9)
        self._loop.call_later(interval, self.generateData, dataType, interval,
                              startTime + interval)
        return

    def onRegisterFailed(self, prefix):
        raise RuntimeError("Register failed for prefix", prefix.toUri())

    def onVerified(self, data):
        print('Data verified: ' + data.getName().toUri())
        return

    def onVerifyFailed(self, data):
        print('Data verification failed: ' + data.getName().toUri())
        return

    def onData(self, interest, data):
        self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed)

        dataName = data.getName()
        dataQueue = None

        if __debug__:
            print("Got data: " + dataName.toUri() + "; " +
                  data.getContent().toRawStr())
        for i in range(0, len(dataName)):
            if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT:
                dataType = dataName.get(i - 1).toEscapedString()
                aggregationType = dataName.get(i + 1).toEscapedString()

                startTime = int(dataName.get(i + 2).toEscapedString())
                endTime = int(dataName.get(i + 3).toEscapedString())
                childName = dataName.get(i - 3).toEscapedString()

                dataAndAggregationType = dataType + aggregationType

                dataDictKey = self.getDataDictKey(startTime, endTime,
                                                  childName)
                dataQueue = self._dataQueue[dataAndAggregationType]
                dataQueue._dataDict[dataDictKey] = data
                break

        # TODO: check what if interval/starttime is misconfigured
        if dataQueue:
            self.calculateAggregation(dataType, aggregationType,
                                      dataQueue._childrenList, startTime,
                                      endTime - startTime,
                                      dataQueue._publishingPrefix)

        # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime
        #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime)))

        # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead
        newInterestName = dataName.getPrefix(i + 2)
        newInterest = Interest(interest)
        newInterest.setName(newInterestName)
        newInterest.setChildSelector(0)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(dataName.get(i + 2))
        newInterest.setExclude(exclude)

        self._face.expressInterest(newInterest, self.onData, self.onTimeout)
        if __debug__:
            print("  issue interest: " + interest.getName().toUri())

        return

    def onTimeout(self, interest):
        if __debug__:
            print("  interest timeout: " + interest.getName().toUri() +
                  "; reexpress")
            pass
        self._face.expressInterest(interest, self.onData, self.onTimeout)
        return

    def stop(self):
        self._loop.stop()
        if __debug__:
            print("Stopped")
        return

    # This creation of dataDictKey means parent and child should not have the same name
    @staticmethod
    def getDataDictKey(startTime, endTime, childName):
        return str(startTime) + '/' + str(endTime) + '/' + childName


##
# Logging
##

    def prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log
コード例 #15
0
class SampleProducer(object):
    def __init__(self, face, username, memoryContentCache):
        # Set up face
        self.face = face

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())

        identityName = Name(username)
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        self.databaseFilePath = "policy_config/test_producer.db"
        self.catalogDatabaseFilePath = "policy_config/test_producer_catalog.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass
        try:
            os.remove(self.catalogDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.testDb = Sqlite3ProducerDb(self.databaseFilePath)
        self.catalogDb = Sqlite3ProducerDb(self.catalogDatabaseFilePath)

        # TODO: as of right now, catalog has a different suffix, so need another instance of producer; that producer cannot share
        # the same DB with the first producer, otherwise there won't be a self.onEncryptedKeys call; as the catalog producer uses 
        # its own C-key, and that key won't be encrypted by an E-key as no interest goes out
        # This sounds like something problematic from the library
        prefix = Name(username)
        suffix = Name("fitness/physical_activity/time_location")

        self.producer = Producer(Name(prefix), suffix, self.face, self.keyChain, self.testDb)

        catalogSuffix = Name(suffix).append("catalog")
        self.catalogProducer = Producer(Name(prefix), catalogSuffix, self.face, self.keyChain, self.catalogDb)

        self.memoryContentCache = memoryContentCache
        return

    def createContentKey(self, timeSlot):
        print "debug: createContentKey for data and catalog"
        contentKeyName = self.producer.createContentKey(timeSlot, self.onEncryptedKeys, self.onError)
        catalogKeyName = self.catalogProducer.createContentKey(timeSlot, self.onEncryptedKeys, self.onError)
        print contentKeyName.toUri()
        print catalogKeyName.toUri()

    def onError(self, code, msg):
        print str(code) + " : " + msg
        return

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
              fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(Name(repoCommandPrefix).append("insert")
          .append(Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout)

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return

    def onEncryptedKeys(self, keys):
        print "debug: onEncryptedKeys called"
        if not keys:
            print "onEncryptedKeys: no keys in callback!"
        for i in range(0, len(keys)):
            print "onEncryptedKeys: produced encrypted key " + keys[i].getName().toUri()
            self.memoryContentCache.add(keys[i])
            self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", keys[i])
        return
コード例 #16
0
class TestGroupManager(object):
    def __init__(self, face, groupManagerName, dataType, readAccessName,
                 dKeyDatabaseFilePath):
        # Set up face
        self.face = face
        #self.loop = eventLoop

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())

        self.certificateName = self.keyChain.createIdentityAndCertificate(
            groupManagerName)

        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        self.dKeyDatabaseFilePath = dKeyDatabaseFilePath
        try:
            os.remove(self.dKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.manager = GroupManager(
            groupManagerName, dataType,
            Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1,
            self.keyChain)

        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(
            Name(groupManagerName).append("READ"), self.onRegisterFailed,
            self.onDataNotFound)
        self.face.registerPrefix(readAccessName, self.onAccessInterest,
                                 self.onAccessTimeout)

        self.updateGroupKeys = False
        return

    def onAccessInterest(self, prefix, interest, face, interestFilterId,
                         filter):
        print "On Access request interest: " + interest.getName().toUri()
        certInterest = Interest(interest.getName().getSubName(4))
        certInterest.setName(certInterest.getName().getPrefix(-1))
        certInterest.setInterestLifetimeMilliseconds(2000)

        self.face.expressInterest(
            certInterest,
            lambda memberInterest, memberData: self.onMemberCertificateData(
                memberInterest, memberData, interest),
            lambda memberInterest: self.onMemberCertificateTimeout(
                memberInterest, interest))
        print "Retrieving member certificate: " + certInterest.getName().toUri(
        )

        return

    def onAccessTimeout(self, prefix):
        print "Prefix registration failed: " + prefix.toUri()
        return

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return

    def setManager(self):
        schedule1 = Schedule()
        interval11 = RepetitiveInterval(
            Schedule.fromIsoString("20161001T000000"),
            Schedule.fromIsoString("20161031T000000"), 0, 24, 1,
            RepetitiveInterval.RepeatUnit.DAY)
        schedule1.addWhiteInterval(interval11)

        self.manager.addSchedule("schedule1", schedule1)

    def onMemberCertificateData(self, interest, data, accessInterest):
        print "Member certificate with name retrieved: " + data.getName(
        ).toUri() + "; member added to group!"
        self.manager.addMember("schedule1", data)
        self.updateGroupKeys = True

        accessResponse = Data(accessInterest.getName())
        accessResponse.setContent("granted")
        self.face.putData(accessResponse)

    def onMemberCertificateTimeout(self, interest, accessInterest):
        print "Member certificate interest times out: " + interest.getName(
        ).toUri()
        newInterest = Interest(interest)
        newInterest.refreshNonce()
        self.face.expressInterest(
            newInterest,
            lambda memberInterest, memberData: self.onMemberCertificateData(
                memberInterest, memberData, accessInterest),
            lambda memberInterest: self.onMemberCertificateTimeout(
                memberInterest, accessInterest))
        return

    def publishGroupKeys(self):
        timePoint1 = Schedule.fromIsoString("20161024T083000")
        result = self.manager.getGroupKey(timePoint1)

        # The first is group public key, E-key
        # The rest are group private keys encrypted with each member's public key, D-key
        for i in range(0, len(result)):
            self.memoryContentCache.add(result[i])
            self.initiateContentStoreInsertion(
                "/ndn/edu/ucla/remap/ndnfit/repo", result[i])
            print "Publish key name: " + str(
                i) + " " + result[i].getName().toUri()

        self.updateGroupKeys = False

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        if interest.getExclude():
            print "Interest has exclude: " + interest.getExclude().toUri()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed"
        return

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
                fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(
            Name(repoCommandPrefix).append("insert").append(
                Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData,
                                  self.onRepoTimeout)
コード例 #17
0
ファイル: test_identity_methods.py プロジェクト: MAHIS/PyNDN2
class TestSqlIdentityStorage(ut.TestCase):
    def setUp(self):
        # Reuse the policy_config subdirectory for the temporary SQLite file.
        self.databaseFilePath = "policy_config/test-public-info.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass
        self.identityStorage = BasicIdentityStorage(self.databaseFilePath)

        self.identityManager = IdentityManager(self.identityStorage,
             FilePrivateKeyStorage())
        self.policyManager = SelfVerifyPolicyManager(self.identityStorage)
        self.keyChain = KeyChain(self.identityManager, self.policyManager)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def test_identity_create_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        keyName = IdentityCertificate.certificateNameToPublicKeyName(certificateName)

        self.assertTrue(self.identityStorage.doesIdentityExist(identityName),
            "Identity was not added to IdentityStorage")
        self.assertIsNotNone(keyName, "New identity has no key")
        self.assertTrue(self.identityStorage.doesKeyExist(keyName),
            "Key was not added to IdentityStorage")
        self.assertIsNotNone(certificateName,
            "Certificate was not added to IdentityStorage")

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesIdentityExist(identityName),
            "Identity still in IdentityStorage after identity was deleted")
        self.assertFalse(self.identityStorage.doesKeyExist(keyName),
            "Key still in IdentityStorage after identity was deleted")
        self.assertFalse(self.identityStorage.doesCertificateExist(certificateName),
            "Certificate still in IdentityStorage after identity was deleted")

        with self.assertRaises(SecurityException):
            self.identityManager.getDefaultCertificateNameForIdentity(identityName)

    def test_key_create_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        keyName1 = self.keyChain.generateRSAKeyPair(identityName, True)
        self.keyChain.getIdentityManager().setDefaultKeyForIdentity(keyName1)

        keyName2 = self.keyChain.generateRSAKeyPair(identityName, False)
        self.assertEqual(self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName1, "Default key name was changed without explicit request")
        self.assertNotEqual(self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName2, "Newly created key replaced default key without explicit request")

        self.identityStorage.deletePublicKeyInfo(keyName2)

        self.assertFalse(self.identityStorage.doesKeyExist(keyName2))
        self.keyChain.deleteIdentity(identityName)

    def test_key_autocreate_identity(self):
        keyName1 = Name('/TestSqlIdentityStorage/KeyType/RSA/ksk-12345')
        identityName = keyName1[:-1]

        decodedKey = base64.b64decode(RSA_DER)
        self.identityStorage.addKey(keyName1, KeyType.RSA, Blob(decodedKey))
        self.identityStorage.setDefaultKeyNameForIdentity(keyName1)

        self.assertTrue(self.identityStorage.doesKeyExist(keyName1),
            "Key was not added")
        self.assertTrue(self.identityStorage.doesIdentityExist(identityName),
            "Identity for key was not automatically created")

        self.assertEqual(self.identityManager.getDefaultKeyNameForIdentity(identityName),
            keyName1, "Default key was not set on identity creation")

        with self.assertRaises(SecurityException):
            self.identityStorage.getDefaultCertificateNameForKey(keyName1)

        with self.assertRaises(SecurityException):
            # we have no private key for signing
            self.identityManager.selfSign(keyName1)

        with self.assertRaises(SecurityException):
            self.identityStorage.getDefaultCertificateNameForKey(keyName1)

        with self.assertRaises(SecurityException):
            self.identityManager.getDefaultCertificateNameForIdentity(identityName)

        keyName2 = self.identityManager.generateRSAKeyPairAsDefault(identityName)
        cert = self.identityManager.selfSign(keyName2)
        self.identityManager.addCertificateAsIdentityDefault(cert)

        certName1 = self.identityManager.getDefaultCertificateNameForIdentity(identityName)
        certName2 = self.identityStorage.getDefaultCertificateNameForKey(keyName2)

        self.assertEqual(certName1, certName2,
            "Key-certificate mapping and identity-certificate mapping are not consistent")

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesKeyExist(keyName1))

    def test_certificate_add_delete(self):
        identityName = Name('/TestIdentityStorage/Identity').appendVersion(
            int(time.time()))

        self.identityManager.createIdentityAndCertificate(
          identityName, KeyChain.DEFAULT_KEY_PARAMS)
        keyName1 = self.identityManager.getDefaultKeyNameForIdentity(identityName)
        cert2 = self.identityManager.selfSign(keyName1)
        self.identityStorage.addCertificate(cert2)
        certName2 = cert2.getName()

        certName1 = self.identityManager.getDefaultCertificateNameForIdentity(identityName)
        self.assertNotEqual(certName1, certName2,
            "New certificate was set as default without explicit request")

        self.identityStorage.deleteCertificateInfo(certName1)
        self.assertTrue(self.identityStorage.doesCertificateExist(certName2))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName1))

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName2))
    def test_stress(self):
        # ndn-cxx/tests/unit-tests/security/test-sec-public-info-sqlite3.cpp
        identityName = Name("/TestSecPublicInfoSqlite3/Delete").appendVersion(
            int(time.time()))

        # ndn-cxx returns the cert name, but the IndentityManager docstring
        # specifies a key
        certName1 = self.keyChain.createIdentityAndCertificate(identityName)
        keyName1 = IdentityCertificate.certificateNameToPublicKeyName(certName1)
        keyName2 = self.keyChain.generateRSAKeyPairAsDefault(identityName)

        cert2 = self.identityManager.selfSign(keyName2)
        certName2 = cert2.getName()
        self.identityManager.addCertificateAsDefault(cert2)

        keyName3 = self.keyChain.generateRSAKeyPairAsDefault(identityName)
        cert3 = self.identityManager.selfSign(keyName3)
        certName3 = cert3.getName()
        self.identityManager.addCertificateAsDefault(cert3)

        cert4 = self.identityManager.selfSign(keyName3)
        self.identityManager.addCertificateAsDefault(cert4)
        certName4 = cert4.getName()

        cert5 = self.identityManager.selfSign(keyName3)
        self.identityManager.addCertificateAsDefault(cert5)
        certName5 = cert5.getName()

        self.assertTrue(self.identityStorage.doesIdentityExist(identityName))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName1))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName3))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName1))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName2))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName3))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName4))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName5))

        self.identityStorage.deleteCertificateInfo(certName5)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName5))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName4))
        self.assertTrue(self.identityStorage.doesCertificateExist(certName3))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))

        self.identityStorage.deletePublicKeyInfo(keyName3)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName4))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName3))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName3))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName2))
        self.assertTrue(self.identityStorage.doesKeyExist(keyName1))
        self.assertTrue(self.identityStorage.doesIdentityExist(identityName))

        self.keyChain.deleteIdentity(identityName)
        self.assertFalse(self.identityStorage.doesCertificateExist(certName2))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName2))
        self.assertFalse(self.identityStorage.doesCertificateExist(certName1))
        self.assertFalse(self.identityStorage.doesKeyExist(keyName1))
        self.assertFalse(self.identityStorage.doesIdentityExist(identityName))
コード例 #18
0
ファイル: test_producer.py プロジェクト: MAHIS/PyNDN2
class TestProducer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {} # key: Name, value: Blob
        self.encryptionKeys = {} # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())
        identityName = Name("TestProducer")
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptionKey(self, eKeyName, timeMarker):
        params = RsaKeyParams()
        eKeyName = Name(eKeyName)
        eKeyName.append(timeMarker)

        dKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        eKeyBlob = RsaAlgorithm.deriveEncryptKey(dKeyBlob).getKeyBits()
        self.decryptionKeys[eKeyName] = dKeyBlob

        keyData = Data(eKeyName)
        keyData.setContent(eKeyBlob)
        self.keyChain.sign(keyData, self.certificateName)
        self.encryptionKeys[eKeyName] = keyData

    def test_content_key_request(self):
        prefix = Name("/prefix")
        suffix = Name("/a/b/c")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        timeMarker = Name("20150101T100000/20150101T120000")
        testTime1 = Schedule.fromIsoString("20150101T100001")
        testTime2 = Schedule.fromIsoString("20150101T110001")
        testTimeRounded1 = Name.Component("20150101T100000")
        testTimeRounded2 = Name.Component("20150101T110000")

        # Create content keys required for this test case:
        for i in range(suffix.size()):
          self.createEncryptionKey(expectedInterest, timeMarker)
          expectedInterest = expectedInterest.getPrefix(-2).append(
            Encryptor.NAME_COMPONENT_E_KEY)

        expressInterestCallCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            expressInterestCallCount[0] += 1

            interestName = Name(interest.getName())
            interestName.append(timeMarker)
            self.assertTrue(interestName in self.encryptionKeys)
            onData(interest, self.encryptionKeys[interestName])

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that the content key is correctly encrypted for each domain, and
        # the produce method encrypts the provided data with the same content key.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        contentKey = [None] # Blob

        def checkEncryptionKeys(
          result, testTime, roundedTime, expectedExpressInterestCallCount):
            self.assertEqual(expectedExpressInterestCallCount,
                             expressInterestCallCount[0])

            self.assertEqual(True, testDb.hasContentKey(testTime))
            contentKey[0] = testDb.getContentKey(testTime)

            params = EncryptParams(EncryptAlgorithmType.RsaOaep)
            for i in range(len(result)):
                key = result[i]
                keyName = key.getName()
                self.assertEqual(cKeyName, keyName.getSubName(0, 6))
                self.assertEqual(keyName.get(6), roundedTime)
                self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR)
                self.assertEqual(
                  True, keyName.getSubName(8) in self.decryptionKeys)

                decryptionKey = self.decryptionKeys[keyName.getSubName(8)]
                self.assertEqual(True, decryptionKey.size() != 0)
                encryptedKeyEncoding = key.getContent()

                content = EncryptedContent()
                content.wireDecode(encryptedKeyEncoding)
                encryptedKey = content.getPayload()
                retrievedKey = RsaAlgorithm.decrypt(
                  decryptionKey, encryptedKey, params)

                self.assertTrue(contentKey[0].equals(retrievedKey))

            self.assertEqual(3, len(result))

        # An initial test to confirm that keys are created for this time slot.
        contentKeyName1 = producer.createContentKey(
          testTime1,
          lambda keys: checkEncryptionKeys(keys, testTime1, testTimeRounded1, 3))

        # Verify that we do not repeat the search for e-keys. The total
        #   expressInterestCallCount should be the same.
        contentKeyName2 = producer.createContentKey(
          testTime2,
          lambda keys: checkEncryptionKeys(keys, testTime2, testTimeRounded2, 3))

        # Confirm content key names are correct
        self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1))
        self.assertEqual(testTimeRounded1, contentKeyName1.get(6))
        self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1))
        self.assertEqual(testTimeRounded2, contentKeyName2.get(6))

        # Confirm that produce encrypts with the correct key and has the right name.
        testData = Data()
        producer.produce(testData, testTime2, Blob(DATA_CONTENT, False))

        producedName = testData.getName()
        self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5))
        self.assertEqual(testTimeRounded2, producedName.get(5))
        self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6))
        self.assertEqual(cKeyName, producedName.getSubName(7, 6))
        self.assertEqual(testTimeRounded2, producedName.get(13))

        dataBlob = testData.getContent()

        dataContent = EncryptedContent()
        dataContent.wireDecode(dataBlob)
        encryptedData = dataContent.getPayload()
        initialVector = dataContent.getInitialVector()

        params = EncryptParams(EncryptAlgorithmType.AesCbc, 16)
        params.setInitialVector(initialVector)
        decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData, params)
        self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))

    def test_content_key_search(self):
        timeMarkerFirstHop = Name("20150101T070000/20150101T080000")
        timeMarkerSecondHop = Name("20150101T080000/20150101T090000")
        timeMarkerThirdHop = Name("20150101T100000/20150101T110000")

        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        # Create content keys required for this test case:
        self.createEncryptionKey(expectedInterest, timeMarkerFirstHop)
        self.createEncryptionKey(expectedInterest, timeMarkerSecondHop)
        self.createEncryptionKey(expectedInterest, timeMarkerThirdHop)

        requestCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            self.assertEqual(expectedInterest, interest.getName())

            gotInterestName = False
            for i in range(3):
              interestName = Name(interest.getName())
              if i == 0:
                interestName.append(timeMarkerFirstHop)
              elif i == 1:
                interestName.append(timeMarkerSecondHop)
              elif i == 2:
                interestName.append(timeMarkerThirdHop)

              # matchesName will check the Exclude.
              if interest.matchesName(interestName):
                gotInterestName = True
                requestCount[0] += 1
                break

            if gotInterestName:
              onData(interest, self.encryptionKeys[interestName])

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that if a key is found, but not within the right time slot, the
        # search is refined until a valid time slot is found.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        def onEncryptedKeys(result):
            self.assertEqual(3, requestCount[0])
            self.assertEqual(1, len(result))

            keyData = result[0]
            keyName = keyData.getName()
            self.assertEqual(cKeyName, keyName.getSubName(0, 4))
            self.assertEqual(timeMarkerThirdHop.get(0), keyName.get(4))
            self.assertEqual(Encryptor.NAME_COMPONENT_FOR, keyName.get(5))
            self.assertEqual(expectedInterest.append(timeMarkerThirdHop),
                             keyName.getSubName(6))
        producer.createContentKey(testTime, onEncryptedKeys)

    def test_content_key_timeout(self):
        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        timeoutCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            self.assertEqual(expectedInterest, interest.getName())
            timeoutCount[0] += 1
            onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that if no response is received, the producer appropriately times
        # out. The result vector should not contain elements that have timed out.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        def onEncryptedKeys(result):
            self.assertEqual(4, timeoutCount[0])
            self.assertEqual(0, len(result))
        producer.createContentKey(testTime, onEncryptedKeys)
コード例 #19
0
class BaseNode(object):
    """
    This class contains methods/attributes common to both node and controller.
    """
    def __init__(self, transport = None, conn = None):
        """
        Initialize the network and security classes for the node
        """
        super(BaseNode, self).__init__()
        self.faceTransport = transport
        self.faceConn = conn
        
        self._identityStorage = BasicIdentityStorage()

        self._identityManager = IdentityManager(self._identityStorage, FilePrivateKeyStorage())
        self._policyManager = IotPolicyManager(self._identityStorage)

        # hopefully there is some private/public key pair available
        self._keyChain = KeyChain(self._identityManager, self._policyManager)

        self._registrationFailures = 0
        self._prepareLogging()

        self._setupComplete = False


##
# Logging
##
    def _prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log

###
# Startup and shutdown
###
    def beforeLoopStart(self):
        """
        Called before the event loop starts.
        """
        pass

    def getDefaultCertificateName(self):
        try:
            certName = self._identityStorage.getDefaultCertificateNameForIdentity( 
                self._policyManager.getDeviceIdentity())
        except SecurityException as e:
            # zhehao: in the case of producer's /localhop prefixes, the default key is not defined in ndnsec-public-info.db
            certName = self._keyChain.createIdentityAndCertificate(self._policyManager.getDeviceIdentity())
            #certName = self._keyChain.getDefaultCertificateName()
            #print(certName.toUri())
        return certName

    def start(self):
        """
        Begins the event loop. After this, the node's Face is set up and it can
        send/receive interests+data
        """
        self.log.info("Starting up")
        self.loop = asyncio.get_event_loop()
        
        if (self.faceTransport == None or self.faceTransport == ''):
            self.face = ThreadsafeFace(self.loop)
        else:
            self.face = ThreadsafeFace(self.loop, self.faceTransport, self.faceConn)
        
        self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName())
        
        self._keyChain.setFace(self.face)

        self._isStopped = False
        self.beforeLoopStart()
        
        try:
            self.loop.run_forever()
        except Exception as e:
            self.log.exception(exc_info=True)
        finally:
            self.stop()

    def stop(self):
        """
        Stops the node, taking it off the network
        """
        self.log.info("Shutting down")
        self._isStopped = True 
        self.loop.stop()
        
###
# Data handling
###
    def signData(self, data):
        """
        Sign the data with our network certificate
        :param pyndn.Data data: The data to sign
        """
        self._keyChain.sign(data, self.getDefaultCertificateName())

    def sendData(self, data, sign=True):
        """
        Reply to an interest with a data packet, optionally signing it.
        :param pyndn.Data data: The response data packet
        :param boolean sign: (optional, default=True) Whether the response must be signed. 
        """
        if sign:
            self.signData(data)
        self.face.putData(data)

###
# 
# 
##
    def onRegisterFailed(self, prefix):
        """
        Called when the node cannot register its name with the forwarder
        :param pyndn.Name prefix: The network name that failed registration
        """
        if self.faceTransport != None and self.faceConn != None:
            self.log.warn("Explicit face transport and connectionInfo: Could not register {}; expect a manual or autoreg on the other side.".format(prefix.toUri()))
        elif self._registrationFailures < 5:
            self._registrationFailures += 1
            self.log.warn("Could not register {}, retry: {}/{}".format(prefix.toUri(), self._registrationFailures, 5)) 
            self.face.registerPrefix(self.prefix, self._onCommandReceived, self.onRegisterFailed)
        else:
            self.log.info("Prefix registration failed")
            self.stop()

    def verificationFailed(self, dataOrInterest):
        """
        Called when verification of a data packet or command interest fails.
        :param pyndn.Data or pyndn.Interest: The packet that could not be verified
        """
        self.log.info("Received invalid" + dataOrInterest.getName().toUri())

    @staticmethod
    def getSerial():
        """
        Find and return the serial number of the Raspberry Pi. Provided in case
        you wish to distinguish data from nodes with the same name by serial.
        :return: The serial number extracted from device information in /proc/cpuinfo
        :rtype: str
        """
        try:
            with open('/proc/cpuinfo') as f:
                for line in f:
                    if line.startswith('Serial'):
                        return line.split(':')[1].strip()
        except NameError:
            return "todo"
コード例 #20
0
ファイル: test_producer.py プロジェクト: pedosb/PyNDN2
class TestProducer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {}  # key: Name, value: Blob
        self.encryptionKeys = {}  # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        identityName = Name("TestProducer")
        self.certificateName = self.keyChain.createIdentityAndCertificate(
            identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptionKey(self, eKeyName, timeMarker):
        params = RsaKeyParams()
        eKeyName = Name(eKeyName)
        eKeyName.append(timeMarker)

        dKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        eKeyBlob = RsaAlgorithm.deriveEncryptKey(dKeyBlob).getKeyBits()
        self.decryptionKeys[eKeyName] = dKeyBlob

        keyData = Data(eKeyName)
        keyData.setContent(eKeyBlob)
        self.keyChain.sign(keyData, self.certificateName)
        self.encryptionKeys[eKeyName] = keyData

    def test_content_key_request(self):
        prefix = Name("/prefix")
        suffix = Name("/a/b/c")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        timeMarker = Name("20150101T100000/20150101T120000")
        testTime1 = Schedule.fromIsoString("20150101T100001")
        testTime2 = Schedule.fromIsoString("20150101T110001")
        testTimeRounded1 = Name.Component("20150101T100000")
        testTimeRounded2 = Name.Component("20150101T110000")
        testTimeComponent2 = Name.Component("20150101T110001")

        # Create content keys required for this test case:
        for i in range(suffix.size()):
            self.createEncryptionKey(expectedInterest, timeMarker)
            expectedInterest = expectedInterest.getPrefix(-2).append(
                Encryptor.NAME_COMPONENT_E_KEY)

        expressInterestCallCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            expressInterestCallCount[0] += 1

            interestName = Name(interest.getName())
            interestName.append(timeMarker)
            self.assertTrue(interestName in self.encryptionKeys)
            onData(interest, self.encryptionKeys[interestName])

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that the content key is correctly encrypted for each domain, and
        # the produce method encrypts the provided data with the same content key.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        contentKey = [None]  # Blob

        def checkEncryptionKeys(result, testTime, roundedTime,
                                expectedExpressInterestCallCount):
            self.assertEqual(expectedExpressInterestCallCount,
                             expressInterestCallCount[0])

            self.assertEqual(True, testDb.hasContentKey(testTime))
            contentKey[0] = testDb.getContentKey(testTime)

            params = EncryptParams(EncryptAlgorithmType.RsaOaep)
            for i in range(len(result)):
                key = result[i]
                keyName = key.getName()
                self.assertEqual(cKeyName, keyName.getSubName(0, 6))
                self.assertEqual(keyName.get(6), roundedTime)
                self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR)
                self.assertEqual(True,
                                 keyName.getSubName(8) in self.decryptionKeys)

                decryptionKey = self.decryptionKeys[keyName.getSubName(8)]
                self.assertEqual(True, decryptionKey.size() != 0)
                encryptedKeyEncoding = key.getContent()

                content = EncryptedContent()
                content.wireDecode(encryptedKeyEncoding)
                encryptedKey = content.getPayload()
                retrievedKey = RsaAlgorithm.decrypt(decryptionKey,
                                                    encryptedKey, params)

                self.assertTrue(contentKey[0].equals(retrievedKey))

            self.assertEqual(3, len(result))

        # An initial test to confirm that keys are created for this time slot.
        contentKeyName1 = producer.createContentKey(
            testTime1, lambda keys: checkEncryptionKeys(
                keys, testTime1, testTimeRounded1, 3))

        # Verify that we do not repeat the search for e-keys. The total
        #   expressInterestCallCount should be the same.
        contentKeyName2 = producer.createContentKey(
            testTime2, lambda keys: checkEncryptionKeys(
                keys, testTime2, testTimeRounded2, 3))

        # Confirm content key names are correct
        self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1))
        self.assertEqual(testTimeRounded1, contentKeyName1.get(6))
        self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1))
        self.assertEqual(testTimeRounded2, contentKeyName2.get(6))

        # Confirm that produce encrypts with the correct key and has the right name.
        testData = Data()
        producer.produce(testData, testTime2, Blob(DATA_CONTENT, False))

        producedName = testData.getName()
        self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5))
        self.assertEqual(testTimeComponent2, producedName.get(5))
        self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6))
        self.assertEqual(cKeyName, producedName.getSubName(7, 6))
        self.assertEqual(testTimeRounded2, producedName.get(13))

        dataBlob = testData.getContent()

        dataContent = EncryptedContent()
        dataContent.wireDecode(dataBlob)
        encryptedData = dataContent.getPayload()
        initialVector = dataContent.getInitialVector()

        params = EncryptParams(EncryptAlgorithmType.AesCbc, 16)
        params.setInitialVector(initialVector)
        decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData,
                                           params)
        self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))

    def test_content_key_search(self):
        timeMarkerFirstHop = Name("20150101T070000/20150101T080000")
        timeMarkerSecondHop = Name("20150101T080000/20150101T090000")
        timeMarkerThirdHop = Name("20150101T100000/20150101T110000")

        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        # Create content keys required for this test case:
        self.createEncryptionKey(expectedInterest, timeMarkerFirstHop)
        self.createEncryptionKey(expectedInterest, timeMarkerSecondHop)
        self.createEncryptionKey(expectedInterest, timeMarkerThirdHop)

        requestCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(expectedInterest, interest.getName())

            gotInterestName = False
            for i in range(3):
                interestName = Name(interest.getName())
                if i == 0:
                    interestName.append(timeMarkerFirstHop)
                elif i == 1:
                    interestName.append(timeMarkerSecondHop)
                elif i == 2:
                    interestName.append(timeMarkerThirdHop)

                # matchesName will check the Exclude.
                if interest.matchesName(interestName):
                    gotInterestName = True
                    requestCount[0] += 1
                    break

            if gotInterestName:
                onData(interest, self.encryptionKeys[interestName])

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that if a key is found, but not within the right time slot, the
        # search is refined until a valid time slot is found.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)

        def onEncryptedKeys(result):
            self.assertEqual(3, requestCount[0])
            self.assertEqual(1, len(result))

            keyData = result[0]
            keyName = keyData.getName()
            self.assertEqual(cKeyName, keyName.getSubName(0, 4))
            self.assertEqual(timeMarkerThirdHop.get(0), keyName.get(4))
            self.assertEqual(Encryptor.NAME_COMPONENT_FOR, keyName.get(5))
            self.assertEqual(expectedInterest.append(timeMarkerThirdHop),
                             keyName.getSubName(6))

        producer.createContentKey(testTime, onEncryptedKeys)

    def test_content_key_timeout(self):
        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        timeoutCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(expectedInterest, interest.getName())
            timeoutCount[0] += 1
            onTimeout(interest)

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that if no response is received, the producer appropriately times
        # out. The result vector should not contain elements that have timed out.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)

        def onEncryptedKeys(result):
            self.assertEqual(4, timeoutCount[0])
            self.assertEqual(0, len(result))

        producer.createContentKey(testTime, onEncryptedKeys)
コード例 #21
0
ファイル: dpu.py プロジェクト: zhehaowang/sample-omh-dpu
class TestDPU(object):
    def __init__(self, face, encryptResult, defaultPrefix, link = None):
        # Set up face
        self.face = face
        self._encryptResult = encryptResult
        self._link = link

        self.databaseFilePath = "policy_config/test_consumer_dpu.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name(defaultPrefix)

        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())

        # Authorized identity
        identityName = Name("/ndn/edu/basel/dpu")
        # Function name: the function that this DPU provides
        self._functionName = "bounding_box"
        self._identityName = identityName
        
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage
        #   For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error
        #self.keyChain.installIdentityCertificate()
        
        self.memoryContentCache = MemoryContentCache(self.face)

        try:
            commandSigningKeyChain = KeyChain()
            print "Default certificate name is: " + self.keyChain.getDefaultCertificateName().toUri()
            self.face.setCommandSigningInfo(commandSigningKeyChain, commandSigningKeyChain.getDefaultCertificateName())
            self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound)
        except SecurityException as e:
            print str(e)
            print "Cannot use default certificate, use created certificate in FilePrivateKeyStorage"
            self.face.setCommandSigningInfo(self.keyChain, self.certificateName)
            self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound)

        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName)
        consumerCertificate = identityStorage.getCertificate(self.certificateName)
        self.consumer = Consumer(
          face, self.keyChain, self.groupName, identityName,
          Sqlite3ConsumerDb(self.databaseFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile:
            print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")
            base64Content = keyFile.read()
            #print base64Content
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)
        self.memoryContentCache.add(consumerCertificate)

        accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time())))
        self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout)
        print "Access request interest name: " + accessRequestInterest.getName().toUri()

        self._tasks = dict()

        return

    def onAccessRequestData(self, interest, data):
        print "Access request data: " + data.getName().toUri()
        return

    def onAccessRequestTimeout(self, interest):
        print "Access request times out: " + interest.getName().toUri()
        print "Assuming certificate sent and D-key generated"
        return

    def startConsuming(self, userId, basetimeString, producedDataName, dataNum, outerDataName):
        contentName = Name(userId).append(Name("/SAMPLE/fitness/physical_activity/time_location/"))
        baseZFill = 3

        for i in range(0, dataNum):
            timeString = basetimeString + str(i).zfill(baseZFill)
            timeFloat = Schedule.fromIsoString(timeString)

            self.consume(Name(contentName).append(timeString), producedDataName, outerDataName)
            print "Trying to consume: " + Name(contentName).append(timeString).toUri()

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        functionComponentIdx = len(self._identityName)
        if interest.getName().get(functionComponentIdx).toEscapedString() == self._functionName:
            try:
                parameters = interest.getName().get(functionComponentIdx + 1).toEscapedString()
                pattern = re.compile('([^,]*),([^,]*),([^,]*)')
                matching = pattern.match(str(Name.fromEscapedString(parameters)))
                
                userId = matching.group(1)
                basetimeString = matching.group(2)
                producedDataName = matching.group(3)
                dataNum = 60
                self._tasks[producedDataName] = {"cap_num": dataNum, "current_num": 0, "dataset": []}
                self.startConsuming(userId, basetimeString, producedDataName, dataNum, interest.getName().toUri())
            except Exception as e:
                print "Exception in processing function arguments: " + str(e)
        else:
            print "function name mismatch: expected " + self._functionName + " ; got " + interest.getName().get(functionComponentIdx).toEscapedString()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed: " + prefix.toUri()
        return

    def consume(self, contentName, producedDataName, outerDataName):
        self.consumer.consume(contentName, lambda data, result: self.onConsumeComplete(data, result, producedDataName, outerDataName), self.onConsumeFailed)

    def onConsumeComplete(self, data, result, producedDataName, outerDataName):
        print "Consume complete for data name: " + data.getName().toUri()

        if producedDataName in self._tasks:
            self._tasks[producedDataName]["current_num"] += 1
            self._tasks[producedDataName]["dataset"].append(result)
            if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]:
                maxLng = -1000
                minLng = 1000
                maxLat = -1000
                minLat = 1000
                for item in self._tasks[producedDataName]["dataset"]:
                    dataObject = json.loads(str(item))
                    if dataObject["lat"] > maxLat:
                        maxLat = dataObject["lat"]
                    if dataObject["lat"] < minLat:
                        minLat = dataObject["lat"]
                    if dataObject["lng"] > maxLng:
                        maxLng = dataObject["lng"]
                    if dataObject["lng"] < minLng:
                        minLng = dataObject["lng"]

                if not self._encryptResult:
                    innerData = Data(Name(str(producedDataName)))
                    innerData.setContent(json.dumps({"minLat": minLat, "maxLat": maxLat, "minLng": minLng, "maxLng": maxLng}))
                    #self.keyChain.sign(innerData)

                    outerData = Data(Name(str(outerDataName)))
                    outerData.setContent(innerData.wireEncode())
                    #self.keyChain.sign(outerData)

                    self.memoryContentCache.add(outerData)
                    self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", outerData)
                    print "Calculation completed, put data to repo"
                else:
                    print "Encrypt result is not implemented"

    def onConsumeFailed(self, code, message):
        print "Consume error " + str(code) + ": " + message

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
              fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(Name(repoCommandPrefix).append("insert")
          .append(Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout)

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return
コード例 #22
0
class TestConsumer(object):
    def __init__(self, face):
        # Set up face
        self.face = face

        self.databaseFilePath = "policy_config/test_consumer.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name("/org/openmhealth/haitao")

        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        # Authorized identity
        identityName = Name("/org/openmhealth/dvu-python-3")
        # Unauthorized identity
        #identityName = Name("/org/openmhealth/dvu-python-1")

        self.certificateName = self.keyChain.createIdentityAndCertificate(
            identityName)

        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(
            self.certificateName)
        consumerCertificate = identityStorage.getCertificate(
            self.certificateName)
        self.consumer = Consumer(face, self.keyChain, self.groupName,
                                 identityName,
                                 Sqlite3ConsumerDb(self.databaseFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(
                privateKeyStorage.nameTransform(consumerKeyName.toUri(),
                                                ".pri")) as keyFile:
            print privateKeyStorage.nameTransform(consumerKeyName.toUri(),
                                                  ".pri")
            base64Content = keyFile.read()
            #print base64Content
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)

        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(identityName,
                                               self.onRegisterFailed,
                                               self.onDataNotFound)
        self.memoryContentCache.add(consumerCertificate)

        accessRequestInterest = Interest(
            Name(self.groupName).append("read_access_request").append(
                self.certificateName).appendVersion(int(time.time())))
        self.face.expressInterest(accessRequestInterest,
                                  self.onAccessRequestData,
                                  self.onAccessRequestTimeout)
        print "Access request interest name: " + accessRequestInterest.getName(
        ).toUri()

        self.consumeCatalog = True
        return

    def onAccessRequestData(self, interest, data):
        print "Access request data: " + data.getName().toUri()
        print "Start consuming"
        self.startConsuming()
        return

    def onAccessRequestTimeout(self, interest):
        print "Access request times out: " + interest.getName().toUri()
        print "Assuming certificate sent and D-key generated, start consuming"
        self.startConsuming()
        return

    def startConsuming(self):
        if self.consumeCatalog:
            contentName = Name(
                "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/catalog/20161024T213400"
            )
            catalogInterest = Interest(contentName)
            self.face.expressInterest(catalogInterest,
                                      self.onCatalogConsumeComplete,
                                      self.onCatalogConsumeFailed)
            # self.consumer.consume(contentName, self.onCatalogConsumeComplete, self.onConsumeFailed)
            print "Trying to consume: " + contentName.toUri()
        else:
            contentName = Name(
                "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/"
            )
            dataNum = 60
            baseZFill = 3
            basetimeString = "20160620T080"

            for i in range(0, dataNum):
                timeString = basetimeString + str(i).zfill(baseZFill)
                timeFloat = Schedule.fromIsoString(timeString)

                self.consumer.consume(
                    Name(contentName).append(timeString),
                    self.onConsumeComplete, self.onConsumeFailed)
                print "Trying to consume: " + Name(contentName).append(
                    timeString).toUri()

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed: " + prefix.toUri()
        return

    def onCatalogConsumeComplete(self, interest, data):
        print "Consume complete for catalog: " + data.getName().toUri()
        resultObject = json.loads(data.getContent().toRawStr())
        print data.getContent().toRawStr()
        contentName = Name(
            "/org/openmhealth/haitao/SAMPLE/fitness/physical_activity/time_location/"
        )

        for i in range(0, len(resultObject)):
            # timeString = Schedule.toIsoString(int(resultObject[i]) * 1000)
            timeString = resultObject[i]
            self.consumer.consume(
                Name(contentName).append(timeString), self.onConsumeComplete,
                self.onConsumeFailed)
            print "Trying to consume: " + Name(contentName).append(
                timeString).toUri()

    def onCatalogConsumeFailed(self, interest):
        print "Data request times out: " + interest.getName().toUri()
        return

    def onConsumeComplete(self, data, result):
        print "Consume complete for data name: " + data.getName().toUri()
        print result

        # Test the length of encrypted data

        # dataBlob = data.getContent()
        # dataContent = EncryptedContent()
        # dataContent.wireDecode(dataBlob)
        # encryptedData = dataContent.getPayload()
        # print len(encryptedData)

    # TODO: shouldn't this indicate the consumption of what has failed though
    def onConsumeFailed(self, code, message):
        print "Consume error " + str(code) + ": " + message
コード例 #23
0
ファイル: dpu.py プロジェクト: zhtaoxiang/omh-dpu-dvu
class TestDPU(object):
    def __init__(self, face, encryptResult, link = None):
        # Set up face
        self.face = face
        self._encryptResult = encryptResult
        self._link = link

        self.databaseFilePath = "policy_config/test_consumer_dpu.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name("/org/openmhealth/haitao")

        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())
        # Authorized identity
        identityName = Name("/ndn/edu/basel/dpu")
        # Function name: the function that this DPU provides
        self._functionName = "bounding_box"
        self._identityName = identityName
        
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        # TODO: if using BasicIdentityStorage and FilePrivateKeyStorage
        #   For some reason this newly generated cert is not installed by default, calling keyChain sign later would result in error
        #self.keyChain.installIdentityCertificate()
        
        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(self.certificateName)
        consumerCertificate = identityStorage.getCertificate(self.certificateName)
        self.consumer = Consumer(
          face, self.keyChain, self.groupName, identityName,
          Sqlite3ConsumerDb(self.databaseFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")) as keyFile:
            print privateKeyStorage.nameTransform(consumerKeyName.toUri(), ".pri")
            base64Content = keyFile.read()
            #print base64Content
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)

        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(identityName, self.onRegisterFailed, self.onDataNotFound)
        self.memoryContentCache.add(consumerCertificate)

        accessRequestInterest = Interest(Name(self.groupName).append("read_access_request").append(self.certificateName).appendVersion(int(time.time())))
        self.face.expressInterest(accessRequestInterest, self.onAccessRequestData, self.onAccessRequestTimeout)
        print "Access request interest name: " + accessRequestInterest.getName().toUri()

        self._tasks = dict()

        return

    def onAccessRequestData(self, interest, data):
        print "Access request data: " + data.getName().toUri()
        return

    def onAccessRequestTimeout(self, interest):
        print "Access request times out: " + interest.getName().toUri()
        print "Assuming certificate sent and D-key generated"
        return

    def startConsuming(self, userId, basetimeString, producedDataName, dataNum, outerDataName):
        contentName = Name(userId).append(Name("/SAMPLE/fitness/physical_activity/time_location/"))
        baseZFill = 2

        for i in range(0, dataNum):
            timeString = basetimeString + str(i).zfill(baseZFill) + '00'
            timeFloat = Schedule.fromIsoString(timeString)

            self.consume(Name(contentName).append(timeString), producedDataName, outerDataName)
            print "Trying to consume: " + Name(contentName).append(timeString).toUri()

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()
        functionComponentIdx = len(self._identityName)
        if interest.getName().get(functionComponentIdx).toEscapedString() == self._functionName:
            try:
                parameters = interest.getName().get(functionComponentIdx + 1).toEscapedString()
                pattern = re.compile('([^,]*),([^,]*),([^,]*)')
                matching = pattern.match(str(Name.fromEscapedString(parameters)))
                #print parameters
                #print str(Name.fromEscapedString(parameters))

                userId = matching.group(1)
                basetimeString = matching.group(2)
                producedDataName = matching.group(3)
                dataNum = 2
                self._tasks[producedDataName] = {"cap_num": dataNum, "current_num": 0, "dataset": []}
                self.startConsuming(userId, basetimeString, producedDataName, dataNum, interest.getName().toUri())
            except Exception as e:
                print "Exception in processing function arguments: " + str(e)
        else:
            print "function name mismatch: expected " + self._functionName + " ; got " + interest.getName().get(functionComponentIdx).toEscapedString()
        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed: " + prefix.toUri()
        return

    def consume(self, contentName, producedDataName, outerDataName):
        self.consumer.consume(contentName, lambda data, result: self.onConsumeComplete(data, result, producedDataName, outerDataName), lambda code, message : self.onConsumeFailed(code, message, producedDataName, outerDataName))

    def onConsumeComplete(self, data, result, producedDataName, outerDataName):
        print "Consume complete for data name: " + data.getName().toUri()

        if producedDataName in self._tasks:
            self._tasks[producedDataName]["current_num"] += 1
            resultObject = json.loads(str(result))
            for i in range(0, len(resultObject)):
                self._tasks[producedDataName]["dataset"].append(resultObject[i])
            if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]:
                self.onGetAllData(producedDataName, outerDataName)

    def onConsumeFailed(self, code, message, producedDataName, outerDataName):
        print "Consume error " + str(code) + ": " + message
        if producedDataName in self._tasks:
            self._tasks[producedDataName]["current_num"] += 1
            if self._tasks[producedDataName]["current_num"] == self._tasks[producedDataName]["cap_num"]:
                self.onGetAllData(producedDataName, outerDataName)

    def onGetAllData(self, producedDataName, outerDataName):
        maxLng = -1000
        minLng = 1000
        maxLat = -1000
        minLat = 1000
        for item in self._tasks[producedDataName]["dataset"]:
            dataObject = json.loads(str(item))
            if dataObject["lat"] > maxLat:
                maxLat = dataObject["lat"]
            if dataObject["lat"] < minLat:
                minLat = dataObject["lat"]
            if dataObject["lng"] > maxLng:
                maxLng = dataObject["lng"]
            if dataObject["lng"] < minLng:
                minLng = dataObject["lng"]

        if not self._encryptResult:
            innerData = Data(Name(str(producedDataName)))
            innerData.setContent(json.dumps({"minLat": minLat, "maxLat": maxLat, "minLng": minLng, "maxLng": maxLng}))
            #self.keyChain.sign(innerData)

            outerData = Data(Name(str(outerDataName)))
            outerData.setContent(innerData.wireEncode())
            #self.keyChain.sign(outerData)

            self.memoryContentCache.add(outerData)
            self.initiateContentStoreInsertion("/ndn/edu/ucla/remap/ndnfit/repo", outerData)
            print "Calculation completed, put data to repo"
        else:
            print "Encrypt result is not implemented"

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
              fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(Name(repoCommandPrefix).append("insert")
          .append(Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData, self.onRepoTimeout)

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return
コード例 #24
0
class PLC(object):

    def __init__(self, name=None):
        # PLC Simulation
        self.slaveid = 0x00
        self.name = name
        if not name:
            self.name = socket.gethostname()
        self.plcrpcclient = PLCRPCClient(rpc_server="0.0.0.0", rpc_port=8000, plc=self.name)
        self.registered = False
        self.speed = 0.2
        self.db = {}
        # NDN
        self._callbackCount = 0
        self.primary_prefix = "/example"
        self.names = []
        self.freshnessPeriod = 2000 # in milliseconds (2000 = 2s).
        
        self.identify_manager = IdentityManager()
        self.keyChain = KeyChain(self.identify_manager)
        
    def _get_sensor_data(self):

        sensor_data = self.plcrpcclient.readSensors()
        
        for sensor in sensor_data:
            register = sensor_data[sensor]['register_type']

            address = int(sensor_data[sensor]['data_address'])

            if register in ['c', 'd']:
                value = bool(sensor_data[sensor]['value'])
            elif register in ['h', 'i']:
                value = int(sensor_data[sensor]['value'])

            address = address + 1  # section 4.4 of specification
            self.db[sensor] = store(self.slaveid, address, register, value)

    def _registerPLC(self):
        self.slaveid = self.plcrpcclient.registerPLC()
        self.registered = True
        log.debug("[PLC][%s] Registered on scadasim rpc" % self.name)
        return True

    def update(self):
        # log.debug("[PLC][%s] Updating PLC values with sensor values" % self)
        # while not self.queue.empty():
        #     # Update scadasim with any new values from Master
        #     fx, address, values = self.queue.get()
        #     log.debug("[PLC][%s] setting fx: %s register:%s to value:%s" %
        #               (self.name, fx, address, values))
        #     self.plcrpcclient.setValues(fx=fx, address=address, values=values)

        self._get_sensor_data()

        delay = (-time.time() % self.speed)
        t = threading.Timer(delay, self.update, ())
        t.daemon = True
        t.start()

    def set_speed(self, speed):
        self.speed = speed

    def __repr__(self):
        return "%s" % self.name

    def main(self):

        log.debug("[PLC][%s] Initialized" % self.name)
        while not self.registered:
            log.debug(
                "[PLC][%s] Trying to register with scadasim rpc" % self.name)
            try:
                self._registerPLC()
            except KeyError:
                log.warn(
                    """[PLC][%s] PLC not found within scadasim. Verify Docker
                     Compose container names match list of plcs in scadasim
                     config""")

            time.sleep(1)

        log.debug("[PLC][%s] Starting update service" % self.name)
        self.update()

        log.debug("[PLC][%s] Starting NDN Producer" % self.name)
        

        #  TODO: Move this setup stuff into a function and make dynamic.
        log.info("Listening on: ")
        for n in self.db:
            # /ndn/plc2-site/plc2
            name = Name("{0}/{1}-site/{1}/{2}".format(self.primary_prefix, self.name, n))
            log.info("\t{}".format(name))

            name_identiy = self.keyChain.createIdentityAndCertificate(name, self.keyChain.getDefaultKeyParams())
            log.debug("Name Identify: {}".format(name_identiy))
            self.face.setCommandSigningInfo(self.keyChain, name_identiy)
            self.face.registerPrefix(name, self.onInterest, self.onRegisterFailed)

            # log.debug("Registered Prefix: {} {}", str(self.primary_prefix), str(n))
        # END LOOP

        # Keep Running unless error.
        while self._callbackCount < 1:
            self.face.processEvents()
            time.sleep(0.01)

        self.face.shutdown()

    # NDN Stuff
    def ndnInit(self): 
        Interest.setDefaultCanBePrefix(True)
        # TODO: Bug? Does not auto retry TCP if unix socket fails as says in docs. 
        # self.face = Face("localhost", 6363)
        self.face = Face()
        self.primary_prefix = "/ndn"

    def onInterest(self, prefix, interest, face, interestFilterId, filter):
        self._callbackCount = 0
        
        # log.debug("prefix: '{}'".format(prefix))
        # log.debug("interest: '{}'".format(interest))
        # log.debug("face: '{}'".format(face))
        # log.debug("interestFilterId: '{}'".format(interestFilterId))
        # log.debug("filter: '{}'".format(filter))

        data = Data()
        
        # 
        # log.debug("----")
        # for n in self.db:
        #     log.debug(n)
        #     log.debug(self.db[n].value)
        # log.debug("----")
        # 

        n = str(prefix).split("/")[-1]

        log.debug("{} value '{}' ({})".format(prefix, self.db[n].value, self.freshnessPeriod))

        data.setContent(str(self.db[n].value)) # TODO: Why does this need to be converted to string?
        data.setName(prefix)
        meta = MetaInfo()
        meta.setFreshnessPeriod(self.freshnessPeriod)
        data.setMetaInfo(meta) 
        self.keyChain.sign(data)

        face.putData(data)

    def onRegisterFailed(self, prefix):
        self._callbackCount += 1
        dump("Unable to register", prefix)
コード例 #25
0
ファイル: base_node.py プロジェクト: remap/ndn-flow
class BaseNode(object):
    """
    This class contains methods/attributes common to both node and controller.
    """
    def __init__(self, transport = None, conn = None):
        """
        Initialize the network and security classes for the node
        """
        super(BaseNode, self).__init__()
        self.faceTransport = transport
        self.faceConn = conn
        
        self._identityStorage = BasicIdentityStorage()

        self._identityManager = IdentityManager(self._identityStorage, FilePrivateKeyStorage())
        self._policyManager = IotPolicyManager(self._identityStorage)

        # hopefully there is some private/public key pair available
        self._keyChain = KeyChain(self._identityManager, self._policyManager)

        self._registrationFailures = 0
        self._prepareLogging()

        self._setupComplete = False


##
# Logging
##
    def _prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log

###
# Startup and shutdown
###
    def beforeLoopStart(self):
        """
        Called before the event loop starts.
        """
        pass

    def getDefaultCertificateName(self):
        try:
            certName = self._identityStorage.getDefaultCertificateNameForIdentity( 
                self._policyManager.getDeviceIdentity())
        except SecurityException as e:
            # zhehao: in the case of producer's /localhop prefixes, the default key is not defined in ndnsec-public-info.db
            certName = self._keyChain.createIdentityAndCertificate(self._policyManager.getDeviceIdentity())
            #certName = self._keyChain.getDefaultCertificateName()
            #print(certName.toUri())
        return certName

    def start(self):
        """
        Begins the event loop. After this, the node's Face is set up and it can
        send/receive interests+data
        """
        self.log.info("Starting up")
        self.loop = asyncio.get_event_loop()
        
        if (self.faceTransport == None or self.faceTransport == ''):
            self.face = ThreadsafeFace(self.loop)
        else:
            self.face = ThreadsafeFace(self.loop, self.faceTransport, self.faceConn)
        
        self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName())
        
        self._keyChain.setFace(self.face)

        self._isStopped = False
        self.beforeLoopStart()
        
        try:
            self.loop.run_forever()
        except Exception as e:
            self.log.exception(exc_info=True)
        finally:
            self.stop()

    def stop(self):
        """
        Stops the node, taking it off the network
        """
        self.log.info("Shutting down")
        self._isStopped = True 
        self.loop.stop()
        
###
# Data handling
###
    def signData(self, data):
        """
        Sign the data with our network certificate
        :param pyndn.Data data: The data to sign
        """
        self._keyChain.sign(data, self.getDefaultCertificateName())

    def sendData(self, data, sign=True):
        """
        Reply to an interest with a data packet, optionally signing it.
        :param pyndn.Data data: The response data packet
        :param boolean sign: (optional, default=True) Whether the response must be signed. 
        """
        if sign:
            self.signData(data)
        self.face.putData(data)

###
# 
# 
##
    def onRegisterFailed(self, prefix):
        """
        Called when the node cannot register its name with the forwarder
        :param pyndn.Name prefix: The network name that failed registration
        """
        if self.faceTransport != None and self.faceConn != None:
            self.log.warn("Explicit face transport and connectionInfo: Could not register {}; expect a manual or autoreg on the other side.".format(prefix.toUri()))
        elif self._registrationFailures < 5:
            self._registrationFailures += 1
            self.log.warn("Could not register {}, retry: {}/{}".format(prefix.toUri(), self._registrationFailures, 5)) 
            self.face.registerPrefix(self.prefix, self._onCommandReceived, self.onRegisterFailed)
        else:
            self.log.info("Prefix registration failed")
            self.stop()

    def verificationFailed(self, dataOrInterest):
        """
        Called when verification of a data packet or command interest fails.
        :param pyndn.Data or pyndn.Interest: The packet that could not be verified
        """
        self.log.info("Received invalid" + dataOrInterest.getName().toUri())

    @staticmethod
    def getSerial():
        """
        Find and return the serial number of the Raspberry Pi. Provided in case
        you wish to distinguish data from nodes with the same name by serial.
        :return: The serial number extracted from device information in /proc/cpuinfo
        :rtype: str
        """
        try:
            if PLATFORM == "raspberry pi":
                with open('/proc/cpuinfo') as f:
                    for line in f:
                        if line.startswith('Serial'):
                            return line.split(':')[1].strip()
            else:
                return "todo"
        except NameError:
            return "todo"
コード例 #26
0
class TestGroupManager(ut.TestCase):
    def setUp(self):
        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.dKeyDatabaseFilePath = "policy_config/manager-d-key-test.db"
        try:
            os.remove(self.dKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.eKeyDatabaseFilePath = "policy_config/manager-e-key-test.db"
        try:
            os.remove(self.eKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.intervalDatabaseFilePath = "policy_config/manager-interval-test.db"
        try:
            os.remove(self.intervalDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupKeyDatabaseFilePath = "policy_config/manager-group-key-test.db"
        try:
            os.remove(self.groupKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        params = RsaKeyParams()
        memberDecryptKey = RsaAlgorithm.generateKey(params)
        self.decryptKeyBlob = memberDecryptKey.getKeyBits()
        memberEncryptKey = RsaAlgorithm.deriveEncryptKey(self.decryptKeyBlob)
        self.encryptKeyBlob = memberEncryptKey.getKeyBits()

        # Generate the certificate.
        self.certificate = IdentityCertificate()
        self.certificate.setName(Name("/ndn/memberA/KEY/ksk-123/ID-CERT/123"))
        contentPublicKey = PublicKey(self.encryptKeyBlob)
        self.certificate.setPublicKeyInfo(contentPublicKey)
        self.certificate.setNotBefore(0)
        self.certificate.setNotAfter(0)
        self.certificate.encode()

        signatureInfoBlob = Blob(SIG_INFO, False)
        signatureValueBlob = Blob(SIG_VALUE, False)

        signature = TlvWireFormat.get().decodeSignatureInfoAndValue(
            signatureInfoBlob.buf(), signatureValueBlob.buf())
        self.certificate.setSignature(signature)

        self.certificate.wireEncode()

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        identityName = Name("TestGroupManager")
        self.keyChain.createIdentityAndCertificate(identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.dKeyDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.eKeyDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.intervalDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.groupKeyDatabaseFilePath)
        except OSError:
            pass

    def setManager(self, manager):
        # Set up the first schedule.
        schedule1 = Schedule()
        interval11 = RepetitiveInterval(
            Schedule.fromIsoString("20150825T000000"),
            Schedule.fromIsoString("20150827T000000"), 5, 10, 2,
            RepetitiveInterval.RepeatUnit.DAY)
        interval12 = RepetitiveInterval(
            Schedule.fromIsoString("20150825T000000"),
            Schedule.fromIsoString("20150827T000000"), 6, 8, 1,
            RepetitiveInterval.RepeatUnit.DAY)
        interval13 = RepetitiveInterval(
            Schedule.fromIsoString("20150827T000000"),
            Schedule.fromIsoString("20150827T000000"), 7, 8)
        schedule1.addWhiteInterval(interval11)
        schedule1.addWhiteInterval(interval12)
        schedule1.addBlackInterval(interval13)

        # Set up the second schedule.
        schedule2 = Schedule()
        interval21 = RepetitiveInterval(
            Schedule.fromIsoString("20150825T000000"),
            Schedule.fromIsoString("20150827T000000"), 9, 12, 1,
            RepetitiveInterval.RepeatUnit.DAY)
        interval22 = RepetitiveInterval(
            Schedule.fromIsoString("20150827T000000"),
            Schedule.fromIsoString("20150827T000000"), 6, 8)
        interval23 = RepetitiveInterval(
            Schedule.fromIsoString("20150827T000000"),
            Schedule.fromIsoString("20150827T000000"), 2, 4)
        schedule2.addWhiteInterval(interval21)
        schedule2.addWhiteInterval(interval22)
        schedule2.addBlackInterval(interval23)

        # Add them to the group manager database.
        manager.addSchedule("schedule1", schedule1)
        manager.addSchedule("schedule2", schedule2)

        # Make some adaptions to certificate.
        dataBlob = self.certificate.wireEncode()

        memberA = Data()
        memberA.wireDecode(dataBlob, TlvWireFormat.get())
        memberA.setName(Name("/ndn/memberA/KEY/ksk-123/ID-CERT/123"))
        memberB = Data()
        memberB.wireDecode(dataBlob, TlvWireFormat.get())
        memberB.setName(Name("/ndn/memberB/KEY/ksk-123/ID-CERT/123"))
        memberC = Data()
        memberC.wireDecode(dataBlob, TlvWireFormat.get())
        memberC.setName(Name("/ndn/memberC/KEY/ksk-123/ID-CERT/123"))

        # Add the members to the database.
        manager.addMember("schedule1", memberA)
        manager.addMember("schedule1", memberB)
        manager.addMember("schedule2", memberC)

    def test_create_d_key_data(self):
        # Create the group manager.
        manager = GroupManager(
            Name("Alice"), Name("data_type"),
            Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1,
            self.keyChain)

        newCertificateBlob = self.certificate.wireEncode()
        newCertificate = IdentityCertificate()
        newCertificate.wireDecode(newCertificateBlob)

        # Encrypt the D-KEY.
        data = manager._createDKeyData(
            "20150825T000000", "20150827T000000", Name("/ndn/memberA/KEY"),
            self.decryptKeyBlob,
            newCertificate.getPublicKeyInfo().getKeyDer())

        # Verify the encrypted D-KEY.
        dataContent = data.getContent()

        # Get the nonce key.
        # dataContent is a sequence of the two EncryptedContent.
        encryptedNonce = EncryptedContent()
        encryptedNonce.wireDecode(dataContent)
        self.assertEqual(0, encryptedNonce.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.RsaOaep,
                         encryptedNonce.getAlgorithmType())

        blobNonce = encryptedNonce.getPayload()
        decryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        nonce = RsaAlgorithm.decrypt(self.decryptKeyBlob, blobNonce,
                                     decryptParams)

        # Get the D-KEY.
        # Use the size of encryptedNonce to find the start of encryptedPayload.
        payloadContent = dataContent.buf()[encryptedNonce.wireEncode().size():]
        encryptedPayload = EncryptedContent()
        encryptedPayload.wireDecode(payloadContent)
        self.assertEqual(16, encryptedPayload.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.AesCbc,
                         encryptedPayload.getAlgorithmType())

        decryptParams.setAlgorithmType(EncryptAlgorithmType.AesCbc)
        decryptParams.setInitialVector(encryptedPayload.getInitialVector())
        blobPayload = encryptedPayload.getPayload()
        largePayload = AesAlgorithm.decrypt(nonce, blobPayload, decryptParams)

        self.assertTrue(largePayload.equals(self.decryptKeyBlob))

    def test_create_e_key_data(self):
        # Create the group manager.
        manager = GroupManager(
            Name("Alice"), Name("data_type"),
            Sqlite3GroupManagerDb(self.eKeyDatabaseFilePath), 1024, 1,
            self.keyChain)
        self.setManager(manager)

        data = manager._createEKeyData("20150825T090000", "20150825T110000",
                                       self.encryptKeyBlob)
        self.assertEqual(
            "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T110000",
            data.getName().toUri())

        contentBlob = data.getContent()
        self.assertTrue(self.encryptKeyBlob.equals(contentBlob))

    def test_calculate_interval(self):
        # Create the group manager.
        manager = GroupManager(
            Name("Alice"), Name("data_type"),
            Sqlite3GroupManagerDb(self.intervalDatabaseFilePath), 1024, 1,
            self.keyChain)
        self.setManager(manager)

        memberKeys = {}

        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager._calculateInterval(timePoint1, memberKeys)
        self.assertEqual("20150825T090000",
                         Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150825T100000",
                         Schedule.toIsoString(result.getEndTime()))

        timePoint2 = Schedule.fromIsoString("20150827T073000")
        result = manager._calculateInterval(timePoint2, memberKeys)
        self.assertEqual("20150827T070000",
                         Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150827T080000",
                         Schedule.toIsoString(result.getEndTime()))

        timePoint3 = Schedule.fromIsoString("20150827T043000")
        result = manager._calculateInterval(timePoint3, memberKeys)
        self.assertEqual(False, result.isValid())

        timePoint4 = Schedule.fromIsoString("20150827T053000")
        result = manager._calculateInterval(timePoint4, memberKeys)
        self.assertEqual("20150827T050000",
                         Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150827T060000",
                         Schedule.toIsoString(result.getEndTime()))

    def test_get_group_key(self):
        # Create the group manager.
        manager = GroupManager(
            Name("Alice"), Name("data_type"),
            Sqlite3GroupManagerDb(self.groupKeyDatabaseFilePath), 1024, 1,
            self.keyChain)
        self.setManager(manager)

        # Get the data list from the group manager.
        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager.getGroupKey(timePoint1)

        self.assertEqual(4, len(result))

        # The first data packet contains the group's encryption key (public key).
        data = result[0]
        self.assertEqual(
            "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
            data.getName().toUri())
        groupEKey = EncryptKey(data.getContent())

        # Get the second data packet and decrypt.
        data = result[1]
        self.assertEqual(
            "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
            data.getName().toUri())

        ####################################################### Start decryption.
        dataContent = data.getContent()

        # Get the nonce key.
        # dataContent is a sequence of the two EncryptedContent.
        encryptedNonce = EncryptedContent()
        encryptedNonce.wireDecode(dataContent)
        self.assertEqual(0, encryptedNonce.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.RsaOaep,
                         encryptedNonce.getAlgorithmType())

        decryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        blobNonce = encryptedNonce.getPayload()
        nonce = RsaAlgorithm.decrypt(self.decryptKeyBlob, blobNonce,
                                     decryptParams)

        # Get the payload.
        # Use the size of encryptedNonce to find the start of encryptedPayload.
        payloadContent = dataContent.buf()[encryptedNonce.wireEncode().size():]
        encryptedPayload = EncryptedContent()
        encryptedPayload.wireDecode(payloadContent)
        self.assertEqual(16, encryptedPayload.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.AesCbc,
                         encryptedPayload.getAlgorithmType())

        decryptParams.setAlgorithmType(EncryptAlgorithmType.AesCbc)
        decryptParams.setInitialVector(encryptedPayload.getInitialVector())
        blobPayload = encryptedPayload.getPayload()
        largePayload = AesAlgorithm.decrypt(nonce, blobPayload, decryptParams)

        # Get the group D-KEY.
        groupDKey = DecryptKey(largePayload)

        ####################################################### End decryption.

        # Check the D-KEY.
        derivedGroupEKey = RsaAlgorithm.deriveEncryptKey(
            groupDKey.getKeyBits())
        self.assertTrue(groupEKey.getKeyBits().equals(
            derivedGroupEKey.getKeyBits()))

        # Check the third data packet.
        data = result[2]
        self.assertEqual(
            "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberB/ksk-123",
            data.getName().toUri())

        # Check the fourth data packet.
        data = result[3]
        self.assertEqual(
            "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberC/ksk-123",
            data.getName().toUri())

        # Check invalid time stamps for getting the group key.
        timePoint2 = Schedule.fromIsoString("20150826T083000")
        self.assertEqual(0, len(manager.getGroupKey(timePoint2)))

        timePoint3 = Schedule.fromIsoString("20150827T023000")
        self.assertEqual(0, len(manager.getGroupKey(timePoint3)))

    def test_get_group_key_without_regeneration(self):
        # Create the group manager.
        manager = GroupManager(
            Name("Alice"), Name("data_type"),
            Sqlite3GroupManagerDb(self.groupKeyDatabaseFilePath), 1024, 1,
            self.keyChain)
        self.setManager(manager)

        # Get the data list from the group manager.
        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager.getGroupKey(timePoint1)

        self.assertEqual(4, len(result))

        # The first data packet contains the group's encryption key (public key).
        data1 = result[0]
        self.assertEqual(
            "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
            data1.getName().toUri())
        groupEKey1 = EncryptKey(data1.getContent())

        # Get the second data packet and decrypt.
        data = result[1]
        self.assertEqual(
            "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
            data.getName().toUri())

        # Add new members to the database.
        dataBlob = self.certificate.wireEncode()
        memberD = Data()
        memberD.wireDecode(dataBlob)
        memberD.setName(Name("/ndn/memberD/KEY/ksk-123/ID-CERT/123"))
        manager.addMember("schedule1", memberD)

        result2 = manager.getGroupKey(timePoint1, False)
        self.assertEqual(5, len(result2))

        # Check that the new EKey is the same as the previous one.
        data2 = result2[0]
        self.assertEqual(
            "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
            data2.getName().toUri())
        groupEKey2 = EncryptKey(data2.getContent())
        self.assertTrue(groupEKey1.getKeyBits().equals(
            groupEKey2.getKeyBits()))

        # Check the second data packet.
        data2 = result2[1]
        self.assertEqual(
            "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
            data2.getName().toUri())
コード例 #27
0
class TestGroupManager(ut.TestCase):
    def setUp(self):
        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.dKeyDatabaseFilePath = "policy_config/manager-d-key-test.db"
        try:
            os.remove(self.dKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.eKeyDatabaseFilePath = "policy_config/manager-e-key-test.db"
        try:
            os.remove(self.eKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.intervalDatabaseFilePath = "policy_config/manager-interval-test.db"
        try:
            os.remove(self.intervalDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupKeyDatabaseFilePath = "policy_config/manager-group-key-test.db"
        try:
            os.remove(self.groupKeyDatabaseFilePath)
        except OSError:
            # no such file
            pass

        params = RsaKeyParams()
        memberDecryptKey = RsaAlgorithm.generateKey(params)
        self.decryptKeyBlob = memberDecryptKey.getKeyBits()
        memberEncryptKey = RsaAlgorithm.deriveEncryptKey(self.decryptKeyBlob)
        self.encryptKeyBlob = memberEncryptKey.getKeyBits()

        # Generate the certificate.
        self.certificate = IdentityCertificate()
        self.certificate.setName(Name("/ndn/memberA/KEY/ksk-123/ID-CERT/123"))
        contentPublicKey = PublicKey(self.encryptKeyBlob)
        self.certificate.setPublicKeyInfo(contentPublicKey)
        self.certificate.setNotBefore(0)
        self.certificate.setNotAfter(0)
        self.certificate.encode()

        signatureInfoBlob = Blob(SIG_INFO, False)
        signatureValueBlob = Blob(SIG_VALUE, False)

        signature = TlvWireFormat.get().decodeSignatureInfoAndValue(
          signatureInfoBlob.buf(), signatureValueBlob.buf())
        self.certificate.setSignature(signature)

        self.certificate.wireEncode()

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())
        identityName = Name("TestGroupManager")
        self.keyChain.createIdentityAndCertificate(identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.dKeyDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.eKeyDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.intervalDatabaseFilePath)
        except OSError:
            pass
        try:
            os.remove(self.groupKeyDatabaseFilePath)
        except OSError:
            pass

    def setManager(self, manager):
        # Set up the first schedule.
        schedule1 = Schedule()
        interval11 = RepetitiveInterval(
          Schedule.fromIsoString("20150825T000000"),
          Schedule.fromIsoString("20150827T000000"), 5, 10, 2,
          RepetitiveInterval.RepeatUnit.DAY)
        interval12 = RepetitiveInterval(
          Schedule.fromIsoString("20150825T000000"),
          Schedule.fromIsoString("20150827T000000"), 6, 8, 1,
          RepetitiveInterval.RepeatUnit.DAY)
        interval13 = RepetitiveInterval(
          Schedule.fromIsoString("20150827T000000"),
          Schedule.fromIsoString("20150827T000000"), 7, 8)
        schedule1.addWhiteInterval(interval11)
        schedule1.addWhiteInterval(interval12)
        schedule1.addBlackInterval(interval13)

        # Set up the second schedule.
        schedule2 = Schedule()
        interval21 = RepetitiveInterval(
          Schedule.fromIsoString("20150825T000000"),
          Schedule.fromIsoString("20150827T000000"), 9, 12, 1,
          RepetitiveInterval.RepeatUnit.DAY)
        interval22 = RepetitiveInterval(
          Schedule.fromIsoString("20150827T000000"),
          Schedule.fromIsoString("20150827T000000"), 6, 8)
        interval23 = RepetitiveInterval(
          Schedule.fromIsoString("20150827T000000"),
          Schedule.fromIsoString("20150827T000000"), 2, 4)
        schedule2.addWhiteInterval(interval21)
        schedule2.addWhiteInterval(interval22)
        schedule2.addBlackInterval(interval23)

        # Add them to the group manager database.
        manager.addSchedule("schedule1", schedule1)
        manager.addSchedule("schedule2", schedule2)

        # Make some adaptions to certificate.
        dataBlob = self.certificate.wireEncode()

        memberA = Data()
        memberA.wireDecode(dataBlob, TlvWireFormat.get())
        memberA.setName(Name("/ndn/memberA/KEY/ksk-123/ID-CERT/123"))
        memberB = Data()
        memberB.wireDecode(dataBlob, TlvWireFormat.get())
        memberB.setName(Name("/ndn/memberB/KEY/ksk-123/ID-CERT/123"))
        memberC = Data()
        memberC.wireDecode(dataBlob, TlvWireFormat.get())
        memberC.setName(Name("/ndn/memberC/KEY/ksk-123/ID-CERT/123"))

        # Add the members to the database.
        manager.addMember("schedule1", memberA)
        manager.addMember("schedule1", memberB)
        manager.addMember("schedule2", memberC)

    def test_create_d_key_data(self):
        # Create the group manager.
        manager = GroupManager(
          Name("Alice"), Name("data_type"),
          Sqlite3GroupManagerDb(self.dKeyDatabaseFilePath), 2048, 1,
          self.keyChain)

        newCertificateBlob = self.certificate.wireEncode()
        newCertificate = IdentityCertificate()
        newCertificate.wireDecode(newCertificateBlob)

        # Encrypt the D-KEY.
        data = manager._createDKeyData(
          "20150825T000000", "20150827T000000", Name("/ndn/memberA/KEY"),
          self.decryptKeyBlob, newCertificate.getPublicKeyInfo().getKeyDer())

        # Verify the encrypted D-KEY.
        dataContent = data.getContent()

        # Get the nonce key.
        # dataContent is a sequence of the two EncryptedContent.
        encryptedNonce = EncryptedContent()
        encryptedNonce.wireDecode(dataContent)
        self.assertEqual(0, encryptedNonce.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.RsaOaep, encryptedNonce.getAlgorithmType())

        blobNonce = encryptedNonce.getPayload()
        decryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        nonce = RsaAlgorithm.decrypt(self.decryptKeyBlob, blobNonce, decryptParams)

        # Get the D-KEY.
        # Use the size of encryptedNonce to find the start of encryptedPayload.
        payloadContent = dataContent.buf()[encryptedNonce.wireEncode().size():]
        encryptedPayload = EncryptedContent()
        encryptedPayload.wireDecode(payloadContent)
        self.assertEqual(16, encryptedPayload.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.AesCbc, encryptedPayload.getAlgorithmType())

        decryptParams.setAlgorithmType(EncryptAlgorithmType.AesCbc)
        decryptParams.setInitialVector(encryptedPayload.getInitialVector())
        blobPayload = encryptedPayload.getPayload()
        largePayload = AesAlgorithm.decrypt(nonce, blobPayload, decryptParams)

        self.assertTrue(largePayload.equals(self.decryptKeyBlob))

    def test_create_e_key_data(self):
        # Create the group manager.
        manager = GroupManager(
          Name("Alice"), Name("data_type"),
          Sqlite3GroupManagerDb(self.eKeyDatabaseFilePath), 1024, 1,
          self.keyChain)
        self.setManager(manager)

        data = manager._createEKeyData(
          "20150825T090000", "20150825T110000", self.encryptKeyBlob)
        self.assertEqual("/Alice/READ/data_type/E-KEY/20150825T090000/20150825T110000",
                         data.getName().toUri())

        contentBlob = data.getContent()
        self.assertTrue(self.encryptKeyBlob.equals(contentBlob))

    def test_calculate_interval(self):
        # Create the group manager.
        manager = GroupManager(
          Name("Alice"), Name("data_type"),
          Sqlite3GroupManagerDb(self.intervalDatabaseFilePath), 1024, 1,
          self.keyChain)
        self.setManager(manager)

        memberKeys = {}

        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager._calculateInterval(timePoint1, memberKeys)
        self.assertEqual("20150825T090000", Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150825T100000", Schedule.toIsoString(result.getEndTime()))

        timePoint2 = Schedule.fromIsoString("20150827T073000")
        result = manager._calculateInterval(timePoint2, memberKeys)
        self.assertEqual("20150827T070000", Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150827T080000", Schedule.toIsoString(result.getEndTime()))

        timePoint3 = Schedule.fromIsoString("20150827T043000")
        result = manager._calculateInterval(timePoint3, memberKeys)
        self.assertEqual(False, result.isValid())

        timePoint4 = Schedule.fromIsoString("20150827T053000")
        result = manager._calculateInterval(timePoint4, memberKeys)
        self.assertEqual("20150827T050000", Schedule.toIsoString(result.getStartTime()))
        self.assertEqual("20150827T060000", Schedule.toIsoString(result.getEndTime()))

    def test_get_group_key(self):
        # Create the group manager.
        manager = GroupManager(
          Name("Alice"), Name("data_type"),
          Sqlite3GroupManagerDb(self.groupKeyDatabaseFilePath), 1024, 1,
          self.keyChain)
        self.setManager(manager)

        # Get the data list from the group manager.
        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager.getGroupKey(timePoint1)

        self.assertEqual(4, len(result))

        # The first data packet contains the group's encryption key (public key).
        data = result[0]
        self.assertEqual(
          "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
          data.getName().toUri())
        groupEKey = EncryptKey(data.getContent())

        # Get the second data packet and decrypt.
        data = result[1]
        self.assertEqual(
          "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
          data.getName().toUri())

        ####################################################### Start decryption.
        dataContent = data.getContent()

        # Get the nonce key.
        # dataContent is a sequence of the two EncryptedContent.
        encryptedNonce = EncryptedContent()
        encryptedNonce.wireDecode(dataContent)
        self.assertEqual(0, encryptedNonce.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.RsaOaep, encryptedNonce.getAlgorithmType())

        decryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        blobNonce = encryptedNonce.getPayload()
        nonce = RsaAlgorithm.decrypt(self.decryptKeyBlob, blobNonce, decryptParams)

        # Get the payload.
        # Use the size of encryptedNonce to find the start of encryptedPayload.
        payloadContent = dataContent.buf()[encryptedNonce.wireEncode().size():]
        encryptedPayload = EncryptedContent()
        encryptedPayload.wireDecode(payloadContent)
        self.assertEqual(16, encryptedPayload.getInitialVector().size())
        self.assertEqual(EncryptAlgorithmType.AesCbc, encryptedPayload.getAlgorithmType())

        decryptParams.setAlgorithmType(EncryptAlgorithmType.AesCbc)
        decryptParams.setInitialVector(encryptedPayload.getInitialVector())
        blobPayload = encryptedPayload.getPayload()
        largePayload = AesAlgorithm.decrypt(nonce, blobPayload, decryptParams)

        # Get the group D-KEY.
        groupDKey = DecryptKey(largePayload)

        ####################################################### End decryption.

        # Check the D-KEY.
        derivedGroupEKey = RsaAlgorithm.deriveEncryptKey(groupDKey.getKeyBits())
        self.assertTrue(groupEKey.getKeyBits().equals(derivedGroupEKey.getKeyBits()))

        # Check the third data packet.
        data = result[2]
        self.assertEqual(
          "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberB/ksk-123",
          data.getName().toUri())

        # Check the fourth data packet.
        data = result[3]
        self.assertEqual(
          "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberC/ksk-123",
          data.getName().toUri())

        # Check invalid time stamps for getting the group key.
        timePoint2 = Schedule.fromIsoString("20150826T083000")
        self.assertEqual(0, len(manager.getGroupKey(timePoint2)))

        timePoint3 = Schedule.fromIsoString("20150827T023000")
        self.assertEqual(0, len(manager.getGroupKey(timePoint3)))

    def test_get_group_key_without_regeneration(self):
        # Create the group manager.
        manager = GroupManager(
          Name("Alice"), Name("data_type"),
          Sqlite3GroupManagerDb(self.groupKeyDatabaseFilePath), 1024, 1,
          self.keyChain)
        self.setManager(manager)

        # Get the data list from the group manager.
        timePoint1 = Schedule.fromIsoString("20150825T093000")
        result = manager.getGroupKey(timePoint1)

        self.assertEqual(4, len(result))

        # The first data packet contains the group's encryption key (public key).
        data1 = result[0]
        self.assertEqual(
          "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
          data1.getName().toUri())
        groupEKey1 = EncryptKey(data1.getContent())

        # Get the second data packet and decrypt.
        data = result[1]
        self.assertEqual(
          "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
          data.getName().toUri())

        # Add new members to the database.
        dataBlob = self.certificate.wireEncode()
        memberD = Data()
        memberD.wireDecode(dataBlob)
        memberD.setName(Name("/ndn/memberD/KEY/ksk-123/ID-CERT/123"))
        manager.addMember("schedule1", memberD)

        result2 = manager.getGroupKey(timePoint1, False)
        self.assertEqual(5, len(result2))

        # Check that the new EKey is the same as the previous one.
        data2 = result2[0]
        self.assertEqual(
          "/Alice/READ/data_type/E-KEY/20150825T090000/20150825T100000",
           data2.getName().toUri())
        groupEKey2 = EncryptKey(data2.getContent())
        self.assertTrue(groupEKey1.getKeyBits().equals(groupEKey2.getKeyBits()));

        # Check the second data packet.
        data2 = result2[1]
        self.assertEqual(
          "/Alice/READ/data_type/D-KEY/20150825T090000/20150825T100000/FOR/ndn/memberA/ksk-123",
          data2.getName().toUri())
コード例 #28
0
class SampleProducer(object):
    def __init__(self, face, username, memoryContentCache):
        # Set up face
        self.face = face

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())

        identityName = Name(username)
        self.certificateName = self.keyChain.createIdentityAndCertificate(
            identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        self.databaseFilePath = "../policy_config/test_producer.db"
        self.catalogDatabaseFilePath = "../policy_config/test_producer_catalog.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass
        try:
            os.remove(self.catalogDatabaseFilePath)
        except OSError:
            # no such file
            pass

        self.testDb = Sqlite3ProducerDb(self.databaseFilePath)
        self.catalogDb = Sqlite3ProducerDb(self.catalogDatabaseFilePath)

        # TODO: as of right now, catalog has a different suffix, so need another instance of producer; that producer cannot share
        # the same DB with the first producer, otherwise there won't be a self.onEncryptedKeys call; as the catalog producer uses
        # its own C-key, and that key won't be encrypted by an E-key as no interest goes out
        # This sounds like something problematic from the library
        prefix = Name(username)
        suffix = Name("fitness/physical_activity/time_location")

        self.producer = Producer(Name(prefix), suffix, self.face,
                                 self.keyChain, self.testDb)

        catalogSuffix = Name(suffix).append("catalog")
        self.catalogProducer = Producer(Name(prefix), catalogSuffix, self.face,
                                        self.keyChain, self.catalogDb)

        self.memoryContentCache = memoryContentCache
        return

    def createContentKey(self, timeSlot):
        print "debug: createContentKey for data and catalog"
        contentKeyName = self.producer.createContentKey(
            timeSlot, self.onEncryptedKeys, self.onError)
        catalogKeyName = self.catalogProducer.createContentKey(
            timeSlot, self.onEncryptedKeys, self.onError)
        print contentKeyName.toUri()
        print catalogKeyName.toUri()

    def onError(self, code, msg):
        print str(code) + " : " + msg
        return

    def initiateContentStoreInsertion(self, repoCommandPrefix, data):
        fetchName = data.getName()
        parameter = repo_command_parameter_pb2.RepoCommandParameterMessage()
        # Add the Name.
        for i in range(fetchName.size()):
            parameter.repo_command_parameter.name.component.append(
                fetchName[i].getValue().toBytes())

        # Create the command interest.
        interest = Interest(
            Name(repoCommandPrefix).append("insert").append(
                Name.Component(ProtobufTlv.encode(parameter))))
        self.face.makeCommandInterest(interest)

        self.face.expressInterest(interest, self.onRepoData,
                                  self.onRepoTimeout)

    def onRepoData(self, interest, data):
        #print "received repo data: " + interest.getName().toUri()
        return

    def onRepoTimeout(self, interest):
        #print "repo command times out: " + interest.getName().getPrefix(-1).toUri()
        return

    def onEncryptedKeys(self, keys):
        print "debug: onEncryptedKeys called"
        if not keys:
            print "onEncryptedKeys: no keys in callback!"
        for i in range(0, len(keys)):
            print "onEncryptedKeys: produced encrypted key " + keys[i].getName(
            ).toUri()
            self.memoryContentCache.add(keys[i])
            self.initiateContentStoreInsertion(
                "/ndn/edu/ucla/remap/ndnfit/repo", keys[i])
        return
コード例 #29
0
ファイル: key-gen.py プロジェクト: zhtaoxiang/dpu-dvu
from pyndn.security import KeyType, KeyChain, RsaKeyParams, SecurityException
from pyndn.security.certificate import IdentityCertificate
from pyndn.security.identity import IdentityManager
from pyndn.security.identity import BasicIdentityStorage, FilePrivateKeyStorage, MemoryIdentityStorage, MemoryPrivateKeyStorage
from pyndn.security.policy import NoVerifyPolicyManager

# Set up the keyChain.
identityStorage = BasicIdentityStorage()
privateKeyStorage = FilePrivateKeyStorage()
keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                    NoVerifyPolicyManager())

# dvu identity
identityName = Name("/org/openmhealth/dvu")
certificateName = keyChain.createIdentityAndCertificate(identityName)
keyName = IdentityCertificate.certificateNameToPublicKeyName(certificateName)
certificate = identityStorage.getCertificate(certificateName)
print keyName
print certificateName
print certificate
print privateKeyStorage.nameTransform(keyName.toUri(), ".pri")
#with open(privateKeyStorage.nameTransform(keyName.toUri(), ".pri")) as keyFile:
#    base64Content = keyFile.read()
#    decoded = base64.b64decode(base64Content)
#    print decoded
#    for i in range(0, len(decoded)):
#        print int(decoded[i])
face = Face()
face.setCommandSigningInfo(keyChain, certificateName)
certificateNamePrefix = Name(identityName).append("KEY")