Exemplo n.º 1
0
class CredentialStorage:
    def __init__(self):
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(IdentityManager(self.identityStorage, self.privateKeyStorage),
                        SelfVerifyPolicyManager(self.identityStorage))
        keyName = Name("/testname/DSK-123")
        self.defaultCertName = keyName[:-1].append(
          "KEY").append(keyName[-1]).append("ID-CERT").append("0")

        ecdsaKeyName = Name("/testEcdsa/DSK-123")
        self.ecdsaCertName = ecdsaKeyName[:-1].append(
          "KEY").append(ecdsaKeyName[-1]).append("ID-CERT").append("0")

        self.identityStorage.addKey(
          keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
          keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER, DEFAULT_RSA_PRIVATE_KEY_DER)

        self.identityStorage.addKey(
          ecdsaKeyName, KeyType.ECDSA, Blob(DEFAULT_EC_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
          ecdsaKeyName, KeyType.ECDSA, DEFAULT_EC_PUBLIC_KEY_DER, DEFAULT_EC_PRIVATE_KEY_DER)

    def signData(self, data, certificateName = None):
        if certificateName is None:
            certificateName = self.defaultCertName
        self.keyChain.sign(data, certificateName)

    def signDataWithSha256(self, data):
        self.keyChain.signWithSha256(data)

    def verifyData(self, data, verifiedCallback, failedCallback):
        self.keyChain.verifyData(data, verifiedCallback, failedCallback)
Exemplo n.º 2
0
    def wrap_content(self, name, content, key=None, key_locator=None):
        """
        @param name - name of the data
        @param content - data to be wrapped
        @param key - key used to sign the data
        @return the content object created
        wraps the given name and content into a content object
        """
        co = Data(Name(name))
        co.setContent(content)
        co.getMetaInfo().setFreshnessPeriod(5000)
        co.getMetaInfo().setFinalBlockID(Name("/%00%09")[0])

        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        identityManager = IdentityManager(identityStorage, privateKeyStorage)
        keyChain = KeyChain(identityManager, None)

        # Initialize the storage.
        keyName = Name("/ndn/bms/DSK-default")
        certificateName = keyName.getSubName(0, keyName.size() - 1).append(
                "KEY").append(keyName[-1]).append("ID-CERT").append("0")
        identityStorage.addKey(keyName, KeyType.RSA, Blob(DEFAULT_PUBLIC_KEY_DER))
        privateKeyStorage.setKeyPairForKeyName(keyName, DEFAULT_PUBLIC_KEY_DER, 
                DEFAULT_PRIVATE_KEY_DER)

        keyChain.sign(co, certificateName)

        _data = co.wireEncode()

        return _data.toRawStr()
Exemplo n.º 3
0
class Producer(object):
    def __init__(self, delay=None):
        self.delay = delay
        self.nDataServed = 0
        self.isDone = False


    def run(self, prefix):
        self.keyChain = KeyChain()

        self.consoleThread = ConsoleThread()
        self.consoleThread.start()

        # The default Face will connect using a Unix socket
        face = Face()
        prefix = Name(prefix)

        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain, self.keyChain.getDefaultCertificateName())

        # Also use the default certificate name to sign data packets.
        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print "Registering prefix", prefix.toUri()

        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)



    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        global stopServing

        if stopServing:
            print "refusing to serve " + interest.getName().toUri()
            self.consoleThread.join()
            print "join'd thread"
            return

        if self.delay is not None:
            time.sleep(self.delay)

        interestName = interest.getName()

        data = Data(interestName)
        data.setContent("Hello " + interestName.toUri())
        data.getMetaInfo().setFreshnessPeriod(3600 * 1000)

        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

        transport.send(data.wireEncode().toBuffer())

        self.nDataServed += 1
        print "Replied to: %s (#%d)" % (interestName.toUri(), self.nDataServed)


    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
Exemplo n.º 4
0
class Producer(object):
    def __init__(self):
        self.keyChain = KeyChain()
        self.isDone = False


    def run(self, namespace):
        # Create a connection to the local forwarder over a Unix socket
        face = Face()

        prefix = Name(namespace)

        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain, \
                                   self.keyChain.getDefaultCertificateName())

        # Also use the default certificate name to sign Data packets.
        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print "Registering prefix", prefix.toUri()

        # Run the event loop forever. Use a short sleep to
        # prevent the Producer from using 100% of the CPU.
        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)



    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        interestName = interest.getName()

        data = Data(interestName)
        data.setContent("Hello, " + interestName.toUri())

        hourMilliseconds = 3600 * 1000
        data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

        transport.send(data.wireEncode().toBuffer())

        print "Replied to: %s" % interestName.toUri()


    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True
Exemplo n.º 5
0
class Producer(object):
    def __init__(self):
        self.keyChain = KeyChain()
        self.isDone = False

    def run(self, namespace):
        # Create a connection to the local forwarder over a Unix socket
        face = Face()

        prefix = Name(namespace)

        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain, \
                                   self.keyChain.getDefaultCertificateName())

        # Also use the default certificate name to sign Data packets.
        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print "Registering prefix", prefix.toUri()

        # Run the event loop forever. Use a short sleep to
        # prevent the Producer from using 100% of the CPU.
        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        lines = open('cpu.txt', 'r')
        numbers = map(float, lines)
        min_number = min(numbers)
        interestName = interest.getName()
        data = Data(interestName)
        data.setContent("CPU min:" + str(min_number))

        hourMilliseconds = 3600 * 1000
        data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

        transport.send(data.wireEncode().toBuffer())

        print "Replied to: %s" % interestName.toUri()

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True
Exemplo n.º 6
0
def main():
    data = Data()
    data.wireDecode(TlvData)
    dump("Decoded Data:")
    dumpData(data)

    # Set the content again to clear the cached encoding so we encode again.
    data.setContent(data.getContent())
    encoding = data.wireEncode()

    reDecodedData = Data()
    reDecodedData.wireDecode(encoding)
    dump("")
    dump("Re-decoded Data:")
    dumpData(reDecodedData)

    freshData = Data(Name("/ndn/abc"))
    freshData.setContent("SUCCESS!")
    freshData.getMetaInfo().setFreshnessPeriod(5000)
    freshData.getMetaInfo().setFinalBlockID(Name("/%00%09")[0])

    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                        SelfVerifyPolicyManager(identityStorage))

    # Initialize the storage.
    keyName = Name("/testname/DSK-123")
    certificateName = keyName.getSubName(
        0,
        keyName.size() - 1).append("KEY").append(
            keyName[-1]).append("ID-CERT").append("0")
    identityStorage.addKey(keyName, KeyType.RSA,
                           Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
    privateKeyStorage.setKeyPairForKeyName(keyName, KeyType.RSA,
                                           DEFAULT_RSA_PUBLIC_KEY_DER,
                                           DEFAULT_RSA_PRIVATE_KEY_DER)

    keyChain.sign(freshData, certificateName)
    dump("")
    dump("Freshly-signed Data:")
    dumpData(freshData)

    keyChain.verifyData(freshData, makeOnVerified("Freshly-signed Data"),
                        makeOnVerifyFailed("Freshly-signed Data"))
Exemplo n.º 7
0
def main():
    data = Data()
    data.wireDecode(TlvData)
    dump("Decoded Data:")
    dumpData(data)

    # Set the content again to clear the cached encoding so we encode again.
    data.setContent(data.getContent())
    encoding = data.wireEncode()

    reDecodedData = Data()
    reDecodedData.wireDecode(encoding)
    dump("")
    dump("Re-decoded Data:")
    dumpData(reDecodedData)

    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                        SelfVerifyPolicyManager(identityStorage))

    # Initialize the storage.
    keyName = Name("/testname/DSK-123")
    certificateName = keyName.getSubName(0, keyName.size() - 1).append(
      "KEY").append(keyName[-1]).append("ID-CERT").append("0")
    identityStorage.addKey(keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
    privateKeyStorage.setKeyPairForKeyName(
      keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER, DEFAULT_RSA_PRIVATE_KEY_DER)

    keyChain.verifyData(reDecodedData, makeOnVerified("Re-decoded Data"),
                        makeOnVerifyFailed("Re-decoded Data"))

    freshData = Data(Name("/ndn/abc"))
    freshData.setContent("SUCCESS!")
    freshData.getMetaInfo().setFreshnessPeriod(5000)
    freshData.getMetaInfo().setFinalBlockId(Name("/%00%09")[0])
    keyChain.sign(freshData, certificateName)
    dump("")
    dump("Freshly-signed Data:")
    dumpData(freshData)

    keyChain.verifyData(freshData, makeOnVerified("Freshly-signed Data"),
                        makeOnVerifyFailed("Freshly-signed Data"))
Exemplo n.º 8
0
class Producer(object):
    def __init__(self):

        Prefix1 = '/umobile/polling/push'
        self.configPrefix = Name(Prefix1)
        self.outstanding = dict()
        self.isDone = False
        self.keyChain = KeyChain()

        self.face = Face("127.0.0.1")

    def run(self):
        try:

            self.face.setCommandSigningInfo(self.keyChain, \
                                            self.keyChain.getDefaultCertificateName())
            self.face.registerPrefix(self.configPrefix, self.onInterest, self.onRegisterFailed)
            print "Registering listening prefix : " + self.configPrefix.toUri()

            while not self.isDone:
                self.face.processEvents()
                time.sleep(0.01)

        except RuntimeError as e:
            print "ERROR: %s" % e

    def onInterest(self, prefix, interest, face, interestFilterId, filter):

        interestName = interest.getName()
        data = Data(interestName)
        data.setContent("Test Push Interest Polling model")
        hourMilliseconds = 600 * 1000
        data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)
        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
        face.send(data.wireEncode().toBuffer())
        print "Replied to Interest name: %s" % interestName.toUri()
        print "Replied with Data name: %s" % interestName.toUri()


    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True
Exemplo n.º 9
0
def main():
    data = Data()
    data.wireDecode(TlvData)
    dump("Decoded Data:")
    dumpData(data)

    # Set the content again to clear the cached encoding so we encode again.
    data.setContent(data.getContent())
    encoding = data.wireEncode()

    reDecodedData = Data()
    reDecodedData.wireDecode(encoding)
    dump("")
    dump("Re-decoded Data:")
    dumpData(reDecodedData)

    # Set up the KeyChain.
    pibImpl = PibMemory()
    keyChain = KeyChain(
      pibImpl, TpmBackEndMemory(), SelfVerifyPolicyManager(pibImpl))
    # This puts the public key in the pibImpl used by the SelfVerifyPolicyManager.
    keyChain.importSafeBag(SafeBag
      (Name("/testname/KEY/123"),
       Blob(DEFAULT_RSA_PRIVATE_KEY_DER, False),
       Blob(DEFAULT_RSA_PUBLIC_KEY_DER, False)))

    keyChain.verifyData(reDecodedData, makeOnVerified("Re-decoded Data"),
                        makeOnValidationFailed("Re-decoded Data"))

    freshData = Data(Name("/ndn/abc"))
    freshData.setContent("SUCCESS!")
    freshData.getMetaInfo().setFreshnessPeriod(5000)
    freshData.getMetaInfo().setFinalBlockId(Name("/%00%09")[0])
    keyChain.sign(freshData)
    dump("")
    dump("Freshly-signed Data:")
    dumpData(freshData)

    keyChain.verifyData(freshData, makeOnVerified("Freshly-signed Data"),
                        makeOnValidationFailed("Freshly-signed Data"))
Exemplo n.º 10
0
def main():
    data = Data()
    data.wireDecode(TlvData)
    dump("Decoded Data:")
    dumpData(data)

    # Set the content again to clear the cached encoding so we encode again.
    data.setContent(data.getContent())
    encoding = data.wireEncode()

    reDecodedData = Data()
    reDecodedData.wireDecode(encoding)
    dump("")
    dump("Re-decoded Data:")
    dumpData(reDecodedData)

    # Set up the KeyChain.
    keyChain = KeyChain("pib-memory:", "tpm-memory:")
    keyChain.importSafeBag(SafeBag
      (Name("/testname/KEY/123"),
       Blob(DEFAULT_RSA_PRIVATE_KEY_DER, False),
       Blob(DEFAULT_RSA_PUBLIC_KEY_DER, False)))
    validator = Validator(ValidationPolicyFromPib(keyChain.getPib()))

    validator.validate(reDecodedData, makeSuccessCallback("Re-decoded Data"),
      makeFailureCallback("Re-decoded Data"))

    freshData = Data(Name("/ndn/abc"))
    freshData.setContent("SUCCESS!")
    freshData.getMetaInfo().setFreshnessPeriod(5000)
    freshData.getMetaInfo().setFinalBlockId(Name("/%00%09")[0])
    keyChain.sign(freshData)
    dump("")
    dump("Freshly-signed Data:")
    dumpData(freshData)

    validator.validate(freshData, makeSuccessCallback("Freshly-signed Data"),
      makeFailureCallback("Freshly-signed Data"))
Exemplo n.º 11
0
def main():
    data = Data()
    data.wireDecode(TlvData)
    dump("Decoded Data:")
    dumpData(data)

    # Set the content again to clear the cached encoding so we encode again.
    data.setContent(data.getContent())
    encoding = data.wireEncode()

    reDecodedData = Data()
    reDecodedData.wireDecode(encoding)
    dump("")
    dump("Re-decoded Data:")
    dumpData(reDecodedData)

    # Set up the KeyChain.
    keyChain = KeyChain("pib-memory:", "tpm-memory:")
    keyChain.importSafeBag(
        SafeBag(Name("/testname/KEY/123"),
                Blob(DEFAULT_RSA_PRIVATE_KEY_DER, False),
                Blob(DEFAULT_RSA_PUBLIC_KEY_DER, False)))
    validator = Validator(ValidationPolicyFromPib(keyChain.getPib()))

    validator.validate(reDecodedData, makeSuccessCallback("Re-decoded Data"),
                       makeFailureCallback("Re-decoded Data"))

    freshData = Data(Name("/ndn/abc"))
    freshData.setContent("SUCCESS!")
    freshData.getMetaInfo().setFreshnessPeriod(5000)
    freshData.getMetaInfo().setFinalBlockId(Name("/%00%09")[0])
    keyChain.sign(freshData)
    dump("")
    dump("Freshly-signed Data:")
    dumpData(freshData)

    validator.validate(freshData, makeSuccessCallback("Freshly-signed Data"),
                       makeFailureCallback("Freshly-signed Data"))
Exemplo n.º 12
0
class CredentialStorage:
    def __init__(self):
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(self.identityStorage, self.privateKeyStorage),
            SelfVerifyPolicyManager(self.identityStorage))
        keyName = Name("/testname/DSK-123")
        self.defaultCertName = keyName[:-1].append("KEY").append(
            keyName[-1]).append("ID-CERT").append("0")

        ecdsaKeyName = Name("/testEcdsa/DSK-123")
        self.ecdsaCertName = ecdsaKeyName[:-1].append("KEY").append(
            ecdsaKeyName[-1]).append("ID-CERT").append("0")

        self.identityStorage.addKey(keyName, KeyType.RSA,
                                    Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
            keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER,
            DEFAULT_RSA_PRIVATE_KEY_DER)

        self.identityStorage.addKey(ecdsaKeyName, KeyType.ECDSA,
                                    Blob(DEFAULT_EC_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
            ecdsaKeyName, KeyType.ECDSA, DEFAULT_EC_PUBLIC_KEY_DER,
            DEFAULT_EC_PRIVATE_KEY_DER)

    def signData(self, data, certificateName=None):
        if certificateName is None:
            certificateName = self.defaultCertName
        self.keyChain.sign(data, certificateName)

    def signDataWithSha256(self, data):
        self.keyChain.signWithSha256(data)

    def verifyData(self, data, verifiedCallback, failedCallback):
        self.keyChain.verifyData(data, verifiedCallback, failedCallback)
Exemplo n.º 13
0
class NaiveEDLParserAndPublisher(object):
    def __init__(self, applyEDLAdjustment=True):
        # prepare trollius logging
        self.prepareLogging()

        self._events = dict()
        self._running = False
        self._applyEDLAdjustment = applyEDLAdjustment

        # NDN related variables
        self._loop = asyncio.get_event_loop()
        self._face = ThreadsafeFace(self._loop)

        # Use the system default key chain and certificate name to sign commands.
        self._keyChain = KeyChain()
        self._keyChain.setFace(self._face)
        self._certificateName = self._keyChain.getDefaultCertificateName()
        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)
        self._memoryContentCache = MemoryContentCache(self._face)

        # Publishing parameters conf  iguration
        self._translationServiceUrl = "http://the-archive.la/losangeles/services/get-youtube-url"
        self._namePrefixString = "/ndn/edu/ucla/remap/test/edl/"

        self._dataLifetime = 2000
        self._publishBeforeSeconds = 3
        self._translateBeforeSeconds = 60
        self._currentIdx = 0

        # Youtube related variables:
        # Channel Global song: UCSMJaKICZKXkpvr7Gj8pPUg
        # Channel Los Angeles: UCeuQoBBzMW6SWkxd8_1I8NQ
        # self._channelID = 'UCSMJaKICZKXkpvr7Gj8pPUg'
        self._channelID = "UCSMJaKICZKXkpvr7Gj8pPUg"
        self._accessKey = "AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s"
        # queryStr = 'https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=' + apiKey + '&id='
        # Video query example
        # https://www.googleapis.com/youtube/v3/videos?part=snippet,contentDetails,statistics,status&key=AIzaSyDUY_AX1iJQcwCW1mASEp5GcLtq1V9BM1Q&id=_ebELPKANxo
        # Channel query example
        # https://www.googleapis.com/youtube/v3/search?key=AIzaSyCe8t7PnmWjMKZ1gBouhP1zARpqNwHAs0s&channelId=UCSMJaKICZKXkpvr7Gj8pPUg&part=snippet,id&order=date&maxResults=20
        self._videoUrlDict = dict()

        self._edlAdjustmentDict = dict()
        return

    def getClipUrlOAuth(self):
        self._videoUrlDict = dict((k.lower(), v) for k, v in getAllVideosFromChannel().iteritems())

    # Old getClipUrl function that looks at the public Youtube channel without using Python API
    def getClipUrl(self, nextPageToken=None):
        options = {"part": "snippet,id", "order": "date", "maxResults": "20"}
        if nextPageToken is not None:
            options["pageToken"] = nextPageToken
        prefix = "https://www.googleapis.com/youtube/v3/search?"

        queryUrl = prefix + "key=" + self._accessKey + "&channelId=" + self._channelID
        for item in options:
            queryUrl += "&" + item + "=" + options[item]
        result = json.loads(urllib.urlopen(queryUrl).read())
        for item in result["items"]:
            if "snippet" in item and "id" in item and "videoId" in item["id"]:
                self._videoUrlDict[item["snippet"]["title"].lower()] = item["id"]["videoId"]
            else:
                print("Unexpected JSON from youtube channel query")
        if "nextPageToken" in result:
            self.getClipUrl(result["nextPageToken"])
        else:
            if __debug__:
                print("Building videoUrl dict finished; number of entries: " + str(len(self._videoUrlDict)))
                # for item in self._videoUrlDict:
                #  print("* " + item)
        return

    def parse(self, fileName):
        isEventBegin = False
        lastEventID = -1
        with open(fileName, "r") as edlFile:
            for line in edlFile:
                if isEventBegin:
                    components = line.split()
                    try:
                        eventID = int(components[0])
                    except ValueError:
                        print("Cannot cast " + components[0] + " to eventID")
                        continue
                    # We seem to have a fixed number of components here;
                    # reference: http://www.edlmax.com/maxguide.html
                    reelName = components[1]
                    channel = components[2]
                    trans = components[3]

                    timeComponentsIdx = len(components) - 4

                    srcStartTime = components[timeComponentsIdx]
                    srcEndTime = components[timeComponentsIdx + 1]
                    dstStartTime = components[timeComponentsIdx + 2]
                    dstEndTime = components[timeComponentsIdx + 3]

                    self._events[eventID] = json.loads(
                        '{ \
              "event_id": "%s", \
              "reel_name": "%s", \
              "channel": "%s", \
              "trans": "%s", \
              "src_start_time": "%s", \
              "src_end_time": "%s", \
              "dst_start_time": "%s", \
              "dst_end_time": "%s", \
              "src_url": "%s", \
              "translated": "%s", \
              "clipName": "%s", \
              "ytPresent": "%s" \
             }'
                        % (
                            str(eventID),
                            reelName,
                            channel,
                            trans,
                            srcStartTime,
                            srcEndTime,
                            dstStartTime,
                            dstEndTime,
                            "none",
                            "none",
                            "n/a",
                            "n/a",
                        )
                    )

                    isEventBegin = False
                    lastEventID = eventID
                elif re.match(r"\s+", line) is not None or line == "":
                    isEventBegin = True
                elif lastEventID > 0:
                    # Skipping events that do not have right offset
                    if not eventID in self._events:
                        print("Line skipped because of missing start time adjustment")
                        continue

                    fromClipNameMatch = re.match(r"\* FROM CLIP NAME: ([^\n]*)\n", line)
                    if fromClipNameMatch is not None:
                        clipName = fromClipNameMatch.group(1).strip()
                        parsedClipName = clipName.lower().replace("_", " ").replace("-", " ")

                        if self._applyEDLAdjustment:
                            if clipName in self._edlAdjustmentDict:
                                startTimeAdjusted = self.getTimeMinus(
                                    self._edlAdjustmentDict[clipName].split(":"),
                                    self._events[eventID]["src_start_time"].split(":"),
                                )
                                endTimeAdjusted = self.getTimeMinus(
                                    self._edlAdjustmentDict[clipName].split(":"),
                                    self._events[eventID]["src_end_time"].split(":"),
                                )
                                self._events[eventID]["src_start_time"] = startTimeAdjusted
                                self._events[eventID]["src_end_time"] = endTimeAdjusted

                                # Skipping events that do not have right offset
                                if startTimeAdjusted == "" or endTimeAdjusted == "":
                                    print(
                                        clipName + " : " + startTimeAdjusted,
                                        " start time incorrect; event " + str(eventID) + " ignored",
                                    )
                                    del self._events[eventID]
                                    continue
                            else:
                                # Skipping events that do not have right offset
                                print(
                                    "Warning: EDL adjustment not found for "
                                    + clipName
                                    + "; event "
                                    + str(eventID)
                                    + " ignored"
                                )
                                del self._events[eventID]
                                continue

                        self._events[eventID]["clipName"] = parsedClipName
                        # We don't do audio (only .wav or .mp3) for now
                        if parsedClipName.endswith(".wav") or parsedClipName.endswith(".mp3"):
                            continue
                        else:
                            parsedClipName = (" ").join(parsedClipName.split(".")[:-1])
                            # print(parsedClipName)
                        if parsedClipName in self._videoUrlDict:
                            # we assume one src_url from one FROM CLIP NAME for now
                            self._events[eventID]["src_url"] = (
                                "https://www.youtube.com/watch?v=" + self._videoUrlDict[parsedClipName]
                            )
                            self._events[eventID]["ytPresent"] = "YES"
                            print("src_url is " + self._events[eventID]["src_url"])
                        else:
                            self._events[eventID]["ytPresent"] = "NO"
                            print("Warning: file not found in Youtube channel: " + clipName)
                    else:
                        if "payload" not in self._events[eventID]:
                            self._events[eventID]["payload"] = [line]
                        else:
                            self._events[eventID]["payload"].append(line)

    @asyncio.coroutine
    def startPublishing(self):
        if len(self._events) == 0:
            return
        elif not self._running:
            self._memoryContentCache.registerPrefix(
                Name(self._namePrefixString), self.onRegisterFailed, self.onDataNotFound
            )
            startTime = time.time()

            latestEventTime = 0
            lastEventID = 0
            for event_id in sorted(self._events):
                timeStrs = self._events[event_id]["dst_start_time"].split(":")
                publishingTime = self.getScheduledTime(timeStrs, self._publishBeforeSeconds)
                translationTime = self.getScheduledTime(timeStrs, self._translateBeforeSeconds)
                if publishingTime > latestEventTime:
                    latestEventTime = publishingTime
                self._loop.call_later(translationTime, self.translateUrl, event_id)
                self._loop.call_later(publishingTime, self.publishData, event_id)
                lastEventID = event_id

            # append arbitrary 'end' data
            lastEventID = lastEventID + 1
            self._events[lastEventID] = json.loads(
                '{ \
        "event_id": "%s", \
        "src_url": "%s", \
        "translated": "%s" \
      }'
                % (str(lastEventID), "end", "not-required")
            )
            startTime = self.getScheduledTime(self._events[lastEventID - 1]["src_start_time"].split(":"), 0)
            endTime = self.getScheduledTime(self._events[lastEventID - 1]["src_end_time"].split(":"), 0)
            print("scheduled end " + str(endTime - startTime) + " sec from now")
            self._loop.call_later(latestEventTime + 1, self.publishData, lastEventID)
            self._loop.call_later(latestEventTime + 2 + (endTime - startTime), self._loop.stop)

            self._running = True

    def translateUrl(self, idx):
        queryUrl = self._translationServiceUrl
        timeStrs = self._events[idx]["src_start_time"].split(":")

        # we don't have the video from Youtube
        if self._events[idx]["src_url"] == "none":
            # print("no video from Youtube")
            # we still publish the data even if src_url is "none", to maintain consecutive sequence numbers
            self._events[idx]["translated"] = "non-existent"
            return

        serviceUrl = self._events[idx]["src_url"]  # + "&t=" + str(self.timeToSeconds(timeStrs)) + "s"

        values = {"url": serviceUrl, "fetchIfNotExist": "true"}

        data = urllibparse.urlencode(values)
        req = urllib.Request(queryUrl, data)
        # This synchronous request might block the execution of publishData; should be changed later
        response = urllib.urlopen(req)
        videoUrl = response.read()

        self._events[idx]["ori_url"] = serviceUrl
        self._events[idx]["src_url"] = videoUrl

        if self._events[idx]["translated"] == "publish":
            # We already missed the scheduled publishing time; should publish as soon as translation finishes
            self.publishData(idx)
        else:
            self._events[idx]["translated"] = "translated"
        return

    def publishData(self, idx):
        # Translation of the video URL has finished by the time of the publishData call;
        # if not, we set translated to "publish"; this is data race free since translateUrl and publishData are scheduled in the same thread
        if self._events[idx]["translated"] != "none":
            # Order published events sequence numbers by start times in destination
            data = Data(Name(self._namePrefixString + str(self._currentIdx)))

            data.setContent(json.dumps(self._events[idx]))
            data.getMetaInfo().setFreshnessPeriod(self._dataLifetime)
            self._keyChain.sign(data, self._certificateName)
            self._memoryContentCache.add(data)
            self._currentIdx += 1
            if __debug__:
                eventId = str(self._events[idx]["event_id"])
                channel = str(self._events[idx]["channel"])
                srcUrl = str(self._events[idx]["src_url"])
                clipName = str(self._events[idx]["clipName"])
                ytPresent = str(self._events[idx]["ytPresent"])
                clipStartTime = str(self._events[idx]["dst_start_time"])
                clipEndTime = str(self._events[idx]["dst_end_time"])
                print(
                    str(time.time())
                    + " Added event ["
                    + eventId
                    + "-"
                    + channel
                    + "|"
                    + clipName
                    + " YT:"
                    + ytPresent
                    + " "
                    + srcUrl[0:30]
                    + "... "
                    + clipStartTime
                    + "-"
                    + clipEndTime
                    + "] ("
                    + data.getName().toUri()
                    + ")"
                )
        else:
            self._events[idx]["translated"] = "publish"

    def timeToSeconds(self, timeStrs):
        seconds = int(timeStrs[2])
        minutes = int(timeStrs[1])
        hours = int(timeStrs[0])
        ret = hours * 3600 + minutes * 60 + seconds
        return ret

    def getTimeMinus(self, timeStrs1, timeStrs2):
        frameNumber = int(timeStrs1[3])
        seconds = int(timeStrs1[2])
        minutes = int(timeStrs1[1])
        hours = int(timeStrs1[0])

        frameNumber2 = int(timeStrs2[3]) - frameNumber
        seconds2 = int(timeStrs2[2]) - seconds
        minutes2 = int(timeStrs2[1]) - minutes
        hours2 = int(timeStrs2[0]) - hours

        if frameNumber2 < 0:
            # frame rate assumption
            frameNumber2 = 30 + frameNumber2
            seconds2 = seconds2 - 1

        if seconds2 < 0:
            seconds2 = 60 + seconds2
            minutes2 = minutes2 - 1

        if minutes2 < 0:
            minutes2 = 60 + minutes2
            hours2 = hours2 - 1

        if hours2 < 0:
            print("Warning: time minus smaller than 0")
            return ""

        # Arbitrary guard of start times that are off
        if hours2 > 1 or minutes2 > 1:
            return ""

        return ":".join([str(hours2), str(minutes2), str(seconds2), str(frameNumber2)])

    def getScheduledTime(self, timeStrs, beforeSeconds):
        frameNumber = int(timeStrs[3])
        seconds = int(timeStrs[2])
        minutes = int(timeStrs[1])
        hours = int(timeStrs[0])
        ret = hours * 3600 + minutes * 60 + seconds - beforeSeconds
        return 0 if ret < 0 else ret

    def onRegisterFailed(self, prefix):
        raise RuntimeError("Register failed for prefix", prefix.toUri())

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        # print('Data not found for interest: ' + interest.getName().toUri())
        return

    #############################
    # Logging
    #############################
    def prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
    Set the log level that will be output to standard error
    :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
    """
        self._console.setLevel(level)

    def getLogger(self):
        """
    :return: The logger associated with this node
    :rtype: logging.Logger
    """
        return self.log

    ############################
    def loadEDLAdjustment(self, csvFile):
        with open(csvFile, "rb") as csvfile:
            reader = csv.reader(csvfile, delimiter=",", quotechar="|")
            for row in reader:
                self._edlAdjustmentDict[row[3]] = row[1]
Exemplo n.º 14
0
class Monitoring_Agent_Main(object):
    def __init__(self, namePrefix, producerName):
        self.configPrefix = Name(namePrefix)
        self.outstanding = dict()
        self.isDone = False
        self.keyChain = KeyChain()
        self.face = Face("127.0.0.1")
        #self.DataStore = DS.readDataStore_json()
        #self.DataStore = DS.table
        self.script_path = os.path.abspath(
            __file__)  # i.e. /path/to/dir/foobar.py
        self.script_dir = os.path.split(
            self.script_path)[0]  #i.e. /path/to/dir/
        self.Datamessage_size = 8000  #8kB --> Max Size from NDN standard
        self.producerName = producerName
        #print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
        #pprint(self.DataStore)

    def run(self):
        try:
            self.face.setCommandSigningInfo(
                self.keyChain, self.keyChain.getDefaultCertificateName())
            self.face.registerPrefix(self.configPrefix, self.onInterest,
                                     self.onRegisterFailed)
            print "Registered prefix : " + self.configPrefix.toUri()

            while not self.isDone:
                self.face.processEvents()
                time.sleep(0.01)

        except RuntimeError as e:
            print "ERROR: %s" % e

    def onInterest(self, prefix, interest, face, interestFilterId, filter):
        interestName = interest.getName()
        data = Data(interestName)
        #print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
        #pprint(self.DataStore)
        print "Interest Name: %s" % interestName
        interest_name_components = interestName.toUri().split("/")
        if "monitoring" in interest_name_components:
            print "Check Pi and Containers Status"
            monitoring_agent = termopi()
            ## Print monitoring data
            #monitoring_agent.prt_pi_resources()
            print "Update json file"
            filename = "piStatus" + self.producerName + ".json"
            folder_name = "PIstatus/"
            rel_path = os.path.join(self.script_dir, folder_name)
            if not os.path.exists(rel_path):
                os.makedirs(rel_path)
            abs_file_path = os.path.join(rel_path, filename)
            monitoring_agent.create_jsonfile_with_pi_status(
                abs_file_path, self.producerName)
            freshness = 10  #milli second, content will be deleted from the cache after freshness period
            self.sendingFile(abs_file_path, interest, face, freshness)
        else:
            print "Interest name mismatch"

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True

    def sendingFile(self, file_path, interest, face, freshness):
        print "Sending File Function"
        interestName = interest.getName()
        interestNameSize = interestName.size()

        try:
            SegmentNum = (interestName.get(interestNameSize - 1)).toSegment()
            dataName = interestName.getSubName(0, interestNameSize - 1)

        # If no segment number is included in the INTEREST, set the segment number as 0 and set the file name to configuration script to be sent
        except RuntimeError as e:
            SegmentNum = 0
            dataName = interestName
        # Put file to the Data message
        try:
            # due to overhead of NDN name and other header values; NDN header overhead + Data packet content = < maxNdnPacketSize
            # So Here segment size is hard coded to 5000 KB.
            # Class Enumerate publisher is used to split large files into segments and get a required segment ( segment numbers started from 0)
            dataSegment, last_segment_num = EnumeratePublisher(
                file_path, self.Datamessage_size, SegmentNum).getFileSegment()
            # create the DATA name appending the segment number
            dataName = dataName.appendSegment(SegmentNum)
            data = Data(dataName)
            data.setContent(dataSegment)

            # set the final block ID to the last segment number
            last_segment = (Name.Component()).fromNumber(last_segment_num)
            data.getMetaInfo().setFinalBlockId(last_segment)
            #hourMilliseconds = 600 * 1000
            data.getMetaInfo().setFreshnessPeriod(freshness)

            # currently Data is signed from the Default Identitiy certificate
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
            # Sending Data message
            face.send(data.wireEncode().toBuffer())
            print "Replied to Interest name: %s" % interestName.toUri()
            print "Replied with Data name: %s" % dataName.toUri()

        except ValueError as err:
            print "ERROR: %s" % err
Exemplo n.º 15
0
class BaseNode(object):
    """
    This class contains methods/attributes common to both node and controller.
    """
    def __init__(self, transport = None, conn = None):
        """
        Initialize the network and security classes for the node
        """
        super(BaseNode, self).__init__()
        self.faceTransport = transport
        self.faceConn = conn
        
        self._identityStorage = BasicIdentityStorage()

        self._identityManager = IdentityManager(self._identityStorage, FilePrivateKeyStorage())
        self._policyManager = IotPolicyManager(self._identityStorage)

        # hopefully there is some private/public key pair available
        self._keyChain = KeyChain(self._identityManager, self._policyManager)

        self._registrationFailures = 0
        self._prepareLogging()

        self._setupComplete = False


##
# Logging
##
    def _prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log

###
# Startup and shutdown
###
    def beforeLoopStart(self):
        """
        Called before the event loop starts.
        """
        pass

    def getDefaultCertificateName(self):
        try:
            certName = self._identityStorage.getDefaultCertificateNameForIdentity( 
                self._policyManager.getDeviceIdentity())
        except SecurityException as e:
            # zhehao: in the case of producer's /localhop prefixes, the default key is not defined in ndnsec-public-info.db
            certName = self._keyChain.createIdentityAndCertificate(self._policyManager.getDeviceIdentity())
            #certName = self._keyChain.getDefaultCertificateName()
            #print(certName.toUri())
        return certName

    def start(self):
        """
        Begins the event loop. After this, the node's Face is set up and it can
        send/receive interests+data
        """
        self.log.info("Starting up")
        self.loop = asyncio.get_event_loop()
        
        if (self.faceTransport == None or self.faceTransport == ''):
            self.face = ThreadsafeFace(self.loop)
        else:
            self.face = ThreadsafeFace(self.loop, self.faceTransport, self.faceConn)
        
        self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName())
        
        self._keyChain.setFace(self.face)

        self._isStopped = False
        self.beforeLoopStart()
        
        try:
            self.loop.run_forever()
        except Exception as e:
            self.log.exception(exc_info=True)
        finally:
            self.stop()

    def stop(self):
        """
        Stops the node, taking it off the network
        """
        self.log.info("Shutting down")
        self._isStopped = True 
        self.loop.stop()
        
###
# Data handling
###
    def signData(self, data):
        """
        Sign the data with our network certificate
        :param pyndn.Data data: The data to sign
        """
        self._keyChain.sign(data, self.getDefaultCertificateName())

    def sendData(self, data, sign=True):
        """
        Reply to an interest with a data packet, optionally signing it.
        :param pyndn.Data data: The response data packet
        :param boolean sign: (optional, default=True) Whether the response must be signed. 
        """
        if sign:
            self.signData(data)
        self.face.putData(data)

###
# 
# 
##
    def onRegisterFailed(self, prefix):
        """
        Called when the node cannot register its name with the forwarder
        :param pyndn.Name prefix: The network name that failed registration
        """
        if self.faceTransport != None and self.faceConn != None:
            self.log.warn("Explicit face transport and connectionInfo: Could not register {}; expect a manual or autoreg on the other side.".format(prefix.toUri()))
        elif self._registrationFailures < 5:
            self._registrationFailures += 1
            self.log.warn("Could not register {}, retry: {}/{}".format(prefix.toUri(), self._registrationFailures, 5)) 
            self.face.registerPrefix(self.prefix, self._onCommandReceived, self.onRegisterFailed)
        else:
            self.log.info("Prefix registration failed")
            self.stop()

    def verificationFailed(self, dataOrInterest):
        """
        Called when verification of a data packet or command interest fails.
        :param pyndn.Data or pyndn.Interest: The packet that could not be verified
        """
        self.log.info("Received invalid" + dataOrInterest.getName().toUri())

    @staticmethod
    def getSerial():
        """
        Find and return the serial number of the Raspberry Pi. Provided in case
        you wish to distinguish data from nodes with the same name by serial.
        :return: The serial number extracted from device information in /proc/cpuinfo
        :rtype: str
        """
        try:
            if PLATFORM == "raspberry pi":
                with open('/proc/cpuinfo') as f:
                    for line in f:
                        if line.startswith('Serial'):
                            return line.split(':')[1].strip()
            else:
                return "todo"
        except NameError:
            return "todo"
def benchmarkEncodeDataSeconds(nIterations, useComplex, useCrypto):
    """
    Loop to encode a data packet nIterations times.

    :param int nIterations: The number of iterations.
    :param bool useComplex: If true, use a large name, large content and all
      fields. If false, use a small name, small content and only required
      fields.
    :param bool useCrypto: If true, sign the data packet.  If false, use a blank
      signature.
    :return: A tuple (duration, encoding) where duration is the number of
      seconds for all iterations and encoding is the wire encoding.
    :rtype: (float, Blob)
    """
    if useComplex:
        # Use a large name and content.
        name = Name(
          "/ndn/ucla.edu/apps/lwndn-test/numbers.txt/%FD%05%05%E8%0C%CE%1D/%00")

        contentString = ""
        count = 1
        contentString += "%d" % count
        count += 1
        while len(contentString) < 1115:
            contentString += " %d" % count
            count += 1
        content = Name.fromEscapedString(contentString)
    else:
        # Use a small name and content.
        name = Name("/test")
        content = Name.fromEscapedString("abc")
    finalBlockId = Name("/%00")[0]

    # Initialize the private key storage in case useCrypto is true.
    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                        SelfVerifyPolicyManager(identityStorage))
    keyName = Name("/testname/DSK-123")
    certificateName = keyName.getSubName(0, keyName.size() - 1).append(
      "KEY").append(keyName[-1]).append("ID-CERT").append("0")
    identityStorage.addKey(keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
    privateKeyStorage.setKeyPairForKeyName(
      keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER, DEFAULT_RSA_PRIVATE_KEY_DER)

    # Set up signatureBits in case useCrypto is false.
    signatureBits = Blob(bytearray(256))
    emptyBlob = Blob([])

    start = getNowSeconds()
    for i in range(nIterations):
        data = Data(name)
        data.setContent(content)
        if useComplex:
            data.getMetaInfo().setFreshnessPeriod(1000)
            data.getMetaInfo().setFinalBlockId(finalBlockId)

        if useCrypto:
            # This sets the signature fields.
            keyChain.sign(data, certificateName)
        else:
            # Imitate IdentityManager.signByCertificate to set up the signature
            # fields, but don't sign.
            sha256Signature = data.getSignature()
            keyLocator = sha256Signature.getKeyLocator()
            keyLocator.setType(KeyLocatorType.KEYNAME)
            keyLocator.setKeyName(certificateName)
            sha256Signature.setSignature(signatureBits)

        encoding = data.wireEncode()

    finish = getNowSeconds()

    return (finish - start, encoding)
Exemplo n.º 17
0
class TestConsumer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {} # key: Name, value: Blob
        self.encryptionKeys = {} # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name("/Prefix/READ")
        self.contentName = Name("/Prefix/SAMPLE/Content")
        self.cKeyName = Name("/Prefix/SAMPLE/Content/C-KEY/1")
        self.eKeyName = Name("/Prefix/READ/E-KEY/1/2")
        self.dKeyName = Name("/Prefix/READ/D-KEY/1/2")
        self.uKeyName = Name("/U/Key")
        self.uName = Name("/U")

        # Generate the E-KEY and D-KEY.
        params = RsaKeyParams()
        self.fixtureDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        self.fixtureEKeyBlob = RsaAlgorithm.deriveEncryptKey(
          self.fixtureDKeyBlob).getKeyBits()

        # Generate the user key.
        self.fixtureUDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        self.fixtureUEKeyBlob = RsaAlgorithm.deriveEncryptKey(
          self.fixtureUDKeyBlob).getKeyBits()

        # Load the C-KEY.
        self.fixtureCKeyBlob = Blob(AES_KEY, False)

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())

        # Initialize the storage.
        keyName = Name("/testname/DSK-123")
        self.certificateName = keyName.getSubName(0, keyName.size() - 1).append(
          "KEY").append(keyName.get(-1)).append("ID-CERT").append("0")
        identityStorage.addKey(
          keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER, False))
        privateKeyStorage.setKeyPairForKeyName(
          keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER,
          DEFAULT_RSA_PRIVATE_KEY_DER)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptedContent(self):
        contentData = Data(self.contentName)
        encryptParams = EncryptParams(EncryptAlgorithmType.AesCbc)
        encryptParams.setInitialVector(Blob(INITIAL_VECTOR, False))
        Encryptor.encryptData(
          contentData, Blob(DATA_CONTENT, False), self.cKeyName,
          self.fixtureCKeyBlob,  encryptParams)
        self.keyChain.sign(contentData, self.certificateName)
        return contentData

    def createEncryptedCKey(self):
        cKeyData = Data(self.cKeyName)
        encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        Encryptor.encryptData(
          cKeyData, self.fixtureCKeyBlob, self.dKeyName, self.fixtureEKeyBlob,
          encryptParams)
        self.keyChain.sign(cKeyData, self.certificateName)
        return cKeyData

    def createEncryptedDKey(self):
        dKeyData = Data(self.dKeyName)
        encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        Encryptor.encryptData(
          dKeyData, self.fixtureDKeyBlob, self.uKeyName, self.fixtureUEKeyBlob,
          encryptParams)
        self.keyChain.sign(dKeyData, self.certificateName)
        return dKeyData

    def test_decrypt_content(self):
        # Generate the AES key.
        aesKeyBlob = Blob(AES_KEY, False)

        # Generate the C-KEY packet for the same AES_KEY.
        cKeyData = self.createEncryptedCKey()
        # Generate the content packet.
        contentData = self.createEncryptedContent()

        # Decrypt.
        Consumer._decrypt(
          cKeyData.getContent(), self.fixtureDKeyBlob,
          lambda result: self.assertTrue(result.equals(aesKeyBlob)),
          lambda errorCode, message: self.fail("decrypt error " + message))

        # Decrypt.
        Consumer._decrypt(
          contentData.getContent(), self.fixtureCKeyBlob,
          lambda result: self.assertTrue(result.equals(Blob(DATA_CONTENT, False))),
          lambda errorCode, message: self.fail("decrypt error " + message))

    def test_consume(self):
        contentData = self.createEncryptedContent()
        cKeyData = self.createEncryptedCKey()
        dKeyData = self.createEncryptedDKey()

        contentCount = [0]
        cKeyCount = [0]
        dKeyCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout, onNetworkNack):
                return self.handleExpressInterest(
                  interest, onData, onTimeout, onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            if interest.matchesName(contentData.getName()):
              contentCount[0] = 1
              onData(interest, contentData)
            elif interest.matchesName(cKeyData.getName()):
              cKeyCount[0] = 1
              onData(interest, cKeyData)
            elif interest.matchesName(dKeyData.getName()):
              dKeyCount[0] = 1
              onData(interest, dKeyData)
            else:
              onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Create the consumer.
        consumer = Consumer(
          face, self.keyChain, self.groupName, self.uName,
          Sqlite3ConsumerDb(self.databaseFilePath))
        consumer.addDecryptionKey(self.uKeyName, self.fixtureUDKeyBlob)

        finalCount = [0]
        def onConsumeComplete(data, result):
            finalCount[0] = 1
            self.assertTrue("consumeComplete",
                            result.equals(Blob(DATA_CONTENT, False)))
        consumer.consume(
          self.contentName, onConsumeComplete,
          lambda code, message:
            self.fail("consume error " + repr(code) + ": " + message))

        self.assertEqual(1, contentCount[0], "contentCount")
        self.assertEqual(1, cKeyCount[0], "cKeyCount")
        self.assertEqual(1, dKeyCount[0], "dKeyCount")
        self.assertEqual(1, finalCount[0], "finalCount")

    def test_consumer_with_link(self):
        contentData = self.createEncryptedContent()
        cKeyData = self.createEncryptedCKey()
        dKeyData = self.createEncryptedDKey()

        contentCount = [0]
        cKeyCount = [0]
        dKeyCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout, onNetworkNack):
                return self.handleExpressInterest(
                  interest, onData, onTimeout, onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(3, interest.getLink().getDelegations().size())

            if interest.matchesName(contentData.getName()):
              contentCount[0] = 1
              onData(interest, contentData)
            elif interest.matchesName(cKeyData.getName()):
              cKeyCount[0] = 1
              onData(interest, cKeyData)
            elif interest.matchesName(dKeyData.getName()):
              dKeyCount[0] = 1
              onData(interest, dKeyData)
            else:
              onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Create the consumer.
        ckeyLink = Link()
        ckeyLink.addDelegation(10,  Name("/ckey1"))
        ckeyLink.addDelegation(20,  Name("/ckey2"))
        ckeyLink.addDelegation(100, Name("/ckey3"))
        dkeyLink = Link()
        dkeyLink.addDelegation(10,  Name("/dkey1"))
        dkeyLink.addDelegation(20,  Name("/dkey2"))
        dkeyLink.addDelegation(100, Name("/dkey3"))
        dataLink = Link()
        dataLink.addDelegation(10,  Name("/data1"))
        dataLink.addDelegation(20,  Name("/data2"))
        dataLink.addDelegation(100, Name("/data3"))
        self.keyChain.sign(ckeyLink, self.certificateName)
        self.keyChain.sign(dkeyLink, self.certificateName)
        self.keyChain.sign(dataLink, self.certificateName)

        consumer = Consumer(
          face, self.keyChain, self.groupName, self.uName,
          Sqlite3ConsumerDb(self.databaseFilePath), ckeyLink, dkeyLink)
        consumer.addDecryptionKey(self.uKeyName, self.fixtureUDKeyBlob)

        finalCount = [0]
        def onConsumeComplete(data, result):
            finalCount[0] = 1
            self.assertTrue("consumeComplete",
                            result.equals(Blob(DATA_CONTENT, False)))
        consumer.consume(
          self.contentName, onConsumeComplete,
          lambda code, message:
            self.fail("consume error " + repr(code) + ": " + message),
          dataLink)

        self.assertEqual(1, contentCount[0], "contentCount")
        self.assertEqual(1, cKeyCount[0], "cKeyCount")
        self.assertEqual(1, dKeyCount[0], "dKeyCount")
        self.assertEqual(1, finalCount[0], "finalCount")
Exemplo n.º 18
0
class RegisterSongList(object):


    def __init__(self, prefix="/ndn/edu/ucla/remap/music/list"):

	logging.basicConfig()        
    	self.device = "PC1"
    	self.deviceComponent = Name.Component(self.device)
	self.excludeDevice = None

        self.prefix = Name(prefix)
	self.changePrefix = Name("/ndn/edu/ucla/remap/music/storage")
        self.keychain = KeyChain()
        self.certificateName = self.keychain.getDefaultCertificateName()

        self.address = ""
        self._isStopped = True
  

    
    def start(self):
	print "reg start"
	self.loop = asyncio.get_event_loop()
	self.face = ThreadsafeFace(self.loop,self.address)
	self.face.setCommandSigningInfo(self.keychain,self.certificateName)
        self.face.registerPrefix(self.prefix,self.onInterest,self.onRegisterFailed)
        self._isStopped = False
        self.face.stopWhen(lambda:self._isStopped)
        try:
            self.loop.run_forever()
        except KeyboardInterrupt:

            sys.exit()
        finally:
            self.stop()


    def stop(self):
        self.loop.close()       
        self.face.shutdown()
        self.face = None
        sys.exit(1)

    def signData(self,data):
	data.setSignature(Sha256WithRsaSignature())


    def onInterest(self, prefix, interest, transport, registeredPrefixId):
	print "received interest"
        initInterest = Name(interest.getName())
        print "interest name:",initInterest.toUri()
	d = Data(interest.getName().append(self.deviceComponent))
	try:
		print "start to set data's content"
                
		currentString = ','.join(currentList)
		d.setContent("songList of " +self.device+":"+currentString+ "\n")
		self.face.registerPrefix(self.changePrefix,self.onInterest,self.onRegisterFailed)	



	    

	except KeyboardInterrupt:
		print "key interrupt"
		sys.exit(1)
	except Exception as e:
		print e
		d.setContent("Bad command\n")
	finally:
		self.keychain.sign(d,self.certificateName)

	encodedData = d.wireEncode()
	transport.send(encodedData.toBuffer())
	print d.getName().toUri()
	print d.getContent()

	self.loop.close()
     
        self.face.shutdown()
        self.face = None
		


    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        self.stop()
Exemplo n.º 19
0
class ServiceController(object):
    def __init__(self):
        # Register all published name prefixes
        prefix_serviceDiscovery = "/sm/service_discovery"
        self.prefix_serviceDiscovery = Name(prefix_serviceDiscovery)

        prefix_serviceRegistration = "/sm/service_registration"
        self.prefix_serviceRegistration = Name(prefix_serviceRegistration)

        prefix_serviceMigration = "/sm/service_migration"
        self.prefix_serviceMigration = Name(prefix_serviceMigration)

        self.prefix_serviceMigrationPush = "/sm/service_migration/push/"

        prefix_trigger = "/trigger"
        self.prefix_trigger = Name(prefix_trigger)
        self.Path = "/home/pi/carlos/SC_repository/"
        self.serviceName = "rpi-nano-httpd.tar"
        self.serviceInfo = {
            'uhttpd.tar': {
                'image_name': 'fnichol/uhttpd:latest',
                'port_host': 8080,
                'port_container': 80,
                'component': ['ubuntu.tar', 'python.tar', 'java.tar']
            },
            'httpd.tar': {
                'image_name': 'httpd:latest',
                'port_host': 8081,
                'port_container': 80,
                'component': ['debian.tar', 'python.tar', 'java.tar']
            },
            'rpi-nano-httpd.tar': {
                'image_name': 'hypriot/rpi-nano-httpd:latest',
                'port_host': 8082,
                'port_container': 80,
                'component': ['debian.tar', 'python.tar', 'java.tar']
            },
            'rpi-busybox-httpd.tar': {
                'image_name': 'hypriot/rpi-nano-httpd:latest',
                'port_host': 8083,
                'port_container': 80,
                'component': ['debian.tar', 'python.tar', 'java.tar']
            }
        }

        # Create face to localhost, Default configuration from PyNDN
        self.outstanding = dict()
        self.isDone = False
        self.keyChain = KeyChain()
        self.face = Face("127.0.0.1")

        # Set the KeyChain and certificate name used to sign command interests (e.g. for registerPrefix).
        self.face.setCommandSigningInfo(self.keyChain, \
                                        self.keyChain.getDefaultCertificateName())
        # Register name prefixes to the face

        self.face.registerPrefix(self.prefix_serviceDiscovery,
                                 self.onInterest_serviceDiscovery,
                                 self.onRegisterFailed)
        print "Registering prefix : " + self.prefix_serviceDiscovery.toUri()

        self.face.registerPrefix(self.prefix_serviceRegistration,
                                 self.onInterest_serviceRegistration,
                                 self.onRegisterFailed)
        print "Registering prefix : " + self.prefix_serviceRegistration.toUri()

        self.face.registerPrefix(self.prefix_serviceMigration,
                                 self.onInterest_serviceMigration,
                                 self.onRegisterFailed)
        print "Registering prefix : " + self.prefix_serviceMigration.toUri()

        self.face.registerPrefix(self.prefix_trigger, self.onInterest_trigger,
                                 self.onRegisterFailed)
        print "Registering prefix : " + self.prefix_trigger.toUri()

    def onInterest_trigger(self, prefix, interest, face, interestFilterId,
                           filter):
        interestName = interest.getName()
        print "Received Interest name: %s" % interestName.toUri()
        interest_name_components = interestName.toUri().split("/")
        serviceName = interest_name_components[2]
        nodeName = interest_name_components[3]
        prefix_serviceMigrationPushtoNode = self.prefix_serviceMigrationPush + nodeName + '/' + serviceName
        print "Sent Push Interest for %s" % prefix_serviceMigrationPushtoNode
        self.prefix_serviceMigrationPushtoNode = Name(
            prefix_serviceMigrationPushtoNode)
        self.sendPushInterest(self.prefix_serviceMigrationPushtoNode)

    def sendPushInterest(self, name):
        interest = Interest(name)
        uri = name.toUri()

        interest.setInterestLifetimeMilliseconds(4000)
        interest.setMustBeFresh(True)

        if uri not in self.outstanding:
            self.outstanding[uri] = 1

        # self.face.expressInterest(interest, self.onData, self._onTimeout)
        self.face.expressInterest(
            interest, None, None
        )  ## set None --> sent out only, don't wait for Data and Timeout
        print "Sent Push-Interest for %s" % uri

    def onInterest_serviceDiscovery(self, prefix, interest, face,
                                    interestFilterId, filter):
        print "Processing Interest message:/SM/service_discovery"
        # send a list of available service

    def onInterest_serviceRegistration(self, prefix, interest, face,
                                       interestFilterId, filter):
        print "Processing Interest message:/SM/service_registration"
        # upload image file to repository
        # send ACK to service provider

    def onInterest_serviceMigration(self, prefix, interest, face,
                                    interestFilterId, filter):
        # Receive command from decision engine to migrate teh service to some specific location
        # Select image file from the repo
        # Send to the chosen SEG

        interestName = interest.getName()
        interestNameSize = interestName.size()
        print "Receive Interest message: %s , sending back DATA message" % interestName
        # try if the received INTEREST include a segment number. If include, extract the segment number and file requested.
        try:
            SegmentNum = (interestName.get(interestNameSize - 1)).toSegment()
            serviceName = (interestName.get(interestNameSize -
                                            2)).toEscapedString()
            dataName = interestName.getSubName(0, interestNameSize - 1)

        # If no segment number is included in the INTEREST, set the segment number as 0 and set the file name to configuration script to be sent
        except RuntimeError as e:
            SegmentNum = 0
            #dataName = (interestName.append(Name("install")).append(Name(self.serviceName)))
            dataName = interestName

        # TO BE USED WITH WEB INTERFACE

        filePath = self.Path + self.serviceName
        print filePath
        # Test in terminal
        # self.filePath = "config_script_tobe_uploaded/" + serviceName

        # get the practical limit of the size of a network-layer packet : 8800 bytes
        maxNdnPacketSize = self.face.getMaxNdnPacketSize()

        # print 	'practical limit of the size of a network-layer packet :' + str(maxNdnPacketSize)

        try:
            # due to overhead of NDN name and other header values; NDN header overhead + Data packet content = < maxNdnPacketSize
            # So Here segment size is hard coded to 5000 KB.
            # Class Enumerate publisher is used to split large files into segments and get a required segment ( segment numbers started from 0)

            dataSegment, last_segment_num = EnumeratePublisher(
                filePath, 5000, SegmentNum).getFileSegment()

            # create the DATA name appending the segment number
            dataName = dataName.appendSegment(SegmentNum)

            data = Data(dataName)
            data.setContent(dataSegment)

            # set the final block ID to the last segment number
            last_segment = (Name.Component()).fromNumber(last_segment_num)
            data.getMetaInfo().setFinalBlockId(last_segment)
            hourMilliseconds = 600 * 1000
            data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

            # currently Data is signed from the Default Identitiy certificate
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

            face.send(data.wireEncode().toBuffer())
            print "Replied to Interest name: %s" % interestName.toUri()
            print "Replied with Data name: %s" % dataName.toUri()

            # If configuration manager has sent the last segment of the file, script can be stopped.
            #if SegmentNum == last_segment_num:
            #self.isDone = True

        except ValueError as err:
            print "ERROR: %s" % err

    def onRegisterFailed(self, prefix, interest, face, interestFilterId,
                         filter):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True

    def run(self):
        try:

            while not self.isDone:
                self.face.processEvents()
                time.sleep(0.01)

        except RuntimeError as e:
            print "ERROR: %s" % e
Exemplo n.º 20
0
class MCCPublisher:
    def __init__(self, dataPrefix, dataSuffix, keychain=None):
        self.currentInsertion = -1
        self.currentStatus = -1
        self.face = None
        self.loop = None
        self.dataName = Name(dataPrefix).append(dataSuffix)
        self.dataPrefix = Name(dataPrefix)

        if keychain is not None:
            self.keychain = keychain
        else:
            self.keychain = KeyChain()

        self.certificateName = self.keychain.getDefaultCertificateName()

        self.fakeSignature = Sha256WithRsaSignature()

        self.failureCount = 0
        self.successCount = 0

        self.dataCache = None

    def onRegisterFailed(self):
        logger.error("Could not register data publishing face!")
        self.stop()

    def stop(self):
        self.loop.close()
        self.face.shutdown()

    def generateVersionedName(self):
        fullName = Name(self.dataName)
        # currently we need to provide the version ourselves when we
        # poke the repo
        ts = int(time.time()*1000)
        fullName.appendVersion(int(ts))
        return fullName

    def generateData(self, baseName):
        '''
           This appends the segment number to the data name, since repo-ng tends to expect it
        '''
        # just make up some data and return it
        ts = (time.time())
        segmentId = 0 # compatible with repo-ng test: may change to test segmented data

        versionStr = baseName.get(-1).toEscapedString()
        dataName = Name(baseName)
        dataName.appendSegment(segmentId)

        d = Data(dataName)
        content = "(" + str(ts) +  ") Data named " + dataName.toUri()
        d.setContent(content)
        d.getMetaInfo().setFinalBlockID(segmentId)
        d.getMetaInfo().setFreshnessPeriod(-1)
        if shouldSign:
            self.keychain.sign(d, self.certificateName)
        else:
            d.setSignature(self.fakeSignature)

        stats.insertDataForVersion(versionStr, {'publish_time':time.time()})
        logger.debug('Publishing: '+d.getName().toUri())
        return d

    def onTimeout(self, prefix):
        logger.warn('Timeout waiting for '+prefix.toUri())

    @asyncio.coroutine
    def insertNewVersion(self, interval=20):
        #while True:
            newVersion = self.generateVersionedName()
            versionStr = newVersion.get(-1).toEscapedString()
            logger.info('Inserting: '+versionStr)
            stats.insertDataForVersion(versionStr, {'insert_request':time.time()})

            newData = self.generateData(newVersion)

            self.dataCache.add(newData)
            stats.insertDataForVersion(versionStr, {'insert_complete':time.time()})
            yield From (self.insertNewVersion())

    def start(self):
        self.loop = asyncio.new_event_loop()
        self.face = ThreadsafeFace(self.loop, "")
        self.dataCache = MemoryContentCache(self.face, 100)

        asyncio.set_event_loop(self.loop)
        self.face.setCommandSigningInfo(self.keychain, self.certificateName)

        self.dataCache.registerPrefix(self.dataPrefix, self.onRegisterFailed)
        self.loop.run_until_complete(self.insertNewVersion())
Exemplo n.º 21
0
class LightController():
    shouldSign = False
    COLORS_PER_LIGHT = 3
    STRAND_SIZE = 50
    #mengchen: let's make the lighting lits only a half every time
    #HALF_STRAND_SIZE = 25
    def __init__(self, nStrands=1, myIP="192.168.1.1", lightIP="192.168.1.50", prefix="/testlight"):
        self.log = logging.getLogger("LightController")
        self.log.setLevel(logging.DEBUG)
        sh = logging.StreamHandler()
        sh.setLevel(logging.DEBUG)
        self.log.addHandler(sh)
        fh = logging.FileHandler("LightController.log")
        fh.setLevel(logging.INFO)
        self.log.addHandler(fh)

        self.payloadBuffer = [[0]*self.STRAND_SIZE*self.COLORS_PER_LIGHT for n in range(nStrands)]

        self.kinetsender = KinetSender(myIP, lightIP, nStrands, self.STRAND_SIZE*self.COLORS_PER_LIGHT)
        self.prefix = Name(prefix)
        self.keychain = KeyChain()

        self.address = ""
        self._isStopped = True

        self.lightState = False
        #mengchen: let's make the lighting lits only a half every time
        #self.uphalf =  True
        self.HALF_STRAND_SIZE = 25       
        self.certificateName = self.keychain.getDefaultCertificateName()
        self.receiveFile = open('interestReceiveFile', 'w')

    def unix_time_now(self):
        epoch = datetime.datetime.utcfromtimestamp(0)
        delta = datetime.datetime.utcnow() - epoch
        return delta.total_seconds() * 1000.0
    
    # XXX: we should get a thread for this or something!
    def start(self):

        self.loop = asyncio.get_event_loop()
        self.face = ThreadsafeFace(self.loop, self.address)
        self.face.setCommandSigningInfo(self.keychain, self.certificateName)
        self.face.registerPrefix(self.prefix, self.onLightingCommand, self.onRegisterFailed)
        self._isStopped = False
        self.face.stopWhen(lambda:self._isStopped)
        try:
            self.loop.run_forever()
        except KeyboardInterrupt:
            #self.stop()
            #print "key interrupt in run_forever"
            sys.exit()
        finally:
            #print "executing finally"
            self.stop()


    def stop(self):
        self.loop.close()
        self.kinetsender.stop = True
        #print "before wait"
        #self.kinetsender.complete.set
        #self.kinetsender.complete.wait()         
        self.face.shutdown()
        self.face = None
        #print "sys exit"
        self.receiveFile.close()
        sys.exit(1)

    def signData(self, data):
        if LightController.shouldSign:
            self.keychain.sign(data, self.certificateName)
        else:
            data.setSignature(Sha256WithRsaSignature())

    def setPayloadColor(self, strand, color):
    # will expand this to allow the repeats, etc
       # if self.uphalf and not self.lightState:
        #    self.uphalf = False
         #   self.payloadBuffer[strand] = [int(color.r)&0xff, int(color.g)&0xff, int(color.b)&0xff]*self.HALF_STRAND_SIZE+[int(0)&0xff, int(0)&0xff, int(0)&0xff]*self.HALF_STRAND_SIZE
        #if not self.uphalf and self.lightState :
         #   self.uphalf = True
            self.payloadBuffer[strand] = [int(color.r)&0xff, int(color.g)&0xff, int(color.b)&0xff]*self.STRAND_SIZE
            
    def onLightingCommand(self, prefix, interest, transport, prefixId):
        #print datetime.datetime.now()
        self.receiveFile.write('{0:f}'.format(self.unix_time_now()) + '\n')
        interestName = Name(interest.getName())
        #interstname: /ndn/ucla.edu/sculptures/ai_bus/lights/setRGB/%83%0D%84%0B%87%09%89%01%04%8A%01%01%8B%01%01
        #d: <pyndn.data.Data object at 0xb64825d0>
        print "interstname", interestName.toUri()
        d = Data(interest.getName())
        # get the command parameters from the name
        try:
            commandComponent = interest.getName().get(prefix.size())
            #print commandComponent.toEscapedString():setRGB
            #print "prefix ",prefix.toUri():/ndn/ucla.edu/sculpture/ai_bus/lights
            #print "get name",interest.getName().toUri()
            commandParams = interest.getName().get(prefix.size()+1)
            #print "commandParams ",commandParams:%83%0D%84%0B%87%09%89%01%04%8A%01%01%8B%01%01

            lightingCommand = LightCommandMessage()
            ProtobufTlv.decode(lightingCommand, commandParams.getValue())
            #self.log.info("Command: " + commandComponent.toEscapedString())
            requestedColor = lightingCommand.command.pattern.colors[0] 
            colorStr = str((requestedColor.r, requestedColor.g, requestedColor.b))
            #self.log.info("Requested color: " + colorStr)
            self.setPayloadColor(0, requestedColor)
            
            self.lightState = not self.lightState
            if self.lightState:
                print "Off"
            else:
                print "On"
            
            #print requestedColor
            
            self.sendLightPayload(1)
            d.setContent("Gotcha: " + colorStr+ "\n")
        except KeyboardInterrupt:
            print "key interrupt"
            sys.exit(1)
        except Exception as e:
            print e
            d.setContent("Bad command\n")
        finally:
            d.getMetaInfo().setFinalBlockID(0)
            self.signData(d)

        encodedData = d.wireEncode()
        transport.send(encodedData.toBuffer())

    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        self.stop()

    def sendLightPayload(self, port):
        # print port
        # print self.payloadBuffer[port-1]
        self.kinetsender.setPayload(port, self.payloadBuffer[port-1])
def benchmarkEncodeDataSeconds(nIterations, useComplex, useCrypto, keyType):
    """
    Loop to encode a data packet nIterations times.

    :param int nIterations: The number of iterations.
    :param bool useComplex: If true, use a large name, large content and all
      fields. If false, use a small name, small content and only required
      fields.
    :param bool useCrypto: If true, sign the data packet.  If false, use a blank
      signature.
    :param KeyType keyType: KeyType.RSA or EC, used if useCrypto is True.
    :return: A tuple (duration, encoding) where duration is the number of
      seconds for all iterations and encoding is the wire encoding.
    :rtype: (float, Blob)
    """
    if useComplex:
        # Use a large name and content.
        name = Name(
          "/ndn/ucla.edu/apps/lwndn-test/numbers.txt/%FD%05%05%E8%0C%CE%1D/%00")

        contentString = ""
        count = 1
        contentString += "%d" % count
        count += 1
        while len(contentString) < 1115:
            contentString += " %d" % count
            count += 1
        content = Name.fromEscapedString(contentString)
    else:
        # Use a small name and content.
        name = Name("/test")
        content = Name.fromEscapedString("abc")
    finalBlockId = Name("/%00")[0]

    # Initialize the KeyChain in case useCrypto is true.
    keyChain = KeyChain("pib-memory:", "tpm-memory:")
    keyChain.importSafeBag(SafeBag
      (Name("/testname/KEY/123"),
       Blob(DEFAULT_EC_PRIVATE_KEY_DER if keyType == KeyType.EC
            else DEFAULT_RSA_PRIVATE_KEY_DER, False),
       Blob(DEFAULT_EC_PUBLIC_KEY_DER if keyType == KeyType.EC
            else DEFAULT_RSA_PUBLIC_KEY_DER, False)))
    certificateName = keyChain.getDefaultCertificateName()

    # Set up signatureBits in case useCrypto is false.
    signatureBits = Blob(bytearray(256))

    start = getNowSeconds()
    for i in range(nIterations):
        data = Data(name)
        data.setContent(content)
        if useComplex:
            data.getMetaInfo().setFreshnessPeriod(1000)
            data.getMetaInfo().setFinalBlockId(finalBlockId)

        if useCrypto:
            # This sets the signature fields.
            keyChain.sign(data)
        else:
            # Imitate IdentityManager.signByCertificate to set up the signature
            # fields, but don't sign.
            sha256Signature = data.getSignature()
            keyLocator = sha256Signature.getKeyLocator()
            keyLocator.setType(KeyLocatorType.KEYNAME)
            keyLocator.setKeyName(certificateName)
            sha256Signature.setSignature(signatureBits)

        encoding = data.wireEncode()

    finish = getNowSeconds()

    return (finish - start, encoding)
Exemplo n.º 23
0
class Producer(object):
    def __init__(self, prefix, transferfile):
        self.keyChain = KeyChain()
        self.prefix = Name(prefix)
        self.isDone = False
        self.transferfile = transferfile

        # Initialize list for Data packet storage.
        # We'll treat the indices as equivalent to the sequence
        # number requested by Interests.
        self.data = []

        f = open(transferfile, 'rb')
        imgdata = f.read()
        chunks = list(chunkstring(imgdata, 8192))
        print len(chunks)

        finalBlock = Name.Component.fromNumberWithMarker(len(chunks) - 1, 0x00)
        hourMilliseconds = 3600 * 1000

        # Pre-generate and sign all of Data we can serve.
        # We can also set the FinalBlockID in each packet
        # ahead of time because we know the entire sequence.

        for i in range(0, len(chunks)):
            dataName = Name(prefix).appendSegment(i)
            print dataName

            data = Data(dataName)
            data.setContent(chunks[i])
            data.getMetaInfo().setFinalBlockID(finalBlock)
            data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

            self.data.append(data)

    def run(self):
        face = Face()

        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain,
                                   self.keyChain.getDefaultCertificateName())

        # Also use the default certificate name to sign data packets.
        face.registerPrefix(self.prefix, self.onInterest,
                            self.onRegisterFailed)

        print "Registering prefix %s" % self.prefix.toUri()

        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)
        print "quit"

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        interestName = interest.getName()
        sequence = interestName[-1].toNumber()

        if 0 <= sequence and sequence < len(self.data):
            transport.send(self.data[sequence].wireEncode().toBuffer())

        print "Replied to: %s" % interestName.toUri()

        if sequence == len(self.data) - 1:
            time.sleep(3)
            self.isDone = True

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True
class RepoPublisher:
    def __init__(self, repoPrefix, dataPrefix, dataSuffix, keychain=None):
        self.currentInsertion = -1
        self.currentStatus = -1
        self.face = None
        self.loop = None
        self.repoPrefix = Name(repoPrefix)
        self.dataName = Name(dataPrefix).append(dataSuffix)
        self.dataPrefix = Name(dataPrefix)

        if keychain is not None:
            self.keychain = keychain
        else:
            self.keychain = KeyChain()

        self.certificateName = self.keychain.getDefaultCertificateName()

        self.failureCount = 0
        self.successCount = 0

        self.processIdToVersion = {}

    def onRegisterFailed(self):
        logger.error("Could not register data publishing face!")
        self.stop()

    def versionFromCommandMessage(self, component):
        command = RepoCommandParameterMessage()
        try:
            ProtobufTlv.decode(command, component.getValue())
        except Exception as e:
            logger.warn(e)

        # last component of name to insert is version
        versionStr = command.repo_command_parameter.name.component[-1]

        return versionStr

    def stop(self):
        self.loop.close()
        self.face.shutdown()

    def onPublishInterest(self, prefix, interest, transport, pxID):
        '''
           For publishing face
        '''
        # just make up some data and return it
        interestName = interest.getName()
        logger.info("Interest for " + interestName.toUri())

        ## CURRENTLY ASSUMES THERE'S A VERSION+SEGMENT SUFFIX!
        dataName = Name(interestName)
        ts = (time.time())
        segmentId = 0
        #try:
        #   segmentId = interestName.get(-1).toSegment()
        #except:
            #logger.debug("Could not find segment id!")
            #dataName.appendSegment(segmentId)
        versionStr = str(interestName.get(-2).getValue())
        logger.debug('Publishing ' + versionStr + ' @ ' + str(ts))

        d = Data(dataName)
        content = "(" + str(ts) +  ") Data named " + dataName.toUri()
        d.setContent(content)
        d.getMetaInfo().setFinalBlockID(segmentId)
        d.getMetaInfo().setFreshnessPeriod(1000)
        self.keychain.sign(d, self.certificateName)

        encodedData = d.wireEncode()

        stats.insertDataForVersion(versionStr, {'publish_time': time.time()})
        transport.send(encodedData.toBuffer())
        #yield from asyncio.sleep()

    def generateVersionedName(self):
        fullName = Name(self.dataName)
        # currently we need to provide the version ourselves when we
        # poke the repo
        ts = int(time.time()*1000)
        fullName.appendVersion(int(ts))
        return fullName

    def onTimeout(self, prefix):
        logger.warn('Timeout waiting for '+prefix.toUri())

    
    def start(self):
        self.loop = asyncio.new_event_loop()
        self.face = ThreadsafeFace(self.loop, "")

        asyncio.set_event_loop(self.loop)
        self.face.setCommandSigningInfo(self.keychain, self.certificateName)
        self.face.registerPrefix(self.dataPrefix,self.onPublishInterest, self.onRegisterFailed)
        try:
            self.loop.call_soon(self.kickRepo)
            self.loop.run_forever()
        finally:
           self.stop() 


    def kickRepo(self):
        # command the repo to insert a new bit of data
        fullName = self.generateVersionedName()
        versionStr = str(fullName.get(-1).getValue())
        command = self.createInsertInterest(fullName)

        logger.debug('inserting: ' + versionStr)

        self.face.makeCommandInterest(command)
        def timeoutLoop(interest):
            logger.warn('Timed out on ' + interest.toUri())
            self.face.expressInterest(command, self.onCommandData, self.onTimeout)

        self.face.expressInterest(command, self.onCommandData, timeoutLoop)
        stats.insertDataForVersion(versionStr, {'insert_request':time.time()})

    def checkInsertion(self, versionStr, processID):
        fullName = Name(self.dataName).append(Name.fromEscapedString(versionStr))
        checkCommand = self.createCheckInterest(fullName, processID)
        self.face.makeCommandInterest(checkCommand)
        def timeoutLoop(interest):
            logger.warn('Timed out waiting on: '+interest.toUri())
            self.face.expressInterest(checkCommand, self.onCommandData, self.onTimeout)
        self.face.expressInterest(checkCommand, self.onCommandData, timeoutLoop)

    def createInsertInterest(self, fullName):
        '''
            For poking the repo
        '''
        # we have to do the versioning before we poke the repo
        interestName = Name(fullName)
        logger.debug('Creating insert interest for: '+interestName.toUri())
        
        insertionName = Name(self.repoPrefix).append('insert')
        commandParams = RepoCommandParameterMessage()

        for i in range(interestName.size()):
            commandParams.repo_command_parameter.name.component.append(interestName.get(i).getValue().toRawStr())

        commandParams.repo_command_parameter.start_block_id = 0
        commandParams.repo_command_parameter.end_block_id = 0

        commandName = insertionName.append(ProtobufTlv.encode(commandParams))
        interest = Interest(commandName)

        interest.setInterestLifetimeMilliseconds(2000)

        return interest

    def createCheckInterest(self, fullName, checkNum):
        insertionName = Name(self.repoPrefix).append('insert check')
        commandParams = RepoCommandParameterMessage()
        interestName = Name(fullName)

        commandParams.repo_command_parameter.process_id = checkNum
        for i in range(interestName.size()):
            commandParams.repo_command_parameter.name.component.append(str(interestName.get(i).getValue()))

        commandName = insertionName.append(ProtobufTlv.encode(commandParams))
        interest = Interest(commandName)

        return interest

    def onCommandData(self, interest, data):
        # assume it's a command response
        now = time.time()
        response = RepoCommandResponseMessage()
        ProtobufTlv.decode(response, data.getContent())


        self.currentStatus = response.repo_command_response.status_code
        self.currentInsertion =  response.repo_command_response.process_id
        logger.debug("Response status code: " + str(self.currentStatus) + ", process id: " + str(self.currentInsertion) + ", insert #" + str(response.repo_command_response.insert_num))

        command_idx = self.repoPrefix.size()
        # we also need to keep track of the mapping from version to processID for stats
        commandName = interest.getName().get(command_idx).getValue().toRawStr()
        if commandName == 'insert check':
            try:
                versionStr = self.processIdToVersion[self.currentInsertion]
                if self.currentStatus == 200:
                    stats.insertDataForVersion(versionStr, {'insert_complete': now})
                    self.loop.call_soon(self.kickRepo)
                elif self.currentStatus >= 400:
                    self.failureCount += 1
                    self.loop.call_soon(self.kickRepo)
                else:
                    self.loop.call_soon(self.checkInsertion, versionStr, self.currentInserion)
            except:
                logger.warn('Missing version for process ID {}'.format(self.currentInsertion))
        elif commandName == 'insert':
            if self.currentStatus == 100:
                versionStr = self.versionFromCommandMessage(interest.getName().get(command_idx+1))
                self.processIdToVersion[self.currentInsertion] = versionStr
                stats.insertDataForVersion(versionStr, {'insert_begin': now})
                self.loop.call_soon(self.checkInsertion, versionStr, self.currentInsertion)
            else:
                self.failureCount += 1
                self.loop.call_soon(self.kickRepo)
Exemplo n.º 25
0
class BACnetAggregator(BIPSimpleApplication, Logging):

    def __init__(self, config):
        if _debug: BACnetAggregator._debug("__init__ %r", config)

        # get local address from the config file
        laddr = config.get('BACpypes', 'address')
        
        # make a local device object
        local_device = \
          LocalDeviceObject( objectName=config.get('BACpypes','objectName')
                             , objectIdentifier=config.getint('BACpypes','objectIdentifier')
                             , maxApduLengthAccepted=config.getint('BACpypes','maxApduLengthAccepted')
                             , segmentationSupported=config.get('BACpypes','segmentationSupported')
                             , vendorIdentifier=config.getint('BACpypes','vendorIdentifier')
              )
        
        # build a bit string that knows about the bit names
        pss = ServicesSupported()
        pss['whoIs'] = 1
        pss['iAm'] = 1
        pss['readProperty'] = 1
        pss['writeProperty'] = 1
        
        # set the property value to be just the bits
        local_device.protocolServicesSupported = pss.value
        
        # make a simple application
        BIPSimpleApplication.__init__(self, local_device, laddr)

        
        # create logger
        self.logger = BACnetDataLogger(self, config)
        self.loadKey()
        # keep track of requests to line up responses
        self._request = None

        # connect to local repo
        self.publisher = RepoSocketPublisher(12345)
        self.interval = 5 # in seconds
    
    def loadKey(self):
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keychain = KeyChain(IdentityManager(self.identityStorage, self.privateKeyStorage))

        f = open(key_file, "r")
        self.key = RSA.importKey(f.read())
        self.key_name = Name(bld_root).append(getKeyID(self.key))
        key_pub_der = bytearray(self.key.publickey().exportKey(format="DER"))
        key_pri_der = bytearray(self.key.exportKey(format="DER"))
        self.identityStorage.addKey(self.key_name, KeyType.RSA, Blob(key_pub_der))
        self.privateKeyStorage.setKeyPairForKeyName(self.key_name, key_pub_der, key_pri_der)
        self.cert_name = self.key_name.getSubName(0, self.key_name.size() - 1).append(
            "KEY").append(self.key_name[-1]).append("ID-CERT").append("0")

        print 'KeyName = ' + self.key_name.toUri()
        print 'CertName = ' + self.cert_name.toUri()

    def publishData(self, name_str, payload, timestamp):
        data = Data(Name(name_str).append(bytearray(timestamp)))
        iv = Random.new().read(AES.block_size)
        encryptor = AES.new(key, AES.MODE_CBC, iv)
        data.setContent(bytearray(time_s + iv + encryptor.encrypt(pad(json.dumps(payload)))))
        data.getMetaInfo().setFreshnessPeriod(10000)
        self.keychain.sign(data, self.cert_name)
        self.publisher.put(data)
        #print payload
        #print 'Publish ' + data.getName().toUri()

    def request(self, apdu):
        if _debug: BACnetAggregator._debug("request %r", apdu)

        # save a copy of the request
        self._request = apdu

        # forward it along
        BIPSimpleApplication.request(self, apdu)

    def confirmation(self, apdu):
        #print thread.get_ident()
        global kds_count, key, time_s, point_count
        
        if _debug: BACnetAggregator._debug("confirmation %r", apdu)

        if isinstance(apdu, Error):
            sys.stdout.write("error: %s\n" % (apdu.errorCode,))
            sys.stdout.flush()

        elif isinstance(apdu, AbortPDU):
            apdu.debug_contents()

        elif (isinstance(self._request, ReadPropertyRequest)) and (isinstance(apdu, ReadPropertyACK)):
            # find the datatype
            datatype = get_datatype(apdu.objectIdentifier[0], apdu.propertyIdentifier)
            BACnetAggregator._debug("    - datatype: %r", datatype)
            if not datatype:
                raise TypeError, "unknown datatype"

            # special case for array parts, others are managed by cast_out
            if issubclass(datatype, Array) and (apdu.propertyArrayIndex is not None):
                if apdu.propertyArrayIndex == 0:
                    value = apdu.propertyValue.cast_out(Unsigned)
                else:
                    value = apdu.propertyValue.cast_out(datatype.subtype)
            else:
                value = apdu.propertyValue.cast_out(datatype)
            BACnetAggregator._debug("    - value: %r", value)

            #sys.stdout.write(str(value) + '\n')
            #sys.stdout.flush()

            # KDS
            if kds_count % 1200 == 0:
                time_t = int(time.time() * 1000)
                time_s = struct.pack("!Q", time_t)
                
                key = Random.new().read(32)
                kds_thread = kds.SimpleKDSPublisher(Name(bld_root), self.keychain, self.cert_name, key, time_s)
                kds_thread.start()
                kds_count = 0

            kds_count = kds_count + 1
            #
            
            now = int(time.time() * 1000) # in milliseconds
            
            payload = {'ts': now, 'val': value}
            
            timestamp = struct.pack("!Q", now)
            self.publishData(datapoints[point_count]['prefix'], payload, timestamp)
            point_count = (point_count + 1) % len(datapoints)

            #
            #
            # We could move the 'sleep&read' looping into logger thread so
            # that we could parallel read and write processes. For now we
            # only work on a single thread. The logger thread simply kicks 
            # off the initial request and then exits.
            #
            if point_count == 0:
                time.sleep(self.interval)

            self.logger.do_read()

    def indication(self, apdu):
        if _debug: BACnetAggregator._debug("indication %r", apdu)

        if (isinstance(self._request, WhoIsRequest)) and (isinstance(apdu, IAmRequest)):
            device_type, device_instance = apdu.iAmDeviceIdentifier
            if device_type != 'device':
                raise DecodingError, "invalid object type"

            if (self._request.deviceInstanceRangeLowLimit is not None) and \
                (device_instance < self._request.deviceInstanceRangeLowLimit):
                pass
            elif (self._request.deviceInstanceRangeHighLimit is not None) and \
                (device_instance > self._request.deviceInstanceRangeHighLimit):
                pass
            else:
                # print out the contents
                sys.stdout.write('pduSource = ' + repr(apdu.pduSource) + '\n')
                sys.stdout.write('iAmDeviceIdentifier = ' + str(apdu.iAmDeviceIdentifier) + '\n')
                sys.stdout.write('maxAPDULengthAccepted = ' + str(apdu.maxAPDULengthAccepted) + '\n')
                sys.stdout.write('segmentationSupported = ' + str(apdu.segmentationSupported) + '\n')
                sys.stdout.write('vendorID = ' + str(apdu.vendorID) + '\n')
                sys.stdout.flush()

        # forward it along
        BIPSimpleApplication.indication(self, apdu)
Exemplo n.º 26
0
class Server:
    def __init__(self, deeplab_manager, fst_manager, _root_path, storage):
        # type: (DeepLab, Fst, str, IStorage) -> None
        self.face = None
        self.keychain = KeyChain()
        # self.namespace = Namespace(Name(SERVER_PREFIX).append(RESULT_PREFIX), self.keychain)
        # self.namespace.addOnObjectNeeded(self.on_result_interest)
        self.segment_size = Face.getMaxNdnPacketSize() // 2

        self.running = False
        self._restart = False

        self.deeplab_manager = deeplab_manager
        self.fst_manager = fst_manager
        self.storage = storage
        deeplab_manager.on_finished = self.on_process_finished
        fst_manager.on_finished = self.on_process_finished

        self.fetcher = Fetcher(self.keychain, self.on_payload, self.storage, self.on_fetch_fail)

        self.command_filter_id = 0
        self.result_filter_id = 0

        self.operations_set = self.fst_manager.get_models() | {"deeplab"}
        # Status set, one item per frame
        # TODO: Start with unfinished tasks
        # self.status_set = {}

    def save_status(self, name, status):
        # type: (Name, ResultStatus) -> None
        """Save status to database"""
        self.storage.put(Name(STATUS_PREFIX).append(name), status.to_bytes())

    def load_status(self, name):
        """Load status from database"""
        # Get exact prefix to data
        if name[-1] == Name.Component("_meta") or name[-1].isSegment():
            name = name[:-1]
        ret = self.storage.get(Name(STATUS_PREFIX).append(name))
        if ret is not None:
            return ResultStatus.from_bytes(ret)
        else:
            return None

    async def _run(self):
        self.running = True
        while self.running:
            self.face = Face()
            self._restart = False
            try:
                self._network_start()
                logging.info("Starting...")
                while self.running and not self._restart:
                    self.face.processEvents()
                    await asyncio.sleep(0.01)
            except ConnectionRefusedError:
                logging.warning("Connection refused. Retry in %ss.", DISCONN_RETRY_TIME)
            finally:
                self.face.shutdown()
                self._network_stop()
            if self.running:
                time.sleep(DISCONN_RETRY_TIME)

    def run(self):
        event_loop = asyncio.get_event_loop()
        try:
            event_loop.run_until_complete(self._run())
        finally:
            event_loop.close()

    def stop(self):
        self.running = False

    def _network_start(self):
        self.face.setCommandSigningInfo(self.keychain, self.keychain.getDefaultCertificateName())
        # self.namespace.setFace(self.face, lambda prefix: print("Register failed for"))

        self.face.registerPrefix(Name(SERVER_PREFIX), None, self.on_register_failed)
        self.command_filter_id = self.face.setInterestFilter(
            Name(SERVER_PREFIX).append(COMMAND_PREFIX), self.on_command)
        self.result_filter_id = self.face.setInterestFilter(
            Name(SERVER_PREFIX).append(RESULT_PREFIX), self.on_result_interest)
        self.fetcher.network_start(self.face)

    def _network_stop(self):
        self.fetcher.network_stop()
        self.face.unsetInterestFilter(self.result_filter_id)
        self.face.unsetInterestFilter(self.command_filter_id)

    def on_register_failed(self, prefix):
        # type: (Name) -> None
        logging.error("Register failed for prefix: %s", prefix.toUri())
        self._restart = True

    def on_command(self, _prefix, interest, _face, _interest_filter_id, _filter_obj):
        # type: (Name, Interest, Face, int, InterestFilter) -> None
        parameter_msg = SegmentParameterMessage()
        try:
            ProtobufTlv.decode(parameter_msg, interest.name[-1].getValue())
        except ValueError:
            self.nodata_reply(interest.name, RET_MALFORMED_COMMAND)
            return

        parameter = parameter_msg.segment_parameter
        prefix = Name()
        for compo in parameter.name.component:
            prefix.append(compo.decode("utf-8"))

        # Check operations
        for op in parameter.operations.components:
            model_name = op.model.decode("utf-8")
            if model_name not in self.operations_set:
                self.nodata_reply(interest.name, RET_NOT_SUPPORTED)
                return

        # Fetch frames
        for frame_id in range(parameter.start_frame, parameter.end_frame + 1):
            frame_name = Name(prefix).append(str(frame_id))
            for op in parameter.operations.components:
                model_name = op.model.decode("utf-8")
                data_name = Name(frame_name).append(model_name)
                logging.info("Request processed: %s", data_name)
                status = ResultStatus(prefix.toUri(), model_name, Common.getNowMilliseconds())
                status.status = STATUS_FETCHING
                status.estimated_time = status.proecess_start_time + 10.0
                self.save_status(data_name, status)

        # Check data existence and trigger fetching process
        for frame_id in range(parameter.start_frame, parameter.end_frame + 1):
            frame_name = Name(prefix).append(str(frame_id))
            if self.storage.exists(frame_name):
                self.on_payload(frame_name)
            else:
                self.fetcher.fetch_data(frame_name)

        self.nodata_reply(interest.name, RET_OK, 10.0)

    # def on_result_interest(self, _namespace, needed_obj, _id):
    #     # type: (Namespace, Namespace, int) -> bool
    def on_result_interest(self, _prefix, interest, face, _interest_filter_id, _filter_obj):
        # type: (Name, Interest, Face, int, InterestFilter) -> bool
        prefix = Name(SERVER_PREFIX).append(RESULT_PREFIX)
        if not prefix.isPrefixOf(interest.name):
            # Wrong prefix
            return False

        data_name = interest.name[prefix.size():]
        logging.info("On result interest: %s", data_name.toUri())
        # key, stat = self._result_set_prefix_match(data_name)
        status = self.load_status(data_name)
        if status is None:
            # No such request
            self.nodata_reply(interest.name, RET_NO_REQUEST)
            return True

        if data_name[-1].isSegment():
            # Segment no suffix
            seg_no = data_name[-1].toSegment()
            result = self.storage.get(data_name.getPrefix(-1))
        elif data_name[-1] == Name("_meta")[0]:
            # MetaInfo suffix
            seg_no = -1
            result = self.storage.get(data_name.getPrefix(-1))
        else:
            # No suffix
            seg_no = None
            result = self.storage.get(data_name)

        if result is not None:
            # There are data
            segment_cnt = (len(result) + self.segment_size - 1) // self.segment_size
            # Note: I don't understand why namespace keep all data in memory
            metainfo = MetaInfo()
            # metainfo.setFinalBlockId(segment_cnt - 1) # WHY this doesn't work?
            metainfo.setFinalBlockId(Name().appendSegment(segment_cnt - 1)[0])
            if segment_cnt > 1 and seg_no is None:
                # Fetch segmented data with no suffix will get only first segment
                seg_no = 0
                data_name.appendSequenceNumber(seg_no)

            data = Data(Name(prefix).append(data_name))
            data.setMetaInfo(metainfo)
            if seg_no == -1:
                # _meta
                data.content = self.storage.get(data_name)
            else:
                # data
                if segment_cnt > 1:
                    # Segmented
                    if seg_no < segment_cnt:
                        start_offset = seg_no * self.segment_size
                        end_offset = start_offset + self.segment_size
                        data.content = Blob(bytearray(result[start_offset:end_offset]))
                    else:
                        data.content = None
                else:
                    # No segmentation
                    data.content = Blob(bytearray(result))

            self.keychain.sign(data)
            face.putData(data)
            return True
        else:
            # Data are not ready
            if status.status == STATUS_NO_INPUT:
                self.nodata_reply(interest.name, RET_NO_INPUT)
            elif status.status == STATUS_FAILED:
                self.nodata_reply(interest.name, RET_EXECUTION_FAILED)
            else:
                self.nodata_reply(interest.name, RET_RETRY_AFTER, status.estimated_time - Common.getNowMilliseconds())
            return True

    def nodata_reply(self, name, code, retry_after=0.0):
        # type: (Name, int, float) -> None
        logging.info("Reply with code: %s", code)
        data = Data(name)
        metainfo = MetaInfo()
        msg = ServerResponseMessage()
        msg.server_response.ret_code = code

        if code != RET_OK:
            metainfo.type = ContentType.NACK
        else:
            metainfo.type = ContentType.BLOB
        if retry_after > 0.1:
            metainfo.freshnessPeriod = int(retry_after / 10)
            msg.server_response.retry_after = int(retry_after)
        else:
            metainfo.freshnessPeriod = 600

        data.setMetaInfo(metainfo)
        data.setContent(ProtobufTlv.encode(msg))

        self.keychain.sign(data)
        self.face.putData(data)

    def on_payload(self, frame_name):
        # type: (Name) -> None
        for model in self.operations_set:
            data_name = Name(frame_name).append(model)
            status = self.load_status(data_name)
            if status is None:
                continue
            if self.storage.exists(data_name):
                logging.info("Result exists: %s", data_name.toUri())
                continue
            logging.info("Ready to produce: %s", data_name.toUri())
            status.proecess_start_time = Common.getNowMilliseconds()

            if model == "deeplab":
                ret = self.deeplab_manager.send(DeepLabRequest(frame_name.toUri()[1:],
                                                               frame_name.toUri(),
                                                               data_name))
            else:
                ret = self.fst_manager.send(FstRequest(frame_name.toUri()[1:],
                                                       model,
                                                       frame_name.toUri(),
                                                       data_name))
            status.status = STATUS_PROCESSING if ret else STATUS_FAILED
            self.save_status(data_name, status)

    def on_process_finished(self, name_str, model_name):
        # type: (str, str) -> None
        data_name = Name(name_str).append(model_name)
        logging.info("Process finished: %s", data_name.toUri())
        status = self.load_status(data_name)
        if status is None:
            logging.fatal("Database broken.")
            raise RuntimeError()
        status.end_time = Common.getNowMilliseconds()
        status.status = STATUS_SUCCEED

        meta_name = Name(data_name).append("_meta")
        content_metainfo = ContentMetaInfo()
        content_metainfo.setContentType("png")
        content_metainfo.setTimestamp(status.end_time)
        content_metainfo.setHasSegments(True)
        self.storage.put(meta_name, content_metainfo.wireEncode().toBytes())

    def on_fetch_fail(self, frame_name):
        logging.error("Fail to fetch: {}".format(frame_name))
        for model in self.operations_set:
            data_name = Name(frame_name).append(model)
            status = self.load_status(data_name)
            if status is None:
                continue
            status.status = STATUS_NO_INPUT
            self.save_status(data_name, status)
Exemplo n.º 27
0
class Producer():
    """Hosts data under a certain namespace"""

    def __init__(self, data_size, verbose=False):
        # create a KeyChain for signing data packets
        self._key_chain = KeyChain()
        self._is_done = False
        self._num_interests = 0
        #  self._keyChain.createIdentityV2(Name("/ndn/identity"))

        # host data at the local forwarder
        self._face = Face()

        # immutable byte array to use as data
        self._byte_array = bytes(data_size)

        # the number of bytes contained in each data packet
        self._data_size = data_size

        # the verbosity of diagnostic information
        self._verbose = verbose

        # keep track of if the first interest has been recieved (for timing)
        self._is_first_interst = True

        # keep track of various performance metrics:
        self._interests_satisfied = 0
        self._interests_recieved = 0
        self._data_sent = 0
        self._elapsed_time = {}
        self._initial_time = {}
        self._final_time = {}

        print("Producer instance created.")


    def run(self, namespace):
        """Starts listening for interest packets in the given namespace."""

        prefix = Name(namespace)

        # Use the system default key chain and certificate name to sign commands.
        self._face.setCommandSigningInfo(self._key_chain, self._key_chain.getDefaultCertificateName())

        # Also use the default certificate name to sign Data packets.
        self._face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        dump("Registering prefix", prefix.toUri())

        print(f"Listening for interests under {namespace}...")

        # Run the event loop forever. Use a short sleep to
        # prevent the Producer from using 100% of the CPU.
        while not self._is_done:
            self._face.processEvents()
            time.sleep(0.01)

        # shutdown this face - TODO: figure out why this can't be done in the self.shutdown() method
        self._face.shutdown()


    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        """Called when an interest for the specified name is recieved"""

        # keep track of when first interest was recieved
        self._initial_time['download_time'] = time.time()

        # set data to a byte array of a specified size
        interestName = interest.getName()
        data = Data(interestName)
        data.setContent(self._byte_array)

        # sign and send data
        data.getMetaInfo().setFreshnessPeriod(3600 * 1000)
        self._key_chain.sign(data, self._key_chain.getDefaultCertificateName())
        transport.send(data.wireEncode().toBuffer())

        # print additional information if verobse flag is set
        if self._verbose:
            dump("Replied to:", interestName.toUri())

        # increment appropriate variables
        self._interests_recieved += 1
        self._interests_satisfied += 1
        self._num_interests += 1


    def onRegisterFailed(self, prefix):
        """Called when forwarder can't register prefix."""
        dump("Register failed for prefix", prefix.toUri())
        self.shutdown()


    def shutdown(self):
        self._final_time['download_time'] = time.time()
        self._is_done = True
        self.print_status_report()


    def print_status_report(self):
        """Prints performance metrics for this producer."""

        # compute total data sent (in bytes)
        self._data_sent = self._interests_satisfied * self._data_size

        # compute timing
        for key, value in self._initial_time.items():
            self._elapsed_time[key] = self._final_time[key] - self._initial_time[key]

        # calculate bitrate of interests sent
        download_kbps = ((self._data_sent * 8) / 1000) / self._elapsed_time['download_time']


        print("\n----------------------------------")
        print(f"Number of interests recieved: {self._interests_recieved}")
        print(f"Number of interests satisfied: {self._interests_satisfied}")
        print("----------------------------------")
        # this probably isn't a useful metric, as the output interface will throttle this
        #  print(f"{self._data_sent / 1000} kilobytes sent for a bitrate of {download_kbps} kbps")
        print(f"{self._data_size * self._interests_satisfied} bytes of data sent.")
        print("----------------------------------\n")
Exemplo n.º 28
0
class TestFaceRegisterMethods(ut.TestCase):
    def setUp(self):
        self.face_in = Face()
        self.face_out = Face()
        self.keyChain = KeyChain()

    def tearDown(self):
        self.face_in.shutdown()
        self.face_out.shutdown()

    def test_register_prefix_response(self):
        prefixName = Name("/test")
        self.face_in.setCommandSigningInfo(self.keyChain,
                self.keyChain.getDefaultCertificateName())

        interestCallbackCount = [0]
        def onInterest(prefix, interest, transport, prefixID):
            interestCallbackCount[0] += 1
            data = Data(interest.getName())
            data.setContent("SUCCESS")
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
            encodedData = data.wireEncode()
            transport.send(encodedData.toBuffer())

        failedCallback = Mock()

        self.face_in.registerPrefix(prefixName, onInterest, failedCallback)
        # Give the 'server' time to register the interest.
        time.sleep(1)

        # express an interest on another face
        dataCallback = Mock()
        timeoutCallback = Mock()

        # now express an interest on this new face, and see if onInterest is called
        # Add the timestamp so it is unique and we don't get a cached response.
        interestName = prefixName.append("hello" + repr(time.time()))
        self.face_out.expressInterest(interestName, dataCallback, timeoutCallback)

        # Process events for the in and out faces.
        timeout = 10000
        startTime = getNowMilliseconds()
        while True:
            if getNowMilliseconds() - startTime >= timeout:
                break

            self.face_in.processEvents()
            self.face_out.processEvents()

            done = True
            if interestCallbackCount[0] == 0 and failedCallback.call_count == 0:
                # Still processing face_in.
                done = False
            if dataCallback.call_count == 0 and timeoutCallback.call_count == 0:
                # Still processing face_out.
                done = False

            if done:
                break
            time.sleep(0.01)


        self.assertEqual(failedCallback.call_count, 0, 'Failed to register prefix at all')

        self.assertEqual(interestCallbackCount[0], 1, 'Expected 1 onInterest callback, got '+str(interestCallbackCount[0]))

        self.assertEqual(dataCallback.call_count, 1, 'Expected 1 onData callback, got '+str(dataCallback.call_count))

        onDataArgs = dataCallback.call_args[0]
        # check the message content
        data = onDataArgs[1]
        expectedBlob = Blob("SUCCESS")
        self.assertTrue(expectedBlob == data.getContent(), 'Data received on face does not match expected format')
Exemplo n.º 29
0
class BaseNode(object):
    """
    This class contains methods/attributes common to both node and controller.
    
    """
    def __init__(self):
        """
        Initialize the network and security classes for the node
        """
        super(BaseNode, self).__init__()

        self._identityStorage = IotIdentityStorage()
        self._identityManager = IotIdentityManager(self._identityStorage)
        self._policyManager = IotPolicyManager(self._identityStorage)

        # hopefully there is some private/public key pair available
        self._keyChain = KeyChain(self._identityManager, self._policyManager)

        self._registrationFailures = 0
        self._prepareLogging()

        self._setupComplete = False
        self._instanceSerial = None

        # waiting devices register this prefix and respond to discovery
        # or configuration interest
        self._hubPrefix = Name('/localhop/configure')

    def getSerial(self):
        """
         Since you may wish to run two nodes on a Raspberry Pi, each
         node will generate a unique serial number each time it starts up.
        """
        if self._instanceSerial is None:
            prefixLen = 4
            prefix = ''
            for i in range(prefixLen):
                prefix += (chr(random.randint(0,0xff)))
            suffix = self.getDeviceSerial().lstrip('0')
            self._instanceSerial = '-'.join([prefix.encode('hex'), suffix])
        return self._instanceSerial

##
# Logging
##
    def _prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log

###
# Startup and shutdown
###
    def beforeLoopStart(self):
        """
        Called before the event loop starts.
        """
        pass

    def getDefaultCertificateName(self):
        try:
            certName = self._identityStorage.getDefaultCertificateNameForIdentity( 
                self._policyManager.getDeviceIdentity())
        except SecurityException:
            certName = self._keyChain.getDefaultCertificateName()

        return certName

    def start(self):
        """
        Begins the event loop. After this, the node's Face is set up and it can
        send/receive interests+data
        """
        self.log.info("Starting up")
        self.loop = asyncio.get_event_loop()
        self.face = ThreadsafeFace(self.loop, '')
        self.face.setCommandSigningInfo(self._keyChain, self.getDefaultCertificateName())
        self._keyChain.setFace(self.face)

        self._isStopped = False
        self.face.stopWhen(lambda:self._isStopped)
        self.beforeLoopStart()
        
        try:
            self.loop.run_forever()
        except KeyboardInterrupt:
            pass
        except Exception as e:
            self.log.exception(e, exc_info=True)
        finally:
            self._isStopped = True

    def stop(self):
        """
        Stops the node, taking it off the network
        """
        self.log.info("Shutting down")
        self._isStopped = True 
        
###
# Data handling
###
    def signData(self, data):
        """
        Sign the data with our network certificate
        :param pyndn.Data data: The data to sign
        """
        self._keyChain.sign(data, self.getDefaultCertificateName())

    def sendData(self, data, transport, sign=True):
        """
        Reply to an interest with a data packet, optionally signing it.
        :param pyndn.Data data: The response data packet
        :param pyndn.Transport transport: The transport to send the data through. This is 
            obtained from an incoming interest handler
        :param boolean sign: (optional, default=True) Whether the response must be signed. 
        """
        if sign:
            self.signData(data)
        transport.send(data.wireEncode().buf())

###
# 
# 
##
    def onRegisterFailed(self, prefix):
        """
        Called when the node cannot register its name with the forwarder
        :param pyndn.Name prefix: The network name that failed registration
        """
        if self._registrationFailures < 5:
            self._registrationFailures += 1
            self.log.warn("Could not register {}, retry: {}/{}".format(prefix.toUri(), self._registrationFailures, 5)) 
            self.face.registerPrefix(self.prefix, self._onCommandReceived, self.onRegisterFailed)
        else:
            self.log.critical("Could not register device prefix, ABORTING")
            self._isStopped = True

    def verificationFailed(self, dataOrInterest):
        """
        Called when verification of a data packet or command interest fails.
        :param pyndn.Data or pyndn.Interest: The packet that could not be verified
        """
        self.log.info("Received invalid" + dataOrInterest.getName().toUri())

    @staticmethod
    def getDeviceSerial():
        """
        Find and return the serial number of the Raspberry Pi. Provided in case
        you wish to distinguish data from nodes with the same name by serial.
        :return: The serial number extracted from device information in /proc/cpuinfo
        :rtype: str
        """
        with open('/proc/cpuinfo') as f:
            for line in f:
                if line.startswith('Serial'):
                    return line.split(':')[1].strip()
Exemplo n.º 30
0
class TestRegexMatching(ut.TestCase):
    def _certNameFromKeyName(self, keyName, keyIdx=-1):
        return keyName[:keyIdx].append("KEY").append(keyName[keyIdx:]).\
                append("ID-CERT").append("0")

    def setUp(self):
        # set up the keychain so we can sign data
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(self.identityStorage, self.privateKeyStorage))
        self.privateKeyStorage = MemoryPrivateKeyStorage()

        # not using keychain for verification so we don't need to set the
        # policy manager
        self.keyChain = KeyChain(
            IdentityManager(self.identityStorage, self.privateKeyStorage))
        self.identityName = Name('/SecurityTestSecRule/Basic/Longer')
        keyName = Name(self.identityName).append('ksk-2439872')
        self.defaultCertName = self._certNameFromKeyName(keyName)
        self.identityStorage.addKey(keyName, KeyType.RSA,
                                    Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
            keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER,
            DEFAULT_RSA_PRIVATE_KEY_DER)

        keyName = Name('/SecurityTestSecRule/Basic/ksk-0923489')
        self.identityStorage.addKey(keyName, KeyType.RSA,
                                    Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
            keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER,
            DEFAULT_RSA_PRIVATE_KEY_DER)
        self.shortCertName = self._certNameFromKeyName(keyName, -2)

    def test_name_relation(self):
        policyManagerPrefix = ConfigPolicyManager(
            "policy_config/relation_ruleset_prefix.conf")
        policyManagerStrict = ConfigPolicyManager(
            "policy_config/relation_ruleset_strict.conf")
        policyManagerEqual = ConfigPolicyManager(
            "policy_config/relation_ruleset_equal.conf")

        dataName = Name('/TestRule1')

        self.assertIsNotNone(
            policyManagerPrefix._findMatchingRule(dataName, 'data'),
            "Prefix relation should match prefix name")
        self.assertIsNotNone(
            policyManagerEqual._findMatchingRule(dataName, 'data'),
            "Equal relation should match prefix name")
        self.assertIsNone(
            policyManagerStrict._findMatchingRule(dataName, 'data'),
            "Strict-prefix relation should not match prefix name")

        dataName = Name('/TestRule1/hi')
        self.assertIsNotNone(
            policyManagerPrefix._findMatchingRule(dataName, 'data'),
            "Prefix relation should match longer name")
        self.assertIsNone(
            policyManagerEqual._findMatchingRule(dataName, 'data'),
            "Equal relation should not match longer name")
        self.assertIsNotNone(
            policyManagerStrict._findMatchingRule(dataName, 'data'),
            "Strict-prefix relation should match longer name")

        dataName = Name('/Bad/TestRule1/')
        self.assertIsNone(
            policyManagerPrefix._findMatchingRule(dataName, 'data'),
            "Prefix relation should not match inner components")
        self.assertIsNone(
            policyManagerEqual._findMatchingRule(dataName, 'data'),
            "Equal relation should not match inner components")
        self.assertIsNone(
            policyManagerStrict._findMatchingRule(dataName, 'data'),
            "Strict-prefix relation should  not match inner components")

    def test_simple_regex(self):
        policyManager = ConfigPolicyManager("policy_config/regex_ruleset.conf")
        dataName1 = Name('/SecurityTestSecRule/Basic')
        dataName2 = Name('/SecurityTestSecRule/Basic/More')
        dataName3 = Name('/SecurityTestSecRule/')
        dataName4 = Name('/SecurityTestSecRule/Other/TestData')
        dataName5 = Name('/Basic/Data')

        matchedRule1 = policyManager._findMatchingRule(dataName1, 'data')
        matchedRule2 = policyManager._findMatchingRule(dataName2, 'data')
        matchedRule3 = policyManager._findMatchingRule(dataName3, 'data')
        matchedRule4 = policyManager._findMatchingRule(dataName4, 'data')
        matchedRule5 = policyManager._findMatchingRule(dataName5, 'data')

        self.assertIsNotNone(matchedRule1)
        self.assertIsNone(matchedRule2)
        self.assertIsNotNone(matchedRule3)
        self.assertNotEqual(matchedRule3, matchedRule1,
                            "Rule regex matched extra components")
        self.assertIsNotNone(matchedRule4)
        self.assertNotEqual(matchedRule4, matchedRule1,
                            "Rule regex matched with missing component")

        self.assertIsNone(matchedRule5)

    def test_checker_hierarchical(self):
        policyManager = ConfigPolicyManager(
            "policy_config/hierarchical_ruleset.conf")

        dataName1 = Name('/SecurityTestSecRule/Basic/Data1')
        dataName2 = Name('/SecurityTestSecRule/Basic/Longer/Data2')

        data1 = Data(dataName1)
        data2 = Data(dataName2)

        matchedRule = policyManager._findMatchingRule(dataName1, 'data')
        self.assertEqual(matchedRule,
                         policyManager._findMatchingRule(dataName2, 'data'))

        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.defaultCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertFalse(
            policyManager._checkSignatureMatch(signatureName1, dataName1,
                                               matchedRule),
            "Hierarchical matcher matched short data name to long key name")

        self.assertTrue(
            policyManager._checkSignatureMatch(signatureName2, dataName2,
                                               matchedRule))

        self.keyChain.sign(data1, self.shortCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertTrue(
            policyManager._checkSignatureMatch(signatureName1, dataName1,
                                               matchedRule))
        self.assertTrue(
            policyManager._checkSignatureMatch(signatureName2, dataName2,
                                               matchedRule))

    def test_hyperrelation(self):
        policyManager = ConfigPolicyManager(
            "policy_config/hyperrelation_ruleset.conf")

        dataName = Name('/SecurityTestSecRule/Basic/Longer/Data2')
        data1 = Data(dataName)
        data2 = Data(dataName)

        matchedRule = policyManager._findMatchingRule(dataName, 'data')
        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertTrue(
            policyManager._checkSignatureMatch(signatureName1, dataName,
                                               matchedRule))
        self.assertFalse(
            policyManager._checkSignatureMatch(signatureName2, dataName,
                                               matchedRule))

        dataName = Name('/SecurityTestSecRule/Basic/Other/Data1')
        data1 = Data(dataName)
        data2 = Data(dataName)

        matchedRule = policyManager._findMatchingRule(dataName, 'data')
        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertFalse(
            policyManager._checkSignatureMatch(signatureName1, dataName,
                                               matchedRule))
        self.assertTrue(
            policyManager._checkSignatureMatch(signatureName2, dataName,
                                               matchedRule))

    def test_interest_matching(self):
        # make sure we chop off timestamp, nonce, and signature info from
        # signed interests
        pass
Exemplo n.º 31
0
class Producer(object):
    def __init__(self):
        self.keyChain = KeyChain()
        self.isDone = False
        selesai = False
        pass

    def run(self, namespace):
        face = Face()

        prefix = Name(namespace)

        face.setCommandSigningInfo(self.keyChain, \
                                self.keyChain.getDefaultCertificateName())

        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print("Registering prefix", prefix.toUri())

        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)
        pass

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        interestName = interest.getName()

        data = Data(interestName)
        dt = datetime.now()

        if (interestName.toUri() == "/vanet-ndn/iklan1"):
            data.setContent(
                "\nLakukan Pengisian Pulsa Telkomsel sekarang juga \nDapatkan pulsa 5000 dan kelipatan untuk setiap pembelian pulsa 10000 dan kelipatannya\n"
            )  #bagian ini bisa diganti dengan informasi dari sensor
            global iklan1
            iklan1 = data.getContent()
        elif (interestName.toUri() == "/vanet-ndn/iklan2"):
            data.setContent(
                "\nLakukan Pengisian Kuota Indosat sekarang juga! \nDapatkan kemudahan dalam berinternet digenggamanmu\n"
            )  #bagian ini bisa diganti dengan informasi dari sensor
            global iklan2
            iklan2 = data.getContent()
        elif (interestName.toUri() == "/vanet-ndn/iklan3"):
            data.setContent(
                "\nIngin memiliki Smartphone baru? \nDapatkan smartphone impian mu dengan XL\nCek xl.co.id untuk penawaran menarik\n"
            )  #bagian ini bisa diganti dengan informasi dari sensor
            global iklan3
            iklan3 = data.getContent()

        hourMilliseconds = 30
        data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

        transport.send(data.wireEncode().toBuffer())

        print("Replied to: %s - Time: %s " % (interestName.toUri(), dt))
        print(
            "==================================================================\n"
        )
        pass

    def onRegisterFailed(self, prefix):
        print("Register failed for prefix", prefix.toUri())
        self.isDone = True
        pass
Exemplo n.º 32
0
class TestProducer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {} # key: Name, value: Blob
        self.encryptionKeys = {} # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())
        identityName = Name("TestProducer")
        self.certificateName = self.keyChain.createIdentityAndCertificate(identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptionKey(self, eKeyName, timeMarker):
        params = RsaKeyParams()
        eKeyName = Name(eKeyName)
        eKeyName.append(timeMarker)

        dKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        eKeyBlob = RsaAlgorithm.deriveEncryptKey(dKeyBlob).getKeyBits()
        self.decryptionKeys[eKeyName] = dKeyBlob

        keyData = Data(eKeyName)
        keyData.setContent(eKeyBlob)
        self.keyChain.sign(keyData, self.certificateName)
        self.encryptionKeys[eKeyName] = keyData

    def test_content_key_request(self):
        prefix = Name("/prefix")
        suffix = Name("/a/b/c")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        timeMarker = Name("20150101T100000/20150101T120000")
        testTime1 = Schedule.fromIsoString("20150101T100001")
        testTime2 = Schedule.fromIsoString("20150101T110001")
        testTimeRounded1 = Name.Component("20150101T100000")
        testTimeRounded2 = Name.Component("20150101T110000")

        # Create content keys required for this test case:
        for i in range(suffix.size()):
          self.createEncryptionKey(expectedInterest, timeMarker)
          expectedInterest = expectedInterest.getPrefix(-2).append(
            Encryptor.NAME_COMPONENT_E_KEY)

        expressInterestCallCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            expressInterestCallCount[0] += 1

            interestName = Name(interest.getName())
            interestName.append(timeMarker)
            self.assertTrue(interestName in self.encryptionKeys)
            onData(interest, self.encryptionKeys[interestName])

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that the content key is correctly encrypted for each domain, and
        # the produce method encrypts the provided data with the same content key.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        contentKey = [None] # Blob

        def checkEncryptionKeys(
          result, testTime, roundedTime, expectedExpressInterestCallCount):
            self.assertEqual(expectedExpressInterestCallCount,
                             expressInterestCallCount[0])

            self.assertEqual(True, testDb.hasContentKey(testTime))
            contentKey[0] = testDb.getContentKey(testTime)

            params = EncryptParams(EncryptAlgorithmType.RsaOaep)
            for i in range(len(result)):
                key = result[i]
                keyName = key.getName()
                self.assertEqual(cKeyName, keyName.getSubName(0, 6))
                self.assertEqual(keyName.get(6), roundedTime)
                self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR)
                self.assertEqual(
                  True, keyName.getSubName(8) in self.decryptionKeys)

                decryptionKey = self.decryptionKeys[keyName.getSubName(8)]
                self.assertEqual(True, decryptionKey.size() != 0)
                encryptedKeyEncoding = key.getContent()

                content = EncryptedContent()
                content.wireDecode(encryptedKeyEncoding)
                encryptedKey = content.getPayload()
                retrievedKey = RsaAlgorithm.decrypt(
                  decryptionKey, encryptedKey, params)

                self.assertTrue(contentKey[0].equals(retrievedKey))

            self.assertEqual(3, len(result))

        # An initial test to confirm that keys are created for this time slot.
        contentKeyName1 = producer.createContentKey(
          testTime1,
          lambda keys: checkEncryptionKeys(keys, testTime1, testTimeRounded1, 3))

        # Verify that we do not repeat the search for e-keys. The total
        #   expressInterestCallCount should be the same.
        contentKeyName2 = producer.createContentKey(
          testTime2,
          lambda keys: checkEncryptionKeys(keys, testTime2, testTimeRounded2, 3))

        # Confirm content key names are correct
        self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1))
        self.assertEqual(testTimeRounded1, contentKeyName1.get(6))
        self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1))
        self.assertEqual(testTimeRounded2, contentKeyName2.get(6))

        # Confirm that produce encrypts with the correct key and has the right name.
        testData = Data()
        producer.produce(testData, testTime2, Blob(DATA_CONTENT, False))

        producedName = testData.getName()
        self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5))
        self.assertEqual(testTimeRounded2, producedName.get(5))
        self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6))
        self.assertEqual(cKeyName, producedName.getSubName(7, 6))
        self.assertEqual(testTimeRounded2, producedName.get(13))

        dataBlob = testData.getContent()

        dataContent = EncryptedContent()
        dataContent.wireDecode(dataBlob)
        encryptedData = dataContent.getPayload()
        initialVector = dataContent.getInitialVector()

        params = EncryptParams(EncryptAlgorithmType.AesCbc, 16)
        params.setInitialVector(initialVector)
        decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData, params)
        self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))

    def test_content_key_search(self):
        timeMarkerFirstHop = Name("20150101T070000/20150101T080000")
        timeMarkerSecondHop = Name("20150101T080000/20150101T090000")
        timeMarkerThirdHop = Name("20150101T100000/20150101T110000")

        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        # Create content keys required for this test case:
        self.createEncryptionKey(expectedInterest, timeMarkerFirstHop)
        self.createEncryptionKey(expectedInterest, timeMarkerSecondHop)
        self.createEncryptionKey(expectedInterest, timeMarkerThirdHop)

        requestCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            self.assertEqual(expectedInterest, interest.getName())

            gotInterestName = False
            for i in range(3):
              interestName = Name(interest.getName())
              if i == 0:
                interestName.append(timeMarkerFirstHop)
              elif i == 1:
                interestName.append(timeMarkerSecondHop)
              elif i == 2:
                interestName.append(timeMarkerThirdHop)

              # matchesName will check the Exclude.
              if interest.matchesName(interestName):
                gotInterestName = True
                requestCount[0] += 1
                break

            if gotInterestName:
              onData(interest, self.encryptionKeys[interestName])

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that if a key is found, but not within the right time slot, the
        # search is refined until a valid time slot is found.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        def onEncryptedKeys(result):
            self.assertEqual(3, requestCount[0])
            self.assertEqual(1, len(result))

            keyData = result[0]
            keyName = keyData.getName()
            self.assertEqual(cKeyName, keyName.getSubName(0, 4))
            self.assertEqual(timeMarkerThirdHop.get(0), keyName.get(4))
            self.assertEqual(Encryptor.NAME_COMPONENT_FOR, keyName.get(5))
            self.assertEqual(expectedInterest.append(timeMarkerThirdHop),
                             keyName.getSubName(6))
        producer.createContentKey(testTime, onEncryptedKeys)

    def test_content_key_timeout(self):
        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        timeoutCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout):
                return self.handleExpressInterest(interest, onData, onTimeout)

        def handleExpressInterest(interest, onData, onTimeout):
            self.assertEqual(expectedInterest, interest.getName())
            timeoutCount[0] += 1
            onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Verify that if no response is received, the producer appropriately times
        # out. The result vector should not contain elements that have timed out.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        def onEncryptedKeys(result):
            self.assertEqual(4, timeoutCount[0])
            self.assertEqual(0, len(result))
        producer.createContentKey(testTime, onEncryptedKeys)
Exemplo n.º 33
0
class Producer(object):
    def __init__(self):
        self.keyChain = KeyChain()
        self.isDone = False
        self.prefixesList = []
        self.gatewayIP = "gatewayIP"
        self.gatewayFace = 0

    def run(self, namespace):
        # Create a connection to the local forwarder over a Unix socket
        self.gatewayIP = os.popen('ip route | grep default').read().split(" ")[
            2]  #get gateway IP of gateway router
        os.system("ndn-autoconfig"
                  )  #perform ndn-autoconfig create face to gateway router
        os.system(
            "nfdc cs erase /localhop/ndn-autoconf/hub"
        )  #delete cached result from ndn-autoconfig for future connection
        self.gatewayFace = os.popen(
            'nfdc route list | grep localhop/nfd').read().split(" ")[1][8:]
        os.system("nfdc route add /ndnchat/register " + self.gatewayFace)
        face = Face()

        prefix = Name(namespace)

        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain, \
                                  self.keyChain.getDefaultCertificateName())

        # Also use the default certificate name to sign Data packets.
        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print "Registering prefix", prefix.toUri()

        # Run the event loop forever. Use a short sleep to
        # prevent the Producer from using 100% of the CPU.
        while not self.isDone:
            # check gatewayIP change. If change:
            # Delete old gatewayFace, add route from UE to new gateway for each prefix in local prefix list
            # Concat prefix in the list to the form "prefix1|prefix2", then call interestSender to send them to new gateway router
            try:
                newgatewayIP = os.popen(
                    'ip route | grep default').read().split(" ")[2]
                if newgatewayIP != self.gatewayIP:
                    os.system("nfdc face destroy " + self.gatewayFace)
                    print "Changes in network detected, attempt re-advertising prefixes"
                    os.system("ndn-autoconfig")
                    os.system(
                        "nfdc cs erase /localhop/ndn-autoconf/hub"
                    )  #delete cached result from ndn-autoconfig for future connection
                    self.gatewayFace = os.popen(
                        'nfdc route list | grep localhop/nfd').read().split(
                            " ")[1][8:]
                    os.system("nfdc route add /ndnchat/register " +
                              self.gatewayFace)
                    concatPrefixesList = ""
                    for i in range(0, len(self.prefixesList)):
                        os.system("nfdc route add " +
                                  str(self.prefixesList[i]) + " " +
                                  self.gatewayFace)
                        if i == len(self.prefixesList) - 1:
                            concatPrefixesList = concatPrefixesList + self.prefixesList[
                                i]
                        else:
                            concatPrefixesList = concatPrefixesList + self.prefixesList[
                                i] + "|"
                    concatPrefixesList = concatPrefixesList + "|" + get_device_ip(
                    )

                    subprocess.call([
                        "python", "interestSender.py", "-u /ndnchat/register",
                        "-p" + concatPrefixesList
                    ])
                    self.gatewayIP = newgatewayIP
            except IndexError:
                print "Lost Internet Connection"
                self.gatewayIP = ""

            face.processEvents()
            time.sleep(0.01)

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        interestName = interest.getName()
        interestParams = str(interest.getApplicationParameters())
        addPrefixes = interestParams.split("|")
        interestParams = interestParams + "|" + get_device_ip()
        # For each Prefix recieved save it in local array
        # Add route for that prefix from User to gateway router
        for i in range(0, len(addPrefixes)):
            print addPrefixes[i]
            self.prefixesList.append(str(addPrefixes[i]))
            os.system("nfdc route add " + str(addPrefixes[i]) + " " +
                      self.gatewayFace)

        # Send prefixesList to gateway router
        subprocess.call([
            "python", "interestSender.py", "-u /ndnchat/register",
            "-p " + interestParams
        ])

        data = Data(interestName)
        data.setContent("Register Successful")

        hourMilliseconds = 3600 * 1000
        data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

        transport.send(data.wireEncode().toBuffer())

        print "Sent advertisement to gateway router"

        #self.isDone = True

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True
Exemplo n.º 34
0
class TestFaceRegisterMethods(ut.TestCase):
    def setUp(self):
        self.face_in = Face()
        self.face_out = Face()
        self.keyChain = KeyChain()

    def tearDown(self):
        self.face_in.shutdown()
        self.face_out.shutdown()

    def onInterestEffect(self, prefix, interest, transport, prefixID):
        data = Data(interest.getName())
        data.setContent("SUCCESS")
        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
        encodedData = data.wireEncode()
        transport.send(encodedData.toBuffer())

    def test_register_prefix_response(self):
        # gotta sign it (WAT)
        prefixName = Name("/test")
        self.face_in.setCommandSigningInfo(self.keyChain,
                self.keyChain.getDefaultCertificateName())

        failedCallback = Mock()
        interestCallback = Mock(side_effect=self.onInterestEffect)

        self.face_in.registerPrefix(prefixName, interestCallback, failedCallback)
        server = gevent.spawn(self.face_process_events, self.face_in, [interestCallback, failedCallback], 'h')

        time.sleep(1) # give the 'server' time to register the interest

        # express an interest on another face
        dataCallback = Mock()
        timeoutCallback = Mock()

        # now express an interest on this new face, and see if onInterest is called
        interestName = prefixName.append("hello")
        self.face_out.expressInterest(interestName, dataCallback, timeoutCallback)

        client = gevent.spawn(self.face_process_events, self.face_out, [dataCallback, timeoutCallback], 'c')

        gevent.joinall([server, client], timeout=10)

        self.assertEqual(failedCallback.call_count, 0, 'Failed to register prefix at all')

        self.assertEqual(interestCallback.call_count, 1, 'Expected 1 onInterest callback, got '+str(interestCallback.call_count))

        self.assertEqual(dataCallback.call_count, 1, 'Expected 1 onData callback, got '+str(dataCallback.call_count))

        onDataArgs = dataCallback.call_args[0]
        # check the message content
        data = onDataArgs[1]
        expectedBlob = Blob(bytearray("SUCCESS"))
        self.assertTrue(expectedBlob == data.getContent(), 'Data received on face does not match expected format')

    def face_process_events(self, face, callbacks, name=None):
        # implemented as a 'greenlet': something like a thread, but semi-synchronous
        # callbacks should be a list
        done = False
        while not done:
            face.processEvents()
            gevent.sleep()
            for c in callbacks:

                if (c.call_count > 0):
                    done = True
class IdentityManagementFixture(object):
    def __init__(self):
        self._keyChain = KeyChain("pib-memory:", "tpm-memory:")
        self._identityNames = set()
        self._certificateFiles = set()

    def saveCertificateToFile(self, data, filePath):
        """
        :param Data data: The certificate to save.
        :param str filePath: The file path, which should be writable.
        :return: True if successful.
        :rtype: bool
        """
        self._certificateFiles.add(filePath)

        try:
            encoding = data.wireEncode()
            encodedCertificate = Common.base64Encode(encoding.toBytes(), True)

            with open(filePath, 'w') as keyFile:
                keyFile.write(encodedCertificate)

            return True
        except Exception:
            return False

    def addIdentity(self, identityName, params = None):
        """
        Add an identity for the identityName.

        :param Name identityName: The name of the identity.
        :param KeyParams params: (optional) The key parameters if a key needs to
          be generated for the identity. If omitted, use
          KeyChain.getDefaultKeyParams().
        :return: The created PibIdentity instance.
        :rtype: PibIdentity
        """
        if params == None:
            params = KeyChain.getDefaultKeyParams()

        identity = self._keyChain.createIdentityV2(identityName, params)
        self._identityNames.add(identityName)
        return identity

    def saveCertificate(identity, filePath):
        """
        Save the identity's certificate to a file.

        :param PibIdentity identity: The PibIdentity.
        :param str filePath: The file path, which should be writable.
        :return: True if successful.
        :rtype: str
        """
        try:
            certificate = identity.getDefaultKey().getDefaultCertificate()
            return self.saveCertificateToFile(certificate, filePath)
        except Pib.Error:
            return False

    def addSubCertificate(self, subIdentityName, issuer, params = None):
        """
        Issue a certificate for subIdentityName signed by issuer. If the
        identity does not exist, it is created. A new key is generated as the
        default key for the identity. A default certificate for the key is
        signed by the issuer using its default certificate.
        """
        if params == None:
            params = KeyChain.getDefaultKeyParams()

        subIdentity = self.addIdentity(subIdentityName, params)

        request = subIdentity.getDefaultKey().getDefaultCertificate()

        request.setName(request.getKeyName().append("parent").appendVersion(1))

        certificateParams = SigningInfo(issuer)
        # Validity period of 20 years.
        now = Common.getNowMilliseconds()
        certificateParams.setValidityPeriod(
          ValidityPeriod(now, now + 20 * 365 * 24 * 3600 * 1000.0))

        # Skip the AdditionalDescription.

        self._keyChain.sign(request, certificateParams)
        self._keyChain.setDefaultCertificate(subIdentity.getDefaultKey(), request)

        return subIdentity

    def addCertificate(self, key, issuerId):
        """
        Add a self-signed certificate made from the key and issuer ID.

        :param PibKey key: The key for the certificate.
        :param str issuerId: The issuer ID name component for the certificate
          name.
        :return: The new certificate.
        :rtype: CertificateV2
        """
        certificateName = Name(key.getName())
        certificateName.append(issuerId).appendVersion(3)
        certificate = CertificateV2()
        certificate.setName(certificateName)

        # Set the MetaInfo.
        certificate.getMetaInfo().setType(ContentType.KEY)
        # One hour.
        certificate.getMetaInfo().setFreshnessPeriod(3600 * 1000.0)

        # Set the content.
        certificate.setContent(key.getPublicKey())

        params = SigningInfo(key)
        # Validity period of 10 days.
        now = Common.getNowMilliseconds()
        params.setValidityPeriod(
          ValidityPeriod(now, now + 10 * 24 * 3600 * 1000.0))

        self._keyChain.sign(certificate, params)
        return certificate
Exemplo n.º 36
0
class TestRegexMatching(ut.TestCase):

    def _certNameFromKeyName(self, keyName, keyIdx=-1):
        return keyName[:keyIdx].append("KEY").append(keyName[keyIdx:]).\
                append("ID-CERT").append("0")

    def setUp(self):
        # set up the keychain so we can sign data
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(IdentityManager(self.identityStorage, self.privateKeyStorage))
        self.privateKeyStorage = MemoryPrivateKeyStorage()

        # not using keychain for verification so we don't need to set the
        # policy manager
        self.keyChain = KeyChain(IdentityManager(self.identityStorage, self.privateKeyStorage))
        self.identityName = Name('/SecurityTestSecRule/Basic/Longer')
        keyName = Name(self.identityName).append('ksk-2439872')
        self.defaultCertName = self._certNameFromKeyName(keyName)
        self.identityStorage.addKey(keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
      keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER, DEFAULT_RSA_PRIVATE_KEY_DER)

        keyName = Name('/SecurityTestSecRule/Basic/ksk-0923489')
        self.identityStorage.addKey(keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
        self.privateKeyStorage.setKeyPairForKeyName(
      keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER, DEFAULT_RSA_PRIVATE_KEY_DER)
        self.shortCertName = self._certNameFromKeyName(keyName, -2)

    def test_name_relation(self):
        policyManagerPrefix = ConfigPolicyManager("policy_config/relation_ruleset_prefix.conf")
        policyManagerStrict = ConfigPolicyManager("policy_config/relation_ruleset_strict.conf")
        policyManagerEqual = ConfigPolicyManager("policy_config/relation_ruleset_equal.conf")

        dataName = Name('/TestRule1')

        self.assertIsNotNone(
                policyManagerPrefix._findMatchingRule(dataName, 'data'),
                "Prefix relation should match prefix name")
        self.assertIsNotNone(
                policyManagerEqual._findMatchingRule(dataName, 'data'),
                "Equal relation should match prefix name")
        self.assertIsNone(
                policyManagerStrict._findMatchingRule(dataName, 'data'),
                "Strict-prefix relation should not match prefix name")

        dataName = Name('/TestRule1/hi')
        self.assertIsNotNone(
                policyManagerPrefix._findMatchingRule(dataName, 'data'),
                "Prefix relation should match longer name")
        self.assertIsNone(
                policyManagerEqual._findMatchingRule(dataName, 'data'),
                "Equal relation should not match longer name")
        self.assertIsNotNone(
                policyManagerStrict._findMatchingRule(dataName, 'data'),
                "Strict-prefix relation should match longer name")

        dataName = Name('/Bad/TestRule1/')
        self.assertIsNone(
                policyManagerPrefix._findMatchingRule(dataName, 'data'),
                "Prefix relation should not match inner components")
        self.assertIsNone(
                policyManagerEqual._findMatchingRule(dataName, 'data'),
                "Equal relation should not match inner components")
        self.assertIsNone(
                policyManagerStrict._findMatchingRule(dataName, 'data'),
                "Strict-prefix relation should  not match inner components")

    def test_simple_regex(self):
        policyManager = ConfigPolicyManager("policy_config/regex_ruleset.conf")
        dataName1 = Name('/SecurityTestSecRule/Basic')
        dataName2 = Name('/SecurityTestSecRule/Basic/More')
        dataName3 = Name('/SecurityTestSecRule/')
        dataName4 = Name('/SecurityTestSecRule/Other/TestData')
        dataName5 = Name('/Basic/Data')

        matchedRule1 = policyManager._findMatchingRule(dataName1, 'data')
        matchedRule2 = policyManager._findMatchingRule(dataName2, 'data')
        matchedRule3 = policyManager._findMatchingRule(dataName3, 'data')
        matchedRule4 = policyManager._findMatchingRule(dataName4, 'data')
        matchedRule5 = policyManager._findMatchingRule(dataName5, 'data')

        self.assertIsNotNone(matchedRule1)
        self.assertIsNone(matchedRule2)
        self.assertIsNotNone(matchedRule3)
        self.assertNotEqual(matchedRule3, matchedRule1,
                "Rule regex matched extra components")
        self.assertIsNotNone(matchedRule4)
        self.assertNotEqual(matchedRule4, matchedRule1,
                "Rule regex matched with missing component")

        self.assertIsNone(matchedRule5)

    def test_checker_hierarchical(self):
        policyManager = ConfigPolicyManager("policy_config/hierarchical_ruleset.conf")

        dataName1 = Name('/SecurityTestSecRule/Basic/Data1')
        dataName2 = Name('/SecurityTestSecRule/Basic/Longer/Data2')

        data1 = Data(dataName1)
        data2 = Data(dataName2)

        matchedRule = policyManager._findMatchingRule(dataName1, 'data')
        self.assertEqual(matchedRule,
                policyManager._findMatchingRule(dataName2, 'data'))

        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.defaultCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertFalse(policyManager._checkSignatureMatch(signatureName1,
            dataName1, matchedRule),
            "Hierarchical matcher matched short data name to long key name")

        self.assertTrue(policyManager._checkSignatureMatch(signatureName2,
            dataName2, matchedRule))

        self.keyChain.sign(data1, self.shortCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertTrue(policyManager._checkSignatureMatch(signatureName1,
            dataName1, matchedRule))
        self.assertTrue(policyManager._checkSignatureMatch(signatureName2,
            dataName2, matchedRule))


    def test_hyperrelation(self):
        policyManager = ConfigPolicyManager("policy_config/hyperrelation_ruleset.conf")

        dataName = Name('/SecurityTestSecRule/Basic/Longer/Data2')
        data1 = Data(dataName)
        data2 = Data(dataName)

        matchedRule = policyManager._findMatchingRule(dataName, 'data')
        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertTrue(policyManager._checkSignatureMatch(signatureName1,
            dataName, matchedRule))
        self.assertFalse(policyManager._checkSignatureMatch(signatureName2,
            dataName, matchedRule))

        dataName = Name('/SecurityTestSecRule/Basic/Other/Data1')
        data1 = Data(dataName)
        data2 = Data(dataName)

        matchedRule = policyManager._findMatchingRule(dataName, 'data')
        self.keyChain.sign(data1, self.defaultCertName)
        self.keyChain.sign(data2, self.shortCertName)

        signatureName1 = data1.getSignature().getKeyLocator().getKeyName()
        signatureName2 = data2.getSignature().getKeyLocator().getKeyName()

        self.assertFalse(policyManager._checkSignatureMatch(signatureName1,
            dataName, matchedRule))
        self.assertTrue(policyManager._checkSignatureMatch(signatureName2,
            dataName, matchedRule))

    def test_interest_matching(self):
        # make sure we chop off timestamp, nonce, and signature info from
        # signed interests
        pass
class IdentityManagementFixture(object):
    def __init__(self):
        self._keyChain = KeyChain("pib-memory:", "tpm-memory:")
        self._identityNames = set()
        self._certificateFiles = set()

    def saveCertificateToFile(self, data, filePath):
        """
        :param Data data: The certificate to save.
        :param str filePath: The file path, which should be writable.
        :return: True if successful.
        :rtype: bool
        """
        self._certificateFiles.add(filePath)

        try:
            encoding = data.wireEncode()
            encodedCertificate = Common.base64Encode(encoding.toBytes(), True)

            with open(filePath, 'w') as keyFile:
                keyFile.write(encodedCertificate)

            return True
        except Exception:
            return False

    def addIdentity(self, identityName, params = None):
        """
        Add an identity for the identityName.

        :param Name identityName: The name of the identity.
        :param KeyParams params: (optional) The key parameters if a key needs to
          be generated for the identity. If omitted, use
          KeyChain.getDefaultKeyParams().
        :return: The created PibIdentity instance.
        :rtype: PibIdentity
        """
        if params == None:
            params = KeyChain.getDefaultKeyParams()

        identity = self._keyChain.createIdentityV2(identityName, params)
        self._identityNames.add(identityName)
        return identity

    def saveCertificate(identity, filePath):
        """
        Save the identity's certificate to a file.

        :param PibIdentity identity: The PibIdentity.
        :param str filePath: The file path, which should be writable.
        :return: True if successful.
        :rtype: str
        """
        try:
            certificate = identity.getDefaultKey().getDefaultCertificate()
            return self.saveCertificateToFile(certificate, filePath)
        except Pib.Error:
            return False

    def addSubCertificate(self, subIdentityName, issuer, params = None):
        """
        Issue a certificate for subIdentityName signed by issuer. If the
        identity does not exist, it is created. A new key is generated as the
        default key for the identity. A default certificate for the key is
        signed by the issuer using its default certificate.
        """
        if params == None:
            params = KeyChain.getDefaultKeyParams()

        subIdentity = self.addIdentity(subIdentityName, params)

        request = subIdentity.getDefaultKey().getDefaultCertificate()

        request.setName(request.getKeyName().append("parent").appendVersion(1))

        certificateParams = SigningInfo(issuer)
        # Validity period of 20 years.
        now = Common.getNowMilliseconds()
        certificateParams.setValidityPeriod(
          ValidityPeriod(now, now + 20 * 365 * 24 * 3600 * 1000.0))

        # Skip the AdditionalDescription.

        self._keyChain.sign(request, certificateParams)
        self._keyChain.setDefaultCertificate(subIdentity.getDefaultKey(), request)

        return subIdentity

    def addCertificate(self, key, issuerId):
        """
        Add a self-signed certificate made from the key and issuer ID.

        :param PibKey key: The key for the certificate.
        :param str issuerId: The issuer ID name component for the certificate
          name.
        :return: The new certificate.
        :rtype: CertificateV2
        """
        certificateName = Name(key.getName())
        certificateName.append(issuerId).appendVersion(3)
        certificate = CertificateV2()
        certificate.setName(certificateName)

        # Set the MetaInfo.
        certificate.getMetaInfo().setType(ContentType.KEY)
        # One hour.
        certificate.getMetaInfo().setFreshnessPeriod(3600 * 1000.0)

        # Set the content.
        certificate.setContent(key.getPublicKey())

        params = SigningInfo(key)
        # Validity period of 10 days.
        now = Common.getNowMilliseconds()
        params.setValidityPeriod(
          ValidityPeriod(now, now + 10 * 24 * 3600 * 1000.0))

        self._keyChain.sign(certificate, params)
        return certificate
Exemplo n.º 38
0
class StatusServer(object):
    def __init__(self):
        self._face = Face()
        self._keyChain = KeyChain()
        #print(dir(self._keyChain))
        self._certificateName = self._keyChain.getDefaultCertificateName()
        self.registerWithNfd()
        #self.nfdCheck()

    def registerWithNfd(self):
        #self._face = Face()
        # Use the system default key chain and certificate name to sign commands.
        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)

        logging.debug('Register prefix ' + LINK_STATUS_NAME.toUri())
        self._face.registerPrefix(LINK_STATUS_NAME, self.onLinkInterest, self.onRegisterFailed)

        logging.debug('Register prefix ' + METADATA_STATUS_NAME.toUri())
        self._face.registerPrefix(METADATA_STATUS_NAME, self.onMetaDataInterest, self.onRegisterFailed)

        logging.debug('Register prefix ' + PREFIX_STATUS_NAME.toUri())
        self._face.registerPrefix(PREFIX_STATUS_NAME, self.onPrefixInterest, self.onRegisterFailed)

    def onLinkInterest(self, prefix, interest, face, registeredPrefixId, filter):
        self._linkData = getLinkData()
        logging.debug('Received interest for Link')
        self.sendData(prefix, interest, face, registeredPrefixId, self._linkData)

    def onMetaDataInterest(self, prefix, interest, face, registeredPrefixId, filter):
        print("on meta interest")
        print("interest rcvd for: ", interest.getName().toUri())
        print("interest must be fresh value: ", interest.getMustBeFresh())
        processFiles()
        self._metaData = processAndGetMetaData()
        logging.debug('Received interest for Metadata')
        self.sendData(prefix, interest, face, registeredPrefixId, self._metaData)
        print("Data send")

    def onPrefixInterest(self, prefix, interest, transport, registeredPrefixId, filter):

        self._prefixData = getPrefixData()
        logging.debug('Received interest for Prefix')
        self.sendData(prefix, interest, transport, registeredPrefixId, self._prefixData)

    def sendData(self, prefix, interest, face, registeredPrefixId, content):      #onInterest

        #transport.send(encodedData.toBuffer())
	#print(prefix)
        # Publish segments
        dataSize = len(content)
        print("Dat size: ",dataSize)
        segmentBegin = 0
        segmentNo = 0
        print "Start"
        while segmentBegin < dataSize: 
            segmentEnd = segmentBegin + 2000 #Common.MAX_NDN_PACKET_SIZE

            if segmentEnd > dataSize:
                segmentEnd = dataSize

            # Make and sign a Data packet.
	    print("Prefix: ")
	    print(prefix)
            if not "%" in str(prefix)[-7:]:
                segmentName = prefix
		#print("NO % in name: ", segmentName)
                segmentName.appendSegment(segmentNo)
            else:
                segmentName = str(prefix)[:-1]
                #print("% in name: ",segmentName)
		segmentName += str(segmentNo)
		segmentName = Name(segmentName)
            print("Segment Name: ")
	    print(segmentName)

            print("Segment Name appended: ", segmentName)

            print "Segmenting data from %d to %d" % (segmentBegin, segmentEnd)
            print("COntent: ")
            print(content[segmentBegin:segmentEnd])

            data = Data(segmentName)
            data.setContent(content[segmentBegin:segmentEnd])
            data.getMetaInfo().setFreshnessPeriod(2000)
            self._keyChain.sign(data, self._certificateName)

            if segmentEnd >= dataSize:
              print("yes")
              data.getMetaInfo().setFinalBlockId(segmentName[-1])

            #encodedData = data.wireEncode()

            segmentBegin = segmentEnd

            print "Sending data " + segmentName.toUri()
            #transport.send(encodedData.toBuffer())
            face.putData(data)

            segmentNo += 1
            time.sleep(0.5)
        print "Finish"

    def onRegisterFailed(self, prefix):
        dump("Register failed for prefix", prefix.toUri())

    def nfdCheck(self):
	try:
            try:
                output=subprocess.check_output('nfd-status | grep memphis.edu/internal', shell=True)
            except subprocess.CalledProcessError,e:
                output=e.output
            print("output", output)	
	    if "memphis.edu/internal" not in output:
	        try:
                    self.registerWithNfd()
		    threading.Timer(1, self.nfdCheck).start()
		    self.run()
                except:
	            pass
            else:
	         pass
        except:
Exemplo n.º 39
0
    from utils import getKeyID

    publisher = RepoSocketPublisher(12345)

    data = Data(Name("/localhost/repo-ng/test/001"))
    data.setContent("SUCCESS!")
    data.getMetaInfo().setFreshnessPeriod(1000000)

    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage), SelfVerifyPolicyManager(identityStorage))
    keyfile = "../keychain/keys/pub_user.pem"
    f = open(keyfile, "r")
    key = RSA.importKey(f.read())
    key_name = Name("/localhost/repo-ng/test/signer")
    signer_name = Name(key_name).append(getKeyID(key))
    key_pub_der = bytearray(key.publickey().exportKey(format="DER"))
    key_pri_der = bytearray(key.exportKey(format="DER"))
    identityStorage.addKey(signer_name, KeyType.RSA, Blob(key_pub_der))
    privateKeyStorage.setKeyPairForKeyName(signer_name, key_pub_der, key_pri_der)
    cert_name = (
        signer_name.getSubName(0, signer_name.size() - 1)
        .append("KEY")
        .append(signer_name[-1])
        .append("ID-CERT")
        .append("0")
    )
    keyChain.sign(data, cert_name)
    publisher.put(data)
Exemplo n.º 40
0
class BmsNode(object):
    def __init__(self):
        self.conf = None
        self._keyChain = None
        self._certificateName = None

        self._dataQueue = dict()
        self._memoryContentCache = None
        self._identityName = None

        self._aggregation = Aggregation()

    def setConfiguration(self, fileName, trustSchemaFile):
        self.conf = BoostInfoParser()
        self.conf.read(fileName)
        self._identityName = Name(self.conf.getNodePrefix())
        self._trustSchemaFile = trustSchemaFile

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        #print('Data not found for ' + interest.getName().toUri())
        return

    def startPublishing(self):
        # One-time security setup
        self.prepareLogging()

        privateKeyStorage = FilePrivateKeyStorage()
        identityStorage = BasicIdentityStorage()
        policyManager = ConfigPolicyManager(self._trustSchemaFile)

        self._keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage), policyManager)
        self._certificateName = self._keyChain.createIdentityAndCertificate(
            self._identityName)

        print("My Identity name: " + self._identityName.toUri())
        print("My certificate name: " + self._certificateName.toUri())
        certificateData = self._keyChain.getIdentityManager(
        )._identityStorage.getCertificate(self._certificateName)
        print("My certificate string: " +
              b64encode(certificateData.wireEncode().toBuffer()))
        # self._keyChain.getIdentityCertificate(self._certificateName).)

        self._loop = asyncio.get_event_loop()
        self._face = ThreadsafeFace(self._loop)
        self._keyChain.setFace(self._face)

        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)
        self._memoryContentCache = MemoryContentCache(self._face)

        # We should only ask for cert to be signed upon the first run of a certain aggregator
        if DO_CERT_SETUP:
            if (KeyLocator.getFromSignature(
                    certificateData.getSignature()).getKeyName().equals(
                        self._certificateName.getPrefix(-1))):
                # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again
                print("certificate " + self._certificateName.toUri() +
                      " asking for signature")
                response = urllib2.urlopen(
                    "http://192.168.56.1:5000/bms-cert-hack?cert=" +
                    b64encode(certificateData.wireEncode().toBuffer()) +
                    "&cert_prefix=" + self._identityName.toUri() +
                    '&subject_name=' + self._identityName.toUri()).read()

                signedCertData = Data()
                signedCertData.wireDecode(Blob(b64decode(response)))

                self._memoryContentCache.add(signedCertData)
                cmdline = ['ndnsec-install-cert', '-']
                p = subprocess.Popen(cmdline,
                                     stdin=subprocess.PIPE,
                                     stdout=subprocess.PIPE)
                # desanitize + sign in GET request
                cert, err = p.communicate(response)
                if p.returncode != 0:
                    raise RuntimeError("ndnsec-install-cert error")
            else:
                self._memoryContentCache.add(certificateData)
        else:
            self._memoryContentCache.add(certificateData)

        dataNode = self.conf.getDataNode()
        childrenNode = self.conf.getChildrenNode()

        self._memoryContentCache.registerPrefix(Name(self._identityName),
                                                self.onRegisterFailed,
                                                self.onDataNotFound)

        # For each type of data, we refresh each type of aggregation according to the interval in the configuration
        for i in range(len(dataNode.subtrees)):
            dataType = dataNode.subtrees.keys()[i]
            aggregationParams = self.conf.getProducingParamsForAggregationType(
                dataNode.subtrees.items()[i][1])

            if childrenNode == None:
                self._dataQueue[dataType] = DataQueue(None, None, None)
                self.generateData(dataType, 2, 0)

            for aggregationType in aggregationParams:
                childrenList = OrderedDict()
                if childrenNode != None:

                    for j in range(len(childrenNode.subtrees)):
                        if dataType in childrenNode.subtrees.items(
                        )[j][1].subtrees['data'].subtrees:
                            if aggregationType in childrenNode.subtrees.items(
                            )[j][1].subtrees['data'].subtrees[
                                    dataType].subtrees:
                                childrenList[childrenNode.subtrees.items()[j][
                                    0]] = self.conf.getProducingParamsForAggregationType(
                                        childrenNode.subtrees.items()[j]
                                        [1].subtrees['data'].subtrees[dataType]
                                    )[aggregationType]

                self.startPublishingAggregation(
                    aggregationParams[aggregationType], childrenList, dataType,
                    aggregationType)
        return

    def startPublishingAggregation(self, params, childrenList, dataType,
                                   aggregationType):
        if __debug__:
            print('Start publishing for ' + dataType + '-' + aggregationType)

        # aggregation calculating and publishing mechanism
        publishingPrefix = Name(
            self._identityName).append(DATA_COMPONENT).append(dataType).append(
                AGGREGATION_COMPONENT).append(aggregationType)
        self._dataQueue[dataType + aggregationType] = DataQueue(
            params, childrenList, publishingPrefix)

        if len(childrenList.keys()) == 0:
            # TODO: make start_time optional for leaf nodes
            self._loop.call_later(int(params['producer_interval']),
                                  self.calculateAggregation, dataType,
                                  aggregationType, childrenList,
                                  int(params['start_time']),
                                  int(params['producer_interval']),
                                  publishingPrefix, True)
        else:
            # express interest for children who produce the same data and aggregation type
            for childName in childrenList.keys():
                name = Name(self._identityName).append(childName).append(
                    DATA_COMPONENT).append(dataType).append(
                        AGGREGATION_COMPONENT).append(aggregationType)
                interest = Interest(name)
                # if start_time is specified, we ask for data starting at start_time;
                # if not, we ask for the right most child and go from there
                if ('start_time' in childrenList[childName]):
                    endTime = int(childrenList[childName]['start_time']) + int(
                        childrenList[childName]['producer_interval'])
                    interest.getName().append(
                        str(childrenList[childName]['start_time'])).append(
                            str(endTime))
                else:
                    # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost
                    interest.setChildSelector(0)
                    interest.setMustBeFresh(True)
                interest.setInterestLifetimeMilliseconds(
                    DEFAULT_INTEREST_LIFETIME)
                if __debug__:
                    print('  Issue interest: ' + interest.getName().toUri())
                self._face.expressInterest(interest, self.onData,
                                           self.onTimeout)

        return

    # TODO: once one calculation's decided a child has not answered, we should do another calculation
    def calculateAggregation(self,
                             dataType,
                             aggregationType,
                             childrenList,
                             startTime,
                             interval,
                             publishingPrefix,
                             repeat=False):
        doCalc = True
        dataList = []

        # TODO: an intermediate node cannot produce raw data for now
        if len(childrenList.keys()) != 0:
            for childName in childrenList.keys():
                dataDictKey = self.getDataDictKey(startTime,
                                                  (startTime + interval),
                                                  childName)
                if dataDictKey in self._dataQueue[dataType +
                                                  aggregationType]._dataDict:
                    data = self._dataQueue[
                        dataType + aggregationType]._dataDict[dataDictKey]
                    dataList.append(float(data.getContent().toRawStr()))
                else:
                    #print('Child ' + childName + ' has not replied yet')
                    doCalc = False
                    break
        else:
            for inst in self._dataQueue[dataType]._dataDict.keys():
                if int(inst) >= startTime and int(inst) < startTime + interval:
                    dataList.append(self._dataQueue[dataType]._dataDict[inst])
        if doCalc:
            content = self._aggregation.getAggregation(aggregationType,
                                                       dataList)
            if content:
                publishData = Data(
                    Name(publishingPrefix).append(str(startTime)).append(
                        str(startTime + interval)))
                publishData.setContent(str(content))
                publishData.getMetaInfo().setFreshnessPeriod(
                    DEFAULT_DATA_LIFETIME)
                self._keyChain.sign(publishData, self._certificateName)
                self._memoryContentCache.add(publishData)
                for childName in childrenList.keys():
                    dataDictKey = self.getDataDictKey(startTime,
                                                      (startTime + interval),
                                                      childName)
                    if dataDictKey in self._dataQueue[
                            dataType + aggregationType]._dataDict:
                        del self._dataQueue[
                            dataType + aggregationType]._dataDict[dataDictKey]
                if __debug__:
                    print("Produced: " + publishData.getName().toUri() + "; " +
                          publishData.getContent().toRawStr())

        # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData
        if repeat:
            self._loop.call_later(interval, self.calculateAggregation,
                                  dataType, aggregationType, childrenList,
                                  startTime + interval, interval,
                                  publishingPrefix, repeat)
        return

    def generateData(self, dataType, interval, startTime):
        self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint(
            0, 9)
        self._loop.call_later(interval, self.generateData, dataType, interval,
                              startTime + interval)
        return

    def onRegisterFailed(self, prefix):
        raise RuntimeError("Register failed for prefix", prefix.toUri())

    def onVerified(self, data):
        print('Data verified: ' + data.getName().toUri())
        return

    def onVerifyFailed(self, data):
        print('Data verification failed: ' + data.getName().toUri())
        return

    def onData(self, interest, data):
        self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed)

        dataName = data.getName()
        dataQueue = None

        if __debug__:
            print("Got data: " + dataName.toUri() + "; " +
                  data.getContent().toRawStr())
        for i in range(0, len(dataName)):
            if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT:
                dataType = dataName.get(i - 1).toEscapedString()
                aggregationType = dataName.get(i + 1).toEscapedString()

                startTime = int(dataName.get(i + 2).toEscapedString())
                endTime = int(dataName.get(i + 3).toEscapedString())
                childName = dataName.get(i - 3).toEscapedString()

                dataAndAggregationType = dataType + aggregationType

                dataDictKey = self.getDataDictKey(startTime, endTime,
                                                  childName)
                dataQueue = self._dataQueue[dataAndAggregationType]
                dataQueue._dataDict[dataDictKey] = data
                break

        # TODO: check what if interval/starttime is misconfigured
        if dataQueue:
            self.calculateAggregation(dataType, aggregationType,
                                      dataQueue._childrenList, startTime,
                                      endTime - startTime,
                                      dataQueue._publishingPrefix)

        # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime
        #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime)))

        # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead
        newInterestName = dataName.getPrefix(i + 2)
        newInterest = Interest(interest)
        newInterest.setName(newInterestName)
        newInterest.setChildSelector(0)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(dataName.get(i + 2))
        newInterest.setExclude(exclude)

        self._face.expressInterest(newInterest, self.onData, self.onTimeout)
        if __debug__:
            print("  issue interest: " + interest.getName().toUri())

        return

    def onTimeout(self, interest):
        if __debug__:
            print("  interest timeout: " + interest.getName().toUri() +
                  "; reexpress")
            pass
        self._face.expressInterest(interest, self.onData, self.onTimeout)
        return

    def stop(self):
        self._loop.stop()
        if __debug__:
            print("Stopped")
        return

    # This creation of dataDictKey means parent and child should not have the same name
    @staticmethod
    def getDataDictKey(startTime, endTime, childName):
        return str(startTime) + '/' + str(endTime) + '/' + childName


##
# Logging
##

    def prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log
Exemplo n.º 41
0
class TestProducer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {}  # key: Name, value: Blob
        self.encryptionKeys = {}  # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        identityName = Name("TestProducer")
        self.certificateName = self.keyChain.createIdentityAndCertificate(
            identityName)
        self.keyChain.getIdentityManager().setDefaultIdentity(identityName)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptionKey(self, eKeyName, timeMarker):
        params = RsaKeyParams()
        eKeyName = Name(eKeyName)
        eKeyName.append(timeMarker)

        dKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        eKeyBlob = RsaAlgorithm.deriveEncryptKey(dKeyBlob).getKeyBits()
        self.decryptionKeys[eKeyName] = dKeyBlob

        keyData = Data(eKeyName)
        keyData.setContent(eKeyBlob)
        self.keyChain.sign(keyData, self.certificateName)
        self.encryptionKeys[eKeyName] = keyData

    def test_content_key_request(self):
        prefix = Name("/prefix")
        suffix = Name("/a/b/c")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        timeMarker = Name("20150101T100000/20150101T120000")
        testTime1 = Schedule.fromIsoString("20150101T100001")
        testTime2 = Schedule.fromIsoString("20150101T110001")
        testTimeRounded1 = Name.Component("20150101T100000")
        testTimeRounded2 = Name.Component("20150101T110000")
        testTimeComponent2 = Name.Component("20150101T110001")

        # Create content keys required for this test case:
        for i in range(suffix.size()):
            self.createEncryptionKey(expectedInterest, timeMarker)
            expectedInterest = expectedInterest.getPrefix(-2).append(
                Encryptor.NAME_COMPONENT_E_KEY)

        expressInterestCallCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            expressInterestCallCount[0] += 1

            interestName = Name(interest.getName())
            interestName.append(timeMarker)
            self.assertTrue(interestName in self.encryptionKeys)
            onData(interest, self.encryptionKeys[interestName])

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that the content key is correctly encrypted for each domain, and
        # the produce method encrypts the provided data with the same content key.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)
        contentKey = [None]  # Blob

        def checkEncryptionKeys(result, testTime, roundedTime,
                                expectedExpressInterestCallCount):
            self.assertEqual(expectedExpressInterestCallCount,
                             expressInterestCallCount[0])

            self.assertEqual(True, testDb.hasContentKey(testTime))
            contentKey[0] = testDb.getContentKey(testTime)

            params = EncryptParams(EncryptAlgorithmType.RsaOaep)
            for i in range(len(result)):
                key = result[i]
                keyName = key.getName()
                self.assertEqual(cKeyName, keyName.getSubName(0, 6))
                self.assertEqual(keyName.get(6), roundedTime)
                self.assertEqual(keyName.get(7), Encryptor.NAME_COMPONENT_FOR)
                self.assertEqual(True,
                                 keyName.getSubName(8) in self.decryptionKeys)

                decryptionKey = self.decryptionKeys[keyName.getSubName(8)]
                self.assertEqual(True, decryptionKey.size() != 0)
                encryptedKeyEncoding = key.getContent()

                content = EncryptedContent()
                content.wireDecode(encryptedKeyEncoding)
                encryptedKey = content.getPayload()
                retrievedKey = RsaAlgorithm.decrypt(decryptionKey,
                                                    encryptedKey, params)

                self.assertTrue(contentKey[0].equals(retrievedKey))

            self.assertEqual(3, len(result))

        # An initial test to confirm that keys are created for this time slot.
        contentKeyName1 = producer.createContentKey(
            testTime1, lambda keys: checkEncryptionKeys(
                keys, testTime1, testTimeRounded1, 3))

        # Verify that we do not repeat the search for e-keys. The total
        #   expressInterestCallCount should be the same.
        contentKeyName2 = producer.createContentKey(
            testTime2, lambda keys: checkEncryptionKeys(
                keys, testTime2, testTimeRounded2, 3))

        # Confirm content key names are correct
        self.assertEqual(cKeyName, contentKeyName1.getPrefix(-1))
        self.assertEqual(testTimeRounded1, contentKeyName1.get(6))
        self.assertEqual(cKeyName, contentKeyName2.getPrefix(-1))
        self.assertEqual(testTimeRounded2, contentKeyName2.get(6))

        # Confirm that produce encrypts with the correct key and has the right name.
        testData = Data()
        producer.produce(testData, testTime2, Blob(DATA_CONTENT, False))

        producedName = testData.getName()
        self.assertEqual(cKeyName.getPrefix(-1), producedName.getSubName(0, 5))
        self.assertEqual(testTimeComponent2, producedName.get(5))
        self.assertEqual(Encryptor.NAME_COMPONENT_FOR, producedName.get(6))
        self.assertEqual(cKeyName, producedName.getSubName(7, 6))
        self.assertEqual(testTimeRounded2, producedName.get(13))

        dataBlob = testData.getContent()

        dataContent = EncryptedContent()
        dataContent.wireDecode(dataBlob)
        encryptedData = dataContent.getPayload()
        initialVector = dataContent.getInitialVector()

        params = EncryptParams(EncryptAlgorithmType.AesCbc, 16)
        params.setInitialVector(initialVector)
        decryptTest = AesAlgorithm.decrypt(contentKey[0], encryptedData,
                                           params)
        self.assertTrue(decryptTest.equals(Blob(DATA_CONTENT, False)))

    def test_content_key_search(self):
        timeMarkerFirstHop = Name("20150101T070000/20150101T080000")
        timeMarkerSecondHop = Name("20150101T080000/20150101T090000")
        timeMarkerThirdHop = Name("20150101T100000/20150101T110000")

        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        cKeyName = Name(prefix)
        cKeyName.append(Encryptor.NAME_COMPONENT_SAMPLE)
        cKeyName.append(suffix)
        cKeyName.append(Encryptor.NAME_COMPONENT_C_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        # Create content keys required for this test case:
        self.createEncryptionKey(expectedInterest, timeMarkerFirstHop)
        self.createEncryptionKey(expectedInterest, timeMarkerSecondHop)
        self.createEncryptionKey(expectedInterest, timeMarkerThirdHop)

        requestCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(expectedInterest, interest.getName())

            gotInterestName = False
            for i in range(3):
                interestName = Name(interest.getName())
                if i == 0:
                    interestName.append(timeMarkerFirstHop)
                elif i == 1:
                    interestName.append(timeMarkerSecondHop)
                elif i == 2:
                    interestName.append(timeMarkerThirdHop)

                # matchesName will check the Exclude.
                if interest.matchesName(interestName):
                    gotInterestName = True
                    requestCount[0] += 1
                    break

            if gotInterestName:
                onData(interest, self.encryptionKeys[interestName])

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that if a key is found, but not within the right time slot, the
        # search is refined until a valid time slot is found.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)

        def onEncryptedKeys(result):
            self.assertEqual(3, requestCount[0])
            self.assertEqual(1, len(result))

            keyData = result[0]
            keyName = keyData.getName()
            self.assertEqual(cKeyName, keyName.getSubName(0, 4))
            self.assertEqual(timeMarkerThirdHop.get(0), keyName.get(4))
            self.assertEqual(Encryptor.NAME_COMPONENT_FOR, keyName.get(5))
            self.assertEqual(expectedInterest.append(timeMarkerThirdHop),
                             keyName.getSubName(6))

        producer.createContentKey(testTime, onEncryptedKeys)

    def test_content_key_timeout(self):
        prefix = Name("/prefix")
        suffix = Name("/suffix")
        expectedInterest = Name(prefix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_READ)
        expectedInterest.append(suffix)
        expectedInterest.append(Encryptor.NAME_COMPONENT_E_KEY)

        testTime = Schedule.fromIsoString("20150101T100001")

        timeoutCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest

            def expressInterest(self, interest, onData, onTimeout,
                                onNetworkNack):
                return self.handleExpressInterest(interest, onData, onTimeout,
                                                  onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(expectedInterest, interest.getName())
            timeoutCount[0] += 1
            onTimeout(interest)

            return 0

        face = TestFace(handleExpressInterest)

        # Verify that if no response is received, the producer appropriately times
        # out. The result vector should not contain elements that have timed out.
        testDb = Sqlite3ProducerDb(self.databaseFilePath)
        producer = Producer(prefix, suffix, face, self.keyChain, testDb)

        def onEncryptedKeys(result):
            self.assertEqual(4, timeoutCount[0])
            self.assertEqual(0, len(result))

        producer.createContentKey(testTime, onEncryptedKeys)
Exemplo n.º 42
0
class RegisterSongList(object):


    def __init__(self, prefix="/ndn/edu/ucla/remap/music/list"):

	logging.basicConfig()        
	'''#这些log是干嘛的myIP="192.168.1.1",lightIP="192.168.1.50",
        self.log = logging.getLogger("RegisterSongList")
        self.log.setLevel(logging.DEBUG)
        sh = logging.StreamHandler()
        sh.setLevel(logging.DEBUG)
        self.log.addHandler(sh)
        fh = logging.FileHandler("RegisterSongList.log")
        fh.setLevel(logging.INFO)
        self.log.addHandler(fh)'''
    	self.device = "PC3"
    	self.deviceComponent = Name.Component(self.device)
	self.excludeDevice = None
    	#self.songList = originalList

        #security?
        self.prefix = Name(prefix)
	self.changePrefix = Name("/ndn/edu/ucla/remap/music/storage")
        self.keychain = KeyChain()
        self.certificateName = self.keychain.getDefaultCertificateName()

        self.address = ""
        self._isStopped = True
  
        #self.payloadBuffer = []

        #self.kinetsender = KinetSender(myIP, playerIP,self.STRAND_SIZE*self.COLORS_PER_LIGHT)




    
    def start(self):
	print "reg start"
	self.loop = asyncio.get_event_loop()
	self.face = ThreadsafeFace(self.loop,self.address)
	self.face.setCommandSigningInfo(self.keychain,self.certificateName)
        self.face.registerPrefix(self.prefix,self.onInterest,self.onRegisterFailed)
        self._isStopped = False
        self.face.stopWhen(lambda:self._isStopped)
        try:
            self.loop.run_forever()
        except KeyboardInterrupt:

            sys.exit()
        finally:
            self.stop()


    def stop(self):
        self.loop.close()
        #self.kinetsender.stop = True
        #self.kinetsender.complete.wait()         
        self.face.shutdown()
        self.face = None
        sys.exit(1)

    def signData(self,data):
	data.setSignature(Sha256WithRsaSignature())
	#self.keychain.sign(data,self.certificateName)

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
	print "received interest"
        initInterest = Name(interest.getName())
        print "interest name:",initInterest.toUri()
        #d = Data(interest.getName().getPrefix(prefix.size()+1))
        #self.excludeDevice = interest.getName().get(prefix.size())
	#initInterest = interest.getName()
	d = Data(interest.getName().append(self.deviceComponent))
	try:
		print "start to set data's content"
                
		currentString = ','.join(currentList)
		d.setContent("songList of " +self.device+":"+currentString+ "\n")
		self.face.registerPrefix(self.changePrefix,self.onInterest,self.onRegisterFailed)	



	    

	except KeyboardInterrupt:
		print "key interrupt"
		sys.exit(1)
	except Exception as e:
		print e
		d.setContent("Bad command\n")
	finally:
		self.keychain.sign(d,self.certificateName)

	encodedData = d.wireEncode()
	transport.send(encodedData.toBuffer())
	print d.getName().toUri()
	print d.getContent()

	self.stop()
		
	'''print"remove register"
        self.face.removeRegisteredPrefix(registeredPrefixId)
        time.sleep(30)
        #sleep 30s which means user cannot update the song list twice within 1 minutes
	print"register again"
        self.face.registerPrefix(self.prefix, self.onInterest, self.onRegisterFailed)'''

    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        self.stop()
Exemplo n.º 43
0
def benchmarkEncodeDataSeconds(nIterations, useComplex, useCrypto):
    """
    Loop to encode a data packet nIterations times.

    :param int nIterations: The number of iterations.
    :param bool useComplex: If true, use a large name, large content and all
      fields. If false, use a small name, small content and only required
      fields.
    :param bool useCrypto: If true, sign the data packet.  If false, use a blank
      signature.
    :return: A tuple (duration, encoding) where duration is the number of
      seconds for all iterations and encoding is the wire encoding.
    :rtype: (float, Blob)
    """
    if useComplex:
        # Use a large name and content.
        name = Name(
            "/ndn/ucla.edu/apps/lwndn-test/numbers.txt/%FD%05%05%E8%0C%CE%1D/%00"
        )

        contentString = ""
        count = 1
        contentString += "%d" % count
        count += 1
        while len(contentString) < 1115:
            contentString += " %d" % count
            count += 1
        content = Name.fromEscapedString(contentString)
    else:
        # Use a small name and content.
        name = Name("/test")
        content = Name.fromEscapedString("abc")
    finalBlockId = Name("/%00")[0]

    # Initialize the private key storage in case useCrypto is true.
    identityStorage = MemoryIdentityStorage()
    privateKeyStorage = MemoryPrivateKeyStorage()
    keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage),
                        SelfVerifyPolicyManager(identityStorage))
    keyName = Name("/testname/DSK-123")
    certificateName = keyName.getSubName(
        0,
        keyName.size() - 1).append("KEY").append(
            keyName[-1]).append("ID-CERT").append("0")
    identityStorage.addKey(keyName, KeyType.RSA,
                           Blob(DEFAULT_RSA_PUBLIC_KEY_DER))
    privateKeyStorage.setKeyPairForKeyName(keyName, KeyType.RSA,
                                           DEFAULT_RSA_PUBLIC_KEY_DER,
                                           DEFAULT_RSA_PRIVATE_KEY_DER)

    # Set up signatureBits in case useCrypto is false.
    signatureBits = Blob(bytearray(256))
    emptyBlob = Blob([])

    start = getNowSeconds()
    for i in range(nIterations):
        data = Data(name)
        data.setContent(content)
        if useComplex:
            data.getMetaInfo().setFreshnessPeriod(1000)
            data.getMetaInfo().setFinalBlockId(finalBlockId)

        if useCrypto:
            # This sets the signature fields.
            keyChain.sign(data, certificateName)
        else:
            # Imitate IdentityManager.signByCertificate to set up the signature
            # fields, but don't sign.
            sha256Signature = data.getSignature()
            keyLocator = sha256Signature.getKeyLocator()
            keyLocator.setType(KeyLocatorType.KEYNAME)
            keyLocator.setKeyName(certificateName)
            sha256Signature.setSignature(signatureBits)

        encoding = data.wireEncode()

    finish = getNowSeconds()

    return (finish - start, encoding)
Exemplo n.º 44
0
class PutFile:
    def __init__(self, repoDataPrefix, repoCommandPrefix, face, loop):
        self._repoCommandPrefix = repoCommandPrefix
        
        self._dataName = Name(repoDataPrefix)
        self._nDataToPrepare = 10
        self._interestLifetime = 4000
        
        self._mData = []
        self._face = face
        self._loop = loop
        
        self._keyChain = KeyChain()
        self._certificateName = self._keyChain.getDefaultCertificateName()

        self._currentSegmentNo = 0
        self._isFinished = False
        self.insertStream = None
        self.fileSize = 0
        
        self._segmentSize = 1000
	self._endBlockId = 0

	self._count = 0

    def start(self):
        self._face.registerPrefix(self._dataName,self.onInterest,self.onRegisterFailed)
        self.startInsertCommand();
          
    # Just for appending segment number 0, making it encoded as %00%00, instead of %00 in PyNDN.
    # Potential bug/loss of interoperability?
    @staticmethod
    def componentFromSingleDigitNumberWithMarkerCXX(number, marker):
        value = []
        value.append(number & 0xFF)
        number >>= 8
        value.reverse()
        value.insert(0, marker)
        return Name.Component(Blob(value, False))

    def prepareNextData(self,referenceSegmentNo):
        if (self._isFinished):
            return
        if  self._mData:
            maxSegmentNo = len(self._mData)
            if(maxSegmentNo - referenceSegmentNo >= self._nDataToPrepare):
                return            
            self._nDataToPrepare -= (maxSegmentNo - referenceSegmentNo)
        
        # Prepare _nDataToPrepare number of data segments, and insert them into _mData
        for i in range(0,self._nDataToPrepare):
            buffer = self.insertStream.read(self._segmentSize)
            
            if not buffer:
                self._isFinished = True
                # For now, repo-ng cannot handle FinalBlockID: 
                # ERROR: Invalid length for nonNegativeInteger (only 1, 2, 4, and 8 are allowed)
                #d.getMetaInfo().setFinalBlockID(self._currentSegmentNo)
                
                # using EndBlockId in repo protocol spec instead
                break
            
            if self._currentSegmentNo == 0:
                dataName = Name(self._dataName).append(PutFile.componentFromSingleDigitNumberWithMarkerCXX(0, 0x00))
            else:
                dataName = Name(self._dataName).appendSegment(self._currentSegmentNo)
            d = Data(dataName)
            
            # Biggest mistake: wrong data name. Still finding out why, though
            # Original one:
            #d = Data(Name(self._dataName).append(self._currentSegmentNo))
            
            print "Given data name", d.getName().toUri(), " Segment no", self._currentSegmentNo
            
            d.setContent(buffer)
            self._keyChain.sign(d, self._certificateName)
            
            self._mData.append(d)

            self._currentSegmentNo += 1
            

    def startInsertCommand(self):
        parameters = RepoCommandParameter()
        parameters.setName(self._dataName)
        parameters.setStartBlockId(0)
        
        # Adding endBlockID so that repo does not crash on putfile
        # should be replaced by FinalBlockID in data.metainfo, once that's fixed
        parameters.setEndBlockId(math.trunc(self.fileSize/self._segmentSize))
        #print (math.trunc(self.fileSize / self._segmentSize))
	self._endBlockId = math.trunc(self.fileSize / self._segmentSize)
        
        commandInterest = self.generateCommandInterest(self._repoCommandPrefix, "insert", parameters)
        self._face.makeCommandInterest(commandInterest)
        #print commandInterest.getName().toUri()
        
        self._face.expressInterest(commandInterest,self.onInsertCommandResponse, self.onInsertCommandTimeout)

    def onInsertCommandResponse(self,interest,data):
        #print "receive the data of command interest"
        pass
        
    def onInsertCommandTimeout(self,interest):
        raise Exception("command response timeout")
        sys.exit(1)

    def onInterest(self,prefix,interest,transport,registeredPrefixId):  
        #print "enter the onInterest"    
        if (interest.getName().size() != prefix.size() + 1):
            print "Unrecognized Interest"
            sys.exit(1)
	if self._count >1000:
        	print interest.getName().toUri()
		self._count -=1000
	self._count +=1
        segmentComponent = interest.getName().get(prefix.size())
        segmentNo = segmentComponent.toSegment()
        
        self.prepareNextData(segmentNo)
        
        # this could be slightly inferior to the logic in repo-ng-cxx test
        if (segmentNo >= self._currentSegmentNo):
            print "requested segment does not exist, or is not prepared"
            return
            #sys.exit(1)

	
	
        
        item = self._mData[segmentNo]
        
        encodedData = item.wireEncode()
        transport.send(encodedData.toBuffer())
	'''if (segmentNo == self.fileSize):
		time.sleep()'''
		
        
        # TODO: if it's the last segment, call stop after a safe period of time...
        # Or should send check after a safe amount of time, and based on the results 
        # of check, call stop.
        
        
    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        sys.exit(1)

    def stop(self):
        self.insertStream.close()
    
    def generateCommandInterest(self, commandPrefix, command, commandParameter):     
        interest = Interest(commandPrefix.append(command).append(commandParameter.wireEncode()))
        interest.setInterestLifetimeMilliseconds(self._interestLifetime)
        return interest
Exemplo n.º 45
0
class RegisterSongList(object):
    def __init__(self, prefix="/ndn/edu/ucla/remap/music/list"):

        logging.basicConfig()
        '''#这些log是干嘛的myIP="192.168.1.1",lightIP="192.168.1.50",
        self.log = logging.getLogger("RegisterSongList")
        self.log.setLevel(logging.DEBUG)
        sh = logging.StreamHandler()
        sh.setLevel(logging.DEBUG)
        self.log.addHandler(sh)
        fh = logging.FileHandler("RegisterSongList.log")
        fh.setLevel(logging.INFO)
        self.log.addHandler(fh)'''
        self.device = "PC3"
        self.deviceComponent = Name.Component(self.device)
        self.excludeDevice = None
        #self.songList = originalList

        #security?
        self.prefix = Name(prefix)
        self.changePrefix = Name("/ndn/edu/ucla/remap/music/storage")
        self.keychain = KeyChain()
        self.certificateName = self.keychain.getDefaultCertificateName()

        self.address = ""
        self._isStopped = True

    #self.payloadBuffer = []

    #self.kinetsender = KinetSender(myIP, playerIP,self.STRAND_SIZE*self.COLORS_PER_LIGHT)

    def start(self):
        print "reg start"
        self.loop = asyncio.get_event_loop()
        self.face = ThreadsafeFace(self.loop, self.address)
        self.face.setCommandSigningInfo(self.keychain, self.certificateName)
        self.face.registerPrefix(self.prefix, self.onInterest,
                                 self.onRegisterFailed)
        self._isStopped = False
        self.face.stopWhen(lambda: self._isStopped)
        try:
            self.loop.run_forever()
        except KeyboardInterrupt:

            sys.exit()
        finally:
            self.stop()

    def stop(self):
        self.loop.close()
        #self.kinetsender.stop = True
        #self.kinetsender.complete.wait()
        self.face.shutdown()
        self.face = None
        sys.exit(1)

    def signData(self, data):
        data.setSignature(Sha256WithRsaSignature())
        #self.keychain.sign(data,self.certificateName)

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        print "received interest"
        initInterest = Name(interest.getName())
        print "interest name:", initInterest.toUri()
        #d = Data(interest.getName().getPrefix(prefix.size()+1))
        #self.excludeDevice = interest.getName().get(prefix.size())
        #initInterest = interest.getName()
        d = Data(interest.getName().append(self.deviceComponent))
        try:
            print "start to set data's content"

            currentString = ','.join(currentList)
            d.setContent("songList of " + self.device + ":" + currentString +
                         "\n")
            self.face.registerPrefix(self.changePrefix, self.onInterest,
                                     self.onRegisterFailed)

        except KeyboardInterrupt:
            print "key interrupt"
            sys.exit(1)
        except Exception as e:
            print e
            d.setContent("Bad command\n")
        finally:
            self.keychain.sign(d, self.certificateName)

        encodedData = d.wireEncode()
        transport.send(encodedData.toBuffer())
        print d.getName().toUri()
        print d.getContent()

        self.stop()
        '''print"remove register"
        self.face.removeRegisteredPrefix(registeredPrefixId)
        time.sleep(30)
        #sleep 30s which means user cannot update the song list twice within 1 minutes
	print"register again"
        self.face.registerPrefix(self.prefix, self.onInterest, self.onRegisterFailed)'''

    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        self.stop()
Exemplo n.º 46
0
def benchmarkEncodeDataSeconds(nIterations, useComplex, useCrypto, keyType):
    """
    Loop to encode a data packet nIterations times.

    :param int nIterations: The number of iterations.
    :param bool useComplex: If true, use a large name, large content and all
      fields. If false, use a small name, small content and only required
      fields.
    :param bool useCrypto: If true, sign the data packet.  If false, use a blank
      signature.
    :param KeyType keyType: KeyType.RSA or EC, used if useCrypto is True.
    :return: A tuple (duration, encoding) where duration is the number of
      seconds for all iterations and encoding is the wire encoding.
    :rtype: (float, Blob)
    """
    if useComplex:
        # Use a large name and content.
        name = Name(
            "/ndn/ucla.edu/apps/lwndn-test/numbers.txt/%FD%05%05%E8%0C%CE%1D/%00"
        )

        contentString = ""
        count = 1
        contentString += "%d" % count
        count += 1
        while len(contentString) < 1115:
            contentString += " %d" % count
            count += 1
        content = Name.fromEscapedString(contentString)
    else:
        # Use a small name and content.
        name = Name("/test")
        content = Name.fromEscapedString("abc")
    finalBlockId = Name("/%00")[0]

    # Initialize the KeyChain in case useCrypto is true.
    keyChain = KeyChain("pib-memory:", "tpm-memory:")
    keyChain.importSafeBag(
        SafeBag(
            Name("/testname/KEY/123"),
            Blob(
                DEFAULT_EC_PRIVATE_KEY_DER if keyType == KeyType.EC else
                DEFAULT_RSA_PRIVATE_KEY_DER, False),
            Blob(
                DEFAULT_EC_PUBLIC_KEY_DER if keyType == KeyType.EC else
                DEFAULT_RSA_PUBLIC_KEY_DER, False)))
    certificateName = keyChain.getDefaultCertificateName()

    # Set up signatureBits in case useCrypto is false.
    signatureBits = Blob(bytearray(256))

    start = getNowSeconds()
    for i in range(nIterations):
        data = Data(name)
        data.setContent(content)
        if useComplex:
            data.getMetaInfo().setFreshnessPeriod(1000)
            data.getMetaInfo().setFinalBlockId(finalBlockId)

        if useCrypto:
            # This sets the signature fields.
            keyChain.sign(data)
        else:
            # Imitate IdentityManager.signByCertificate to set up the signature
            # fields, but don't sign.
            sha256Signature = data.getSignature()
            keyLocator = sha256Signature.getKeyLocator()
            keyLocator.setType(KeyLocatorType.KEYNAME)
            keyLocator.setKeyName(certificateName)
            sha256Signature.setSignature(signatureBits)

        encoding = data.wireEncode()

    finish = getNowSeconds()

    return (finish - start, encoding)
Exemplo n.º 47
0
from pyndn import Data
from pyndn.security import KeyChain

import sys

from RepoSocketPublisher import RepoSocketPublisher

publisher = RepoSocketPublisher(12345)

keyChain = KeyChain()

max_packets = 50000

total_size = 0

n = Name("/test").append(str(0))
d = Data(n)
d.setContent("this is a test.")
d.getMetaInfo().setFreshnessPeriod(4000)
keyChain.sign(d, keyChain.getDefaultCertificateName())

for i in range(0, max_packets + 1):
    n = Name("/test").append(str(i))
    d.setName(n)
    if i % 100000 == 0:
        print str(i)
    if i == max_packets:
        print str(total_size)
    total_size = total_size + d.wireEncode().size()
    publisher.put(d)
Exemplo n.º 48
0
class BaseNode(object):
    """
    This class contains methods/attributes common to both node and controller.
    
    """
    def __init__(self):
        """
        Initialize the network and security classes for the node
        """
        super(BaseNode, self).__init__()

        self._identityStorage = IotIdentityStorage()
        self._identityManager = IotIdentityManager(self._identityStorage)
        self._policyManager = IotPolicyManager(self._identityStorage)

        # hopefully there is some private/public key pair available
        self._keyChain = KeyChain(self._identityManager, self._policyManager)

        self._registrationFailures = 0
        self._prepareLogging()

        self._setupComplete = False
        self._instanceSerial = None

        # waiting devices register this prefix and respond to discovery
        # or configuration interest
        self._hubPrefix = Name('/localhop/configure')

    def getSerial(self):
        """
         Since you may wish to run two nodes on a Raspberry Pi, each
         node will generate a unique serial number each time it starts up.
        """
        if self._instanceSerial is None:
            prefixLen = 4
            prefix = ''
            for i in range(prefixLen):
                prefix += (chr(random.randint(0, 0xff)))
            suffix = self.getDeviceSerial().lstrip('0')
            self._instanceSerial = '-'.join([prefix.encode('hex'), suffix])
        return self._instanceSerial

##
# Logging
##

    def _prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log

###
# Startup and shutdown
###

    def beforeLoopStart(self):
        """
        Called before the event loop starts.
        """
        pass

    def getDefaultCertificateName(self):
        try:
            certName = self._identityStorage.getDefaultCertificateNameForIdentity(
                self._policyManager.getDeviceIdentity())
        except SecurityException:
            certName = self._keyChain.getDefaultCertificateName()

        return certName

    def start(self):
        """
        Begins the event loop. After this, the node's Face is set up and it can
        send/receive interests+data
        """
        self.log.info("Starting up")
        self.loop = asyncio.get_event_loop()
        self.face = ThreadsafeFace(self.loop, '')
        self.face.setCommandSigningInfo(self._keyChain,
                                        self.getDefaultCertificateName())
        self._keyChain.setFace(self.face)

        self._isStopped = False
        self.face.stopWhen(lambda: self._isStopped)
        self.beforeLoopStart()

        try:
            self.loop.run_forever()
        except KeyboardInterrupt:
            pass
        except Exception as e:
            self.log.exception(e, exc_info=True)
        finally:
            self._isStopped = True

    def stop(self):
        """
        Stops the node, taking it off the network
        """
        self.log.info("Shutting down")
        self._isStopped = True

###
# Data handling
###

    def signData(self, data):
        """
        Sign the data with our network certificate
        :param pyndn.Data data: The data to sign
        """
        self._keyChain.sign(data, self.getDefaultCertificateName())

    def sendData(self, data, transport, sign=True):
        """
        Reply to an interest with a data packet, optionally signing it.
        :param pyndn.Data data: The response data packet
        :param pyndn.Transport transport: The transport to send the data through. This is 
            obtained from an incoming interest handler
        :param boolean sign: (optional, default=True) Whether the response must be signed. 
        """
        if sign:
            self.signData(data)
        transport.send(data.wireEncode().buf())

###
#
#
##

    def onRegisterFailed(self, prefix):
        """
        Called when the node cannot register its name with the forwarder
        :param pyndn.Name prefix: The network name that failed registration
        """
        if self._registrationFailures < 5:
            self._registrationFailures += 1
            self.log.warn("Could not register {}, retry: {}/{}".format(
                prefix.toUri(), self._registrationFailures, 5))
            self.face.registerPrefix(self.prefix, self._onCommandReceived,
                                     self.onRegisterFailed)
        else:
            self.log.critical("Could not register device prefix, ABORTING")
            self._isStopped = True

    def verificationFailed(self, dataOrInterest):
        """
        Called when verification of a data packet or command interest fails.
        :param pyndn.Data or pyndn.Interest: The packet that could not be verified
        """
        self.log.info("Received invalid" + dataOrInterest.getName().toUri())

    @staticmethod
    def getDeviceSerial():
        """
        Find and return the serial number of the Raspberry Pi. Provided in case
        you wish to distinguish data from nodes with the same name by serial.
        :return: The serial number extracted from device information in /proc/cpuinfo
        :rtype: str
        """
        with open('/proc/cpuinfo') as f:
            for line in f:
                if line.startswith('Serial'):
                    return line.split(':')[1].strip()
Exemplo n.º 49
0
class Decision_Engine_Main(object):
    def __init__(self, namePrefix):
        self.outstanding = dict()
        self.isDone = False
        self.keyChain = KeyChain()
        self.face = Face("127.0.0.1")
        self.configPrefix = Name(namePrefix)
        self.script_path = os.path.abspath(
            __file__)  # i.e. /path/to/dir/foobar.py
        self.script_dir = os.path.split(
            self.script_path)[0]  #i.e. /path/to/dir/
        self.interestLifetime = 800000
        #self.Datamessage_size = 1999000
        self.Datamessage_size = 19990000
        folder_name = "SC_repository/"
        rel_path = os.path.join(self.script_dir, folder_name)
        prefix_startDE = "/picasso/start_de/"
        self.prefix_startDE = Name(prefix_startDE)
        self.prefix_deployService = '/picasso/service_deployment_push/'
        self.json_server_Spec_default = {  # This is only an skeleton
            'par': {  #  service parameters
                'serviceName': 'nameOfService',
                'imageName': 'NameOfImageToIstantiateService',
                'imageSize': 'sizeOfImage',
                'maxConReq': 'maxNumConcurrentRequestsThatAnIntanceCanHandle',
                'startUpTime': 'timeToInstatiateService'
            },
            'QoS': {  #QoS parameters expected from the service
                'responseTime': 'resposeTimeExpectedFromService',
                'availability': 'availabilityExpectedFromService',
                'numConReq': 'numConcurrentRequestsToBeHandledByService'
            }
        }

        if not os.path.exists(rel_path):
            os.makedirs(rel_path)

    def run(self):

        try:

            ### This face is used to send an image to the SEG
            self.face.setCommandSigningInfo(
                self.keyChain, self.keyChain.getDefaultCertificateName())
            self.face.registerPrefix(self.configPrefix,
                                     self.onInterest_PullService,
                                     self.onRegisterFailed)

            #### This face is used to start the algorithm of decision engine. The Interest is sent by trigger module
            #### This face is for testing propose
            self.face.setCommandSigningInfo(
                self.keyChain, self.keyChain.getDefaultCertificateName())
            self.face.registerPrefix(self.prefix_startDE,
                                     self.onInterest_StartDE,
                                     self.onRegisterFailed)
            Max = self.face.getMaxNdnPacketSize()
            print 'Maxsize: ', Max
            print "Registered prefix : " + self.configPrefix.toUri()

            while not self.isDone:
                self.face.processEvents()
                time.sleep(0.01)

        except RuntimeError as e:
            print "ERROR: %s" % e

        return True

    def onInterest_StartDE(self, prefix, interest, face, interestFilterId,
                           filter):
        interestName = interest.getName()
        print "Interest Name: %s" % interestName
        interest_name_components = interestName.toUri().split("/")
        if "start_de" in interest_name_components:
            #print 'Query database'
            #print 'Call decision engine algorithm'
            #parent_dir = os.path.split(self.script_dir)[0]
            #monitor_path = os.path.join(self.script_dir, parent_dir, 'Monitoring', 'Monitoring_DB')
            #print monitor_path
            #myDE = de(monitor_path)
            #json_lst_dict = myDE.get_lst_of_dictionaries()
            #json_server_Spec = self.json_server_Spec_default
            #node_name = myDE.selectHost_to_deploy_firstInstance(json_lst_dict, json_server_Spec)
            node_name = interest_name_components[
                interest_name_components.index("start_de") + 2]
            print 'Selected Host Name %s' % node_name
            service_name = interest_name_components[
                interest_name_components.index("start_de") + 1]
            print 'service name %s' % service_name
            print 'Start service deployment'
            deployService = self.prefix_deployService + node_name + '/' + service_name
            config_prefix_deployService = Name(deployService)
            interest = Interest(config_prefix_deployService)
            interest.setInterestLifetimeMilliseconds(self.interestLifetime)
            interest.setMustBeFresh(True)
            self.face.expressInterest(
                interest, None, None
            )  ## set None --> sent out only, don't wait for Data and Timeout
            print "Sent Push Interest to SEG %s" % config_prefix_deployService
        else:
            print "Interest name mismatch"

    def onInterest_PullService(self, prefix, interest, face, interestFilterId,
                               filter):
        interestName = interest.getName()
        data = Data(interestName)
        print "Interest Name: %s" % interestName
        interest_name_components = interestName.toUri().split("/")
        if "service_deployment_pull" in interest_name_components:
            ## Extract filename from Interest name
            #filename = "uhttpd.tar"
            filename = interest_name_components[
                interest_name_components.index("service_deployment_pull") + 1]
            folder_name = "ServiceRepo/SC_repository/"
            parent_dir = os.path.split(self.script_dir)[0]
            rel_path = os.path.join(parent_dir, folder_name)
            if not os.path.exists(rel_path):
                os.makedirs(rel_path)
            abs_file_path = os.path.join(rel_path, filename)
            freshness = 6000000  #milli second, content will be deleted from the cache after freshness period
            self.sendingFile(abs_file_path, interest, face, freshness)
        else:
            print "Interest name mismatch"

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True

    def sendingFile(self, file_path, interest, face, freshness):
        print "Sending File Function"
        interestName = interest.getName()
        interestNameSize = interestName.size()

        try:
            SegmentNum = (interestName.get(interestNameSize - 1)).toSegment()
            dataName = interestName.getSubName(0, interestNameSize - 1)

        # If no segment number is included in the INTEREST, set the segment number as 0 and set the file name to configuration script to be sent
        except RuntimeError as e:
            SegmentNum = 0
            dataName = interestName
        # Put file to the Data message
        try:
            # due to overhead of NDN name and other header values; NDN header overhead + Data packet content = < maxNdnPacketSize
            # So Here segment size is hard coded to 5000 KB.
            # Class Enumerate publisher is used to split large files into segments and get a required segment ( segment numbers started from 0)
            dataSegment, last_segment_num = EnumeratePublisher(
                file_path, self.Datamessage_size, SegmentNum).getFileSegment()
            print 'SegmentNum:%s last_segment_num: %s' % (SegmentNum,
                                                          last_segment_num)
            # create the DATA name appending the segment number
            dataName = dataName.appendSegment(SegmentNum)
            data = Data(dataName)
            data.setContent(dataSegment)

            # set the final block ID to the last segment number
            last_segment = (Name.Component()).fromNumber(last_segment_num)
            data.getMetaInfo().setFinalBlockId(last_segment)
            #hourMilliseconds = 600 * 1000
            data.getMetaInfo().setFreshnessPeriod(freshness)

            # currently Data is signed from the Default Identitiy certificate
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
            # Sending Data message
            face.send(data.wireEncode().toBuffer())
            print "Replied to Interest name: %s" % interestName.toUri()
            print "Replied with Data name: %s" % dataName.toUri()

        except ValueError as err:
            print "ERROR: %s" % err
Exemplo n.º 50
0
class TestFaceRegisterMethods(ut.TestCase):
    def setUp(self):
        self.face_in = Face()
        self.face_out = Face()
        self.keyChain = KeyChain()

    def tearDown(self):
        self.face_in.shutdown()
        self.face_out.shutdown()

    def test_register_prefix_response(self):
        prefixName = Name("/test")
        self.face_in.setCommandSigningInfo(self.keyChain,
                self.keyChain.getDefaultCertificateName())

        interestCallbackCount = [0]
        def onInterest(prefix, interest, face, interestFilterId, filter):
            interestCallbackCount[0] += 1
            data = Data(interest.getName())
            data.setContent("SUCCESS")
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
            face.putData(data)

        failedCallback = Mock()

        self.face_in.registerPrefix(prefixName, onInterest, failedCallback)
        # Give the 'server' time to register the interest.
        timeout = 1000
        startTime = getNowMilliseconds()
        while True:
            if getNowMilliseconds() - startTime >= timeout:
                break
            self.face_in.processEvents()
            time.sleep(0.01)

        # express an interest on another face
        dataCallback = Mock()
        timeoutCallback = Mock()

        # now express an interest on this new face, and see if onInterest is called
        # Add the timestamp so it is unique and we don't get a cached response.
        interestName = prefixName.append("hello" + repr(time.time()))
        self.face_out.expressInterest(interestName, dataCallback, timeoutCallback)

        # Process events for the in and out faces.
        timeout = 10000
        startTime = getNowMilliseconds()
        while True:
            if getNowMilliseconds() - startTime >= timeout:
                break

            self.face_in.processEvents()
            self.face_out.processEvents()

            done = True
            if interestCallbackCount[0] == 0 and failedCallback.call_count == 0:
                # Still processing face_in.
                done = False
            if dataCallback.call_count == 0 and timeoutCallback.call_count == 0:
                # Still processing face_out.
                done = False

            if done:
                break
            time.sleep(0.01)


        self.assertEqual(failedCallback.call_count, 0, 'Failed to register prefix at all')

        self.assertEqual(interestCallbackCount[0], 1, 'Expected 1 onInterest callback, got '+str(interestCallbackCount[0]))

        self.assertEqual(dataCallback.call_count, 1, 'Expected 1 onData callback, got '+str(dataCallback.call_count))

        onDataArgs = dataCallback.call_args[0]
        # check the message content
        data = onDataArgs[1]
        expectedBlob = Blob("SUCCESS")
        self.assertTrue(expectedBlob == data.getContent(), 'Data received on face does not match expected format')
Exemplo n.º 51
0
class Producer(object):
    def __init__(self):

        Prefix = '/umobile/push_PulblishData/pull'
        self.configPrefix = Name(Prefix)
        self.outstanding = dict()
        self.isDone = False
        self.keyChain = KeyChain()
        self.face = Face("127.0.0.1")
        self.Prefix_publish = '/umobile/push_PulblishData/publish'
        self.Datamessage_size = 8000  # Data message size ~4kB

    def run(self):
        try:

            self.face.setCommandSigningInfo(self.keyChain, \
                                            self.keyChain.getDefaultCertificateName())
            self.face.registerPrefix(self.configPrefix, self.onInterest_Pull,
                                     self.onRegisterFailed)
            print "Registering listening prefix : " + self.configPrefix.toUri()

            self._sendInterest_Publish(Name(self.Prefix_publish))

            while not self.isDone:
                self.face.processEvents()
                time.sleep(0.01)

        except RuntimeError as e:
            print "ERROR: %s" % e

    def _sendInterest_Publish(self, name):
        interest = Interest(name)
        uri = name.toUri()

        interest.setInterestLifetimeMilliseconds(4000)
        interest.setMustBeFresh(True)

        if uri not in self.outstanding:
            self.outstanding[uri] = 1

        self.face.expressInterest(interest, None, None)
        print "Sent Interest for %s" % uri

    def onInterest_Pull(self, prefix, interest, face, interestFilterId,
                        filter):

        script_path = os.path.abspath(__file__)  # i.e. /path/to/dir/foobar.py
        script_dir = os.path.split(script_path)[0]  # i.e. /path/to/dir/
        filename = 'testfile_original.docx'
        abs_file_path = os.path.join(script_dir, filename)
        freshness = 10000  # milli second, content will be deleted from the cache after freshness period
        self.sendingFile(abs_file_path, interest, face, freshness)

    def _onTimeout(self, interest):
        name = interest.getName()
        uri = name.toUri()

        print "TIMEOUT #%d: %s" % (self.outstanding[uri], uri)
        self.outstanding[uri] += 1

        if self.outstanding[uri] <= 3:
            self._sendNextInterest(name)
        else:
            self.isDone = True

    def onRegisterFailed(self, prefix):
        print "Register failed for prefix", prefix.toUri()
        self.isDone = True

    def sendingFile(self, file_path, interest, face, freshness):
        print "Sending File Function"
        interestName = interest.getName()
        interestNameSize = interestName.size()

        try:
            SegmentNum = (interestName.get(interestNameSize - 1)).toSegment()
            dataName = interestName.getSubName(0, interestNameSize - 1)

        # If no segment number is included in the INTEREST, set the segment number as 0 and set the file name to configuration script to be sent
        except RuntimeError as e:
            SegmentNum = 0
            dataName = interestName
        # Put file to the Data message
        try:
            # due to overhead of NDN name and other header values; NDN header overhead + Data packet content = < maxNdnPacketSize
            # So Here segment size is hard coded to 5000 KB.
            # Class Enumerate publisher is used to split large files into segments and get a required segment ( segment numbers started from 0)
            dataSegment, last_segment_num = EnumeratePublisher(
                file_path, self.Datamessage_size, SegmentNum).getFileSegment()
            # create the DATA name appending the segment number
            dataName = dataName.appendSegment(SegmentNum)
            data = Data(dataName)
            data.setContent(dataSegment)

            # set the final block ID to the last segment number
            last_segment = (Name.Component()).fromNumber(last_segment_num)
            data.getMetaInfo().setFinalBlockId(last_segment)
            #hourMilliseconds = 600 * 1000
            data.getMetaInfo().setFreshnessPeriod(freshness)

            # currently Data is signed from the Default Identitiy certificate
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
            # Sending Data message
            face.send(data.wireEncode().toBuffer())
            print "Replied to Interest name: %s" % interestName.toUri()
            print "Replied with Data name: %s" % dataName.toUri()

        except ValueError as err:
            print "ERROR: %s" % err
Exemplo n.º 52
0
class Experiment:

    _maxSegmentPayloadLength = 8192

    def __init__(self, absPath, maxAttributes):
        self.keyChain = KeyChain("pib-memory:", "tpm-memory:")
        self.keyChain.createIdentityV2("/test/identity")
        self.validator = Validator(
            ValidationPolicyFromPib(self.keyChain.getPib()))
        # , filename, groupSize, nAttributes, absPath, keepData = False):

        # sys.stderr.write ("Using NDN-ABS authority, signer, and verifier database from %s\n" % absPath)
        self.db = ndnabs.PickleDb(absPath)

        self.signer = ndnabs.Signer(self.db)
        self.verifier = ndnabs.Verifier(self.db)

        try:
            info = self.signer.get_public_params_info()
            if info.getName().getPrefix(
                    -2).toUri() != "/icn2019/test/authority":
                raise RuntimeError(
                    'NDN-ABS authority exists, but not setup for experiment. Use `ndnabs setup -f /icn2019/test/authority` to force-setup the authority'
                )
        except:
            raise RuntimeError(
                "Public parameters are not properly installed for the signer/verifier"
            )

        maxAttributes = [
            b'attribute%d' % i for i in range(1, maxAttributes + 1)
        ]

        for attr in maxAttributes:
            if not attr in self.signer.get_attributes():
                raise RuntimeError(
                    "%s attribute missing. Generate attributes for the experiment using `ndnabs gen-secret %s | ndnabs install-secret`"
                    % (str(attr, 'utf-8'), ' '.join(
                        [str(i, 'utf-8') for i in maxAttributes])))

    #     self.attributes = [b'attribute%d' % i for i in range(1, nAttributes + 1)]
    #     self._setupAbs(absPath)
    #     self._readDataAndCreateManifests(filename, groupSize, keepData)

    def setupAbs(self, nAttributes):
        self.attributes = [
            b'attribute%d' % i for i in range(1, nAttributes + 1)
        ]

    def _createManifest(self, name, manifestBuffer, nManifests):
        manifest = Data(name)
        manifest.setContent(manifestBuffer[0:nManifests * SHA256_DIGEST_SIZE])
        return manifest

    def readDataAndCreateManifests(self, filename, groupSize, keepData):
        if groupSize < 1:
            raise RuntimeError("Group size cannot be less than 1")

        self.allChunks = [
        ]  # for holding the generated data packets, including unsigned manifests
        self.allManifests = [
        ]  # for storing first unsigned manifest packets, which are then signed in-place
        self.rawDataCount = 0
        self.ndnChunkCount = 0

        seqNo = 0  # sequence number of data packets
        chunkNo = 0  # number of the chunk in the group

        with open(filename, 'rb') as f:

            # prepare space to store all manifests of the group (last manifest will not use all the space)
            def allocateBufferForDigests():
                return bytearray(groupSize * SHA256_DIGEST_SIZE)

            digests = allocateBufferForDigests()

            while f.readable():
                chunkPayload = f.read(self._maxSegmentPayloadLength)
                if len(chunkPayload) == 0:
                    break
                self.rawDataCount = self.rawDataCount + len(chunkPayload)

                chunk = Data(
                    Name("/icn2019/test/data").appendSequenceNumber(seqNo))
                seqNo = seqNo + 1
                chunk.setContent(chunkPayload)

                digestSignature = DigestSha256Signature()
                digestSignature.setSignature(
                    Blob(bytearray(SHA256_DIGEST_SIZE))
                )  # not real a valid signature, but ok for the experiment
                chunk.setSignature(digestSignature)

                if keepData:
                    self.allChunks.append(chunk)

                # only data chunks; manifest sizes counted separatedly, as they are signed
                self.ndnChunkCount = self.ndnChunkCount + chunk.wireEncode(
                ).size()

                # For storing data packet to a file
                # with open(writepath + "-1.txt", "wb") as dataf
                #     dataf.write(dpacket_bytes)

                implicitDigest = chunk.getFullName()[-1].getValue()

                offset = chunkNo * SHA256_DIGEST_SIZE
                digests[offset:offset +
                        SHA256_DIGEST_SIZE] = implicitDigest.toBytes()[:]

                chunkNo = chunkNo + 1

                if chunkNo == groupSize:
                    manifest = self._createManifest(
                        Name("/icn2019/test/data").appendSequenceNumber(seqNo),
                        digests, groupSize)  # full group
                    seqNo = seqNo + 1
                    self.allChunks.append(manifest)
                    self.allManifests.append(manifest)
                    chunkNo = 0
                    digests = allocateBufferForDigests()

            if chunkNo != 0:
                manifest = self._createManifest(
                    Name("/icn2019/test/data").appendSequenceNumber(seqNo),
                    digests, groupSize)  # partial group
                self.allChunks.append(manifest)
                self.allManifests.append(manifest)

            self.nDataChunks = seqNo - len(
                self.allManifests
            )  # number of data packets, excluding the manifests

    def signManifestsABS(self):
        self.manifestCount = 0
        self.signatureCounts = []
        for manifest in self.allManifests:
            self.signer.sign(manifest, self.attributes)
            self.manifestCount = self.manifestCount + manifest.wireEncode(
            ).size()
            self.signatureCounts.append(
                manifest.getSignature().getSignature().size())

    def verifyManifestsABS(self):
        for manifest in self.allManifests:
            if not self.signer.verify(manifest.wireEncode()):
                sys.stderr.write("Failed to verify %s\n" % manifest.getName())

    def signManifestsRSA(self):
        self.manifestCount = 0
        self.signatureCounts = []
        for manifest in self.allManifests:
            self.keyChain.sign(manifest)
            self.manifestCount = self.manifestCount + manifest.wireEncode(
            ).size()
            self.signatureCounts.append(
                manifest.getSignature().getSignature().size())

    def verifyManifestsRSA(self):
        def onSuccess(*k, **kw):
            pass

        def onFailure(data, *k, **kw):
            sys.stderr.write("Failed to verify %s\n" % manifest.getName())

        for manifest in self.allManifests:
            self.validator.validate(manifest, onSuccess, onFailure)
Exemplo n.º 53
0
class Producer(object):

    def __init__(self):
        # Initialize our keychain
        self.keyChain = KeyChain()
        self.isDone = False
        self.namesp = '/ndn/eb/'

    def run(self):
        # Create a connection to the local forwarder.
        face = Face()

        prefix = Name(namesp)
        # Use the system default key chain and certificate name to sign commands.
        face.setCommandSigningInfo(self.keyChain, self.keyChain.getDefaultCertificateName())
        # Also use the default certificate name to sign Data packets.
        face.registerPrefix(prefix, self.onInterest, self.onRegisterFailed)

        print("Registering prefix: %s" % prefix.toUri())

        # Run the event loop forever. Use a short sleep to
        # prevent the Producer from using 100% of the CPU.
        while not self.isDone:
            face.processEvents()
            time.sleep(0.01)

    def onInterest(self, prefix, interest, transport, registeredPrefixId):
        # Create a response Data packet based on the type of incoming interest
        # Then, sign the Data with our keychain and send it out
        # using transport.
        interestName = interest.getName()

        if interestName.split('/')[:-1] == 'ndnrtc' :
            if segDect().newSegDetected():
                data = Data(segLabName + '/segment/' + segDect().newSeg().label)
                # set the palyload with the new segment
                data.setContent(segDect().newSeg().ann)

                # set refresh period
                # hourMilliseconds = 3600 * 1000
                # data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

                # manually adjust the FreshnessPeriod based on the time of producing a data packet
                productionTimeGap = 100 * 1000
                data.getMetaInfo().setFreshnessPeriod(productionTimeGap)

                # sign the packet
                self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

                transport.send(data.wireEncode().toBuffer())
                data.setContent(segDect().newSeg())
            else:
                print("no new segment detected at: %s" % interestName.toUri())
            print("Replied to: %s" % interestName.toUri())
        elif interestName.split('/')[:-1] == 'frame-annotation':
            if segDect().newSegDetected():
                data = Data(segLabName + '/segment/' + segDect().newSeg()['seglab'])
                # set the palyload with the new segment
                data.setContent(segDect().newSeg())

                # set refresh period
                hourMilliseconds = 3600 * 1000
                data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

                # sign the packet
                self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

                transport.send(data.wireEncode().toBuffer())
                data.setContent(segDect().newSeg())
            else:
                print("no new segment detected at: %s" % interestName.toUri())
            print("Replied to: %s" % interestName.toUri())

        #send historical similar segment to the playdetect engine
        elif interestName.split('/')[:-3] == 'cue':
            pre_seg = segDect().similarHistory(interestName.split('/')[:-1])
            data = Data(segLabName + '/segment/' + pre_seg['seglab'])
            data.setContent(pre_seg)

            # set refresh period
            hourMilliseconds = 3600 * 1000
            data.getMetaInfo().setFreshnessPeriod(hourMilliseconds)

            # sign the packet
            self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())

            transport.send(data.wireEncode().toBuffer())
            data.setContent(segDect().newSeg())
            print("Replied to: %s" % interestName.toUri())
        else:
            print("Replied to: %s" % interestName.toUri())
            pass



    def onRegisterFailed(self, prefix):
        # Print an error message and signal the event loop to terminate
        print("Register failed for prefix: %s" % prefix.toUri())
        self.isDone = True
Exemplo n.º 54
0
class TestConsumer(ut.TestCase):
    def setUp(self):
        self.decryptionKeys = {} # key: Name, value: Blob
        self.encryptionKeys = {} # key: Name, value: Data

        # Reuse the policy_config subdirectory for the temporary SQLite files.
        self.databaseFilePath = "policy_config/test.db"
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            # no such file
            pass

        self.groupName = Name("/Prefix/READ")
        self.contentName = Name("/Prefix/SAMPLE/Content")
        self.cKeyName = Name("/Prefix/SAMPLE/Content/C-KEY/1")
        self.eKeyName = Name("/Prefix/READ/E-KEY/1/2")
        self.dKeyName = Name("/Prefix/READ/D-KEY/1/2")
        self.uKeyName = Name("/U/Key")
        self.uName = Name("/U")

        # Generate the E-KEY and D-KEY.
        params = RsaKeyParams()
        self.fixtureDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        self.fixtureEKeyBlob = RsaAlgorithm.deriveEncryptKey(
          self.fixtureDKeyBlob).getKeyBits()

        # Generate the user key.
        self.fixtureUDKeyBlob = RsaAlgorithm.generateKey(params).getKeyBits()
        self.fixtureUEKeyBlob = RsaAlgorithm.deriveEncryptKey(
          self.fixtureUDKeyBlob).getKeyBits()

        # Load the C-KEY.
        self.fixtureCKeyBlob = Blob(AES_KEY, False)

        # Set up the keyChain.
        identityStorage = MemoryIdentityStorage()
        privateKeyStorage = MemoryPrivateKeyStorage()
        self.keyChain = KeyChain(
          IdentityManager(identityStorage, privateKeyStorage),
          NoVerifyPolicyManager())

        # Initialize the storage.
        keyName = Name("/testname/DSK-123")
        self.certificateName = keyName.getSubName(0, keyName.size() - 1).append(
          "KEY").append(keyName.get(-1)).append("ID-CERT").append("0")
        identityStorage.addKey(
          keyName, KeyType.RSA, Blob(DEFAULT_RSA_PUBLIC_KEY_DER, False))
        privateKeyStorage.setKeyPairForKeyName(
          keyName, KeyType.RSA, DEFAULT_RSA_PUBLIC_KEY_DER,
          DEFAULT_RSA_PRIVATE_KEY_DER)

    def tearDown(self):
        try:
            os.remove(self.databaseFilePath)
        except OSError:
            pass

    def createEncryptedContent(self):
        contentData = Data(self.contentName)
        encryptParams = EncryptParams(EncryptAlgorithmType.AesCbc)
        encryptParams.setInitialVector(Blob(INITIAL_VECTOR, False))
        Encryptor.encryptData(
          contentData, Blob(DATA_CONTENT, False), self.cKeyName,
          self.fixtureCKeyBlob,  encryptParams)
        self.keyChain.sign(contentData, self.certificateName)
        return contentData

    def createEncryptedCKey(self):
        cKeyData = Data(self.cKeyName)
        encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        Encryptor.encryptData(
          cKeyData, self.fixtureCKeyBlob, self.dKeyName, self.fixtureEKeyBlob,
          encryptParams)
        self.keyChain.sign(cKeyData, self.certificateName)
        return cKeyData

    def createEncryptedDKey(self):
        dKeyData = Data(self.dKeyName)
        encryptParams = EncryptParams(EncryptAlgorithmType.RsaOaep)
        Encryptor.encryptData(
          dKeyData, self.fixtureDKeyBlob, self.uKeyName, self.fixtureUEKeyBlob,
          encryptParams)
        self.keyChain.sign(dKeyData, self.certificateName)
        return dKeyData

    def test_decrypt_content(self):
        # Generate the AES key.
        aesKeyBlob = Blob(AES_KEY, False)

        # Generate the C-KEY packet for the same AES_KEY.
        cKeyData = self.createEncryptedCKey()
        # Generate the content packet.
        contentData = self.createEncryptedContent()

        # Decrypt.
        Consumer._decrypt(
          cKeyData.getContent(), self.fixtureDKeyBlob,
          lambda result: self.assertTrue(result.equals(aesKeyBlob)),
          lambda errorCode, message: self.fail("decrypt error " + message))

        # Decrypt.
        Consumer._decrypt(
          contentData.getContent(), self.fixtureCKeyBlob,
          lambda result: self.assertTrue(result.equals(Blob(DATA_CONTENT, False))),
          lambda errorCode, message: self.fail("decrypt error " + message))

    def test_consume(self):
        contentData = self.createEncryptedContent()
        cKeyData = self.createEncryptedCKey()
        dKeyData = self.createEncryptedDKey()

        contentCount = [0]
        cKeyCount = [0]
        dKeyCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout, onNetworkNack):
                return self.handleExpressInterest(
                  interest, onData, onTimeout, onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            if interest.matchesName(contentData.getName()):
              contentCount[0] = 1
              onData(interest, contentData)
            elif interest.matchesName(cKeyData.getName()):
              cKeyCount[0] = 1
              onData(interest, cKeyData)
            elif interest.matchesName(dKeyData.getName()):
              dKeyCount[0] = 1
              onData(interest, dKeyData)
            else:
              onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Create the consumer.
        consumer = Consumer(
          face, self.keyChain, self.groupName, self.uName,
          Sqlite3ConsumerDb(self.databaseFilePath))
        consumer.addDecryptionKey(self.uKeyName, self.fixtureUDKeyBlob)

        finalCount = [0]
        def onConsumeComplete(data, result):
            finalCount[0] = 1
            self.assertTrue("consumeComplete",
                            result.equals(Blob(DATA_CONTENT, False)))
        consumer.consume(
          self.contentName, onConsumeComplete,
          lambda code, message:
            self.fail("consume error " + repr(code) + ": " + message))

        self.assertEqual(1, contentCount[0], "contentCount")
        self.assertEqual(1, cKeyCount[0], "cKeyCount")
        self.assertEqual(1, dKeyCount[0], "dKeyCount")
        self.assertEqual(1, finalCount[0], "finalCount")

    def test_consumer_with_link(self):
        contentData = self.createEncryptedContent()
        cKeyData = self.createEncryptedCKey()
        dKeyData = self.createEncryptedDKey()

        contentCount = [0]
        cKeyCount = [0]
        dKeyCount = [0]

        # Prepare a TestFace to instantly answer calls to expressInterest.
        class TestFace(object):
            def __init__(self, handleExpressInterest):
                self.handleExpressInterest = handleExpressInterest
            def expressInterest(self, interest, onData, onTimeout, onNetworkNack):
                return self.handleExpressInterest(
                  interest, onData, onTimeout, onNetworkNack)

        def handleExpressInterest(interest, onData, onTimeout, onNetworkNack):
            self.assertEqual(3, interest.getLink().getDelegations().size())

            if interest.matchesName(contentData.getName()):
              contentCount[0] = 1
              onData(interest, contentData)
            elif interest.matchesName(cKeyData.getName()):
              cKeyCount[0] = 1
              onData(interest, cKeyData)
            elif interest.matchesName(dKeyData.getName()):
              dKeyCount[0] = 1
              onData(interest, dKeyData)
            else:
              onTimeout(interest)

            return 0
        face = TestFace(handleExpressInterest)

        # Create the consumer.
        ckeyLink = Link()
        ckeyLink.addDelegation(10,  Name("/ckey1"))
        ckeyLink.addDelegation(20,  Name("/ckey2"))
        ckeyLink.addDelegation(100, Name("/ckey3"))
        dkeyLink = Link()
        dkeyLink.addDelegation(10,  Name("/dkey1"))
        dkeyLink.addDelegation(20,  Name("/dkey2"))
        dkeyLink.addDelegation(100, Name("/dkey3"))
        dataLink = Link()
        dataLink.addDelegation(10,  Name("/data1"))
        dataLink.addDelegation(20,  Name("/data2"))
        dataLink.addDelegation(100, Name("/data3"))
        self.keyChain.sign(ckeyLink, self.certificateName)
        self.keyChain.sign(dkeyLink, self.certificateName)
        self.keyChain.sign(dataLink, self.certificateName)

        consumer = Consumer(
          face, self.keyChain, self.groupName, self.uName,
          Sqlite3ConsumerDb(self.databaseFilePath), ckeyLink, dkeyLink)
        consumer.addDecryptionKey(self.uKeyName, self.fixtureUDKeyBlob)

        finalCount = [0]
        def onConsumeComplete(data, result):
            finalCount[0] = 1
            self.assertTrue("consumeComplete",
                            result.equals(Blob(DATA_CONTENT, False)))
        consumer.consume(
          self.contentName, onConsumeComplete,
          lambda code, message:
            self.fail("consume error " + repr(code) + ": " + message),
          dataLink)

        self.assertEqual(1, contentCount[0], "contentCount")
        self.assertEqual(1, cKeyCount[0], "cKeyCount")
        self.assertEqual(1, dKeyCount[0], "dKeyCount")
        self.assertEqual(1, finalCount[0], "finalCount")
Exemplo n.º 55
0
class LightController():
    shouldSign = False
    COLORS_PER_LIGHT = 3
    STRAND_SIZE = 50
    def __init__(self, nStrands=1, myIP="192.168.1.1", lightIP="192.168.1.50", prefix="/testlight"):
        self.log = logging.getLogger("LightController")
        self.log.setLevel(logging.DEBUG)
        sh = logging.StreamHandler()
        sh.setLevel(logging.WARNING)
        self.log.addHandler(sh)
        fh = logging.FileHandler("LightController.log")
        fh.setLevel(logging.INFO)
        self.log.addHandler(fh)

        self.payloadBuffer = [[0]*self.STRAND_SIZE*self.COLORS_PER_LIGHT for n in range(nStrands)]

        self.kinetsender = KinetSender(myIP, lightIP, nStrands, self.STRAND_SIZE*self.COLORS_PER_LIGHT)
        self.registerFailed = False
        self.done = False
        self.prefix = Name(prefix)
        self.keychain = KeyChain()
        self.certificateName = self.keychain.getDefaultCertificateName()

    # XXX: we should get a thread for this or something!
    def start(self):
        self.face = Face()
        self.face.setCommandSigningInfo(self.keychain, self.certificateName)
        self.face.registerPrefix(self.prefix, self.onLightingCommand, self.onRegisterFailed)
        while self.face is not None:
            self.face.processEvents()
            if self.registerFailed:
                self.stop()
                break
            #time.sleep(0.001)


    def stop(self):
        self.kinetsender.stop = True
        self.kinetsender.complete.wait()         
        self.face.shutdown()
        self.face = None

    def signData(self, data):
        if LightController.shouldSign:
            self.keychain.sign(data, self.certificateName)
        else:
            data.setSignature(Sha256WithRsaSignature())

    def setPayloadColor(self, strand, color):
        # will expand this to allow the repeats, etc
        self.payloadBuffer[strand] = [int(color.r)&0xff, int(color.g)&0xff, int(color.b)&0xff]*self.STRAND_SIZE

    def onLightingCommand(self, prefix, interest, transport, prefixId):
        interestName = Name(interest.getName())
        #d = Data(interest.getName().getPrefix(prefix.size()+1))
        d = Data(interest.getName())
        # get the command parameters from the name
        try:
            commandComponent = interest.getName().get(prefix.size())
            commandParams = interest.getName().get(prefix.size()+1)

            lightingCommand = LightCommandMessage()
            ProtobufTlv.decode(lightingCommand, commandParams.getValue())
            self.log.info("Command: " + commandComponent.toEscapedString())
            requestedColor = lightingCommand.command.pattern.colors[0] 
            colorStr = str((requestedColor.r, requestedColor.g, requestedColor.b))
            self.log.info("Requested color: " + colorStr)
            self.setPayloadColor(0, requestedColor)
            self.sendLightPayload(1)
            d.setContent("Gotcha: " + colorStr+ "\n")
        except Exception as e:
            print e
            d.setContent("Bad command\n")
        finally:
            d.getMetaInfo().setFinalBlockID(0)
            self.signData(d)

        encodedData = d.wireEncode()
        transport.send(encodedData.toBuffer())

    def onRegisterFailed(self, prefix):
        self.log.error("Could not register " + prefix.toUri())
        print "Register failed!"
        self.registerFailed = True

    def sendLightPayload(self, port):
        self.kinetsender.setPayload(port, self.payloadBuffer[port-1])
Exemplo n.º 56
0
class SensorDataLogger:
    def __init__(self, data_interval):
        # connect to modbus
        self.master = modbus_tcp.TcpMaster("172.17.66.246", 502)
        # self.master.set_timeout(120) # in seconds
        
        # connect to local repo
        self.publisher = RepoSocketPublisher(12345)
        self.prefix = "/ndn/ucla.edu/bms/strathmore/data/demand"
        self.interval = data_interval # in seconds
        
        self.loadKey()
        
    def loadKey(self):
        self.identityStorage = MemoryIdentityStorage()
        self.privateKeyStorage = MemoryPrivateKeyStorage()
        self.keychain = KeyChain(IdentityManager(self.identityStorage, self.privateKeyStorage))

        f = open(key_file, "r")
        self.key = RSA.importKey(f.read())
        self.key_name = Name(bld_root).append(getKeyID(self.key))
        key_pub_der = bytearray(self.key.publickey().exportKey(format="DER"))
        key_pri_der = bytearray(self.key.exportKey(format="DER"))
        self.identityStorage.addKey(self.key_name, KeyType.RSA, Blob(key_pub_der))
        self.privateKeyStorage.setKeyPairForKeyName(self.key_name, key_pub_der, key_pri_der)
        self.cert_name = self.key_name.getSubName(0, self.key_name.size() - 1).append(
            "KEY").append(self.key_name[-1]).append("ID-CERT").append("0")

        print 'KeyName = ' + self.key_name.toUri()
        print 'CertName = ' + self.cert_name.toUri()

    def publishData(self, key, key_ts, payload, timestamp):
        data = Data(Name(self.prefix).append(bytearray(timestamp)))
        iv = Random.new().read(AES.block_size)
        encryptor = AES.new(key, AES.MODE_CBC, iv)
        data.setContent(bytearray(key_ts + iv + encryptor.encrypt(pad(json.dumps(payload)))))
        data.getMetaInfo().setFreshnessPeriod(5000)
        self.keychain.sign(data, self.cert_name)
        self.publisher.put(data)
        #print payload
        #print data.getName().toUri()

    def run(self):
        key_ts = struct.pack('!Q', int(time.time() * 1000))
        key = Random.new().read(32)
        kds_count = -1
        
        while (True):
            # KDS
            kds_count = kds_count + 1
            if kds_count % 120 == 0:
                key_ts = struct.pack("!Q", int(time.time() * 1000))
                key = Random.new().read(32)
                kds_thread = kds.SimpleKDSPublisher(Name(bld_root), self.keychain, self.cert_name, key, key_ts)
                kds_thread.start()
                kds_count = 0

            # Data
            now = int(time.time() * 1000) # in milliseconds

            a = self.master.execute(100, cst.READ_HOLDING_REGISTERS, 166, 1)
            b = self.master.execute(100, cst.READ_HOLDING_REGISTERS, 167, 1)
            vln = (b[0] << 16) + a[0]
            c = self.master.execute(1, cst.READ_HOLDING_REGISTERS, 150, 1)
            la = c[0]
            
            payload = {'ts': now, 'vlna': vln, 'la': la}
            timestamp = struct.pack("!Q", now) # timestamp is in milliseconds

            self.publishData(key, key_ts, payload, timestamp)

            time.sleep(self.interval)
Exemplo n.º 57
0
class TestPolicyManagerV2(ut.TestCase):
    def setUp(self):
        testCertDirectory = 'policy_config/certs'
        self.testCertFile = os.path.join(testCertDirectory, 'test.cert')

        self.pibImpl = PibMemory()
        self.tpmBackEnd = TpmBackEndMemory()
        self.policyManager = ConfigPolicyManager(
            'policy_config/simple_rules.conf', CertificateCacheV2())

        self.identityName = Name('/TestConfigPolicyManager/temp')
        # to match the anchor cert
        self.keyName = Name(
            self.identityName).append("KEY").append("ksk-1416010123")
        self.pibImpl.addKey(self.identityName, self.keyName,
                            TEST_RSA_PUBLIC_KEY_DER)
        # Set the password to None since we have an unencrypted PKCS #8 private key.
        self.tpmBackEnd.importKey(self.keyName, TEST_RSA_PRIVATE_KEY_PKCS8,
                                  None)

        self.keyChain = KeyChain(self.pibImpl, self.tpmBackEnd,
                                 self.policyManager)

        pibKey = self.keyChain.getPib().getIdentity(self.identityName).getKey(
            self.keyName)
        # selfSign adds to the PIB.
        self.keyChain.selfSign(pibKey)

    def tearDown(self):
        try:
            os.remove(self.testCertFile)
        except OSError:
            pass

    def test_interest_timestamp(self):
        interestName = Name('/ndn/ucla/edu/something')
        certName = self.keyChain.getPib().getIdentity(
            self.identityName).getKey(
                self.keyName).getDefaultCertificate().getName()
        face = Face("localhost")
        face.setCommandSigningInfo(self.keyChain, certName)

        oldInterest = Interest(interestName)
        face.makeCommandInterest(oldInterest)

        time.sleep(0.1)  # make sure timestamps are different
        newInterest = Interest(interestName)
        face.makeCommandInterest(newInterest)

        vr = doVerify(self.policyManager, newInterest)

        self.assertFalse(
            vr.hasFurtherSteps,
            "ConfigPolicyManager returned ValidationRequest but certificate is known"
        )
        self.assertEqual(vr.failureCount, 0,
                         "Verification of valid interest failed")
        self.assertEqual(
            vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
                vr.successCount))

        vr = doVerify(self.policyManager, oldInterest)

        self.assertFalse(
            vr.hasFurtherSteps,
            "ConfigPolicyManager returned ValidationRequest but certificate is known"
        )
        self.assertEqual(vr.successCount, 0,
                         "Verification of stale interest succeeded")
        self.assertEqual(
            vr.failureCount, 1,
            "Failure callback called {} times instead of 1".format(
                vr.failureCount))

    def test_refresh_10s(self):
        with open('policy_config/testData', 'r') as dataFile:
            encodedData = dataFile.read()
            data = Data()
            dataBlob = Blob(b64decode(encodedData))
            data.wireDecode(dataBlob)

        # This test is needed, since the KeyChain will express interests in
        # unknown certificates.
        vr = doVerify(self.policyManager, data)

        self.assertTrue(
            vr.hasFurtherSteps,
            "ConfigPolicyManager did not create ValidationRequest for unknown certificate"
        )
        self.assertEqual(
            vr.successCount, 0,
            "ConfigPolicyManager called success callback with pending ValidationRequest"
        )
        self.assertEqual(
            vr.failureCount, 0,
            "ConfigPolicyManager called failure callback with pending ValidationRequest"
        )

        # Now save the cert data to our anchor directory, and wait.
        # We have to sign it with the current identity or the policy manager
        # will create an interest for the signing certificate.

        cert = CertificateV2()
        certData = b64decode(CERT_DUMP)
        cert.wireDecode(Blob(certData, False))
        signingInfo = SigningInfo()
        signingInfo.setSigningIdentity(self.identityName)
        # Make sure the validity period is current for two years.
        now = Common.getNowMilliseconds()
        signingInfo.setValidityPeriod(
            ValidityPeriod(now, now + 2 * 365 * 24 * 3600 * 1000.0))

        self.keyChain.sign(cert, signingInfo)
        encodedCert = b64encode(cert.wireEncode().toBytes())
        with open(self.testCertFile, 'w') as certFile:
            certFile.write(Blob(encodedCert, False).toRawStr())

        # Still too early for refresh to pick it up.
        vr = doVerify(self.policyManager, data)

        self.assertTrue(
            vr.hasFurtherSteps,
            "ConfigPolicyManager refresh occured sooner than specified")
        self.assertEqual(
            vr.successCount, 0,
            "ConfigPolicyManager called success callback with pending ValidationRequest"
        )
        self.assertEqual(
            vr.failureCount, 0,
            "ConfigPolicyManager called failure callback with pending ValidationRequest"
        )
        time.sleep(6)

        # Now we should find it.
        vr = doVerify(self.policyManager, data)

        self.assertFalse(
            vr.hasFurtherSteps,
            "ConfigPolicyManager did not refresh certificate store")
        self.assertEqual(
            vr.successCount, 1,
            "Verification success called {} times instead of 1".format(
                vr.successCount))
        self.assertEqual(
            vr.failureCount, 0,
            "ConfigPolicyManager did not verify valid signed data")
Exemplo n.º 58
0
class DPU(object):
    def __init__(self,
                 face,
                 identityName,
                 groupName,
                 catalogPrefix,
                 rawDataPrefix,
                 producerDbFilePath,
                 consumerDbFilePath,
                 encrypted=False):
        self.face = face
        # Set up the keyChain.
        identityStorage = BasicIdentityStorage()
        privateKeyStorage = FilePrivateKeyStorage()
        self.keyChain = KeyChain(
            IdentityManager(identityStorage, privateKeyStorage),
            NoVerifyPolicyManager())
        self.identityName = Name(identityName)
        self.groupName = Name(groupName)
        self.rawDataPrefix = rawDataPrefix
        self.catalogPrefix = catalogPrefix

        self.certificateName = self.keyChain.createIdentityAndCertificate(
            self.identityName)
        self.face.setCommandSigningInfo(self.keyChain, self.certificateName)

        # Set up the memoryContentCache
        self.memoryContentCache = MemoryContentCache(self.face)
        self.memoryContentCache.registerPrefix(self.identityName,
                                               self.onRegisterFailed,
                                               self.onDataNotFound)

        self.producerPrefix = Name(identityName)
        self.producerSuffix = Name()

        self.producer = DPUProducer(face, self.memoryContentCache,
                                    self.producerPrefix, self.producerSuffix,
                                    self.keyChain, self.certificateName,
                                    producerDbFilePath)

        # Put own (consumer) certificate in memoryContentCache
        consumerKeyName = IdentityCertificate.certificateNameToPublicKeyName(
            self.certificateName)
        consumerCertificate = identityStorage.getCertificate(
            self.certificateName, True)

        # TODO: request that this DPU be added as a trusted group member

        self.remainingTasks = dict()

        try:
            os.remove(consumerDbFilePath)
        except OSError:
            # no such file
            pass

        self.consumer = Consumer(face, self.keyChain, self.groupName,
                                 consumerKeyName,
                                 Sqlite3ConsumerDb(consumerDbFilePath))

        # TODO: Read the private key to decrypt d-key...this may or may not be ideal
        base64Content = None
        with open(
                privateKeyStorage.nameTransform(consumerKeyName.toUri(),
                                                ".pri")) as keyFile:
            base64Content = keyFile.read()
        der = Blob(base64.b64decode(base64Content), False)
        self.consumer.addDecryptionKey(consumerKeyName, der)

        self.memoryContentCache.add(consumerCertificate)

        self.encrypted = encrypted

        self.rawData = []

        self.catalogFetchFinished = False
        self.remainingData = 0
        return

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        print "Data not found for interest: " + interest.getName().toUri()

        if interest.getName().get(
                -3).toEscapedString() == "bout" or interest.getName().get(
                    -3).toEscapedString() == "genericfunctions":
            if interest.getName().toUri() in self.remainingTasks:
                # We are already trying to process this task, so we don't add it to the list of tasks
                pass
            else:
                self.remainingTasks[interest.getName().toUri()] = "in-progress"
        else:
            print "Got unexpected interest: " + interest.getName().toUri()

        # .../SAMPLE/<timestamp>
        timestamp = interest.getName().get(-1)
        catalogInterest = Interest(self.catalogPrefix)

        # Traverse catalogs in range from leftmost child
        catalogInterest.setChildSelector(0)
        catalogInterest.setMustBeFresh(True)
        catalogInterest.setInterestLifetimeMilliseconds(4000)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(timestamp)
        catalogInterest.setExclude(catalogInterest)
        self.face.expressInterest(catalogInterest, self.onCatalogData,
                                  self.onCatalogTimeout)
        print "Expressed catalog interest " + catalogInterest.getName().toUri()

        return

    def onRegisterFailed(self, prefix):
        print "Prefix registration failed"
        return

    def onCatalogData(self, interest, data):
        # Find the next catalog
        print "Received catalog data " + data.getName().toUri()

        catalogTimestamp = data.getName().get(-2)
        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(catalogTimestamp)

        nextCatalogInterest = Interest(interest.getName())
        nextCatalogInterest.setExclude(exclude)
        nextCatalogInterest.setChildSelector(0)
        nextCatalogInterest.setMustBeFresh(True)
        nextCatalogInterest.setInterestLifetimeMilliseconds(2000)
        self.face.expressInterest(nextCatalogInterest, self.onCatalogData,
                                  self.onCatalogTimeout)
        print "Expressed catalog interest " + nextCatalogInterest.getName(
        ).toUri()

        # We ignore the version in the catalog
        if self.encrypted:
            self.consumer.consume(contentName, self.onCatalogConsumeComplete,
                                  self.onConsumeFailed)
        else:
            self.onCatalogConsumeComplete(data, data.getContent())

    def onCatalogConsumeComplete(self, data, result):
        print "Consume complete for catalog name: " + data.getName().toUri()
        catalog = json.loads(result.toRawStr())

        for timestamp in catalog:
            # For encrypted data, timestamp format will have to change
            rawDataName = Name(self.rawDataPrefix).append(
                Schedule.toIsoString(timestamp * 1000))
            dataInterest = Interest(rawDataName)
            dataInterest.setInterestLifetimeMilliseconds(2000)
            dataInterest.setMustBeFresh(True)
            self.face.expressInterest(dataInterest, self.onRawData,
                                      self.onRawDataTimeout)
            self.remainingData += 1
        return

    # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run
    def onRawDataConsumeComplete(self, data, result):
        resultObject = json.loads(result.toRawStr())
        # TODO: the original data for timestamp should be an array
        self.rawData.append(resultObject)

        self.remainingData -= 1
        print "Remaing data number: " + str(self.remainingData)

        if self.remainingData == 0 and self.catalogFetchFinished:
            self.produce()

        # TODO: Unideal distanceTo production
        for item in self.remainingTasks:
            username = data.getName().get(2)
            timestamp = Name(item).get(-1).toEscapedString()
            if "distanceTo" in item and username in item and timestamp in data.getName(
            ).toUri():
                # We want this distanceTo
                destCoordinate = Name(item).get(-2).toEscapedString()
                coordinates = destCoordinate[1:-1].split(",").strip()
                x = int(coordinates[0])
                y = int(coordinates[1])
                dataObject = json.dumps({
                    "distanceTo":
                    math.sqrt((x - resultObject["lat"]) *
                              (x - resultObject["lat"]) +
                              (y - resultObject["lng"]) *
                              (y - resultObject["lng"]))
                })

                data = Data(data)
                data.getMetaInfo().setFreshnessPeriod(40000000000)
                data.setContent(dataObject)
                self.keyChain.sign(data)

                # If the interest's still within lifetime, this will satisfy the interest
                self.memoryContentCache.add(data)

        return

    def onConsumeFailed(self, code, message):
        print "Consume error " + str(code) + ": " + message

    def onRawData(self, interest, data):
        print "Raw data received: " + data.getName().toUri()

        # TODO: Quick hack for deciding if the data is encrypted
        if "zhehao" in data.getName().toUri():
            self.consumer.consume(data.getName(),
                                  self.onRawDataConsumeComplete,
                                  self.onConsumeFailed)
        else:
            print "raw data consume complete"
            self.onRawDataConsumeComplete(data, data.getContent())

        # if self.encrypted:
        #     self.consumer.consume(data.getName(), self.onRawDataConsumeComplete, self.onConsumeFailed)
        # else:
        #     self.onRawDataConsumeComplete(data, data.getContent())

    def onCatalogTimeout(self, interest):
        print "Catalog times out: " + interest.getName().toUri()
        # TODO: 1 timeout would result in this dpu thinking that catalog fetching's done!

        self.catalogFetchFinished = True
        if self.remainingData == 0:
            self.produce()
        return

    def onRawDataTimeout(self, interest):
        print "Raw data times out: " + interest.getName().toUri()
        return

    # TODO: This logic for checking 'if I have everything, and should proceed with all pending tasks' is not correct for the long run
    def produce(self):
        # Produce the bounding box
        print "ready to produce"
        maxLong = -3600
        minLong = 3600
        maxLat = -3600
        minLat = 3600

        if len(self.rawData) == 0:
            print "No raw data as producer input"

        for item in self.rawData:
            print item
            if item["lng"] > maxLong:
                maxLong = item["lng"]
            if item["lng"] < minLong:
                minLong = item["lng"]
            if item["lat"] > maxLat:
                maxLat = item["lat"]
            if item["lat"] < minLat:
                minLat = item["lat"]

        result = json.dumps({
            "maxlng": maxLong,
            "minlng": minLong,
            "maxlat": maxLat,
            "minlat": minLat,
            "size": len(self.rawData)
        })

        if self.encrypted:
            # TODO: replace fixed timestamp for now for produced data, createContentKey as needed
            testTime1 = Schedule.fromIsoString("20160320T080000")
            self.producer.createContentKey(testTime1)
            self.producer.produce(testTime1, result)
        else:
            # Arbitrary produced data lifetime
            data = Data(Name(self.identityName).append("20160320T080000"))
            data.getMetaInfo().setFreshnessPeriod(400000)
            data.setContent(result)

            # If the interest's still within lifetime, this will satisfy the interest
            self.memoryContentCache.add(data)
            print "Produced data with name " + data.getName().toUri()
Exemplo n.º 59
0
class TestFaceRegisterMethods(ut.TestCase):
    def setUp(self):
        self.face_in = Face()
        self.face_out = Face()
        self.keyChain = KeyChain()

    def tearDown(self):
        self.face_in.shutdown()
        self.face_out.shutdown()

    def onInterestEffect(self, prefix, interest, transport, prefixID):
        data = Data(interest.getName())
        data.setContent("SUCCESS")
        self.keyChain.sign(data, self.keyChain.getDefaultCertificateName())
        encodedData = data.wireEncode()
        transport.send(encodedData.toBuffer())

    def test_register_prefix_response(self):
        # gotta sign it (WAT)
        prefixName = Name("/unittest")
        self.face_in.setCommandSigningInfo(
            self.keyChain, self.keyChain.getDefaultCertificateName())

        failedCallback = Mock()
        interestCallback = Mock(side_effect=self.onInterestEffect)

        self.face_in.registerPrefix(prefixName, interestCallback,
                                    failedCallback)
        server = gevent.spawn(self.face_process_events, self.face_in,
                              [interestCallback, failedCallback], 'h')

        gevent.sleep(1)  # give the 'server' time to register the interest

        # express an interest on another face
        dataCallback = Mock()
        timeoutCallback = Mock()

        # now express an interest on this new face, and see if onInterest is called
        interestName = prefixName.append("hello")
        self.face_out.expressInterest(interestName, dataCallback,
                                      timeoutCallback)

        client = gevent.spawn(self.face_process_events, self.face_out,
                              [dataCallback, timeoutCallback], 'c')

        gevent.joinall([server, client], timeout=10)

        self.assertEqual(failedCallback.call_count, 0,
                         'Failed to register prefix at all')

        self.assertEqual(
            interestCallback.call_count, 1,
            'Expected 1 onInterest callback, got ' +
            str(interestCallback.call_count))

        self.assertEqual(
            dataCallback.call_count, 1,
            'Expected 1 onData callback, got ' + str(dataCallback.call_count))

        onDataArgs = dataCallback.call_args[0]
        # check the message content
        data = onDataArgs[1]
        expectedBlob = Blob(bytearray("SUCCESS"))
        self.assertTrue(
            expectedBlob.equals(data.getContent()),
            'Data received on face does not match expected format')

    def face_process_events(self, face, callbacks, name=None):
        # implemented as a 'greenlet': something like a thread, but semi-synchronous
        # callbacks should be a list
        done = False
        while not done:
            face.processEvents()
            gevent.sleep()
            for c in callbacks:

                if (c.call_count > 0):
                    done = True
Exemplo n.º 60
0
class BmsNode(object):
    def __init__(self):
        self.conf = None
        self._keyChain = None
        self._certificateName = None

        self._dataQueue = dict()
        self._memoryContentCache = None
        self._identityName = None

        self._aggregation = Aggregation()

    def setConfiguration(self, fileName, trustSchemaFile):
        self.conf = BoostInfoParser()
        self.conf.read(fileName)
        self._identityName = Name(self.conf.getNodePrefix())
        self._trustSchemaFile = trustSchemaFile

    def onDataNotFound(self, prefix, interest, face, interestFilterId, filter):
        #print('Data not found for ' + interest.getName().toUri())
        return

    def startPublishing(self):
        # One-time security setup
        self.prepareLogging()

        privateKeyStorage = FilePrivateKeyStorage()
        identityStorage = BasicIdentityStorage()
        policyManager = ConfigPolicyManager(self._trustSchemaFile)

        self._keyChain = KeyChain(IdentityManager(identityStorage, privateKeyStorage), policyManager)
        self._certificateName = self._keyChain.createIdentityAndCertificate(self._identityName)

        print("My Identity name: " + self._identityName.toUri())
        print("My certificate name: " + self._certificateName.toUri())
        certificateData = self._keyChain.getIdentityManager()._identityStorage.getCertificate(self._certificateName, True)
        print("My certificate string: " + b64encode(certificateData.wireEncode().toBuffer()))
        # self._keyChain.getIdentityCertificate(self._certificateName).)

        self._loop = asyncio.get_event_loop()
        self._face = ThreadsafeFace(self._loop)
        self._keyChain.setFace(self._face)

        self._face.setCommandSigningInfo(self._keyChain, self._certificateName)
        self._memoryContentCache = MemoryContentCache(self._face)

        # We should only ask for cert to be signed upon the first run of a certain aggregator
        if DO_CERT_SETUP:
            if (KeyLocator.getFromSignature(certificateData.getSignature()).getKeyName().equals(self._certificateName.getPrefix(-1))):
                # Need to configure for mini-ndn; aggregation node runs outside of mini-ndn first so that signed cert get installed and mini-ndn won't ask for this again
                print("certificate " + self._certificateName.toUri() + " asking for signature")
                response = urllib2.urlopen("http://192.168.56.1:5000/bms-cert-hack?cert=" + b64encode(certificateData.wireEncode().toBuffer()) + "&cert_prefix=" + self._identityName.toUri() + '&subject_name=' + self._identityName.toUri()).read()
                
                signedCertData = Data()
                signedCertData.wireDecode(Blob(b64decode(response)))

                self._memoryContentCache.add(signedCertData)
                cmdline = ['ndnsec-install-cert', '-']
                p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
                # desanitize + sign in GET request
                cert, err = p.communicate(response)
                if p.returncode != 0:
                    raise RuntimeError("ndnsec-install-cert error")
            else:
                self._memoryContentCache.add(certificateData)
        else:
            self._memoryContentCache.add(certificateData)

        dataNode = self.conf.getDataNode()
        childrenNode = self.conf.getChildrenNode()

        self._memoryContentCache.registerPrefix(Name(self._identityName), self.onRegisterFailed, self.onDataNotFound)

        # For each type of data, we refresh each type of aggregation according to the interval in the configuration
        for i in range(len(dataNode.subtrees)):
            dataType = dataNode.subtrees.keys()[i]
            aggregationParams = self.conf.getProducingParamsForAggregationType(dataNode.subtrees.items()[i][1])

            if childrenNode == None:
                self._dataQueue[dataType] = DataQueue(None, None, None)
                self.generateData(dataType, 2, 0)

            for aggregationType in aggregationParams:
                childrenList = OrderedDict()
                if childrenNode != None:

                    for j in range(len(childrenNode.subtrees)):
                        if dataType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees:
                            if aggregationType in childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType].subtrees:
                                childrenList[childrenNode.subtrees.items()[j][0]] = self.conf.getProducingParamsForAggregationType(childrenNode.subtrees.items()[j][1].subtrees['data'].subtrees[dataType])[aggregationType]

                self.startPublishingAggregation(aggregationParams[aggregationType], childrenList, dataType, aggregationType)
        return

    def startPublishingAggregation(self, params, childrenList, dataType, aggregationType):
        if __debug__:
            print('Start publishing for ' + dataType + '-' + aggregationType)
        
        # aggregation calculating and publishing mechanism
        publishingPrefix = Name(self._identityName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType)
        self._dataQueue[dataType + aggregationType] = DataQueue(params, childrenList, publishingPrefix)

        if len(childrenList.keys()) == 0:
            # TODO: make start_time optional for leaf nodes
            self._loop.call_later(int(params['producer_interval']), self.calculateAggregation, dataType, aggregationType, childrenList, int(params['start_time']), int(params['producer_interval']), publishingPrefix, True)
        else:
            # express interest for children who produce the same data and aggregation type
            for childName in childrenList.keys():
                name = Name(self._identityName).append(childName).append(DATA_COMPONENT).append(dataType).append(AGGREGATION_COMPONENT).append(aggregationType)
                interest = Interest(name)
                # if start_time is specified, we ask for data starting at start_time; 
                # if not, we ask for the right most child and go from there
                if ('start_time' in childrenList[childName]):
                    endTime = int(childrenList[childName]['start_time']) + int(childrenList[childName]['producer_interval'])
                    interest.getName().append(str(childrenList[childName]['start_time'])).append(str(endTime))
                else:
                    # TODO: For now we are playing with historical data, for each run we don't want to miss any data, thus we start with leftMost
                    interest.setChildSelector(0)
                    interest.setMustBeFresh(True)
                interest.setInterestLifetimeMilliseconds(DEFAULT_INTEREST_LIFETIME)
                if __debug__:
                    print('  Issue interest: ' + interest.getName().toUri())
                self._face.expressInterest(interest, self.onData, self.onTimeout)

        return

    # TODO: once one calculation's decided a child has not answered, we should do another calculation
    def calculateAggregation(self, dataType, aggregationType, childrenList, startTime, interval, publishingPrefix, repeat = False):
        doCalc = True
        dataList = []

        # TODO: an intermediate node cannot produce raw data for now
        if len(childrenList.keys()) != 0:
            for childName in childrenList.keys():
                dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName)
                if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict:
                    data = self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey]
                    dataList.append(float(data.getContent().toRawStr()))
                else:
                    #print('Child ' + childName + ' has not replied yet')
                    doCalc = False
                    break
        else:
            for inst in self._dataQueue[dataType]._dataDict.keys():
                if int(inst) >= startTime and int(inst) < startTime + interval:
                    dataList.append(self._dataQueue[dataType]._dataDict[inst])
        if doCalc:
            content = self._aggregation.getAggregation(aggregationType, dataList)
            if content:
                publishData = Data(Name(publishingPrefix).append(str(startTime)).append(str(startTime + interval)))
                publishData.setContent(str(content))
                publishData.getMetaInfo().setFreshnessPeriod(DEFAULT_DATA_LIFETIME)
                self._keyChain.sign(publishData, self._certificateName)
                self._memoryContentCache.add(publishData)
                for childName in childrenList.keys():
                    dataDictKey = self.getDataDictKey(startTime, (startTime + interval), childName)
                    if dataDictKey in self._dataQueue[dataType + aggregationType]._dataDict:
                        del self._dataQueue[dataType + aggregationType]._dataDict[dataDictKey]
                if __debug__:
                    print("Produced: " + publishData.getName().toUri() + "; " + publishData.getContent().toRawStr())

        # repetition of this function only happens for raw data producer, otherwise calculateAggregation is called by each onData
        if repeat:
            self._loop.call_later(interval, self.calculateAggregation, dataType, aggregationType, childrenList, startTime + interval, interval, publishingPrefix, repeat)
        return

    def generateData(self, dataType, interval, startTime):
        self._dataQueue[dataType]._dataDict[str(startTime)] = random.randint(0,9)
        self._loop.call_later(interval, self.generateData, dataType, interval, startTime + interval)
        return

    def onRegisterFailed(self, prefix):
        raise RuntimeError("Register failed for prefix", prefix.toUri())

    def onVerified(self, data):
        print('Data verified: ' + data.getName().toUri())
        return

    def onVerifyFailed(self, data):
        print('Data verification failed: ' + data.getName().toUri())
        return

    def onData(self, interest, data):
        self._keyChain.verifyData(data, self.onVerified, self.onVerifyFailed)

        dataName = data.getName()
        dataQueue = None

        if __debug__:
            print("Got data: " + dataName.toUri() + "; " + data.getContent().toRawStr())
        for i in range(0, len(dataName)):
            if dataName.get(i).toEscapedString() == AGGREGATION_COMPONENT:
                dataType = dataName.get(i - 1).toEscapedString()
                aggregationType = dataName.get(i + 1).toEscapedString()
                
                startTime = int(dataName.get(i + 2).toEscapedString())
                endTime = int(dataName.get(i + 3).toEscapedString())
                childName = dataName.get(i - 3).toEscapedString()

                dataAndAggregationType = dataType + aggregationType
                
                dataDictKey = self.getDataDictKey(startTime, endTime, childName)
                dataQueue = self._dataQueue[dataAndAggregationType]
                dataQueue._dataDict[dataDictKey] = data
                break

        # TODO: check what if interval/starttime is misconfigured
        if dataQueue:
            self.calculateAggregation(dataType, aggregationType, dataQueue._childrenList, startTime, endTime - startTime, dataQueue._publishingPrefix)

        # Always ask for the next piece of data when we receive this one; assumes interval does not change; this also assumes there are no more components after endTime
        #newInterestName = dataName.getPrefix(i + 2).append(str(endTime)).append(str(endTime + (endTime - startTime)))
        
        # We don't expect aggregated data name to be continuous within our given time window, so we ask with exclusion instead
        newInterestName = dataName.getPrefix(i + 2)
        newInterest = Interest(interest)
        newInterest.setName(newInterestName)
        newInterest.setChildSelector(0)

        exclude = Exclude()
        exclude.appendAny()
        exclude.appendComponent(dataName.get(i + 2))
        newInterest.setExclude(exclude)

        self._face.expressInterest(newInterest, self.onData, self.onTimeout)
        if __debug__:
            print("  issue interest: " + interest.getName().toUri())

        return

    def onTimeout(self, interest):
        if __debug__:
            print("  interest timeout: " + interest.getName().toUri() + "; reexpress")
            pass
        self._face.expressInterest(interest, self.onData, self.onTimeout)
        return

    def stop(self):
        self._loop.stop()
        if __debug__:
            print("Stopped")
        return
    
    # This creation of dataDictKey means parent and child should not have the same name            
    @staticmethod
    def getDataDictKey(startTime, endTime, childName):
        return str(startTime) + '/' + str(endTime) + '/' + childName

##
# Logging
##
    def prepareLogging(self):
        self.log = logging.getLogger(str(self.__class__))
        self.log.setLevel(logging.DEBUG)
        logFormat = "%(asctime)-15s %(name)-20s %(funcName)-20s (%(levelname)-8s):\n\t%(message)s"
        self._console = logging.StreamHandler()
        self._console.setFormatter(logging.Formatter(logFormat))
        self._console.setLevel(logging.INFO)
        # without this, a lot of ThreadsafeFace errors get swallowed up
        logging.getLogger("trollius").addHandler(self._console)
        self.log.addHandler(self._console)

    def setLogLevel(self, level):
        """
        Set the log level that will be output to standard error
        :param level: A log level constant defined in the logging module (e.g. logging.INFO) 
        """
        self._console.setLevel(level)

    def getLogger(self):
        """
        :return: The logger associated with this node
        :rtype: logging.Logger
        """
        return self.log