def main(index_f, weight_f, config_f, consumerMode, th, fetchPrefix,
         publishPrefix):
    # The default Face will connect using a Unix socket, or to "localhost".
    instance_prefix = fetchPrefix.split("/")[-1]
    sl = SegmentLabel(index_f, weight_f, instance_prefix, th)

    if config_f != "":
        sl.readConfig(config_f)

    face = Face()
    keyChain = KeyChain()
    face.setCommandSigningInfo(keyChain, keyChain.getDefaultCertificateName())

    #stream_annConsumer_test = Namespace("/ndn/eb/stream/run/28/annotations")
    #stream_annConsumer_test.setFace(face)
    print(' > Will fetch from ' + str(fetchPrefix))
    stream_annConsumer_show = Namespace(fetchPrefix)
    stream_annConsumer_show.setFace(face)

    log_f = open(str("seglab_log") + ".txt", "w")
    log_f.close()

    stream_segProducer = Namespace(
        Name(publishPrefix).append(Name(fetchPrefix)[-1]), keyChain)
    print(' > Will publish segments under ' +
          str(stream_segProducer.getName()))
    publish_handler = GeneralizedObjectStreamHandler()
    # publish_handler.setLatestPacketFreshnessPeriod(30)
    stream_segProducer.setHandler(publish_handler)

    stream_segProducer.setFace(
        face,
        lambda prefixName: dump("Register failed for prefix", prefixName),
        lambda prefixName, whatever: dump("Register success for prefix",
                                          prefixName))

    def onNewAnnotation(sequenceNumber, contentMetaInfo, objectNamespace):
        ann = str(objectNamespace.obj)
        segment_result = []

        jsonAnn = json.loads(ann)
        # print(jsonAnn["frameName"])

        if not "error" in ann:
            jsonAnn = json.loads(ann)
            # print(jsonAnn["frameName"])
            segment_result = sl.sceneDetection(jsonAnn)
            if segment_result and len(segment_result) > 0:
                print(segment_result)
                #dump("Got generalized object, sequenceNumber", sequenceNumber,
                #     ", content-type", contentMetaInfo.getContentType(), ":",
                #     str(jsonAnn["frameName"]), 'at', str(time.time()))

                publish_handler.addObject(Blob(json.dumps(segment_result)),
                                          "application/json")
                print(" > PUBLISHED SCENE " +
                      str(publish_handler.getProducedSequenceNumber()))

                # # logging the result
                # if segment_result:
                with open(str("seglab_log") + ".txt", "w+") as f:
                    f.write("PUBLISHED SCENE: %s" %
                            str(publish_handler.getProducedSequenceNumber()))
                    f.write("%s\r\n" % segment_result)

    pipelineSize = 0

    #if consumerMode == 'default':
    #    stream_annConsumer_default.setHandler(
    #      GeneralizedObjectStreamHandler(pipelineSize, onNewAnnotation)).objectNeeded()

    stream_annConsumer_show.setHandler(
        GeneralizedObjectStreamHandler(pipelineSize,
                                       onNewAnnotation)).objectNeeded()

    #stream_annConsumer_test.setHandler(
    #    GeneralizedObjectStreamHandler(pipelineSize, onNewAnnotation)).objectNeeded()

    while True:
        face.processEvents()
        # We need to sleep for a few milliseconds so we don't use 100% of the CPU.
        time.sleep(0.01)
Exemple #2
0
def main(index_f, weight_f, consumerMode, k, query_interval, fetchPrefix,
         publishPrefix):
    # The default Face will connect using a Unix socket, or to "localhost".
    instance_prefix = fetchPrefix.split("/")[-1]
    pd = PlayDetect(index_f, weight_f, instance_prefix, k, query_interval)

    face = Face()
    keyChain = KeyChain()
    face.setCommandSigningInfo(keyChain, keyChain.getDefaultCertificateName())

    # sceneConsumer = Namespace("/ndn/eb/stream/run/28/annotation")
    engine = str(Name(fetchPrefix)[-1])
    sceneFetchPrefix = Name('/eb/seglab').append(engine)

    print(' > Will fetch annotations from ' + fetchPrefix)
    print(' > Will fetch scenes from ' + sceneFetchPrefix.toUri())

    sceneConsumer = Namespace(sceneFetchPrefix)
    sceneConsumer.setFace(face)

    annotationsConsumer = Namespace(fetchPrefix)

    #if consumerMode == "test":
    #    annotationsConsumer = Namespace("/ndn/eb/stream/run/28/annotations")
    #elif consumerMode == "default":
    #    annotationsConsumer = Namespace('/eb/proto/test/ml_processing/yolo_default')

    annotationsConsumer.setFace(face)

    log_f = open(str("playdetect_log") + ".txt", "w")
    log_f.close()

    playdetectProducer = Namespace(
        Name(publishPrefix).append(engine), keyChain)
    print(' > Will publish playdetect data under ' +
          playdetectProducer.getName().toUri())

    playdSegmentsHandler = GeneralizedObjectStreamHandler()
    # set freshness to 30
    # playdSegmentsHandler.setLatestPacketFreshnessPeriod(30)
    playdetectProducer.setHandler(playdSegmentsHandler)

    playdetectProducer.setFace(
        face,
        lambda prefixName: dump("Register failed for prefix", prefixName),
        lambda prefixName, whatever: dump("Register success for prefix",
                                          prefixName))

    def onNewScene(sequenceNumber, contentMetaInfo, objectNamespace):
        dump("Got scene (segment) :", str(objectNamespace.getName()))

        if str(objectNamespace.obj):
            # Store scene segment AND scene segment NAME into a database
            sceneSegmentName = objectNamespace.getName()
            sceneSegment = json.loads(str(objectNamespace.obj))
            pd.storeToDatabase(sceneSegmentName, sceneSegment)

    def onNewAnnotation(sequenceNumber, contentMetaInfo, objectNamespace):
        # dump("Got new annotation")
        stringObj = str(objectNamespace.obj)
        # print(stringObj)
        now = Common.getNowMilliseconds()
        if stringObj and pd.itIsTimeToQueryDatabase():
            # TBD
            # query interval configurable
            itIsTimeToQueryDatabase = True
            if itIsTimeToQueryDatabase:
                # TBD
                # run query against the databse, using recevied annotation
                # the result should be a list that contains scene segment names (see above)
                # FOR NOW: let's have startFrame end endFrame in the results
                # most likely -- parameterize query, i.e. give argument maxResultNum
                result = pd.pickTops(json.loads(stringObj), k)
                if result:
                    playdSegmentsHandler.addObject(Blob(json.dumps(result)),
                                                   "application/json")

                    print(
                        "PUBLISH SIMILAR SCENES: %s" %
                        str(playdSegmentsHandler.getProducedSequenceNumber()))

                    #logging the result
                    with open(str("playdetect_log") + ".txt", "w+") as f:
                        f.write("PUBLISHED SCENE: %s" % str(
                            playdSegmentsHandler.getProducedSequenceNumber()))
                        f.write("%s\r\n" % result)

    pipelineSize_segConsume = 3
    sceneConsumer.setHandler(
        GeneralizedObjectStreamHandler(pipelineSize_segConsume,
                                       onNewScene)).objectNeeded()

    pipelineSize_annoConsume = 3
    annotationsConsumer.setHandler(
        GeneralizedObjectStreamHandler(pipelineSize_annoConsume,
                                       onNewAnnotation)).objectNeeded()

    while True:
        face.processEvents()
        # We need to sleep for a few milliseconds so we don't use 100% of the CPU.
        time.sleep(0.01)