Ejemplo n.º 1
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    pipeline1 = streamjson["pipeline"]+"/1"
    stream = streamjson['source']['uri']
    print("VA feeder: stream: "+stream, flush=True)
    init_stream = None
    zk_path = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']
        print("VA feeder: init_stream: "+init_stream, flush=True)
        zk_path = stream+"/"+pipeline1

    m1 = re.search("(.*)/.*_([0-9]+.ts)$", stream)
    if m1:
        segment = stream.split('/')[-1].split('_')[-1]
        zk_path = m1.group(1)+"/"+segment+"/"+pipeline1

    print("VA feeder: zk_path "+zk_path, flush=True)
    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + stream + " already complete", flush=True)
        zk.close()
        return
    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)
        
        fps=va.loop({
            "source": {
                "uri": stream,
                "type":"uri"
            },
            "destination": {
                "type": "kafka",
                "host": socket.gethostbyname("kafka-service")+":9092",
                "topic": "seg_analytics_data"
            },
            "tags": streamjson["tags"],
            "parameters": streamjson["parameters"],
        }, streamjson["pipeline"])
        if fps<0:
            zk.process_abort()
        else:
            zk.process_end()
            p.send(video_analytics_fps_topic, json.dumps({
                "fps": fps,
                "machine":machine_prefix+socket.gethostname()[0:3],
                "time": datetime.datetime.utcnow().isoformat(),
            }));
            
        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()
Ejemplo n.º 2
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    if 'source' not in streamjson:
        print("VA feeder: missing source object in input ", flush=True)
        return
    if 'pipeline' not in streamjson:
        print("VA feeder: missing pipeline in input", flush=True)
        return
    if 'uri' not in streamjson['source']:
        print("VA feeder: missing uri in source", flush=True)
        return
    pipeline = streamjson["pipeline"]+"/1"
    tags = {}
    if 'tags' in streamjson:
        tags = streamjson["tags"]
    parameters = {}
    if 'parameters' in streamjson:
        parameters = streamjson["parameters"]

    stream = streamjson['source']['uri']
    print("VA feeder: stream: "+stream, flush=True)
    if not stream:
        print("VA feeder: empty uri", flush=True)
        return

    init_stream = None
    zk_path = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']
        print("VA feeder: init_stream: "+init_stream, flush=True)
        zk_path = stream+"/"+pipeline

    m1 = re.search("(.*)/.*_([0-9]+.ts)$", stream)
    if m1:
        segment = stream.split('/')[-1].split('_')[-1]
        zk_path = m1.group(1)+"/"+segment+"/"+pipeline

    print("VA feeder: zk_path "+zk_path, flush=True)
    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + stream + " already complete", flush=True)
        zk.close()
        return
    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)
        print("VA feeder: start analytic ", flush=True)
        instanceid = start_analytic(stream, pipeline, tags, parameters)
        if instanceid:
            print("VA feeder: waiting for analytics to complete for stream: " +
                  stream + " analytics-instance-id: "+instanceid, flush=True)
            while True:
                time.sleep(sleep_for_status)
                status, fps = get_analytic_status(instanceid.strip(), pipeline)
                print("VA feeder: segment status : " + status, flush=True)
                send_video_analytics_fps(fps)
                if status == 'COMPLETED':
                    zk.process_end()
                    break
                elif status == 'RUNNING':
                    continue
                elif status == 'QUEUED':
                    continue
                else:
                    print("VA feeder: segment processing failed", flush=True)
                    zk.process_abort()
                    break
        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()
Ejemplo n.º 3
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    pipeline1 = streamjson["pipeline"] + "/1"
    stream = streamjson['source']['uri']
    user = streamjson["user_info"]["name"]
    elapsed_time = time.time() - streamjson["start_time"]
    print("VA feeder: stream: " + stream + " " + user +
          " elapsed-time on kafka queue:" + str(elapsed_time), flush=True)

    zk_path = None
    init_stream = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']

    m1 = re.search(r'(dash/.*)/chunk-stream[0-9]*-([0-9]*.m4s)$', stream)
    if m1:
        zk_path = "/analytics/" + \
            m1.group(1) + "/" + m1.group(2) + "/" + streamjson["pipeline"]

    m1 = re.search("(hls/.*)/[0-9]*p_([0-9]*.ts)$", stream)
    if m1:
        zk_path = "/analytics/" + \
            m1.group(1) + "/" + m1.group(2) + "/" + streamjson["pipeline"]
    print("zk path: " + zk_path, flush=True)

    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + user + " " + stream +
              " already complete", flush=True)
        zk.close()
        return

    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)

        fps = va.loop({
            "source": {
                "uri": stream,
                "type": "uri"
            },
            "destination": {
                "type": "kafka",
                "host": socket.gethostbyname("kafka-service") + ":9092",
                "topic": "seg_analytics_data"
            },
            "tags": streamjson["tags"],
            "parameters": streamjson["parameters"],
            "user": user,
            "start_time": streamjson["start_time"],
        }, streamjson["pipeline"])

        if fps < 0:
            zk.process_abort()
        else:
            zk.process_end()

        if fps > 0:
            global global_total_fps, global_seg_count
            global_total_fps = global_total_fps + fps
            global_seg_count = global_seg_count + 1
            avg_fps = global_total_fps / global_seg_count
            print("VA statistics : " + "avg_fps " + str(avg_fps) + " " +
                  str(global_total_fps) + " " + str(global_seg_count), flush=True)

        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()