示例#1
0
def get_streams():
    try:
        available_videos = [
            f for f in listdir(ARCHIVE_ROOT) if f.endswith((".mp4", ".avi"))
        ]
    except:
        return None

    streams_dict = []
    types = [("hls", ".m3u8"), ("dash", ".mpd")]
    for f in available_videos:
        for t in types:
            url = t[0] + "/" + f + "/index" + t[1]
            zk = ZKState(BASE_PATH + url)
            streams_dict.append({
                "name": t[0] + "-" + f,
                "url": url,
                "img": "thumbnail/" + f + ".png",
                "type": t[0],
                "file": f,
                "zk": {
                    "path": zk.get_path(),
                    "state": zk.get_state()
                }
            })

    return streams_dict
    def get(self):
        streams = get_streams()
        if streams is None:
            print("Error while parsing folders", flush=True)
            self.set_status(500, "Internal Error")
            return

        for s in streams:
            zk = ZKState(s["zk"]["path"])
            if zk.clear():
                root = DASH_ROOT if s["type"] == "dash" else HLS_ROOT
                r = call(["rm", "-rf", root + "/" + s["file"]])
                if r != 0:
                    print("Failed to rm: " + s["name"], flush=True)
                    s["zk"]["state"] = "Failed"
                else:
                    s["zk"]["state"] = "Cleared"
            else:
                print("Failed to clear or not processed: " + s["zk"]["path"],
                      flush=True)

        self.set_status(200, "OK")
        self.set_header("Content-Type", "application/json")
        self.write(json.dumps(streams))
示例#3
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    if 'source' not in streamjson:
        print("VA feeder: missing source object in input ", flush=True)
        return
    if 'pipeline' not in streamjson:
        print("VA feeder: missing pipeline in input", flush=True)
        return
    if 'uri' not in streamjson['source']:
        print("VA feeder: missing uri in source", flush=True)
        return
    pipeline = streamjson["pipeline"]+"/1"
    tags = {}
    if 'tags' in streamjson:
        tags = streamjson["tags"]
    parameters = {}
    if 'parameters' in streamjson:
        parameters = streamjson["parameters"]

    stream = streamjson['source']['uri']
    print("VA feeder: stream: "+stream, flush=True)
    if not stream:
        print("VA feeder: empty uri", flush=True)
        return

    init_stream = None
    zk_path = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']
        print("VA feeder: init_stream: "+init_stream, flush=True)
        zk_path = stream+"/"+pipeline

    m1 = re.search("(.*)/.*_([0-9]+.ts)$", stream)
    if m1:
        segment = stream.split('/')[-1].split('_')[-1]
        zk_path = m1.group(1)+"/"+segment+"/"+pipeline

    print("VA feeder: zk_path "+zk_path, flush=True)
    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + stream + " already complete", flush=True)
        zk.close()
        return
    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)
        print("VA feeder: start analytic ", flush=True)
        instanceid = start_analytic(stream, pipeline, tags, parameters)
        if instanceid:
            print("VA feeder: waiting for analytics to complete for stream: " +
                  stream + " analytics-instance-id: "+instanceid, flush=True)
            while True:
                time.sleep(sleep_for_status)
                status, fps = get_analytic_status(instanceid.strip(), pipeline)
                print("VA feeder: segment status : " + status, flush=True)
                send_video_analytics_fps(fps)
                if status == 'COMPLETED':
                    zk.process_end()
                    break
                elif status == 'RUNNING':
                    continue
                elif status == 'QUEUED':
                    continue
                else:
                    print("VA feeder: segment processing failed", flush=True)
                    zk.process_abort()
                    break
        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()
示例#4
0
def ADTranscode(kafkamsg, db):
    zk = None

    msg = KafkaMsgParser(kafkamsg)
    # add zk state for each resolution file if we generate the ad clip each time for one solution
    zk = ZKState(msg.target_path, msg.target_name)

    if zk.processed():
        print("AD transcoding finish the clip :", msg.target, flush=True)
        zk.close()
        return

    if zk.process_start():
        try:
            print("mkdir -p " + msg.target_path, flush=True)
            makedirs(msg.target_path)
        except OSError as exc:  # Python >2.5 (except OSError, exc: for Python <2.5)
            if exc.errno == errno.EEXIST and isdir(msg.target_path):
                pass
            else:
                raise

        # copy static ADs to fill all resolutions
        CopyADStatic(msg)

        stream = ADClipDecision(msg, db)
        if not stream:
            print("Query AD clip failed and fall back to skipped ad clip!",
                  flush=True)
            # mark zk as incomplete (so that the valid one can be generated next time)
            zk.process_abort()
            zk.close()
            return

        # try to re-generate resolution specific AD
        SignalIncompletion(msg.target)

        try:
            # only generate one resolution for ad segment, if not generated, ad will fall back to skipped ad.
            cmd = GetABRCommand(stream,
                                msg.target_path,
                                msg.streaming_type,
                                msg.GetRedition(),
                                duration=msg.segment_duration,
                                fade_type="audio",
                                content_type="ad")
            process_id = subprocess.Popen(cmd, stdout=subprocess.PIPE)
            # the `multiprocessing.Process` process will wait until
            # the call to the `subprocess.Popen` object is completed
            process_id.wait()
            SignalCompletion(msg.target)
            zk.process_end()
        except Exception as e:
            print(str(e))
            CopyADStatic(msg)
            zk.process_abort()
    zk.close()
示例#5
0
def process_stream(stream):
    streams = []
    stream_name = stream.split("/")[1]
    config = configparser.ConfigParser()
    config.read('config.ini')
    src_mode = config.get('mode', 'srcMode')
    src_path = config.get('path', 'srcPath')
    src_protocol = ""
    src_api = ""
    if src_mode == "local":
        streams = listdir(src_path)
    elif src_mode == "live":
        html = urlopen("http://" + src_path)
        soup = BeautifulSoup(html, 'html.parser')
        src_protocol = "rtmp://"
        src_api = src_mode
        for item in soup.findAll('a')[1:]:
            streams.append(item.get('href'))
    else:
        return

    if stream_name not in streams:
        return

    if src_mode == "live":
        stream_name = stream_name[:stream_name.index('.')]

    zk = ZKState(src_protocol + src_path + "/" + src_api + "/" +
                 stream.replace("/", "_"))
    if zk.processed():
        zk.close()
        return

    if stream.endswith(".mpd"):
        try:
            mkdir(DASH_ROOT + "/" + stream_name)
        except Exception as e:
            print(str(e))

        if zk.process_start():
            try:
                cmd = GetABRCommand(
                    src_protocol + src_path + "/" + src_api + "/" +
                    stream_name, DASH_ROOT + "/" + stream_name, "dash")
                r = call(cmd)
                if r:
                    raise Exception("status code: " + str(r))
                zk.process_end()
            except Exception as e:
                print(str(e))
                zk.process_abort()
    if stream.endswith(".m3u8"):
        try:
            mkdir(HLS_ROOT + "/" + stream_name)
        except Exception as e:
            print(str(e))

        if zk.process_start():
            try:
                cmd = GetABRCommand(
                    src_protocol + src_path + "/" + src_api + "/" +
                    stream_name, HLS_ROOT + "/" + stream_name, "hls")
                r = call(cmd)
                if r:
                    raise Exception("status code: " + str(r))
                zk.process_end()
            except Exception as e:
                print(str(e))
                zk.process_abort()

    zk.close()
示例#6
0
def process_stream(stream):
    stream_name=stream.split("/")[1]
    if not isfile(archive_root+"/"+stream_name): return

    zk=ZKState("/content_provider_transcoder/"+archive_root+"/"+stream)
    if zk.processed(): 
        zk.close()
        return

    if stream.endswith(".mpd"):
        try:
            mkdir(dash_root+"/"+stream_name)
        except:
            pass

        if zk.process_start():
            try:
                cmd = GetABRCommand(archive_root+"/"+stream_name,dash_root+"/"+stream_name,"dash")
                r=call(cmd)
                if r: raise Exception("status code: "+str(r))
                zk.process_end()
            except:
                print(traceback.format_exc(), flush=True)
                zk.process_abort()

    if stream.endswith(".m3u8"):
        try:
            mkdir(hls_root+"/"+stream_name)
        except:
            pass

        if zk.process_start():
            try:
                cmd = GetABRCommand(archive_root+"/"+stream_name,hls_root+"/"+stream_name,"hls")
                r=call(cmd)
                if r: raise Exception("status code: "+str(r))
                zk.process_end()
            except:
                print(traceback.format_exc(), flush=True)
                zk.process_abort()
    zk.close()
示例#7
0
def process_stream_vods(msg):
    stream_name=msg["name"]
    stream_type=msg["output"]["type"]
    stream_parameters=msg["parameters"]
    loop= msg["loop"]
    idx=msg["idx"] if "idx" in msg.keys() else int(random.random()*10000)
    stream=stream_type+"/"+stream_name

    print("VOD transcode:",stream , flush=True)
    if not isfile(ARCHIVE_ROOT+"/"+stream_name):
        return

    zk = ZKState("/content_provider_transcoder/"+ARCHIVE_ROOT+"/vods/"+stream)
    if zk.processed():
        zk.close()
        return

    target_root=VIDEO_ROOT+stream_type

    try:
        makedirs(target_root+"/"+stream_name)
    except:
        pass

    if zk.process_start():
        try:
            cmd = FFMpegCmd(ARCHIVE_ROOT+"/"+stream_name, target_root+"/"+stream_name, stream_type, params=stream_parameters, acc_type=HW_ACC_TYPE, loop=loop, device=HW_DEVICE).cmd()
            if cmd:
                print(cmd, flush=True)
                r = execute(idx, stream_name, cmd)
                if r:
                    raise Exception("status code: "+str(r))
                zk.process_end()
        except:
            print(traceback.format_exc(), flush=True)
            zk.process_abort()

    zk.close()
示例#8
0
def ADTranscode(kafkamsg, db):
    msg = KafkaMsgParser(kafkamsg)
    # path: /var/www/adinsert/hls/Content_seq7u10.mp4/adstream/u10/4, name: 360p.m3u8
    zk_path = "/ad-transcode/" + ("/".join(msg.target_path.split("/")[-5:]))
    print("zk_path: " + zk_path + "/" + msg.target_name, flush=True)

    zks = ZKState(zk_path, msg.target_name)
    start_time = time.time()
    if zks.processed():
        print("AD transcoding finish the clip :", msg.target, flush=True)
        zks.close()
        return

    if zks.process_start():
        try:
            makedirs(msg.target_path)
        except:
            pass

        stream = ADClipDecision(msg, db)
        zkd_path = "/".join(
            msg.target.replace(adinsert_archive_root + "/",
                               "").split("/")[:-1])
        if not stream:
            set_ad_path(zk_segment_prefix + "/" + zkd_path + "/link",
                        "/adstatic")
            zks.process_abort()
        else:
            try:
                stream_folder = msg.segment_path + "/" + stream.split("/")[-1]
                print("Checking pre-transcoded stream: " + stream_folder,
                      flush=True)
                if isdir(stream_folder):  # pre-transcoded AD exists
                    print(
                        "Prefetch the AD segment {} \n".format(stream_folder),
                        flush=True)
                    CopyADSegment(msg, stream)
                else:
                    print("Transcoding the AD segment {} \n".format(stream),
                          flush=True)
                    # only generate one resolution for ad segment, if not generated, ad will fall back to skipped ad.
                    cmd = GetABRCommand(stream,
                                        msg.target_path,
                                        msg.streaming_type,
                                        msg.GetRedition(),
                                        duration=msg.segment_duration,
                                        fade_type="audio",
                                        content_type="ad")
                    process_id = subprocess.Popen(cmd, stdout=subprocess.PIPE)
                    # the `multiprocessing.Process` process will wait until
                    # the call to the `subprocess.Popen` object is completed
                    process_id.wait()

                # signal that we are ready
                set_ad_path(zk_segment_prefix + "/" + zkd_path + "/link",
                            "/adinsert/" + zkd_path)
                zks.process_end()
                print("Status transcode: Timing {0} {1} {2} {3} {4}".format(
                    msg.start_time, start_time,
                    time.time() - start_time, msg.user_name, msg.target),
                      flush=True)
            except Exception as e:
                print(traceback.format_exc(), flush=True)
                set_ad_path(zk_segment_prefix + "/" + zkd_path + "/link",
                            "/adstatic")
                zks.process_abort()
    zks.close()
示例#9
0
kkhost = os.environ["KKHOST"]
vdhost = os.environ["VDHOST"]
dbhost = os.environ["DBHOST"]

while True:
    try:
        c = KafkaConsumer(topic,
                          bootstrap_servers=kkhost,
                          client_id=clientid,
                          group_id=groupid,
                          auto_offset_reset="earliest",
                          api_version=(0, 10))

        for msg in c:
            mode, clip_name = msg.value.decode('utf-8').split(",")
            zk = ZKState("/state/" + clip_name, mode)
            if not zk.processed():
                if zk.process_start():

                    print("Processing " + clip_name + ":" + mode + "...",
                          flush=True)
                    while True:
                        print("Downloading " + clip_name, flush=True)
                        sts = call([
                            "/usr/bin/wget", "-O", clip_name,
                            vdhost + "/mp4/" + clip_name
                        ])
                        if sts == 0: break
                        time.sleep(1)

                    call([
示例#10
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    pipeline1 = streamjson["pipeline"] + "/1"
    stream = streamjson['source']['uri']
    user = streamjson["user_info"]["name"]
    elapsed_time = time.time() - streamjson["start_time"]
    print("VA feeder: stream: " + stream + " " + user +
          " elapsed-time on kafka queue:" + str(elapsed_time), flush=True)

    zk_path = None
    init_stream = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']

    m1 = re.search(r'(dash/.*)/chunk-stream[0-9]*-([0-9]*.m4s)$', stream)
    if m1:
        zk_path = "/analytics/" + \
            m1.group(1) + "/" + m1.group(2) + "/" + streamjson["pipeline"]

    m1 = re.search("(hls/.*)/[0-9]*p_([0-9]*.ts)$", stream)
    if m1:
        zk_path = "/analytics/" + \
            m1.group(1) + "/" + m1.group(2) + "/" + streamjson["pipeline"]
    print("zk path: " + zk_path, flush=True)

    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + user + " " + stream +
              " already complete", flush=True)
        zk.close()
        return

    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)

        fps = va.loop({
            "source": {
                "uri": stream,
                "type": "uri"
            },
            "destination": {
                "type": "kafka",
                "host": socket.gethostbyname("kafka-service") + ":9092",
                "topic": "seg_analytics_data"
            },
            "tags": streamjson["tags"],
            "parameters": streamjson["parameters"],
            "user": user,
            "start_time": streamjson["start_time"],
        }, streamjson["pipeline"])

        if fps < 0:
            zk.process_abort()
        else:
            zk.process_end()

        if fps > 0:
            global global_total_fps, global_seg_count
            global_total_fps = global_total_fps + fps
            global_seg_count = global_seg_count + 1
            avg_fps = global_total_fps / global_seg_count
            print("VA statistics : " + "avg_fps " + str(avg_fps) + " " +
                  str(global_total_fps) + " " + str(global_seg_count), flush=True)

        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()
示例#11
0
def process_stream(streamstring):
    streamjson = ast.literal_eval(streamstring)
    pipeline1 = streamjson["pipeline"]+"/1"
    stream = streamjson['source']['uri']
    print("VA feeder: stream: "+stream, flush=True)
    init_stream = None
    zk_path = None
    if 'uri-init' in streamjson['source']:
        init_stream = streamjson['source']['uri-init']
        print("VA feeder: init_stream: "+init_stream, flush=True)
        zk_path = stream+"/"+pipeline1

    m1 = re.search("(.*)/.*_([0-9]+.ts)$", stream)
    if m1:
        segment = stream.split('/')[-1].split('_')[-1]
        zk_path = m1.group(1)+"/"+segment+"/"+pipeline1

    print("VA feeder: zk_path "+zk_path, flush=True)
    zk = ZKState(zk_path)
    if zk.processed():
        print("VA feeder: " + stream + " already complete", flush=True)
        zk.close()
        return
    if zk.process_start():
        merged_segment = None
        if init_stream:
            merged_segment = merge.create_merged_segment(init_stream, stream)
            if merged_segment:
                stream = "file://" + merged_segment
                print("VA feeder: video-analytics merged segment: " +
                      stream, flush=True)
        
        fps=va.loop({
            "source": {
                "uri": stream,
                "type":"uri"
            },
            "destination": {
                "type": "kafka",
                "host": socket.gethostbyname("kafka-service")+":9092",
                "topic": "seg_analytics_data"
            },
            "tags": streamjson["tags"],
            "parameters": streamjson["parameters"],
        }, streamjson["pipeline"])
        if fps<0:
            zk.process_abort()
        else:
            zk.process_end()
            p.send(video_analytics_fps_topic, json.dumps({
                "fps": fps,
                "machine":machine_prefix+socket.gethostname()[0:3],
                "time": datetime.datetime.utcnow().isoformat(),
            }));
            
        if merged_segment:
            merge.delete_merged_segment(merged_segment)
    zk.close()
示例#12
0
def process_stream_lives(msg):
    stream_name = msg["name"]
    stream_parameters = msg["parameters"]
    codec = stream_parameters["codec_type"]
    stream_type = msg["output"]["type"]
    target = msg["output"]["target"]
    loop = msg["loop"]
    idx = msg["idx"] if "idx" in msg.keys() else int(random.random() * 10000)
    stream = stream_type + "/" + stream_name

    if not isfile(ARCHIVE_ROOT + "/" + stream_name):
        return

    target_root = VIDEO_ROOT + stream_type

    try:
        makedirs(target_root + "/" + stream_name)
    except:
        pass

    if target != "file":
        target_name = target + stream_type + "/media_" + str(idx) + "_"
    else:
        target_name = target_root + "/" + stream_name

    print("LIVE transcode:", target_name, stream_type, flush=True)
    zk = ZKState("/content_provider_transcoder/" + ARCHIVE_ROOT + "/lives/" +
                 str(idx) + "/" + stream)
    if zk.processed():
        zk.close()
        return

    if zk.process_start():
        try:
            if stream_parameters:
                cmd = GetLiveCommand(ARCHIVE_ROOT + "/" + stream_name,
                                     target_name,
                                     stream_type,
                                     params=stream_parameters,
                                     loop=loop)
            else:
                cmd = GetLiveCommand(ARCHIVE_ROOT + "/" + stream_name,
                                     target_name,
                                     stream_type,
                                     loop=loop)
            print(cmd, flush=True)
            r = execute(idx, stream_name, cmd)
            if r:
                raise Exception("status code: " + str(r))
            zk.process_end()
        except:
            print(traceback.format_exc(), flush=True)
            zk.process_abort()

    zk.close()
def process_stream(stream):
    # stream = "dash/video.mp4/index.mpd" or "hls/video.mp4/index.m3u8"
    print("process stream: " + stream, flush=True)

    stream_name = stream.split("/")[1]

    if not isfile(ARCHIVE_ROOT + "/" + stream_name):
        print("process aborted for stream name: " + stream_name, flush=True)
        return

    zk = ZKState("/content_provider_transcoder/" + ARCHIVE_ROOT + "/" + stream)
    # "/content_provider_transcoder/" + ARCHIVE_ROOT + "/" + stream
    # => "/content_provider_transcoder//var/www/archive/dash/video.mp4/index.mpd"
    # or "/content_provider_transcoder//var/www/archive/hls/video.mp4/index.m3u8"
    if zk.processed():
        print("process already done", flush=True)
        zk.close()
        return

    if stream.endswith(".mpd"):
        print("it's a dash process", flush=True)
        try:
            mkdir(DASH_ROOT + "/" + stream_name)
        except:
            pass

        if zk.process_start():
            try:
                cmd = GetABRCommand(ARCHIVE_ROOT + "/" + stream_name,
                                    DASH_ROOT + "/" + stream_name, "dash")
                r = call(cmd)
                if r:
                    raise Exception("status code: " + str(r))
                zk.process_end()
            except:
                print(traceback.format_exc(), flush=True)
                zk.process_abort()

    if stream.endswith(".m3u8"):
        print("it's a hls process", flush=True)
        try:
            mkdir(HLS_ROOT + "/" + stream_name)
        except:
            pass

        if zk.process_start():
            try:
                cmd = GetABRCommand(ARCHIVE_ROOT + "/" + stream_name,
                                    HLS_ROOT + "/" + stream_name, "hls")
                r = call(cmd)
                if r:
                    raise Exception("status code: " + str(r))
                zk.process_end()
            except:
                print(traceback.format_exc(), flush=True)
                zk.process_abort()

    zk.close()