def listen(self): while True: print("listening to messages") try: c = Consumer(kafka_group) for msg in c.messages(kafka_topic): try: value = json.loads(msg) value["time"] = float(value["timestamp"]) / 1.0e9 if "tags" in value: if "seg_time" in value["tags"]: value["time"] = value["time"] + float( value["tags"]["seg_time"]) if "tag" in value: if "seg_time" in value["tag"]: value["time"] = value["time"] + float( value["tag"]["seg_time"]) stream = value["source"].split("/")[-2] self._send((stream, value)) except Exception as e: print("Exception: " + str(e), flush=True) except Exception as e: print("Exception: " + str(e), flush=True) time.sleep(2)
def main(): db = DataBase() consumer = Consumer(kafka_group) while True: try: print("ad transcode service: listening to messages", flush=True) for msg in consumer.messages(kafka_topic): print("ad transcode service: recieved message: " + str(msg), flush=True) ADTranscode(msg, db) except Exception as e: print(str(e)) print("ad transcode exception in service") time.sleep(10)
def _read_topic(self, topic): c = Consumer(None) while True: try: for msg in c.debug(topic): if msg: yield self.write_message( json.dumps({ "topic": topic, "value": msg })) else: yield gen.sleep(0.05) except Exception as e: yield self.write_message("Exception:" + str(e)) print(str(e)) # sleep and retry yield gen.sleep(10)
def get_analytic_status(instanceId, pipeline): try: r = requests.get(video_analytic_url+pipeline+"/" + instanceId+"/status", timeout=timeout) if r.status_code == 200: jsonValue = r.json() return jsonValue.get('state'), jsonValue.get('avg_fps') except requests.exceptions.RequestException as e: print("VA feeder: Error in getting status " + str(e), flush=True) return "UNKNOWN", None if __name__ == "__main__": c = Consumer(kafka_group) while True: try: print("VA feeder: listening to messages", flush=True) for msg in c.messages(video_analytics_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: "+str(e), flush=True) except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(10) if p: p.close()
merge.delete_merged_segment(merged_segment) zk.close() def get_analytic_status(instanceId, pipeline): try: r = requests.get(video_analytic_url+pipeline+"/" + instanceId+"/status", timeout=timeout) if r.status_code == 200: jsonValue = r.json() return jsonValue.get('state'), jsonValue.get('avg_fps') except requests.exceptions.RequestException as e: print("VA feeder: Error in getting status " + str(e), flush=True) return "UNKNOWN", None if __name__ == "__main__": c = Consumer(kafka_group) while True: try: print("VA feeder: listening to messages", flush=True) for msg in c.messages(kafka_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: "+str(e), flush=True) except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(10)
try: mkdir(HLS_ROOT + "/" + stream_name) except Exception as e: print(str(e)) if zk.process_start(): try: cmd = GetABRCommand( src_protocol + src_path + "/" + src_api + "/" + stream_name, HLS_ROOT + "/" + stream_name, "hls") r = call(cmd) if r: raise Exception("status code: " + str(r)) zk.process_end() except Exception as e: print(str(e)) zk.process_abort() zk.close() if __name__ == "__main__": c = Consumer(KAFKA_GROUP) while True: try: for message in c.messages(KAFKA_TOPIC): process_stream(message) except Exception as e: print(str(e)) time.sleep(2)
except: print(traceback.format_exc(), flush=True) zk.process_abort() if stream.endswith(".m3u8"): try: mkdir(hls_root+"/"+stream_name) except: pass if zk.process_start(): try: cmd = GetABRCommand(archive_root+"/"+stream_name,hls_root+"/"+stream_name,"hls") r=call(cmd) if r: raise Exception("status code: "+str(r)) zk.process_end() except: print(traceback.format_exc(), flush=True) zk.process_abort() zk.close() c=Consumer(kafka_group) while True: try: for message in c.messages(kafka_topic): process_stream(message) except: print(traceback.format_exc(), flush=True) time.sleep(2) c.close()
#!/usr/bin/python3 from messaging import Consumer from process import ADTranscode from db import DataBase import traceback import time kafka_topic = "ad_transcode_sched" kafka_group = "ad_transcode_creator" db = DataBase() consumer = Consumer(kafka_group) while True: try: print("ad transcode service: listening to messages", flush=True) for msg in consumer.messages(kafka_topic): ADTranscode(msg, db) except Exception as e: print(traceback.format_exc(), flush=True) time.sleep(10) consumer.close() db.close()
# show transcoding statistics def stats_fileinfo(root): nfiles = 0 size = 0 for path, dirs, files in walk(root): for stream1 in files: if stream1.endswith((".mp4", ".avi", ".ts")): nfiles = nfiles + 1 size = size + getsize(path + "/" + stream1) return (nfiles, size) c = Consumer(None) info = { "summary": { "cpu": round(psutil.cpu_percent(), 2), "mem": round( int(psutil.virtual_memory().total - psutil.virtual_memory().free) / float(psutil.virtual_memory().total), 2), "active": 0, "completed": 0, "aborted": 0
#!/usr/bin/python3 #!/usr/bin/python3 from messaging import Consumer from db import DataBase import json import time kafka_topic = "seg_analytics_data" kafka_group = "kafka_to_db_converter" if __name__ == "__main__": db=DataBase() c=Consumer(kafka_group) while True: try: print("listening to messages") while True: data=[] start=time.clock() for msg in c.messages(kafka_topic,timeout=100): if msg: try: value=json.loads(msg) value["time"]=float(value["timestamp"])/1.0e9 if "tags" in value: if "seg_time" in value["tags"]: value["time"]=value["time"]+float(value["tags"]["seg_time"]) if "tag" in value: if "seg_time" in value["tag"]: value["time"]=value["time"]+float(value["tag"]["seg_time"])
zk.process_end() if fps > 0: global global_total_fps, global_seg_count global_total_fps = global_total_fps + fps global_seg_count = global_seg_count + 1 avg_fps = global_total_fps / global_seg_count print("VA statistics : " + "avg_fps " + str(avg_fps) + " " + str(global_total_fps) + " " + str(global_seg_count), flush=True) if merged_segment: merge.delete_merged_segment(merged_segment) zk.close() if __name__ == "__main__": c = Consumer("analytics") while True: try: print("VA feeder: listening to messages", flush=True) for msg in c.messages(video_analytics_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: " + str(e), flush=True) traceback.print_exc() except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(1) c.close()