def listen(self): while True: print("listening to messages") try: c = Consumer(kafka_group) for msg in c.messages(kafka_topic): try: value = json.loads(msg) value["time"] = float(value["timestamp"]) / 1.0e9 if "tags" in value: if "seg_time" in value["tags"]: value["time"] = value["time"] + float( value["tags"]["seg_time"]) if "tag" in value: if "seg_time" in value["tag"]: value["time"] = value["time"] + float( value["tag"]["seg_time"]) stream = value["source"].split("/")[-2] self._send((stream, value)) except Exception as e: print("Exception: " + str(e), flush=True) except Exception as e: print("Exception: " + str(e), flush=True) time.sleep(2)
def main(): db = DataBase() consumer = Consumer(kafka_group) while True: try: print("ad transcode service: listening to messages", flush=True) for msg in consumer.messages(kafka_topic): print("ad transcode service: recieved message: " + str(msg), flush=True) ADTranscode(msg, db) except Exception as e: print(str(e)) print("ad transcode exception in service") time.sleep(10)
def get_analytic_status(instanceId, pipeline): try: r = requests.get(video_analytic_url+pipeline+"/" + instanceId+"/status", timeout=timeout) if r.status_code == 200: jsonValue = r.json() return jsonValue.get('state'), jsonValue.get('avg_fps') except requests.exceptions.RequestException as e: print("VA feeder: Error in getting status " + str(e), flush=True) return "UNKNOWN", None if __name__ == "__main__": c = Consumer(kafka_group) while True: try: print("VA feeder: listening to messages", flush=True) for msg in c.messages(video_analytics_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: "+str(e), flush=True) except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(10) if p: p.close()
merge.delete_merged_segment(merged_segment) zk.close() def get_analytic_status(instanceId, pipeline): try: r = requests.get(video_analytic_url+pipeline+"/" + instanceId+"/status", timeout=timeout) if r.status_code == 200: jsonValue = r.json() return jsonValue.get('state'), jsonValue.get('avg_fps') except requests.exceptions.RequestException as e: print("VA feeder: Error in getting status " + str(e), flush=True) return "UNKNOWN", None if __name__ == "__main__": c = Consumer(kafka_group) while True: try: print("VA feeder: listening to messages", flush=True) for msg in c.messages(kafka_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: "+str(e), flush=True) except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(10)
try: mkdir(HLS_ROOT + "/" + stream_name) except Exception as e: print(str(e)) if zk.process_start(): try: cmd = GetABRCommand( src_protocol + src_path + "/" + src_api + "/" + stream_name, HLS_ROOT + "/" + stream_name, "hls") r = call(cmd) if r: raise Exception("status code: " + str(r)) zk.process_end() except Exception as e: print(str(e)) zk.process_abort() zk.close() if __name__ == "__main__": c = Consumer(KAFKA_GROUP) while True: try: for message in c.messages(KAFKA_TOPIC): process_stream(message) except Exception as e: print(str(e)) time.sleep(2)
except: print(traceback.format_exc(), flush=True) zk.process_abort() if stream.endswith(".m3u8"): try: mkdir(hls_root+"/"+stream_name) except: pass if zk.process_start(): try: cmd = GetABRCommand(archive_root+"/"+stream_name,hls_root+"/"+stream_name,"hls") r=call(cmd) if r: raise Exception("status code: "+str(r)) zk.process_end() except: print(traceback.format_exc(), flush=True) zk.process_abort() zk.close() c=Consumer(kafka_group) while True: try: for message in c.messages(kafka_topic): process_stream(message) except: print(traceback.format_exc(), flush=True) time.sleep(2) c.close()
#!/usr/bin/python3 from messaging import Consumer from process import ADTranscode from db import DataBase import traceback import time kafka_topic = "ad_transcode_sched" kafka_group = "ad_transcode_creator" db = DataBase() consumer = Consumer(kafka_group) while True: try: print("ad transcode service: listening to messages", flush=True) for msg in consumer.messages(kafka_topic): ADTranscode(msg, db) except Exception as e: print(traceback.format_exc(), flush=True) time.sleep(10) consumer.close() db.close()
}) return active, completed, aborted def log_info(sinfo): with open(log_file, "w") as f: for k, v in sinfo.items(): f.write(str(k) + ": " + json.dumps(v)) f.write("\n") def format_info(sinfo, task_list): print("\n", flush=True) for k, v in sinfo.items(): if k in task_list + [ "summary", "active_task", "completed_task", "aborted_task" ]: print(k, v, flush=True) while True: try: print("Waiting...", flush=True) for message in c.messages(KAFKA_WORKLOAD_TOPIC): active, completed, aborted = process_message(message, info) log_info(info) format_info(info, active) except Exception as e: print("Exception: {}".format(e)) time.sleep(2)
import json import time kafka_topic = "seg_analytics_data" kafka_group = "kafka_to_db_converter" if __name__ == "__main__": db=DataBase() c=Consumer(kafka_group) while True: try: print("listening to messages") while True: data=[] start=time.clock() for msg in c.messages(kafka_topic,timeout=100): if msg: try: value=json.loads(msg) value["time"]=float(value["timestamp"])/1.0e9 if "tags" in value: if "seg_time" in value["tags"]: value["time"]=value["time"]+float(value["tags"]["seg_time"]) if "tag" in value: if "seg_time" in value["tag"]: value["time"]=value["time"]+float(value["tag"]["seg_time"]) stream=value["source"].split("/")[-2] print("Ingest "+stream+": "+str(value["time"]),flush=True) data.append((stream,value)) except Exception as e: print(str(e))