def get(self): stream = self.request.uri.replace("/schedule/", "") # schedule producing the stream print("request received to process stream: " + stream, flush=True) producer = Producer() msg = {} msg.update({ "name": stream.split("/")[1], "parameters": { "renditions": [], "codec_type": "AVC" }, "output": { "target": "file", "type": stream.split("/")[0] }, "live_vod": "vod", "loop": 0 }) producer.send(KAFKA_TOPIC, json.dumps(msg)) producer.close() # wait until file is available, return it start_time = time.time() while time.time() - start_time < 60: if isfile(DASHLS_ROOT + "/" + stream): self.set_header('X-Accel-Redirect', '/' + stream) self.set_status(200, "OK") return yield gen.sleep(0.5) # wait too long, skip this REST API self.set_status(503, "Request scheduled")
class AdStatsHandler(web.RequestHandler): def __init__(self, app, request, **kwargs): super(AdStatsHandler, self).__init__(app, request, **kwargs) self._cache = {} def check_origin(self, origin): return True @gen.coroutine def get(self): self.set_status(200, "OK") self.set_header("Content-Type", "application/json") self.write(json.dumps(adstats)) self._producer = Producer() self._producer.send(kafka_topic, json.dumps(adstats)) self._producer.close() @gen.coroutine def post(self): try: data = json.loads(self.request.body.decode('utf-8')) for item in adstats: if item['uri'] == data['uri']: if data['clicked'] == 1: item['clicked'] += 1 if data['watched'] == 1: item['watched'] += 1 self.set_status(200, "OK") except Exception as e: self.set_status(503, "Ad-content:Exception during post")
def get(self): stream = self.request.uri.replace("/schedule/", "") # schedule producing the stream print("request received to process stream: " + stream, flush=True) producer = Producer() producer.send(KAFKA_TOPIC, stream) producer.close() # wait until file is available, return it for t in range(50): if isfile(DASHLS_ROOT + "/" + stream): self.set_header('X-Accel-Redirect', '/' + stream) self.set_status(200, "OK") return yield gen.sleep(0.1) # wait too long, skip this REST API self.set_status(503, "Request scheduled")
def get(self): stream = self.request.uri.replace("/schedule/", "") # schedule producing the stream print("request received to process stream: " + stream, flush=True) producer = Producer() producer.send(kafka_topic, stream) producer.close() # wait until file is available, return it start_time = time.time() while time.time() - start_time < 60: if isfile(dashls_root + "/" + stream): self.set_header('X-Accel-Redirect', '/' + stream) self.set_status(200, "OK") return yield gen.sleep(0.5) # wait too long, skip this REST API self.set_status(503, "Request scheduled")
def post(self, *args, **kwargs): fileName = self.get_body_argument('fileName', None) file = self.request.files.get('file', None) uploadStatus = self.get_body_argument('uploadStatus', None) timeStamp = self.get_body_argument('timeStamp', None) count = self.get_body_argument('count', None) streamType = self.get_body_argument('type', "dash") fileName = timeStamp + "-" + fileName proPath = os.path.join(TEMP_ROOT, fileName) if not os.path.isdir(proPath): os.makedirs(proPath) try: with open(os.path.join(proPath, count), 'wb') as f: f.write(file[0]['body']) self.set_status(200) if uploadStatus == 'end': in_out.delay(proPath, ARCHIVE_ROOT, fileName, count) # schedule producing the stream stream = streamType + "/" + fileName + "/index." + ("m3u8" if streamType == "hls" else "mpd") print("request received to process offline stream: " + stream, flush=True) start_time = time.time() while time.time() - start_time < 10: if isfile(ARCHIVE_ROOT + "/" + fileName): print("file " + fileName + " exists, sending job", flush=True) producer = Producer() producer.send(KAFKA_TOPIC, stream) producer.close() return yield gen.sleep(0.5) print("timeout :(", flush=True) except: self.set_status(401) print(traceback.format_exc(), flush=True)
}, streamjson["pipeline"]) if fps<0: zk.process_abort() else: zk.process_end() p.send(video_analytics_fps_topic, json.dumps({ "fps": fps, "machine":machine_prefix+socket.gethostname()[0:3], "time": datetime.datetime.utcnow().isoformat(), })); if merged_segment: merge.delete_merged_segment(merged_segment) zk.close() if __name__ == "__main__": while True: try: print("VA feeder: listening to messages", flush=True) c = Consumer("analytics") for msg in c.messages(video_analytics_topic): print("VA feeder: recieved message: " + str(msg), flush=True) try: process_stream(msg) except Exception as e: print("VA feeder: "+str(e), flush=True) except Exception as e: print("VA feeder: error in main" + str(e), flush=True) time.sleep(1) p.close()