def get_thumbnail(self, mp4, size, start_time): for c in self._cache: if c[0] == mp4 and c[1] == size and c[2] == start_time: return c[3] png = "/tmp/" + str(uuid.uuid4()) + ".png" cmds = ["/usr/local/bin/ffmpeg"] if start_time: cmds.extend([ "-ss", "{:02d}:{:02d}:{:02d}.{:03d}".format(int(start_time / 3600000), int(start_time / 60000), int(start_time / 1000), int(start_time % 1000)) ]) cmds.extend(["-i", mp4]) if size: cmds.extend(["-vf", "scale=" + size]) cmds.extend(["-frames:v", "1", png]) image = None try: list(run(cmds)) with open(png, "rb") as fd: image = fd.read() os.remove(png) except: print(traceback.format_exc(), flush=True) if image: if len(self._cache) > THUMBNAIL_CACHE: self._cache.shift() self._cache.append((mp4, size, start_time, image)) return image
def probe_camera(): command = "/usr/bin/nmap " + port_scan + " -n" print(command, flush=True) for line in run(command.split(" ")): if line.startswith("Nmap scan report for"): ip = line.split(" ")[-1].strip("()") if "/tcp" in line and "open" in line: port = int(line.split("/")[0]) yield ip, port
def get_thumbnail(self, mp4, size, start_time): for c in self._cache: if c[0] == mp4 and c[1] == size and c[2] == start_time: return c[3] png = "/tmp/" + str(uuid.uuid4()) + ".png" cmds = ["/usr/local/bin/ffmpeg"] if start_time: cmds.extend([ "-ss", "{:02d}:{:02d}:{:02d}.{:03d}".format(int(start_time / 3600000), int(start_time / 60000), int(start_time / 1000), int(start_time % 1000)) ]) cmds.extend(["-i", mp4]) if size: cmds.extend(["-vf", "scale=" + size]) cmds.extend(["-frames:v", "1", png]) try: list(run(cmds)) with open(png, "rb") as fd: image = fd.read() except Exception as e: print(str(e), flush=True) if not start_time: return None cmds[1:3] = ["-sseof", "-1"] cmds[-3:-1] = ["-update", "1", "-vsync", "0"] try: list(run(cmds)) with open(png, "rb") as fd: image = fd.read() except Exception as e: print(str(e), flush=True) return None try: os.remove(png) except: pass if len(self._cache) > THUMBNAIL_CACHE: self._cache.pop(0) self._cache.append((mp4, size, start_time, image)) return image
def _ffmpeg_convert(self, filename): print("post-processing " + filename, flush=True) with tempfile.TemporaryDirectory() as tmpdirname: filename = os.path.abspath(filename) tmpfilename = os.path.abspath( os.path.join(tmpdirname, os.path.basename(filename))) try: list( run([ "/usr/bin/ffmpeg", "-i", filename, "-c", "copy", tmpfilename ])) shutil.move(tmpfilename, filename) list( run([ "/usr/bin/ffmpeg", "-i", filename, "-vf", "thumbnail,scale=640:360", "-frames:v", "1", filename + ".png" ])) return filename, probe(filename) except Exception as e: print("Exception: " + str(e), flush=True) return None, None
def ffmpeg_convert(self, filename): with tempfile.TemporaryDirectory() as tmpdirname: filename = os.path.abspath(filename) tmpfilename = os.path.abspath( os.path.join(tmpdirname, os.path.basename(filename))) output = "" try: list( run([ "/usr/bin/ffmpeg", "-i", filename, "-c", "copy", tmpfilename ])) shutil.move(tmpfilename, filename) list( run([ "/usr/bin/ffmpeg", "-i", filename, "-vf", "thumbnail,scale=640:360", "-frames:v", "1", filename + ".png" ])) return filename, probe(filename) except Exception as error: logger.error("Error converting mp4 with ffmpeg: %s %s" % (error, error.output)) raise
def _rec2db(self, office, sensor, timestamp, path): dt = datetime.datetime.fromtimestamp(timestamp / 1000) officestr = (str(office[0]) + "c" + str(office[1])).replace( "-", "n").replace(".", "d") mp4path = self._storage + "/" + officestr + "/" + sensor + "/" + str( dt.year) + "/" + str(dt.month) + "/" + str(dt.day) os.makedirs(mp4path, exist_ok=True) mp4file = mp4path + "/" + str(timestamp) + ".mp4" list( run([ "/usr/bin/ffmpeg", "-f", "mp4", "-i", path, "-c", "copy", mp4file ])) list( run([ "/usr/bin/ffmpeg", "-i", mp4file, "-vf", "scale=640:360", "-frames:v", "1", mp4file + ".png" ])) sinfo = probe(mp4file) sinfo.update({ "sensor": sensor, "office": { "lat": office[0], "lon": office[1], }, "time": timestamp, "path": mp4file[len(self._storage) + 1:], }) if local_office: # calculate total bandwidth bandwidth = 0 for stream1 in sinfo["streams"]: if "bit_rate" in stream1: bandwidth = bandwidth + stream1["bit_rate"] if bandwidth: db_cam = DBQuery(host=dbhost, index="sensors", office=office) db_cam.update(sensor, {"bandwidth": bandwidth}) # check disk usage and send alert disk_usage = psutil.disk_usage(self._storage)[3] if disk_usage > 75 and sensor_index: level = "fatal" if disk_uage > 85 else "warning" db_alt = DBIngest(host=dbhost, index="alerts", office=office) db_alt.ingest({ "time": int( time.mktime(datetime.datetime.now().timetuple()) * 1000), "office": { "lat": office[0], "lon": office[1], }, "location": { "lat": office[0], "lon": office[1], }, level: [{ "message": "Disk usage: " + str(disk_usage) + "%", "args": { "disk": disk_usage, } }] }) # ingest recording local db_rec = DBIngest(host=dbhost, index="recordings", office=office) db_rec.ingest(sinfo) else: # ingest recording cloud db_rec = DBIngest(host=dbhost, index="recordings_c", office="") db_rec.ingest(sinfo)
def _rec2db(self, office, sensor, timestamp, path): disk_usage = psutil.disk_usage(self._storage)[3] if disk_usage < halt_rec_th: dt = datetime.datetime.fromtimestamp(timestamp / 1000) officestr = (str(office[0]) + "c" + str(office[1])).replace( "-", "n").replace(".", "d") mp4path = self._storage + "/" + officestr + "/" + sensor + "/" + str( dt.year) + "/" + str(dt.month) + "/" + str(dt.day) os.makedirs(mp4path, exist_ok=True) mp4file = mp4path + "/" + str(timestamp) + ".mp4" # perform a straight copy to fix negative timestamp for chrome list( run([ "/usr/local/bin/ffmpeg", "-f", "mp4", "-i", path, "-c", "copy", mp4file ])) sinfo = probe(mp4file) sinfo.update({ "sensor": sensor, "office": { "lat": office[0], "lon": office[1], }, "time": timestamp, "path": mp4file[len(self._storage) + 1:], }) else: print("Disk full: recording halted", flush=True) sinfo = None if local_office: if sinfo["bandwidth"]: db_cam = DBQuery(host=dbhost, index="sensors", office=office) db_cam.update(sensor, {"bandwidth": sinfo["bandwidth"]}) # check disk usage and send alert disk_usage = psutil.disk_usage(self._storage).percent if disk_usage >= warn_disk_th: level = "fatal" if disk_usage >= fatal_disk_th else "warning" db_alt = DBIngest(host=dbhost, index="alerts", office=office) message = text["halt recording"].format( disk_usage ) if disk_usage >= halt_rec_th else text["disk usage"].format( disk_usage) db_alt.ingest({ "time": int(time.time() * 1000), "office": { "lat": office[0], "lon": office[1], }, "location": { "lat": office[0], "lon": office[1], }, level: [{ "message": message, "args": { "disk": disk_usage, } }] }) # ingest recording local if sinfo: print("Ingest recording: {}".format(sinfo), flush=True) office1 = office if local_office else "" # denormalize sensor address to recordings dbs = DBQuery(host=dbhost, index="sensors", office=office1) r = list(dbs.search("_id='" + sinfo["sensor"] + "'", size=1)) if r: sinfo["address"] = r[0]["_source"]["address"] db_rec = DBIngest(host=dbhost, index="recordings", office=office1) db_rec.ingest(sinfo)
def _rec2db(self, office, sensor, timestamp, path): disk_usage=psutil.disk_usage(self._storage)[3] if disk_usage<halt_rec_th: dt=datetime.datetime.fromtimestamp(timestamp/1000) officestr=(str(office[0])+"c"+str(office[1])).replace("-","n").replace(".","d") mp4path=self._storage+"/"+officestr+"/"+sensor+"/"+str(dt.year)+"/"+str(dt.month)+"/"+str(dt.day) os.makedirs(mp4path,exist_ok=True) mp4file=mp4path+"/"+str(timestamp)+".mp4" # perform a straight copy to fix negative timestamp for chrome list(run(["/usr/local/bin/ffmpeg","-f","mp4","-i",path,"-c","copy",mp4file])) sinfo=probe(mp4file) sinfo.update({ "sensor": sensor, "office": { "lat": office[0], "lon": office[1], }, "time": timestamp, "path": mp4file[len(self._storage)+1:], }) else: print("Disk full: recording halted", flush=True) sinfo=None if local_office: if sinfo["bandwidth"]: db_cam=DBQuery(host=dbhost, index="sensors", office=office) db_cam.update(sensor, {"bandwidth": sinfo["bandwidth"]}) # check disk usage and send alert disk_usage=psutil.disk_usage(self._storage).percent if disk_usage>=warn_disk_th: level="fatal" if disk_usage>=fatal_disk_th else "warning" db_alt=DBIngest(host=dbhost, index="alerts", office=office) message=text["halt recording"].format(disk_usage) if disk_usage>=halt_rec_th else text["disk usage"].format(disk_usage) db_alt.ingest({ "time": int(time.time()*1000), "office": { "lat": office[0], "lon": office[1], }, "location": { "lat": office[0], "lon": office[1], }, level: [{ "message": message, "args": { "disk": disk_usage, } }] }) # ingest recording local if sinfo: db_rec=DBIngest(host=dbhost, index="recordings", office=office) db_rec.ingest(sinfo) else: # ingest recording cloud if sinfo: db_s=DBQuery(host=dbhost, index="sensors", office=sinfo["office"]) sensor=list(db_s.search("_id='"+sinfo["sensor"]+"'",size=1)) if sensor: # remove status sensor[0]["_source"].pop("status",None) # denormalize address sinfo["address"]=sensor[0]["_source"]["address"] # calcualte hash code for the sensor m=hashlib.md5() m.update(json.dumps(sensor[0]["_source"],ensure_ascii=False).encode('utf-8')) md5=m.hexdigest() # locate the sensor record in cloud db_sc=DBQuery(host=dbhost, index="sensors", office="") sensor_c=list(db_sc.search("md5='"+md5+"'",size=1)) if not sensor_c: # if not available, ingest a sensor record in cloud sensor_c=[{ "_source": sensor[0]["_source"].copy() }] sensor_c[0]["_source"]["md5"]=md5 db_sc=DBIngest(host=dbhost, index="sensors", office="") print("Ingest sensor: {}".format(sensor_c[0]["_source"]), flush=True) sensor_c[0]=db_sc.ingest(sensor_c[0]["_source"]) # replace cloud sensor id and ingest recording sinfo["sensor"]=sensor_c[0]["_id"] print("Ingest recording: {}".format(sinfo), flush=True) db_rec=DBIngest(host=dbhost, index="recordings", office="") db_rec.ingest(sinfo) # copy local analytics to cloud db_a=DBQuery(host=dbhost, index="analytics", office=sinfo["office"]) data=[] for r in db_a.search('sensor="'+sensor[0]["_id"]+'" and office:['+str(office[0])+','+str(office[1])+'] and time>='+str(sinfo["time"])+' and time<='+str(sinfo["time"]+sinfo["duration"]*1000),size=10000): r["_source"]["sensor"]=sinfo["sensor"] data.append(r["_source"]) db_ac=DBIngest(host=dbhost, index="analytics", office="") print("Ingest analytics: {}".format(len(data)), flush=True) db_ac.ingest_bulk(data)
while True: print("Searching...", flush=True) print("query = ", query) try: for q in dbq.search(query): url = smhost + '/' + q["_source"]["path"] print("url: ", url) mp4file = "/tmp/" + str(os.path.basename(url)) print("Transcoding...") os.remove(mp4file) list( run([ "/usr/local/bin/ffmpeg", "-f", "mp4", "-i", url, "-c:v", "libsvt_hevc", "-c:a", "aac", mp4file ])) print("Uploading: ", cloudhost) sensor = q["_source"]["sensor"] timestamp = q["_source"]["time"] upload(cloudhost, mp4file, office, sensor, timestamp) except Exception as e: print("Exception: " + str(e), flush=True) print("Sleeping...") time.sleep(service_interval)
r["_source"]["sensor"] = sensor_c[0]["_id"] analytics.append(r["_source"]) print("Ingest analytics: {}".format(len(analytics)), flush=True) dba_c.ingest_bulk(analytics) url = sthost + '/' + q["_source"]["path"] print("url: " + url, flush=True) mp4file = "/tmp/" + str(os.path.basename(url)) print("Transcoding...", flush=True) # Replace with any transcoding command list( run([ "/usr/local/bin/ffmpeg", "-f", "mp4", "-i", url, "-c", "copy", "-f", "mp4", "-y", mp4file ])) print("Uploading: " + stchost, flush=True) sensor = sensor_c[0]["_id"] timestamp = q["_source"]["time"] upload(stchost, mp4file, office, sensor, timestamp) os.remove(mp4file) except: print(traceback.format_exc(), flush=True) print("Sleeping...") stop.wait(service_interval) dbs.delete(rs["_id"])