def sync_recordings(self): logger.debug("Start sync recordings.") # get all recordings in the db recordings: Recordings = Recordings.select() # get all recordings files on disk process = sp.run( ["find", RECORD_DIR, "-type", "f"], capture_output=True, text=True, ) files_on_disk = process.stdout.splitlines() recordings_to_delete = [] for recording in recordings.objects().iterator(): if not recording.path in files_on_disk: recordings_to_delete.append(recording.id) logger.debug( f"Deleting {len(recordings_to_delete)} recordings with missing files" ) Recordings.delete().where( Recordings.id << recordings_to_delete).execute() logger.debug("End sync recordings.")
def expire_files(self): logger.debug("Start expire files (legacy).") default_expire = (datetime.datetime.now().timestamp() - SECONDS_IN_DAY * self.config.record.retain.days) delete_before = {} for name, camera in self.config.cameras.items(): delete_before[name] = (datetime.datetime.now().timestamp() - SECONDS_IN_DAY * camera.record.retain.days) # find all the recordings older than the oldest recording in the db try: oldest_recording = Recordings.select().order_by( Recordings.start_time).get() p = Path(oldest_recording.path) oldest_timestamp = p.stat().st_mtime - 1 except DoesNotExist: oldest_timestamp = datetime.datetime.now().timestamp() except FileNotFoundError: logger.warning( f"Unable to find file from recordings database: {p}") Recordings.delete().where( Recordings.id == oldest_recording.id).execute() return logger.debug(f"Oldest recording in the db: {oldest_timestamp}") process = sp.run( [ "find", RECORD_DIR, "-type", "f", "!", "-newermt", f"@{oldest_timestamp}" ], capture_output=True, text=True, ) files_to_check = process.stdout.splitlines() for f in files_to_check: p = Path(f) try: if p.stat().st_mtime < delete_before.get( p.parent.name, default_expire): p.unlink(missing_ok=True) except FileNotFoundError: logger.warning(f"Attempted to expire missing file: {f}") logger.debug("End expire files (legacy).")
def vod_ts(camera, start_ts, end_ts): recordings = (Recordings.select().where( Recordings.start_time.between(start_ts, end_ts) | Recordings.end_time.between(start_ts, end_ts) | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))).where( Recordings.camera == camera).order_by( Recordings.start_time.asc())) clips = [] durations = [] recording: Recordings for recording in recordings: clip = {"type": "source", "path": recording.path} duration = int(recording.duration * 1000) # Determine if offset is needed for first clip if recording.start_time < start_ts: offset = int((start_ts - recording.start_time) * 1000) clip["clipFrom"] = offset duration -= offset # Determine if we need to end the last clip early if recording.end_time > end_ts: duration -= int((recording.end_time - end_ts) * 1000) if duration > 0: clips.append(clip) durations.append(duration) else: logger.warning( f"Recording clip is missing or empty: {recording.path}") if not clips: logger.error("No recordings found for the requested time range") return "No recordings found.", 404 hour_ago = datetime.now() - timedelta(hours=1) return jsonify({ "cache": hour_ago.timestamp() > start_ts, "discontinuity": False, "durations": durations, "sequences": [{ "clips": clips }], })
def expire_files(self): logger.debug("Start expire files (legacy).") default_expire = (datetime.datetime.now().timestamp() - SECONDS_IN_DAY * self.config.record.retain_days) delete_before = {} for name, camera in self.config.cameras.items(): delete_before[name] = (datetime.datetime.now().timestamp() - SECONDS_IN_DAY * camera.record.retain_days) # find all the recordings older than the oldest recording in the db try: oldest_recording = (Recordings.select().order_by( Recordings.start_time.desc()).get()) p = Path(oldest_recording.path) oldest_timestamp = p.stat().st_mtime - 1 except DoesNotExist: oldest_timestamp = datetime.datetime.now().timestamp() logger.debug(f"Oldest recording in the db: {oldest_timestamp}") process = sp.run( [ "find", RECORD_DIR, "-type", "f", "-newermt", f"@{oldest_timestamp}" ], capture_output=True, text=True, ) files_to_check = process.stdout.splitlines() for f in files_to_check: p = Path(f) if p.stat().st_mtime < delete_before.get(p.parent.name, default_expire): p.unlink(missing_ok=True) logger.debug("End expire files (legacy).")
def expire_recordings(self): logger.debug("Start expire recordings (new).") logger.debug("Start deleted cameras.") # Handle deleted cameras expire_days = self.config.record.retain.days expire_before = (datetime.datetime.now() - datetime.timedelta(days=expire_days)).timestamp() no_camera_recordings: Recordings = Recordings.select().where( Recordings.camera.not_in(list(self.config.cameras.keys())), Recordings.end_time < expire_before, ) deleted_recordings = set() for recording in no_camera_recordings: Path(recording.path).unlink(missing_ok=True) deleted_recordings.add(recording.id) logger.debug(f"Expiring {len(deleted_recordings)} recordings") Recordings.delete().where( Recordings.id << deleted_recordings).execute() logger.debug("End deleted cameras.") logger.debug("Start all cameras.") for camera, config in self.config.cameras.items(): logger.debug(f"Start camera: {camera}.") # When deleting recordings without events, we have to keep at LEAST the configured max clip duration min_end = (datetime.datetime.now() - datetime.timedelta( seconds=config.record.events.max_seconds)).timestamp() expire_days = config.record.retain.days expire_before = (datetime.datetime.now() - datetime.timedelta(days=expire_days)).timestamp() expire_date = min(min_end, expire_before) # Get recordings to check for expiration recordings: Recordings = (Recordings.select().where( Recordings.camera == camera, Recordings.end_time < expire_date, ).order_by(Recordings.start_time)) # Get all the events to check against events: Event = ( Event.select().where( Event.camera == camera, # need to ensure segments for all events starting # before the expire date are included Event.start_time < expire_date, Event.has_clip, ).order_by(Event.start_time).objects()) # loop over recordings and see if they overlap with any non-expired events # TODO: expire segments based on segment stats according to config event_start = 0 deleted_recordings = set() for recording in recordings.objects().iterator(): keep = False # Now look for a reason to keep this recording segment for idx in range(event_start, len(events)): event = events[idx] # if the event starts in the future, stop checking events # and let this recording segment expire if event.start_time > recording.end_time: keep = False break # if the event is in progress or ends after the recording starts, keep it # and stop looking at events if event.end_time is None or event.end_time >= recording.start_time: keep = True break # if the event ends before this recording segment starts, skip # this event and check the next event for an overlap. # since the events and recordings are sorted, we can skip events # that end before the previous recording segment started on future segments if event.end_time < recording.start_time: event_start = idx # Delete recordings outside of the retention window or based on the retention mode if (not keep or (config.record.events.retain.mode == RetainModeEnum.motion and recording.motion == 0) or (config.record.events.retain.mode == RetainModeEnum.active_objects and recording.objects == 0)): Path(recording.path).unlink(missing_ok=True) deleted_recordings.add(recording.id) logger.debug(f"Expiring {len(deleted_recordings)} recordings") Recordings.delete().where( Recordings.id << deleted_recordings).execute() logger.debug(f"End camera: {camera}.") logger.debug("End all cameras.") logger.debug("End expire recordings (new).")
def recording_clip(camera, start_ts, end_ts): download = request.args.get("download", type=bool) recordings = (Recordings.select().where( (Recordings.start_time.between(start_ts, end_ts)) | (Recordings.end_time.between(start_ts, end_ts)) | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))).where( Recordings.camera == camera).order_by( Recordings.start_time.asc())) playlist_lines = [] clip: Recordings for clip in recordings: playlist_lines.append(f"file '{clip.path}'") # if this is the starting clip, add an inpoint if clip.start_time < start_ts: playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}") # if this is the ending clip, add an outpoint if clip.end_time > end_ts: playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}") file_name = f"clip_{camera}_{start_ts}-{end_ts}.mp4" path = f"/tmp/cache/{file_name}" ffmpeg_cmd = [ "ffmpeg", "-y", "-protocol_whitelist", "pipe,file", "-f", "concat", "-safe", "0", "-i", "-", "-c", "copy", "-movflags", "+faststart", path, ] p = sp.run( ffmpeg_cmd, input="\n".join(playlist_lines), encoding="ascii", capture_output=True, ) if p.returncode != 0: logger.error(p.stderr) return f"Could not create clip from recordings for {camera}.", 500 response = make_response() response.headers["Content-Description"] = "File Transfer" response.headers["Cache-Control"] = "no-cache" response.headers["Content-Type"] = "video/mp4" if download: response.headers[ "Content-Disposition"] = "attachment; filename=%s" % file_name response.headers["Content-Length"] = os.path.getsize(path) response.headers[ "X-Accel-Redirect"] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile return response
def recordings(camera_name): dates = OrderedDict() # Retrieve all recordings for this camera recordings = (Recordings.select().where( Recordings.camera == camera_name).order_by( Recordings.start_time.asc())) last_end = 0 recording: Recordings for recording in recordings: date = datetime.fromtimestamp(recording.start_time) key = date.strftime("%Y-%m-%d") hour = date.strftime("%H") # Create Day Record if key not in dates: dates[key] = OrderedDict() # Create Hour Record if hour not in dates[key]: dates[key][hour] = {"delay": {}, "events": []} # Check for delay the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp() # diff current recording start time and the greater of the previous end time or top of the hour diff = recording.start_time - max(last_end, the_hour) # Determine seconds into recording seconds = 0 if datetime.fromtimestamp(last_end).strftime("%H") == hour: seconds = int(last_end - the_hour) # Determine the delay delay = min(int(diff), 3600 - seconds) if delay > 1: # Add an offset for any delay greater than a second dates[key][hour]["delay"][seconds] = delay last_end = recording.end_time # Packing intervals to return all events with same label and overlapping times as one row. # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/ events = Event.raw( """WITH C1 AS ( SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub FROM event WHERE camera = ? UNION ALL SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub FROM event WHERE camera = ? ), C2 AS ( SELECT C1.*, SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) - sub AS cnt FROM C1 ), C3 AS ( SELECT id, label, camera, top_score, ts, (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1 AS grpnum FROM C2 WHERE cnt = 0 ) SELECT id, label, camera, top_score, start_time, end_time FROM event WHERE camera = ? AND end_time IS NULL UNION ALL SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time FROM C3 GROUP BY label, grpnum ORDER BY start_time;""", camera_name, camera_name, camera_name, ) event: Event for event in events: date = datetime.fromtimestamp(event.start_time) key = date.strftime("%Y-%m-%d") hour = date.strftime("%H") if key in dates and hour in dates[key]: dates[key][hour]["events"].append( model_to_dict( event, exclude=[ Event.false_positive, Event.zones, Event.thumbnail, Event.has_clip, Event.has_snapshot, ], )) return jsonify([{ "date": date, "events": sum([len(value["events"]) for value in hours.values()]), "recordings": [{ "hour": hour, "delay": value["delay"], "events": value["events"] } for hour, value in hours.items()], } for date, hours in dates.items()])