def purge_duplicates(self): duplicate_query = """with grouped_events as ( select id, label, camera, has_snapshot, has_clip, row_number() over ( partition by label, camera, round(start_time/5,0)*5 order by end_time-start_time desc ) as copy_number from event ) select distinct id, camera, has_snapshot, has_clip from grouped_events where copy_number > 1;""" duplicate_events = Event.raw(duplicate_query) for event in duplicate_events: logger.debug(f"Removing duplicate: {event.id}") media_name = f"{event.camera}-{event.id}" if event.has_snapshot: media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg") media_path.unlink(missing_ok=True) if event.has_clip: media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4") media_path.unlink(missing_ok=True) (Event.delete().where( Event.id << [event.id for event in duplicate_events]).execute())
def recordings(camera_name): files = glob.glob(f"{RECORD_DIR}/*/*/*/{camera_name}") if len(files) == 0: return jsonify([]) files.sort() dates = OrderedDict() for path in files: first = glob.glob(f"{path}/00.*.mp4") delay = 0 if len(first) > 0: delay = int(first[0].strip(path).split(".")[1]) search = re.search(r".+/(\d{4}[-]\d{2})/(\d{2})/(\d{2}).+", path) if not search: continue date = f"{search.group(1)}-{search.group(2)}" if date not in dates: dates[date] = OrderedDict() dates[date][search.group(3)] = {"delay": delay, "events": []} # Packing intervals to return all events with same label and overlapping times as one row. # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/ events = Event.raw( """WITH C1 AS ( SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub FROM event WHERE camera = ? UNION ALL SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub FROM event WHERE camera = ? ), C2 AS ( SELECT C1.*, SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) - sub AS cnt FROM C1 ), C3 AS ( SELECT id, label, camera, top_score, ts, (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1 AS grpnum FROM C2 WHERE cnt = 0 ) SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time FROM C3 GROUP BY label, grpnum ORDER BY start_time;""", camera_name, camera_name, ) e: Event for e in events: date = datetime.fromtimestamp(e.start_time) key = date.strftime("%Y-%m-%d") hour = date.strftime("%H") if key in dates and hour in dates[key]: dates[key][hour]["events"].append( model_to_dict( e, exclude=[ Event.false_positive, Event.zones, Event.thumbnail, Event.has_clip, Event.has_snapshot, ], )) return jsonify([{ "date": date, "events": sum([len(value["events"]) for value in hours.values()]), "recordings": [{ "hour": hour, "delay": value["delay"], "events": value["events"] } for hour, value in hours.items()], } for date, hours in dates.items()])
def recordings(camera_name): dates = OrderedDict() # Retrieve all recordings for this camera recordings = (Recordings.select().where( Recordings.camera == camera_name).order_by( Recordings.start_time.asc())) last_end = 0 recording: Recordings for recording in recordings: date = datetime.fromtimestamp(recording.start_time) key = date.strftime("%Y-%m-%d") hour = date.strftime("%H") # Create Day Record if key not in dates: dates[key] = OrderedDict() # Create Hour Record if hour not in dates[key]: dates[key][hour] = {"delay": {}, "events": []} # Check for delay the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp() # diff current recording start time and the greater of the previous end time or top of the hour diff = recording.start_time - max(last_end, the_hour) # Determine seconds into recording seconds = 0 if datetime.fromtimestamp(last_end).strftime("%H") == hour: seconds = int(last_end - the_hour) # Determine the delay delay = min(int(diff), 3600 - seconds) if delay > 1: # Add an offset for any delay greater than a second dates[key][hour]["delay"][seconds] = delay last_end = recording.end_time # Packing intervals to return all events with same label and overlapping times as one row. # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/ events = Event.raw( """WITH C1 AS ( SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub FROM event WHERE camera = ? UNION ALL SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub FROM event WHERE camera = ? ), C2 AS ( SELECT C1.*, SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) - sub AS cnt FROM C1 ), C3 AS ( SELECT id, label, camera, top_score, ts, (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1 AS grpnum FROM C2 WHERE cnt = 0 ) SELECT id, label, camera, top_score, start_time, end_time FROM event WHERE camera = ? AND end_time IS NULL UNION ALL SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time FROM C3 GROUP BY label, grpnum ORDER BY start_time;""", camera_name, camera_name, camera_name, ) event: Event for event in events: date = datetime.fromtimestamp(event.start_time) key = date.strftime("%Y-%m-%d") hour = date.strftime("%H") if key in dates and hour in dates[key]: dates[key][hour]["events"].append( model_to_dict( event, exclude=[ Event.false_positive, Event.zones, Event.thumbnail, Event.has_clip, Event.has_snapshot, ], )) return jsonify([{ "date": date, "events": sum([len(value["events"]) for value in hours.values()]), "recordings": [{ "hour": hour, "delay": value["delay"], "events": value["events"] } for hour, value in hours.items()], } for date, hours in dates.items()])