コード例 #1
0
    def sync_recordings(self):
        logger.debug("Start sync recordings.")

        # get all recordings in the db
        recordings: Recordings = Recordings.select()

        # get all recordings files on disk
        process = sp.run(
            ["find", RECORD_DIR, "-type", "f"],
            capture_output=True,
            text=True,
        )
        files_on_disk = process.stdout.splitlines()

        recordings_to_delete = []
        for recording in recordings.objects().iterator():
            if not recording.path in files_on_disk:
                recordings_to_delete.append(recording.id)

        logger.debug(
            f"Deleting {len(recordings_to_delete)} recordings with missing files"
        )
        Recordings.delete().where(
            Recordings.id << recordings_to_delete).execute()

        logger.debug("End sync recordings.")
コード例 #2
0
    def store_segment(
        self,
        camera,
        start_time,
        end_time,
        duration,
        cache_path,
        store_mode: RetainModeEnum,
    ):
        motion_count, active_count = self.segment_stats(
            camera, start_time, end_time)

        # check if the segment shouldn't be stored
        if (store_mode == RetainModeEnum.motion and motion_count
                == 0) or (store_mode == RetainModeEnum.active_objects
                          and active_count == 0):
            Path(cache_path).unlink(missing_ok=True)
            self.end_time_cache.pop(cache_path, None)
            return

        directory = os.path.join(RECORD_DIR,
                                 start_time.strftime("%Y-%m/%d/%H"), camera)

        if not os.path.exists(directory):
            os.makedirs(directory)

        file_name = f"{start_time.strftime('%M.%S.mp4')}"
        file_path = os.path.join(directory, file_name)

        try:
            start_frame = datetime.datetime.now().timestamp()
            # copy then delete is required when recordings are stored on some network drives
            shutil.copyfile(cache_path, file_path)
            logger.debug(
                f"Copied {file_path} in {datetime.datetime.now().timestamp()-start_frame} seconds."
            )
            os.remove(cache_path)

            rand_id = "".join(
                random.choices(string.ascii_lowercase + string.digits, k=6))
            Recordings.create(
                id=f"{start_time.timestamp()}-{rand_id}",
                camera=camera,
                path=file_path,
                start_time=start_time.timestamp(),
                end_time=end_time.timestamp(),
                duration=duration,
                motion=motion_count,
                # TODO: update this to store list of active objects at some point
                objects=active_count,
            )
        except Exception as e:
            logger.error(f"Unable to store recording segment {cache_path}")
            Path(cache_path).unlink(missing_ok=True)
            logger.error(e)

        # clear end_time cache
        self.end_time_cache.pop(cache_path, None)
コード例 #3
0
    def expire_files(self):
        logger.debug("Start expire files (legacy).")

        default_expire = (datetime.datetime.now().timestamp() -
                          SECONDS_IN_DAY * self.config.record.retain.days)
        delete_before = {}

        for name, camera in self.config.cameras.items():
            delete_before[name] = (datetime.datetime.now().timestamp() -
                                   SECONDS_IN_DAY * camera.record.retain.days)

        # find all the recordings older than the oldest recording in the db
        try:
            oldest_recording = Recordings.select().order_by(
                Recordings.start_time).get()

            p = Path(oldest_recording.path)
            oldest_timestamp = p.stat().st_mtime - 1
        except DoesNotExist:
            oldest_timestamp = datetime.datetime.now().timestamp()
        except FileNotFoundError:
            logger.warning(
                f"Unable to find file from recordings database: {p}")
            Recordings.delete().where(
                Recordings.id == oldest_recording.id).execute()
            return

        logger.debug(f"Oldest recording in the db: {oldest_timestamp}")
        process = sp.run(
            [
                "find", RECORD_DIR, "-type", "f", "!", "-newermt",
                f"@{oldest_timestamp}"
            ],
            capture_output=True,
            text=True,
        )
        files_to_check = process.stdout.splitlines()

        for f in files_to_check:
            p = Path(f)
            try:
                if p.stat().st_mtime < delete_before.get(
                        p.parent.name, default_expire):
                    p.unlink(missing_ok=True)
            except FileNotFoundError:
                logger.warning(f"Attempted to expire missing file: {f}")

        logger.debug("End expire files (legacy).")
コード例 #4
0
ファイル: http.py プロジェクト: ScottRoach/frigate
def vod_ts(camera, start_ts, end_ts):
    recordings = (Recordings.select().where(
        Recordings.start_time.between(start_ts, end_ts)
        | Recordings.end_time.between(start_ts, end_ts)
        | ((start_ts > Recordings.start_time)
           & (end_ts < Recordings.end_time))).where(
               Recordings.camera == camera).order_by(
                   Recordings.start_time.asc()))

    clips = []
    durations = []

    recording: Recordings
    for recording in recordings:
        clip = {"type": "source", "path": recording.path}
        duration = int(recording.duration * 1000)
        # Determine if offset is needed for first clip
        if recording.start_time < start_ts:
            offset = int((start_ts - recording.start_time) * 1000)
            clip["clipFrom"] = offset
            duration -= offset
        # Determine if we need to end the last clip early
        if recording.end_time > end_ts:
            duration -= int((recording.end_time - end_ts) * 1000)

        if duration > 0:
            clips.append(clip)
            durations.append(duration)
        else:
            logger.warning(
                f"Recording clip is missing or empty: {recording.path}")

    if not clips:
        logger.error("No recordings found for the requested time range")
        return "No recordings found.", 404

    hour_ago = datetime.now() - timedelta(hours=1)
    return jsonify({
        "cache": hour_ago.timestamp() > start_ts,
        "discontinuity": False,
        "durations": durations,
        "sequences": [{
            "clips": clips
        }],
    })
コード例 #5
0
    def expire_files(self):
        logger.debug("Start expire files (legacy).")

        default_expire = (datetime.datetime.now().timestamp() -
                          SECONDS_IN_DAY * self.config.record.retain_days)
        delete_before = {}

        for name, camera in self.config.cameras.items():
            delete_before[name] = (datetime.datetime.now().timestamp() -
                                   SECONDS_IN_DAY * camera.record.retain_days)

        # find all the recordings older than the oldest recording in the db
        try:
            oldest_recording = (Recordings.select().order_by(
                Recordings.start_time.desc()).get())

            p = Path(oldest_recording.path)
            oldest_timestamp = p.stat().st_mtime - 1
        except DoesNotExist:
            oldest_timestamp = datetime.datetime.now().timestamp()

        logger.debug(f"Oldest recording in the db: {oldest_timestamp}")
        process = sp.run(
            [
                "find", RECORD_DIR, "-type", "f", "-newermt",
                f"@{oldest_timestamp}"
            ],
            capture_output=True,
            text=True,
        )
        files_to_check = process.stdout.splitlines()

        for f in files_to_check:
            p = Path(f)
            if p.stat().st_mtime < delete_before.get(p.parent.name,
                                                     default_expire):
                p.unlink(missing_ok=True)

        logger.debug("End expire files (legacy).")
コード例 #6
0
    def expire_recordings(self):
        logger.debug("Start expire recordings (new).")

        logger.debug("Start deleted cameras.")
        # Handle deleted cameras
        expire_days = self.config.record.retain.days
        expire_before = (datetime.datetime.now() -
                         datetime.timedelta(days=expire_days)).timestamp()
        no_camera_recordings: Recordings = Recordings.select().where(
            Recordings.camera.not_in(list(self.config.cameras.keys())),
            Recordings.end_time < expire_before,
        )

        deleted_recordings = set()
        for recording in no_camera_recordings:
            Path(recording.path).unlink(missing_ok=True)
            deleted_recordings.add(recording.id)

        logger.debug(f"Expiring {len(deleted_recordings)} recordings")
        Recordings.delete().where(
            Recordings.id << deleted_recordings).execute()
        logger.debug("End deleted cameras.")

        logger.debug("Start all cameras.")
        for camera, config in self.config.cameras.items():
            logger.debug(f"Start camera: {camera}.")
            # When deleting recordings without events, we have to keep at LEAST the configured max clip duration
            min_end = (datetime.datetime.now() - datetime.timedelta(
                seconds=config.record.events.max_seconds)).timestamp()
            expire_days = config.record.retain.days
            expire_before = (datetime.datetime.now() -
                             datetime.timedelta(days=expire_days)).timestamp()
            expire_date = min(min_end, expire_before)

            # Get recordings to check for expiration
            recordings: Recordings = (Recordings.select().where(
                Recordings.camera == camera,
                Recordings.end_time < expire_date,
            ).order_by(Recordings.start_time))

            # Get all the events to check against
            events: Event = (
                Event.select().where(
                    Event.camera == camera,
                    # need to ensure segments for all events starting
                    # before the expire date are included
                    Event.start_time < expire_date,
                    Event.has_clip,
                ).order_by(Event.start_time).objects())

            # loop over recordings and see if they overlap with any non-expired events
            # TODO: expire segments based on segment stats according to config
            event_start = 0
            deleted_recordings = set()
            for recording in recordings.objects().iterator():
                keep = False
                # Now look for a reason to keep this recording segment
                for idx in range(event_start, len(events)):
                    event = events[idx]

                    # if the event starts in the future, stop checking events
                    # and let this recording segment expire
                    if event.start_time > recording.end_time:
                        keep = False
                        break

                    # if the event is in progress or ends after the recording starts, keep it
                    # and stop looking at events
                    if event.end_time is None or event.end_time >= recording.start_time:
                        keep = True
                        break

                    # if the event ends before this recording segment starts, skip
                    # this event and check the next event for an overlap.
                    # since the events and recordings are sorted, we can skip events
                    # that end before the previous recording segment started on future segments
                    if event.end_time < recording.start_time:
                        event_start = idx

                # Delete recordings outside of the retention window or based on the retention mode
                if (not keep or
                    (config.record.events.retain.mode == RetainModeEnum.motion
                     and recording.motion == 0)
                        or (config.record.events.retain.mode
                            == RetainModeEnum.active_objects
                            and recording.objects == 0)):
                    Path(recording.path).unlink(missing_ok=True)
                    deleted_recordings.add(recording.id)

            logger.debug(f"Expiring {len(deleted_recordings)} recordings")
            Recordings.delete().where(
                Recordings.id << deleted_recordings).execute()

            logger.debug(f"End camera: {camera}.")

        logger.debug("End all cameras.")
        logger.debug("End expire recordings (new).")
コード例 #7
0
    def move_files(self):
        recordings = [
            d for d in os.listdir(CACHE_DIR)
            if os.path.isfile(os.path.join(CACHE_DIR, d)) and d.endswith(".ts")
        ]

        files_in_use = []
        for process in psutil.process_iter():
            try:
                if process.name() != "ffmpeg":
                    continue
                flist = process.open_files()
                if flist:
                    for nt in flist:
                        if nt.path.startswith(CACHE_DIR):
                            files_in_use.append(nt.path.split("/")[-1])
            except:
                continue

        for f in recordings:
            # Skip files currently in use
            if f in files_in_use:
                continue

            cache_path = os.path.join(CACHE_DIR, f)
            basename = os.path.splitext(f)[0]
            camera, date = basename.rsplit("-", maxsplit=1)
            start_time = datetime.datetime.strptime(date, "%Y%m%d%H%M%S")

            # Just delete files if recordings are turned off
            if (not camera in self.config.cameras
                    or not self.config.cameras[camera].record.enabled):
                Path(cache_path).unlink(missing_ok=True)
                continue

            ffprobe_cmd = [
                "ffprobe",
                "-v",
                "error",
                "-show_entries",
                "format=duration",
                "-of",
                "default=noprint_wrappers=1:nokey=1",
                f"{cache_path}",
            ]
            p = sp.run(ffprobe_cmd, capture_output=True)
            if p.returncode == 0:
                duration = float(p.stdout.decode().strip())
                end_time = start_time + datetime.timedelta(seconds=duration)
            else:
                logger.warning(f"Discarding a corrupt recording segment: {f}")
                Path(cache_path).unlink(missing_ok=True)
                continue

            directory = os.path.join(RECORD_DIR,
                                     start_time.strftime("%Y-%m/%d/%H"),
                                     camera)

            if not os.path.exists(directory):
                os.makedirs(directory)

            file_name = f"{start_time.strftime('%M.%S.mp4')}"
            file_path = os.path.join(directory, file_name)

            ffmpeg_cmd = [
                "ffmpeg",
                "-y",
                "-i",
                cache_path,
                "-c",
                "copy",
                "-movflags",
                "+faststart",
                file_path,
            ]

            p = sp.run(
                ffmpeg_cmd,
                encoding="ascii",
                capture_output=True,
            )

            Path(cache_path).unlink(missing_ok=True)

            if p.returncode != 0:
                logger.error(f"Unable to convert {cache_path} to {file_path}")
                logger.error(p.stderr)
                continue

            rand_id = "".join(
                random.choices(string.ascii_lowercase + string.digits, k=6))
            Recordings.create(
                id=f"{start_time.timestamp()}-{rand_id}",
                camera=camera,
                path=file_path,
                start_time=start_time.timestamp(),
                end_time=end_time.timestamp(),
                duration=duration,
            )
コード例 #8
0
ファイル: http.py プロジェクト: ScottRoach/frigate
def recording_clip(camera, start_ts, end_ts):
    download = request.args.get("download", type=bool)

    recordings = (Recordings.select().where(
        (Recordings.start_time.between(start_ts, end_ts))
        | (Recordings.end_time.between(start_ts, end_ts))
        | ((start_ts > Recordings.start_time)
           & (end_ts < Recordings.end_time))).where(
               Recordings.camera == camera).order_by(
                   Recordings.start_time.asc()))

    playlist_lines = []
    clip: Recordings
    for clip in recordings:
        playlist_lines.append(f"file '{clip.path}'")
        # if this is the starting clip, add an inpoint
        if clip.start_time < start_ts:
            playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
        # if this is the ending clip, add an outpoint
        if clip.end_time > end_ts:
            playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")

    file_name = f"clip_{camera}_{start_ts}-{end_ts}.mp4"
    path = f"/tmp/cache/{file_name}"

    ffmpeg_cmd = [
        "ffmpeg",
        "-y",
        "-protocol_whitelist",
        "pipe,file",
        "-f",
        "concat",
        "-safe",
        "0",
        "-i",
        "-",
        "-c",
        "copy",
        "-movflags",
        "+faststart",
        path,
    ]

    p = sp.run(
        ffmpeg_cmd,
        input="\n".join(playlist_lines),
        encoding="ascii",
        capture_output=True,
    )
    if p.returncode != 0:
        logger.error(p.stderr)
        return f"Could not create clip from recordings for {camera}.", 500

    response = make_response()
    response.headers["Content-Description"] = "File Transfer"
    response.headers["Cache-Control"] = "no-cache"
    response.headers["Content-Type"] = "video/mp4"
    if download:
        response.headers[
            "Content-Disposition"] = "attachment; filename=%s" % file_name
    response.headers["Content-Length"] = os.path.getsize(path)
    response.headers[
        "X-Accel-Redirect"] = f"/cache/{file_name}"  # nginx: http://wiki.nginx.org/NginxXSendfile

    return response
コード例 #9
0
ファイル: http.py プロジェクト: ScottRoach/frigate
def recordings(camera_name):
    dates = OrderedDict()

    # Retrieve all recordings for this camera
    recordings = (Recordings.select().where(
        Recordings.camera == camera_name).order_by(
            Recordings.start_time.asc()))

    last_end = 0
    recording: Recordings
    for recording in recordings:
        date = datetime.fromtimestamp(recording.start_time)
        key = date.strftime("%Y-%m-%d")
        hour = date.strftime("%H")

        # Create Day Record
        if key not in dates:
            dates[key] = OrderedDict()

        # Create Hour Record
        if hour not in dates[key]:
            dates[key][hour] = {"delay": {}, "events": []}

        # Check for delay
        the_hour = datetime.strptime(f"{key} {hour}",
                                     "%Y-%m-%d %H").timestamp()
        # diff current recording start time and the greater of the previous end time or top of the hour
        diff = recording.start_time - max(last_end, the_hour)
        # Determine seconds into recording
        seconds = 0
        if datetime.fromtimestamp(last_end).strftime("%H") == hour:
            seconds = int(last_end - the_hour)
        # Determine the delay
        delay = min(int(diff), 3600 - seconds)
        if delay > 1:
            # Add an offset for any delay greater than a second
            dates[key][hour]["delay"][seconds] = delay

        last_end = recording.end_time

    # Packing intervals to return all events with same label and overlapping times as one row.
    # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
    events = Event.raw(
        """WITH C1 AS
        (
        SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub
        FROM event
        WHERE camera = ?
        UNION ALL
        SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub
        FROM event
        WHERE camera = ?
        ),
        C2 AS
        (
        SELECT C1.*,
        SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC
        ROWS BETWEEN UNBOUNDED PRECEDING
        AND CURRENT ROW) - sub AS cnt
        FROM C1
        ),
        C3 AS
        (
        SELECT id, label, camera, top_score, ts,
        (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1
        AS grpnum
        FROM C2
        WHERE cnt = 0
        )
        SELECT id, label, camera, top_score, start_time, end_time
        FROM event
        WHERE camera = ? AND end_time IS NULL
        UNION ALL
        SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time
        FROM C3
        GROUP BY label, grpnum
        ORDER BY start_time;""",
        camera_name,
        camera_name,
        camera_name,
    )

    event: Event
    for event in events:
        date = datetime.fromtimestamp(event.start_time)
        key = date.strftime("%Y-%m-%d")
        hour = date.strftime("%H")
        if key in dates and hour in dates[key]:
            dates[key][hour]["events"].append(
                model_to_dict(
                    event,
                    exclude=[
                        Event.false_positive,
                        Event.zones,
                        Event.thumbnail,
                        Event.has_clip,
                        Event.has_snapshot,
                    ],
                ))

    return jsonify([{
        "date":
        date,
        "events":
        sum([len(value["events"]) for value in hours.values()]),
        "recordings": [{
            "hour": hour,
            "delay": value["delay"],
            "events": value["events"]
        } for hour, value in hours.items()],
    } for date, hours in dates.items()])
コード例 #10
0
 def add_index():
     # First add the index here, because there is a bug in peewee_migrate
     # when trying to create an multi-column index in the same migration
     # as the table: https://github.com/klen/peewee_migrate/issues/19
     Recordings.add_index("start_time", "end_time")
     Recordings.create_table()