Add feature to skip events longer than a maximum length

This commit is contained in:
Sebastian Goscik
2023-11-01 00:11:49 +00:00
parent d5a74f475a
commit 9410051ab9
4 changed files with 50 additions and 11 deletions

View File

@@ -191,6 +191,8 @@ Options:
Subsequent missing events will be downloaded (e.g. a missed event) [default: False] Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
--download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by --download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by
default default
--max-event-length INTEGER Only download events shorter than this maximum length, in
seconds [default: 7200]
--help Show this message and exit. --help Show this message and exit.
``` ```
@@ -215,6 +217,7 @@ always take priority over environment variables):
- `APPRISE_NOTIFIERS` - `APPRISE_NOTIFIERS`
- `SKIP_MISSING` - `SKIP_MISSING`
- `DOWNLOAD_RATELIMIT` - `DOWNLOAD_RATELIMIT`
- `MAX_EVENT_LENGTH`
## File path formatting ## File path formatting

View File

@@ -203,6 +203,14 @@ Subsequent missing events will be downloaded (e.g. a missed event)
type=float, type=float,
help="Limit how events can be downloaded in one minute. Disabled by default", help="Limit how events can be downloaded in one minute. Disabled by default",
) )
@click.option(
'--max-event-length',
default=2 * 60 * 60,
show_default=True,
envvar='MAX_EVENT_LENGTH',
type=int,
help="Only download events shorter than this maximum length, in seconds",
)
def main(**kwargs): def main(**kwargs):
"""A Python based tool for backing up Unifi Protect event clips as they occur.""" """A Python based tool for backing up Unifi Protect event clips as they occur."""
event_listener = UnifiProtectBackup(**kwargs) event_listener = UnifiProtectBackup(**kwargs)

View File

@@ -50,6 +50,7 @@ class VideoDownloader:
upload_queue: VideoQueue, upload_queue: VideoQueue,
color_logging: bool, color_logging: bool,
download_rate_limit: float, download_rate_limit: float,
max_event_length: timedelta,
): ):
"""Init. """Init.
@@ -60,6 +61,7 @@ class VideoDownloader:
upload_queue (VideoQueue): Queue to place downloaded videos on upload_queue (VideoQueue): Queue to place downloaded videos on
color_logging (bool): Whether or not to add color to logging output color_logging (bool): Whether or not to add color to logging output
download_rate_limit (float): Limit how events can be downloaded in one minute", download_rate_limit (float): Limit how events can be downloaded in one minute",
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
""" """
self._protect: ProtectApiClient = protect self._protect: ProtectApiClient = protect
self._db: aiosqlite.Connection = db self._db: aiosqlite.Connection = db
@@ -68,6 +70,7 @@ class VideoDownloader:
self.current_event = None self.current_event = None
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
self._download_rate_limit = download_rate_limit self._download_rate_limit = download_rate_limit
self._max_event_length = max_event_length
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
self.base_logger = logging.getLogger(__name__) self.base_logger = logging.getLogger(__name__)
@@ -95,6 +98,7 @@ class VideoDownloader:
await self._protect.connect_event.wait() await self._protect.connect_event.wait()
event = await self.download_queue.get() event = await self.download_queue.get()
self.current_event = event self.current_event = event
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'}) self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
@@ -118,6 +122,11 @@ class VideoDownloader:
duration = (event.end - event.start).total_seconds() duration = (event.end - event.start).total_seconds()
self.logger.debug(f" Duration: {duration}s") self.logger.debug(f" Duration: {duration}s")
# Skip invalid events
if not self._valid_event(event):
await self._ignore_event(event)
continue
# Unifi protect does not return full video clips if the clip is requested too soon. # Unifi protect does not return full video clips if the clip is requested too soon.
# There are two issues at play here: # There are two issues at play here:
# - Protect will only cut a clip on an keyframe which happen every 5s # - Protect will only cut a clip on an keyframe which happen every 5s
@@ -146,15 +155,7 @@ class VideoDownloader:
self.logger.error( self.logger.error(
"Event has failed to download 10 times in a row. Permanently ignoring this event" "Event has failed to download 10 times in a row. Permanently ignoring this event"
) )
await self._ignore_event(event)
# ignore event
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()
continue continue
# Remove successfully downloaded event from failures list # Remove successfully downloaded event from failures list
@@ -193,6 +194,15 @@ class VideoDownloader:
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s") self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
return video return video
async def _ignore_event(self, event):
self.logger.warning("Ignoring event")
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()
async def _check_video_length(self, video, duration): async def _check_video_length(self, video, duration):
"""Check if the downloaded event is at least the length of the event, warn otherwise. """Check if the downloaded event is at least the length of the event, warn otherwise.
@@ -207,3 +217,11 @@ class VideoDownloader:
self.logger.debug(msg) self.logger.debug(msg)
except SubprocessException as e: except SubprocessException as e:
self.logger.warning(" `ffprobe` failed", exc_info=e) self.logger.warning(" `ffprobe` failed", exc_info=e)
def _valid_event(self, event):
duration = event.end - event.start
if duration > self._max_event_length:
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
return False
return True

View File

@@ -3,7 +3,7 @@ import asyncio
import logging import logging
import os import os
import shutil import shutil
from datetime import datetime, timezone from datetime import datetime, timezone, timedelta
from typing import Callable, List from typing import Callable, List
import aiosqlite import aiosqlite
@@ -62,6 +62,7 @@ class UnifiProtectBackup:
purge_interval: relativedelta, purge_interval: relativedelta,
apprise_notifiers: str, apprise_notifiers: str,
skip_missing: bool, skip_missing: bool,
max_event_length: int,
sqlite_path: str = "events.sqlite", sqlite_path: str = "events.sqlite",
color_logging: bool = False, color_logging: bool = False,
download_rate_limit: float = None, download_rate_limit: float = None,
@@ -95,6 +96,7 @@ class UnifiProtectBackup:
sqlite_path (str): Path where to find/create sqlite database sqlite_path (str): Path where to find/create sqlite database
color_logging (bool): Whether to add color to logging output or not color_logging (bool): Whether to add color to logging output or not
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default", download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
""" """
self.color_logging = color_logging self.color_logging = color_logging
setup_logging(verbose, self.color_logging) setup_logging(verbose, self.color_logging)
@@ -130,6 +132,7 @@ class UnifiProtectBackup:
logger.debug(f" {apprise_notifiers=}") logger.debug(f" {apprise_notifiers=}")
logger.debug(f" {skip_missing=}") logger.debug(f" {skip_missing=}")
logger.debug(f" {download_rate_limit=} events per minute") logger.debug(f" {download_rate_limit=} events per minute")
logger.debug(f" {max_event_length=}s")
self.rclone_destination = rclone_destination self.rclone_destination = rclone_destination
self.retention = retention self.retention = retention
@@ -162,6 +165,7 @@ class UnifiProtectBackup:
self._purge_interval = purge_interval self._purge_interval = purge_interval
self._skip_missing = skip_missing self._skip_missing = skip_missing
self._download_rate_limit = download_rate_limit self._download_rate_limit = download_rate_limit
self._max_event_length = timedelta(seconds=max_event_length)
async def start(self): async def start(self):
"""Bootstrap the backup process and kick off the main loop. """Bootstrap the backup process and kick off the main loop.
@@ -222,7 +226,13 @@ class UnifiProtectBackup:
# Create downloader task # Create downloader task
# This will download video files to its buffer # This will download video files to its buffer
downloader = VideoDownloader( downloader = VideoDownloader(
self._protect, self._db, download_queue, upload_queue, self.color_logging, self._download_rate_limit self._protect,
self._db,
download_queue,
upload_queue,
self.color_logging,
self._download_rate_limit,
self._max_event_length,
) )
tasks.append(downloader.start()) tasks.append(downloader.start())