Add ability to skip missing events at launch

This commit is contained in:
Sebastian Goscik
2023-03-24 01:02:58 +00:00
parent 0d3395b74a
commit 782d126ae5
4 changed files with 103 additions and 59 deletions

View File

@@ -183,6 +183,8 @@ Options:
More details about supported platforms can be found here: More details about supported platforms can be found here:
https://github.com/caronc/apprise https://github.com/caronc/apprise
--skip-missing If set, events which are 'missing' at the start will be ignored.
Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
--help Show this message and exit. --help Show this message and exit.
``` ```
@@ -204,6 +206,7 @@ always take priority over environment variables):
- `COLOR_LOGGING` - `COLOR_LOGGING`
- `PURGE_INTERVAL` - `PURGE_INTERVAL`
- `APPRISE_NOTIFIERS` - `APPRISE_NOTIFIERS`
- `SKIP_MISSING`
## File path formatting ## File path formatting
@@ -222,6 +225,12 @@ The following fields are provided to the format string:
You can optionally format the `event.start`/`event.end` timestamps as per the [`strftime` format](https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior) by appending it after a `:` e.g to get just the date without the time: `{event.start:%Y-%m-%d}` You can optionally format the `event.start`/`event.end` timestamps as per the [`strftime` format](https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior) by appending it after a `:` e.g to get just the date without the time: `{event.start:%Y-%m-%d}`
## Skipping initially missing events
If you prefer to avoid backing up the entire backlog of events, and would instead prefer to back up events that occur from
now on, you can use the `--skip-missing` flag. This does not enable the periodic check for missing event (e.g. one that was missed by a disconnection) but instead marks all missing events at start-up as backed up.
If you use this feature it is advised that your run the tool once with this flag, then stop it once the database has been created and the events are ignored. Keeping this flag set permanently could cause events to be missed if the tool crashes and is restarted etc.
# A note about `rclone` backends and disk wear # A note about `rclone` backends and disk wear
This tool attempts to not write the downloaded files to disk to minimise disk wear, and instead streams them directly to This tool attempts to not write the downloaded files to disk to minimise disk wear, and instead streams them directly to
rclone. Sadly, not all storage backends supported by `rclone` allow "Stream Uploads". Please refer to the `StreamUpload` column on this table to see which one do and don't: https://rclone.org/overview/#optional-features rclone. Sadly, not all storage backends supported by `rclone` allow "Stream Uploads". Please refer to the `StreamUpload` column on this table to see which one do and don't: https://rclone.org/overview/#optional-features

View File

@@ -151,6 +151,17 @@ If no tags are specified, it defaults to ERROR
More details about supported platforms can be found here: https://github.com/caronc/apprise""", More details about supported platforms can be found here: https://github.com/caronc/apprise""",
) )
@click.option(
'--skip-missing',
default=False,
show_default=True,
is_flag=True,
envvar='SKIP_MISSING',
help="""\b
If set, events which are 'missing' at the start will be ignored.
Subsequent missing events will be downloaded (e.g. a missed event)
""",
)
def main(**kwargs): def main(**kwargs):
"""A Python based tool for backing up Unifi Protect event clips as they occur.""" """A Python based tool for backing up Unifi Protect event clips as they occur."""
event_listener = UnifiProtectBackup(**kwargs) event_listener = UnifiProtectBackup(**kwargs)

View File

@@ -9,6 +9,7 @@ import aiosqlite
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from pyunifiprotect import ProtectApiClient from pyunifiprotect import ProtectApiClient
from pyunifiprotect.data.types import EventType from pyunifiprotect.data.types import EventType
from pyunifiprotect.data.nvr import Event
from unifi_protect_backup import VideoDownloader, VideoUploader from unifi_protect_backup import VideoDownloader, VideoUploader
@@ -53,80 +54,96 @@ class MissingEventChecker:
self.ignore_cameras: List[str] = ignore_cameras self.ignore_cameras: List[str] = ignore_cameras
self.interval: int = interval self.interval: int = interval
async def _get_missing_events(self) -> List[Event]:
# Get list of events that need to be backed up from unifi protect
unifi_events = await self._protect.get_events(
start=datetime.now() - self.retention,
end=datetime.now(),
types=[EventType.MOTION, EventType.SMART_DETECT, EventType.RING],
)
unifi_events = {event.id: event for event in unifi_events}
# Get list of events that have been backed up from the database
# events(id, type, camera_id, start, end)
async with self._db.execute("SELECT * FROM events") as cursor:
rows = await cursor.fetchall()
db_event_ids = {row[0] for row in rows}
# Prevent re-adding events currently in the download/upload queue
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} # type: ignore
current_download = self._downloader.current_event
if current_download is not None:
downloading_event_ids.add(current_download.id)
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue} # type: ignore
current_upload = self._uploader.current_event
if current_upload is not None:
uploading_event_ids.add(current_upload.id)
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
def wanted_event_type(event_id):
event = unifi_events[event_id]
if event.start is None or event.end is None:
return False # This event is still on-going
if event.type is EventType.MOTION and "motion" not in self.detection_types:
return False
if event.type is EventType.RING and "ring" not in self.detection_types:
return False
elif event.type is EventType.SMART_DETECT:
for event_smart_detection_type in event.smart_detect_types:
if event_smart_detection_type not in self.detection_types:
return False
return True
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
return [unifi_events[id] for id in wanted_event_ids]
async def ignore_missing(self):
"""Ignore missing events by adding them to the event table."""
wanted_events = await self._get_missing_events()
logger.info(f" Ignoring {len(wanted_events)} missing events")
for event in wanted_events:
logger.extra_debug(f"Ignoring event '{event.id}'")
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()
async def start(self): async def start(self):
"""Main loop.""" """Main loop."""
logger.info("Starting Missing Event Checker") logger.info("Starting Missing Event Checker")
while True: while True:
try: try:
logger.extra_debug("Running check for missing events...") logger.extra_debug("Running check for missing events...")
# Get list of events that need to be backed up from unifi protect
unifi_events = await self._protect.get_events(
start=datetime.now() - self.retention,
end=datetime.now(),
types=[EventType.MOTION, EventType.SMART_DETECT, EventType.RING],
)
unifi_events = {event.id: event for event in unifi_events}
# Get list of events that have been backed up from the database wanted_events = await self._get_missing_events()
# events(id, type, camera_id, start, end) logger.debug(f" Undownloaded events of wanted types: {len(wanted_events)}")
async with self._db.execute("SELECT * FROM events") as cursor:
rows = await cursor.fetchall()
db_event_ids = {row[0] for row in rows}
# Prevent re-adding events currently in the download/upload queue if len(wanted_events) > 20:
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} logger.warning(f" Adding {len(wanted_events)} missing events to backup queue")
current_download = self._downloader.current_event
if current_download is not None:
downloading_event_ids.add(current_download.id)
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue}
current_upload = self._uploader.current_event
if current_upload is not None:
uploading_event_ids.add(current_upload.id)
missing_event_ids = set(unifi_events.keys()) - (
db_event_ids | downloading_event_ids | uploading_event_ids
)
logger.debug(f" Total undownloaded events: {len(missing_event_ids)}")
def wanted_event_type(event_id):
event = unifi_events[event_id]
if event.start is None or event.end is None:
return False # This event is still on-going
if event.type is EventType.MOTION and "motion" not in self.detection_types:
return False
if event.type is EventType.RING and "ring" not in self.detection_types:
return False
elif event.type is EventType.SMART_DETECT:
for event_smart_detection_type in event.smart_detect_types:
if event_smart_detection_type not in self.detection_types:
return False
return True
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
logger.debug(f" Undownloaded events of wanted types: {len(wanted_event_ids)}")
if len(wanted_event_ids) > 20:
logger.warning(f" Adding {len(wanted_event_ids)} missing events to backup queue")
missing_logger = logger.extra_debug missing_logger = logger.extra_debug
else: else:
missing_logger = logger.warning missing_logger = logger.warning
for event_id in wanted_event_ids: for event in wanted_events:
event = unifi_events[event_id]
if event.type != EventType.SMART_DETECT: if event.type != EventType.SMART_DETECT:
missing_logger( event_name = f"{event.id} ({event.type})"
f" Adding missing event to backup queue: {event.id} ({event.type})"
f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -"
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})"
)
else: else:
missing_logger( event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"
f" Adding missing event to backup queue: {event.id} ({', '.join(event.smart_detect_types)})"
f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -" missing_logger(
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})" f" Adding missing event to backup queue: {event_name}"
) f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -"
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})"
)
await self._download_queue.put(event) await self._download_queue.put(event)
except Exception as e: except Exception as e:

View File

@@ -66,6 +66,7 @@ class UnifiProtectBackup:
download_buffer_size: int, download_buffer_size: int,
purge_interval: str, purge_interval: str,
apprise_notifiers: str, apprise_notifiers: str,
skip_missing: bool,
sqlite_path: str = "events.sqlite", sqlite_path: str = "events.sqlite",
color_logging=False, color_logging=False,
port: int = 443, port: int = 443,
@@ -93,6 +94,7 @@ class UnifiProtectBackup:
download_buffer_size (int): How many bytes big the download buffer should be download_buffer_size (int): How many bytes big the download buffer should be
purge_interval (str): How often to check for files to delete purge_interval (str): How often to check for files to delete
apprise_notifiers (str): Apprise URIs for notifications apprise_notifiers (str): Apprise URIs for notifications
skip_missing (bool): If initial missing events should be ignored
sqlite_path (str): Path where to find/create sqlite database sqlite_path (str): Path where to find/create sqlite database
color_logging (bool): Whether to add color to logging output or not color_logging (bool): Whether to add color to logging output or not
""" """
@@ -123,6 +125,7 @@ class UnifiProtectBackup:
logger.debug(f" download_buffer_size={human_readable_size(download_buffer_size)}") logger.debug(f" download_buffer_size={human_readable_size(download_buffer_size)}")
logger.debug(f" {purge_interval=}") logger.debug(f" {purge_interval=}")
logger.debug(f" {apprise_notifiers=}") logger.debug(f" {apprise_notifiers=}")
logger.debug(f" {skip_missing=}")
self.rclone_destination = rclone_destination self.rclone_destination = rclone_destination
self.retention = parse_rclone_retention(retention) self.retention = parse_rclone_retention(retention)
@@ -152,6 +155,7 @@ class UnifiProtectBackup:
self._db = None self._db = None
self._download_buffer_size = download_buffer_size self._download_buffer_size = download_buffer_size
self._purge_interval = parse_rclone_retention(purge_interval) self._purge_interval = parse_rclone_retention(purge_interval)
self._skip_missing = skip_missing
async def start(self): async def start(self):
"""Bootstrap the backup process and kick off the main loop. """Bootstrap the backup process and kick off the main loop.
@@ -245,6 +249,9 @@ class UnifiProtectBackup:
self.detection_types, self.detection_types,
self.ignore_cameras, self.ignore_cameras,
) )
if self._skip_missing:
logger.info("Ignoring missing events")
await missing.ignore_missing()
tasks.append(missing.start()) tasks.append(missing.start())
logger.info("Starting Tasks...") logger.info("Starting Tasks...")