mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
14 Commits
v0.13.1
...
5fa54615c8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5fa54615c8 | ||
|
|
8e267017cd | ||
|
|
26c1797ce9 | ||
|
|
ef0cf38f83 | ||
|
|
2bd48014a0 | ||
|
|
afe025be1d | ||
|
|
a14ff1bf30 | ||
|
|
ba64722937 | ||
|
|
65d8e66e79 | ||
|
|
cb54078153 | ||
|
|
048e061df1 | ||
|
|
7f8177de35 | ||
|
|
eaabfbdb4e | ||
|
|
edf377adc4 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.13.1
|
||||
current_version = 0.14.0
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
||||
@@ -4,6 +4,9 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.14.0] - 2025-07-18
|
||||
- Add support for Finger Print, NFC Card Scan, and Audio Detections
|
||||
|
||||
## [0.13.1] - 2025-06-26
|
||||
### Fixed
|
||||
- Bumped uiprotect version to support unifi protect 6
|
||||
|
||||
@@ -7,7 +7,7 @@ LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.13.1.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.14.0.tar.gz sdist.tar.gz
|
||||
|
||||
# https://github.com/rust-lang/cargo/issues/2808
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
26
README.md
26
README.md
@@ -23,7 +23,7 @@ retention period.
|
||||
## Features
|
||||
|
||||
- Listens to events in real-time via the Unifi Protect websocket API
|
||||
- Ensures any previous and/or missed events within the retention period are also backed up
|
||||
- Ensures any previous and/or missed events within the missing range are also backed up
|
||||
- Supports uploading to a [wide range of storage systems using `rclone`](https://rclone.org/overview/)
|
||||
- Automatic pruning of old clips
|
||||
|
||||
@@ -90,7 +90,7 @@ docker run \
|
||||
-e UFP_ADDRESS='UNIFI_PROTECT_IP' \
|
||||
-e UFP_SSL_VERIFY='false' \
|
||||
-e RCLONE_DESTINATION='my_remote:/unifi_protect_backup' \
|
||||
-v '/path/to/rclone.conf':'/config/rclone/rclone.conf' \
|
||||
-v '/path/to/config/rclone':'/config/rclone/' \
|
||||
-v '/path/to/save/database':/config/database/ \
|
||||
ghcr.io/ep1cman/unifi-protect-backup
|
||||
```
|
||||
@@ -123,6 +123,10 @@ Options:
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-
|
||||
older-than-this) [default: 7d]
|
||||
--missing-range TEXT How far back should missing events be checked for. Defaults to
|
||||
the same as the retention time. Format as per the `--max-age`
|
||||
argument of `rclone` (https://rclone.org/filtering/#max-age-don-
|
||||
t-transfer-any-file-older-than-this)
|
||||
--rclone-args TEXT Optional extra arguments to pass to `rclone rcat` directly.
|
||||
Common usage for this would be to set a bandwidth limit, for
|
||||
example.
|
||||
@@ -131,14 +135,21 @@ Options:
|
||||
instead of using the recycle bin on a destination. Google Drive
|
||||
example: `--drive-use-trash=false`
|
||||
--detection-types TEXT A comma separated list of which types of detections to backup.
|
||||
Valid options are: `motion`, `person`, `vehicle`, `ring`
|
||||
[default: motion,person,vehicle,ring]
|
||||
Valid options are: `motion`, `ring`, `line`, `fingerprint`,
|
||||
`nfc`, `person`, `animal`, `vehicle`, `licensePlate`, `package`,
|
||||
`face`, `car`, `pet`, `alrmSmoke`, `alrmCmonx`, `smoke_cmonx`,
|
||||
`alrmSiren`, `alrmBabyCry`, `alrmSpeak`, `alrmBark`,
|
||||
`alrmBurglar`, `alrmCarHorn`, `alrmGlassBreak` [default: motion
|
||||
,ring,line,fingerprint,nfc,person,animal,vehicle,licensePlate,pa
|
||||
ckage,face,car,pet,alrmSmoke,alrmCmonx,smoke_cmonx,alrmSiren,alr
|
||||
mBabyCry,alrmSpeak,alrmBark,alrmBurglar,alrmCarHorn,alrmGlassBre
|
||||
ak]
|
||||
--ignore-camera TEXT IDs of cameras for which events should not be backed up. Use
|
||||
multiple times to ignore multiple IDs. If being set as an
|
||||
environment variable the IDs should be separated by whitespace.
|
||||
Alternatively, use a Unifi user with a role which has access
|
||||
restricted to the subset of cameras that you wish to backup.
|
||||
--camera TEXT IDs of *ONLY* cameras for which events should be backed up. Use
|
||||
--camera TEXT IDs of *ONLY* cameras for which events should be backed up. Use
|
||||
multiple times to include multiple IDs. If being set as an
|
||||
environment variable the IDs should be separated by whitespace.
|
||||
Alternatively, use a Unifi user with a role which has access
|
||||
@@ -214,6 +225,7 @@ always take priority over environment variables):
|
||||
- `UFP_PORT`
|
||||
- `UFP_SSL_VERIFY`
|
||||
- `RCLONE_RETENTION`
|
||||
- `MISSING_RANGE`
|
||||
- `RCLONE_DESTINATION`
|
||||
- `RCLONE_ARGS`
|
||||
- `RCLONE_PURGE_ARGS`
|
||||
@@ -369,7 +381,7 @@ If you need to debug your rclone setup, you can invoke rclone directly like so:
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-v /path/to/config/rclone:/config/rclone \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
@@ -380,7 +392,7 @@ For example to check that your config file is being read properly and list the c
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-v /path/to/config/rclone:/config/rclone \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
|
||||
@@ -4,7 +4,7 @@ mkdir -p /config/rclone
|
||||
|
||||
# For backwards compatibility
|
||||
[[ -f "/root/.config/rclone/rclone.conf" ]] && \
|
||||
echo "DEPRECATED: Copying rclone conf from /root/.config/rclone/rclone.conf, please change your mount to /config/rclone/rclone.conf"
|
||||
echo "DEPRECATED: Copying rclone conf from /root/.config/rclone/rclone.conf, please change your mount to /config/rclone/rclone.conf" && \
|
||||
cp \
|
||||
/root/.config/rclone/rclone.conf \
|
||||
/config/rclone/rclone.conf
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.13.1"
|
||||
version = "0.14.0"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
@@ -30,7 +30,7 @@ dependencies = [
|
||||
"async-lru>=2.0.4",
|
||||
"aiolimiter>=1.1.0",
|
||||
"uiprotect==7.14.1",
|
||||
"aiohttp==3.11.16",
|
||||
"aiohttp==3.12.14",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = "sebastian@goscik.com"
|
||||
__version__ = "0.13.1"
|
||||
__version__ = "0.14.0"
|
||||
|
||||
from .downloader import VideoDownloader
|
||||
from .downloader_experimental import VideoDownloaderExperimental
|
||||
|
||||
@@ -7,13 +7,15 @@ import click
|
||||
from aiorun import run # type: ignore
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from uiprotect.data.types import SmartDetectObjectType
|
||||
from uiprotect.data.types import SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import __version__
|
||||
from unifi_protect_backup.unifi_protect_backup_core import UnifiProtectBackup
|
||||
from unifi_protect_backup.utils import human_readable_to_float
|
||||
|
||||
DETECTION_TYPES = ["motion", "ring", "line"] + SmartDetectObjectType.values()
|
||||
DETECTION_TYPES = ["motion", "ring", "line", "fingerprint", "nfc"]
|
||||
DETECTION_TYPES += [t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()]
|
||||
DETECTION_TYPES += [f"{t}" for t in SmartDetectAudioType.values()]
|
||||
|
||||
|
||||
def _parse_detection_types(ctx, param, value):
|
||||
@@ -28,8 +30,11 @@ def _parse_detection_types(ctx, param, value):
|
||||
return types
|
||||
|
||||
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta | None:
|
||||
"""Parse the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
if retention is None:
|
||||
return None
|
||||
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
|
||||
# Check that we matched the whole string
|
||||
@@ -77,6 +82,15 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
"(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
"--missing-range",
|
||||
default=None,
|
||||
envvar="MISSING_RANGE",
|
||||
help="How far back should missing events be checked for. Defaults to the same as the retention time. "
|
||||
"Format as per the `--max-age` argument of `rclone` "
|
||||
"(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
"--rclone-args",
|
||||
default="",
|
||||
@@ -259,6 +273,9 @@ def main(**kwargs):
|
||||
)
|
||||
raise SystemExit(200) # throw 200 = arg error, service will not be restarted (docker)
|
||||
|
||||
if kwargs.get("missing_range") is None:
|
||||
kwargs["missing_range"] = kwargs.get("retention")
|
||||
|
||||
# Only create the event listener and run if validation passes
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
run(event_listener.start(), stop_on_unhandled_errors=True)
|
||||
|
||||
@@ -114,7 +114,7 @@ class VideoDownloader:
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
|
||||
@@ -114,7 +114,7 @@ class VideoDownloaderExperimental:
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from time import sleep
|
||||
from typing import List
|
||||
from typing import Set
|
||||
|
||||
from uiprotect.api import ProtectApiClient
|
||||
from uiprotect.websocket import WebsocketState
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
from uiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
|
||||
from unifi_protect_backup.utils import wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -21,27 +22,27 @@ class EventListener:
|
||||
self,
|
||||
event_queue: asyncio.Queue,
|
||||
protect: ProtectApiClient,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
):
|
||||
"""Init.
|
||||
|
||||
Args:
|
||||
event_queue (asyncio.Queue): Queue to place events to backup on
|
||||
protect (ProtectApiClient): UniFI Protect API client to use
|
||||
detection_types (List[str]): Desired Event detection types to look for
|
||||
ignore_cameras (List[str]): Cameras IDs to ignore events from
|
||||
cameras (List[str]): Cameras IDs to ONLY include events from
|
||||
detection_types (Set[str]): Desired Event detection types to look for
|
||||
ignore_cameras (Set[str]): Cameras IDs to ignore events from
|
||||
cameras (Set[str]): Cameras IDs to ONLY include events from
|
||||
|
||||
"""
|
||||
self._event_queue: asyncio.Queue = event_queue
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._unsub = None
|
||||
self._unsub_websocketstate = None
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self.cameras: List[str] = cameras
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
|
||||
async def start(self):
|
||||
"""Run main Loop."""
|
||||
@@ -63,35 +64,10 @@ class EventListener:
|
||||
assert isinstance(msg.new_obj, Event)
|
||||
if msg.action != WSAction.UPDATE:
|
||||
return
|
||||
if msg.new_obj.camera_id in self.ignore_cameras:
|
||||
return
|
||||
if self.cameras and msg.new_obj.camera_id not in self.cameras:
|
||||
return
|
||||
if "end" not in msg.changed_data:
|
||||
return
|
||||
if msg.new_obj.type not in [
|
||||
EventType.MOTION,
|
||||
EventType.SMART_DETECT,
|
||||
EventType.RING,
|
||||
EventType.SMART_DETECT_LINE,
|
||||
]:
|
||||
if not wanted_event_type(msg.new_obj, self.detection_types, self.cameras, self.ignore_cameras):
|
||||
return
|
||||
if msg.new_obj.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted motion detection event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.RING and "ring" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted ring event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.SMART_DETECT_LINE and "line" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted line event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
elif msg.new_obj.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in msg.new_obj.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
logger.extra_debug( # type: ignore
|
||||
f"Skipping unwanted {event_smart_detection_type} detection event: {msg.new_obj.id}"
|
||||
)
|
||||
return
|
||||
|
||||
# TODO: Will this even work? I think it will block the async loop
|
||||
while self._event_queue.full():
|
||||
|
||||
@@ -2,20 +2,30 @@
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import AsyncIterator, List
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from typing import AsyncIterator, List, Set, Dict
|
||||
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
|
||||
from unifi_protect_backup import VideoDownloader, VideoUploader
|
||||
from unifi_protect_backup.utils import EVENT_TYPES_MAP, wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MissingEvent:
|
||||
"""Track missing events and how many attempts they have had."""
|
||||
|
||||
event: Event
|
||||
attempts: int
|
||||
|
||||
|
||||
class MissingEventChecker:
|
||||
"""Periodically checks if any unifi protect events exist within the retention period that are not backed up."""
|
||||
|
||||
@@ -27,9 +37,9 @@ class MissingEventChecker:
|
||||
downloader: VideoDownloader,
|
||||
uploaders: List[VideoUploader],
|
||||
retention: relativedelta,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
interval: int = 60 * 5,
|
||||
) -> None:
|
||||
"""Init.
|
||||
@@ -41,9 +51,9 @@ class MissingEventChecker:
|
||||
downloader (VideoDownloader): Downloader to check for on-going downloads
|
||||
uploaders (List[VideoUploader]): Uploaders to check for on-going uploads
|
||||
retention (relativedelta): Retention period to limit search window
|
||||
detection_types (List[str]): Detection types wanted to limit search
|
||||
ignore_cameras (List[str]): Ignored camera IDs to limit search
|
||||
cameras (List[str]): Included (ONLY) camera IDs to limit search
|
||||
detection_types (Set[str]): Detection types wanted to limit search
|
||||
ignore_cameras (Set[str]): Ignored camera IDs to limit search
|
||||
cameras (Set[str]): Included (ONLY) camera IDs to limit search
|
||||
interval (int): How frequently, in seconds, to check for missing events,
|
||||
|
||||
"""
|
||||
@@ -53,107 +63,111 @@ class MissingEventChecker:
|
||||
self._downloader: VideoDownloader = downloader
|
||||
self._uploaders: List[VideoUploader] = uploaders
|
||||
self.retention: relativedelta = retention
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self.cameras: List[str] = cameras
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
self.interval: int = interval
|
||||
self.missing_events: Dict[str, MissingEvent] = {}
|
||||
self.last_check_time: datetime | None = None
|
||||
|
||||
async def _get_missing_events(self) -> AsyncIterator[Event]:
|
||||
start_time = datetime.now() - self.retention
|
||||
end_time = datetime.now()
|
||||
async def _get_backedup_event_ids(self) -> Set[str]:
|
||||
# Get ids of events successfully backed up, or ignored
|
||||
async with self._db.execute("SELECT id FROM events") as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
return {row[0] for row in rows}
|
||||
|
||||
async def _get_ongoing_event_ids(self) -> Set[str]:
|
||||
# Get ids of events currently being downloaded
|
||||
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} # type: ignore
|
||||
current_download = self._downloader.current_event
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
|
||||
# Get ids of events currently being uploaded
|
||||
uploading_event_ids = {event.id for event, video in self._downloader.upload_queue._queue} # type: ignore
|
||||
for uploader in self._uploaders:
|
||||
current_upload = uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
return downloading_event_ids | uploading_event_ids
|
||||
|
||||
async def _get_new_missing_events(self) -> AsyncIterator[MissingEvent]:
|
||||
# If it's the first check we've done, check the entire retention period
|
||||
if self.last_check_time is None:
|
||||
start_time = datetime.now(timezone.utc) - self.retention
|
||||
# Otherwise only check the time since the last check + a buffer period
|
||||
# however, if the retention is smaller than the buffer, only check the
|
||||
# retention period
|
||||
else:
|
||||
now = datetime.now(timezone.utc)
|
||||
retention_start = now - self.retention
|
||||
buffer_start = self.last_check_time - relativedelta(hours=3)
|
||||
start_time = max(retention_start, buffer_start)
|
||||
|
||||
end_time = datetime.now(timezone.utc)
|
||||
new_last_check_time = end_time
|
||||
chunk_size = 500
|
||||
|
||||
# Check UniFi Protect for new missing events
|
||||
while True:
|
||||
# Get list of events that need to be backed up from unifi protect
|
||||
logger.extra_debug(f"Fetching events for interval: {start_time} - {end_time}") # type: ignore
|
||||
# Get list of events that need to be backed up from UniFi protect
|
||||
logger.info(f"Fetching events for interval: {start_time} - {end_time}") # type: ignore
|
||||
events_chunk = await self._protect.get_events(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
types=[
|
||||
EventType.MOTION,
|
||||
EventType.SMART_DETECT,
|
||||
EventType.RING,
|
||||
EventType.SMART_DETECT_LINE,
|
||||
],
|
||||
types=list(EVENT_TYPES_MAP.keys()), # TODO: Only request the types we want
|
||||
limit=chunk_size,
|
||||
)
|
||||
|
||||
if not events_chunk:
|
||||
break # There were no events to backup
|
||||
existing_ids = await self._get_backedup_event_ids() | await self._get_ongoing_event_ids()
|
||||
|
||||
# Filter out on-going events
|
||||
unifi_events = {event.id: event for event in events_chunk if event.end is not None}
|
||||
for event in events_chunk:
|
||||
# Filter out on-going events
|
||||
if event.end is None:
|
||||
# Push back new_last_checked_time to before on-going events
|
||||
if event.start < new_last_check_time:
|
||||
new_last_check_time = event.start
|
||||
continue
|
||||
|
||||
if not unifi_events:
|
||||
break # No completed events to process
|
||||
# Next chunks start time should be the start of the
|
||||
# oldest complete event in the current chunk
|
||||
if event.start > start_time:
|
||||
start_time = event.start
|
||||
|
||||
# Next chunks start time should be the end of the oldest complete event in the current chunk
|
||||
start_time = max([event.end for event in unifi_events.values() if event.end is not None])
|
||||
# Skip backed up/in-progress events
|
||||
if event.id in existing_ids:
|
||||
continue
|
||||
|
||||
# Get list of events that have been backed up from the database
|
||||
# Filter out unwanted event types
|
||||
if not wanted_event_type(event, self.detection_types, self.cameras, self.ignore_cameras):
|
||||
continue
|
||||
|
||||
# events(id, type, camera_id, start, end)
|
||||
async with self._db.execute("SELECT * FROM events") as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
db_event_ids = {row[0] for row in rows}
|
||||
|
||||
# Prevent re-adding events currently in the download/upload queue
|
||||
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} # type: ignore
|
||||
current_download = self._downloader.current_event
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
|
||||
uploading_event_ids = {event.id for event, video in self._downloader.upload_queue._queue} # type: ignore
|
||||
for uploader in self._uploaders:
|
||||
current_upload = uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
|
||||
# Exclude events of unwanted types
|
||||
def wanted_event_type(event_id, unifi_events=unifi_events):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
if event.camera_id in self.ignore_cameras:
|
||||
return False
|
||||
if self.cameras and event.camera_id not in self.cameras:
|
||||
return False
|
||||
if event.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.RING and "ring" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.SMART_DETECT_LINE and "line" not in self.detection_types:
|
||||
return False
|
||||
elif event.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in event.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
return False
|
||||
return True
|
||||
|
||||
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
|
||||
|
||||
# Yeild events one by one to allow the async loop to start other task while
|
||||
# waiting on the full list of events
|
||||
for id in wanted_event_ids:
|
||||
yield unifi_events[id]
|
||||
logger.extra_debug(f"Yielding new missing event '{event.id}'") # type: ignore[attr-defined]
|
||||
yield MissingEvent(event, 0)
|
||||
|
||||
# Last chunk was in-complete, we can stop now
|
||||
if len(events_chunk) < chunk_size:
|
||||
break
|
||||
|
||||
async def ignore_missing(self):
|
||||
"""Ignore missing events by adding them to the event table."""
|
||||
logger.info(" Ignoring missing events")
|
||||
self.last_check_time = new_last_check_time
|
||||
|
||||
async for event in self._get_missing_events():
|
||||
logger.extra_debug(f"Ignoring event '{event.id}'")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
async def _ignore_event(self, event, commit=True):
|
||||
"""Ignore an event by adding them to the event table."""
|
||||
logger.extra_debug(f"Ignoring event '{event.id}'") # type: ignore[attr-defined]
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
if commit:
|
||||
await self._db.commit()
|
||||
|
||||
async def ignore_missing(self):
|
||||
"""Ignore all missing events by adding them to the event table."""
|
||||
logger.info(" Ignoring missing events")
|
||||
async for missing_event in self._get_new_missing_events():
|
||||
await self._ignore_event(missing_event.event, commit=False)
|
||||
await self._db.commit()
|
||||
|
||||
async def start(self):
|
||||
@@ -168,7 +182,28 @@ class MissingEventChecker:
|
||||
|
||||
logger.debug("Running check for missing events...")
|
||||
|
||||
async for event in self._get_missing_events():
|
||||
logger.extra_debug("Checking for new missing events") # type: ignore[attr-defined]
|
||||
async for missing_event in self._get_new_missing_events():
|
||||
logger.debug(f"Found new missing event: '{missing_event.event.id}")
|
||||
self.missing_events[missing_event.event.id] = missing_event
|
||||
|
||||
db_event_ids = await self._get_backedup_event_ids()
|
||||
in_progress_ids = await self._get_ongoing_event_ids()
|
||||
|
||||
logger.extra_debug("Processing missing events") # type: ignore[attr-defined]
|
||||
for missing_event in self.missing_events.copy().values():
|
||||
event = missing_event.event
|
||||
|
||||
# it has been backed up, stop tracking it
|
||||
if event.id in db_event_ids:
|
||||
del self.missing_events[event.id]
|
||||
logger.debug(f"Missing event '{event.id}' backed up")
|
||||
continue
|
||||
|
||||
# it is in progress, we need to wait
|
||||
elif event.id in in_progress_ids:
|
||||
continue
|
||||
|
||||
if not shown_warning:
|
||||
logger.warning(" Found missing events, adding to backup queue")
|
||||
shown_warning = True
|
||||
@@ -179,10 +214,10 @@ class MissingEventChecker:
|
||||
event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"
|
||||
|
||||
logger.extra_debug(
|
||||
f" Adding missing event to backup queue: {event_name}"
|
||||
f" Adding missing event to download queue: {event_name}"
|
||||
f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -"
|
||||
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})"
|
||||
)
|
||||
) # type: ignore[attr-defined]
|
||||
await self._download_queue.put(event)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -62,16 +62,16 @@ class Purge:
|
||||
# For every event older than the retention time
|
||||
retention_oldest_time = time.mktime((datetime.now() - self.retention).timetuple())
|
||||
async with self._db.execute(
|
||||
f"SELECT * FROM events WHERE end < {retention_oldest_time}"
|
||||
f"SELECT id FROM events WHERE end < {retention_oldest_time}"
|
||||
) as event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor: # noqa: B007
|
||||
async for (event_id,) in event_cursor: # noqa: B007
|
||||
logger.info(f"Purging event: {event_id}.")
|
||||
|
||||
# For every backup for this event
|
||||
async with self._db.execute(f"SELECT * FROM backups WHERE id = '{event_id}'") as backup_cursor:
|
||||
async for _, remote, file_path in backup_cursor:
|
||||
logger.debug(f" Deleted: {remote}:{file_path}")
|
||||
await delete_file(f"{remote}:{file_path}", self.rclone_purge_args)
|
||||
logger.debug(f" Deleted: {remote}:{file_path}")
|
||||
deleted_a_file = True
|
||||
|
||||
# delete event from database
|
||||
|
||||
@@ -68,6 +68,7 @@ class UnifiProtectBackup:
|
||||
verify_ssl: bool,
|
||||
rclone_destination: str,
|
||||
retention: relativedelta,
|
||||
missing_range: relativedelta,
|
||||
rclone_args: str,
|
||||
rclone_purge_args: str,
|
||||
detection_types: List[str],
|
||||
@@ -98,9 +99,13 @@ class UnifiProtectBackup:
|
||||
rclone_destination (str): `rclone` destination path in the format
|
||||
{rclone remote}:{path on remote}. E.g.
|
||||
`gdrive:/backups/unifi_protect`
|
||||
retention (str): How long should event clips be backed up for. Format as per the
|
||||
retention (relativedelta): How long should event clips be backed up for. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
missing_range (relativedelta): How far back should missing events be checked for. Defaults to
|
||||
the same as the retention time. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
rclone_args (str): A bandwidth limit which is passed to the `--bwlimit` argument of
|
||||
`rclone` (https://rclone.org/docs/#bwlimit-bandwidth-spec)
|
||||
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
|
||||
@@ -120,6 +125,7 @@ class UnifiProtectBackup:
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the
|
||||
webUI)
|
||||
parallel_uploads (int): Max number of parallel uploads to allow
|
||||
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
setup_logging(verbose, self.color_logging)
|
||||
@@ -143,6 +149,7 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {verify_ssl=}")
|
||||
logger.debug(f" {rclone_destination=}")
|
||||
logger.debug(f" {retention=}")
|
||||
logger.debug(f" {missing_range=}")
|
||||
logger.debug(f" {rclone_args=}")
|
||||
logger.debug(f" {rclone_purge_args=}")
|
||||
logger.debug(f" {ignore_cameras=}")
|
||||
@@ -162,6 +169,7 @@ class UnifiProtectBackup:
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = retention
|
||||
self.missing_range = missing_range
|
||||
self.rclone_args = rclone_args
|
||||
self.rclone_purge_args = rclone_purge_args
|
||||
self.file_structure_format = file_structure_format
|
||||
@@ -180,11 +188,11 @@ class UnifiProtectBackup:
|
||||
verify_ssl=self.verify_ssl,
|
||||
subscribed_models={ModelType.EVENT},
|
||||
)
|
||||
self.ignore_cameras = ignore_cameras
|
||||
self.cameras = cameras
|
||||
self.ignore_cameras = set(ignore_cameras)
|
||||
self.cameras = set(cameras)
|
||||
self._download_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._unsub: Callable[[], None]
|
||||
self.detection_types = detection_types
|
||||
self.detection_types = set(detection_types)
|
||||
self._has_ffprobe = False
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
@@ -313,15 +321,15 @@ class UnifiProtectBackup:
|
||||
tasks.append(purge.start())
|
||||
|
||||
# Create missing event task
|
||||
# This will check all the events within the retention period, if any have been missed and not backed up
|
||||
# they will be added to the event queue
|
||||
# This will check all the events within the missing_range period, if any have been missed and not
|
||||
# backed up. they will be added to the event queue
|
||||
missing = MissingEventChecker(
|
||||
self._protect,
|
||||
self._db,
|
||||
download_queue,
|
||||
downloader,
|
||||
uploaders,
|
||||
self.retention,
|
||||
self.missing_range,
|
||||
self.detection_types,
|
||||
self.ignore_cameras,
|
||||
self.cameras,
|
||||
|
||||
@@ -45,6 +45,7 @@ class VideoUploader:
|
||||
file_structure_format (str): format string for how to structure the uploaded files
|
||||
db (aiosqlite.Connection): Async SQlite database connection
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
|
||||
@@ -4,12 +4,13 @@ import asyncio
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from typing import Optional, Set
|
||||
|
||||
from apprise import NotifyType
|
||||
from async_lru import alru_cache
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType, SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import notifications
|
||||
|
||||
@@ -213,7 +214,7 @@ def setup_logging(verbosity: int, color_logging: bool = False) -> None:
|
||||
logging.DEBUG - 2,
|
||||
)
|
||||
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} : {message}"
|
||||
format = "{asctime} [{levelname:^11s}] {name:<46} : {message}"
|
||||
sh = create_logging_handler(format, color_logging)
|
||||
|
||||
logger = logging.getLogger("unifi_protect_backup")
|
||||
@@ -247,7 +248,7 @@ def setup_event_logger(logger, color_logging):
|
||||
"""Set up a logger that also displays the event ID currently being processed."""
|
||||
global _initialized_loggers
|
||||
if logger not in _initialized_loggers:
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
|
||||
format = "{asctime} [{levelname:^11s}] {name:<46} :{event} {message}"
|
||||
sh = create_logging_handler(format, color_logging)
|
||||
logger.addHandler(sh)
|
||||
logger.propagate = False
|
||||
@@ -454,3 +455,38 @@ async def wait_until(dt):
|
||||
"""Sleep until the specified datetime."""
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
|
||||
EVENT_TYPES_MAP = {
|
||||
EventType.MOTION: {"motion"},
|
||||
EventType.RING: {"ring"},
|
||||
EventType.SMART_DETECT_LINE: {"line"},
|
||||
EventType.FINGERPRINT_IDENTIFIED: {"fingerprint"},
|
||||
EventType.NFC_CARD_SCANNED: {"nfc"},
|
||||
EventType.SMART_DETECT: {t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()},
|
||||
EventType.SMART_AUDIO_DETECT: {f"{t}" for t in SmartDetectAudioType.values()},
|
||||
}
|
||||
|
||||
|
||||
def wanted_event_type(event, wanted_detection_types: Set[str], cameras: Set[str], ignore_cameras: Set[str]):
|
||||
"""Return True if this event is one we want."""
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
|
||||
if event.camera_id in ignore_cameras:
|
||||
return False
|
||||
|
||||
if cameras and event.camera_id not in cameras:
|
||||
return False
|
||||
|
||||
if event.type not in EVENT_TYPES_MAP:
|
||||
return False
|
||||
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
detection_types = set(event.smart_detect_types)
|
||||
else:
|
||||
detection_types = EVENT_TYPES_MAP[event.type]
|
||||
if not detection_types & wanted_detection_types: # No intersection
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user