mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
11 Commits
39a9ad3089
...
v0.14.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30d3837649 | ||
|
|
331a7f51b7 | ||
|
|
9aa29b1758 | ||
|
|
ef06d2a4d4 | ||
|
|
12c8539977 | ||
|
|
474d3c32fa | ||
|
|
3750847055 | ||
|
|
c16a380918 | ||
|
|
df466b5d0b | ||
|
|
18a78863a7 | ||
|
|
4d2002b98d |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.13.0
|
||||
current_version = 0.14.0
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
@@ -7,6 +7,10 @@ tag = True
|
||||
search = version = "{current_version}"
|
||||
replace = version = "{new_version}"
|
||||
|
||||
[bumpversion:file:uv.lock]
|
||||
search = version = "{current_version}"
|
||||
replace = version = "{new_version}"
|
||||
|
||||
[bumpversion:file:unifi_protect_backup/__init__.py]
|
||||
search = __version__ = "{current_version}"
|
||||
replace = __version__ = "{new_version}"
|
||||
|
||||
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help UPB improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
* Unifi Protect Backup version:
|
||||
* Unifi Protect version:
|
||||
* Python version:
|
||||
* Operating System:
|
||||
* Are you using a docker container or native?:
|
||||
|
||||
### Description
|
||||
|
||||
Describe what you were trying to get done.
|
||||
Tell us what happened, what went wrong, and what you expected to happen.
|
||||
|
||||
### What I Did
|
||||
|
||||
```
|
||||
Paste the command(s) you ran and the output.
|
||||
If there was a crash, please include the traceback here.
|
||||
```
|
||||
@@ -12,7 +12,7 @@ repos:
|
||||
args: [ --unsafe ]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.7
|
||||
rev: v0.11.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.14.0] - 2025-07-18
|
||||
- Add support for Finger Print, NFC Card Scan, and Audio Detections
|
||||
|
||||
## [0.13.1] - 2025-06-26
|
||||
### Fixed
|
||||
- Bumped uiprotect version to support unifi protect 6
|
||||
|
||||
## [0.13.0] - 2025-04-09
|
||||
### Added
|
||||
- Parallel uploaders are now supported
|
||||
|
||||
@@ -7,7 +7,7 @@ LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.13.0.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.14.0.tar.gz sdist.tar.gz
|
||||
|
||||
# https://github.com/rust-lang/cargo/issues/2808
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
@@ -90,7 +90,6 @@ docker run \
|
||||
-e UFP_ADDRESS='UNIFI_PROTECT_IP' \
|
||||
-e UFP_SSL_VERIFY='false' \
|
||||
-e RCLONE_DESTINATION='my_remote:/unifi_protect_backup' \
|
||||
-v '/path/to/save/clips':'/data' \
|
||||
-v '/path/to/rclone.conf':'/config/rclone/rclone.conf' \
|
||||
-v '/path/to/save/database':/config/database/ \
|
||||
ghcr.io/ep1cman/unifi-protect-backup
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.13.0"
|
||||
version = "0.14.0"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
readme = "README.md"
|
||||
license = {text = "MIT"}
|
||||
@@ -29,7 +29,7 @@ dependencies = [
|
||||
"expiring-dict>=1.1.0",
|
||||
"async-lru>=2.0.4",
|
||||
"aiolimiter>=1.1.0",
|
||||
"uiprotect==7.5.2",
|
||||
"uiprotect==7.14.1",
|
||||
"aiohttp==3.11.16",
|
||||
]
|
||||
|
||||
@@ -63,8 +63,14 @@ line-length = 120
|
||||
target-version = "py310"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E","F","D","B","W"]
|
||||
ignore = ["D203", "D213"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
line-ending = "lf"
|
||||
docstring-code-format = true
|
||||
|
||||
[tool.mypy]
|
||||
allow_redefinition = true
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = "sebastian@goscik.com"
|
||||
__version__ = "0.13.0"
|
||||
__version__ = "0.14.0"
|
||||
|
||||
from .downloader import VideoDownloader
|
||||
from .downloader_experimental import VideoDownloaderExperimental
|
||||
|
||||
@@ -7,13 +7,15 @@ import click
|
||||
from aiorun import run # type: ignore
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from uiprotect.data.types import SmartDetectObjectType
|
||||
from uiprotect.data.types import SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import __version__
|
||||
from unifi_protect_backup.unifi_protect_backup_core import UnifiProtectBackup
|
||||
from unifi_protect_backup.utils import human_readable_to_float
|
||||
|
||||
DETECTION_TYPES = ["motion", "ring", "line"] + SmartDetectObjectType.values()
|
||||
DETECTION_TYPES = ["motion", "ring", "line", "fingerprint", "nfc"]
|
||||
DETECTION_TYPES += [t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()]
|
||||
DETECTION_TYPES += [f"{t}" for t in SmartDetectAudioType.values()]
|
||||
|
||||
|
||||
def _parse_detection_types(ctx, param, value):
|
||||
@@ -29,7 +31,7 @@ def _parse_detection_types(ctx, param, value):
|
||||
|
||||
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
"""Parses the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
"""Parse the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
|
||||
# Check that we matched the whole string
|
||||
@@ -169,7 +171,7 @@ all warnings, and websocket data
|
||||
show_default=True,
|
||||
envvar="DOWNLOAD_BUFFER_SIZE",
|
||||
help='How big the download buffer should be (you can use suffixes like "B", "KiB", "MiB", "GiB")',
|
||||
callback=lambda ctx, param, value: human_readable_to_float(value),
|
||||
callback=lambda ctx, param, value: int(human_readable_to_float(value)),
|
||||
)
|
||||
@click.option(
|
||||
"--purge_interval",
|
||||
@@ -248,8 +250,7 @@ a lot of failed downloads with the default downloader.
|
||||
help="Max number of parallel uploads to allow",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
|
||||
"""Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
try:
|
||||
# Validate only one of the camera select arguments was given
|
||||
if kwargs.get("cameras") and kwargs.get("ignore_cameras"):
|
||||
|
||||
@@ -27,7 +27,7 @@ from unifi_protect_backup.utils import (
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
@@ -62,6 +62,7 @@ class VideoDownloader:
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -86,7 +87,7 @@ class VideoDownloader:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
@@ -113,7 +114,7 @@ class VideoDownloader:
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
@@ -174,7 +175,7 @@ class VideoDownloader:
|
||||
self.logger.error(f"Unexpected exception occurred, abandoning event {event.id}:", exc_info=e)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Downloads the video clip for the given event."""
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
@@ -185,7 +186,7 @@ class VideoDownloader:
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
@@ -210,7 +211,7 @@ class VideoDownloader:
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
|
||||
@@ -27,7 +27,7 @@ from unifi_protect_backup.utils import (
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
@@ -62,6 +62,7 @@ class VideoDownloaderExperimental:
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -86,7 +87,7 @@ class VideoDownloaderExperimental:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
@@ -113,7 +114,7 @@ class VideoDownloaderExperimental:
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
@@ -180,7 +181,7 @@ class VideoDownloaderExperimental:
|
||||
)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Downloads the video clip for the given event."""
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
@@ -196,7 +197,7 @@ class VideoDownloaderExperimental:
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
@@ -221,7 +222,7 @@ class VideoDownloaderExperimental:
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from time import sleep
|
||||
from typing import List
|
||||
from typing import Set
|
||||
|
||||
from uiprotect.api import ProtectApiClient
|
||||
from uiprotect.websocket import WebsocketState
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
from uiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
|
||||
from unifi_protect_backup.utils import wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -21,75 +22,52 @@ class EventListener:
|
||||
self,
|
||||
event_queue: asyncio.Queue,
|
||||
protect: ProtectApiClient,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
):
|
||||
"""Init.
|
||||
|
||||
Args:
|
||||
event_queue (asyncio.Queue): Queue to place events to backup on
|
||||
protect (ProtectApiClient): UniFI Protect API client to use
|
||||
detection_types (List[str]): Desired Event detection types to look for
|
||||
ignore_cameras (List[str]): Cameras IDs to ignore events from
|
||||
cameras (List[str]): Cameras IDs to ONLY include events from
|
||||
detection_types (Set[str]): Desired Event detection types to look for
|
||||
ignore_cameras (Set[str]): Cameras IDs to ignore events from
|
||||
cameras (Set[str]): Cameras IDs to ONLY include events from
|
||||
|
||||
"""
|
||||
self._event_queue: asyncio.Queue = event_queue
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._unsub = None
|
||||
self._unsub_websocketstate = None
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self.cameras: List[str] = cameras
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
|
||||
async def start(self):
|
||||
"""Main Loop."""
|
||||
"""Run main Loop."""
|
||||
logger.debug("Subscribed to websocket")
|
||||
self._unsub_websocket_state = self._protect.subscribe_websocket_state(self._websocket_state_callback)
|
||||
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
|
||||
|
||||
def _websocket_callback(self, msg: WSSubscriptionMessage) -> None:
|
||||
"""Callback for "EVENT" websocket messages.
|
||||
"""'EVENT' websocket message callback.
|
||||
|
||||
Filters the incoming events, and puts completed events onto the download queue
|
||||
|
||||
Args:
|
||||
msg (Event): Incoming event data
|
||||
|
||||
"""
|
||||
logger.websocket_data(msg) # type: ignore
|
||||
|
||||
assert isinstance(msg.new_obj, Event)
|
||||
if msg.action != WSAction.UPDATE:
|
||||
return
|
||||
if msg.new_obj.camera_id in self.ignore_cameras:
|
||||
return
|
||||
if self.cameras and msg.new_obj.camera_id not in self.cameras:
|
||||
return
|
||||
if "end" not in msg.changed_data:
|
||||
return
|
||||
if msg.new_obj.type not in [
|
||||
EventType.MOTION,
|
||||
EventType.SMART_DETECT,
|
||||
EventType.RING,
|
||||
EventType.SMART_DETECT_LINE,
|
||||
]:
|
||||
if not wanted_event_type(msg.new_obj, self.detection_types, self.cameras, self.ignore_cameras):
|
||||
return
|
||||
if msg.new_obj.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted motion detection event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.RING and "ring" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted ring event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.SMART_DETECT_LINE and "line" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted line event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
elif msg.new_obj.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in msg.new_obj.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
logger.extra_debug( # type: ignore
|
||||
f"Skipping unwanted {event_smart_detection_type} detection event: {msg.new_obj.id}"
|
||||
)
|
||||
return
|
||||
|
||||
# TODO: Will this even work? I think it will block the async loop
|
||||
while self._event_queue.full():
|
||||
@@ -107,12 +85,13 @@ class EventListener:
|
||||
logger.debug(f"Adding event {msg.new_obj.id} to queue (Current download queue={self._event_queue.qsize()})")
|
||||
|
||||
def _websocket_state_callback(self, state: WebsocketState) -> None:
|
||||
"""Callback for websocket state messages.
|
||||
"""Websocket state message callback.
|
||||
|
||||
Flags the websocket for reconnection
|
||||
|
||||
Args:
|
||||
msg (WebsocketState): new state of the websocket
|
||||
state (WebsocketState): new state of the websocket
|
||||
|
||||
"""
|
||||
if state == WebsocketState.DISCONNECTED:
|
||||
logger.error("Unifi Protect Websocket lost connection. Reconnecting...")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import AsyncIterator, List
|
||||
from typing import AsyncIterator, List, Set
|
||||
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
@@ -12,6 +12,7 @@ from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
|
||||
from unifi_protect_backup import VideoDownloader, VideoUploader
|
||||
from unifi_protect_backup.utils import EVENT_TYPES_MAP, wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,9 +28,9 @@ class MissingEventChecker:
|
||||
downloader: VideoDownloader,
|
||||
uploaders: List[VideoUploader],
|
||||
retention: relativedelta,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
interval: int = 60 * 5,
|
||||
) -> None:
|
||||
"""Init.
|
||||
@@ -41,10 +42,11 @@ class MissingEventChecker:
|
||||
downloader (VideoDownloader): Downloader to check for on-going downloads
|
||||
uploaders (List[VideoUploader]): Uploaders to check for on-going uploads
|
||||
retention (relativedelta): Retention period to limit search window
|
||||
detection_types (List[str]): Detection types wanted to limit search
|
||||
ignore_cameras (List[str]): Ignored camera IDs to limit search
|
||||
cameras (List[str]): Included (ONLY) camera IDs to limit search
|
||||
detection_types (Set[str]): Detection types wanted to limit search
|
||||
ignore_cameras (Set[str]): Ignored camera IDs to limit search
|
||||
cameras (Set[str]): Included (ONLY) camera IDs to limit search
|
||||
interval (int): How frequently, in seconds, to check for missing events,
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -52,9 +54,9 @@ class MissingEventChecker:
|
||||
self._downloader: VideoDownloader = downloader
|
||||
self._uploaders: List[VideoUploader] = uploaders
|
||||
self.retention: relativedelta = retention
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self.cameras: List[str] = cameras
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
self.interval: int = interval
|
||||
|
||||
async def _get_missing_events(self) -> AsyncIterator[Event]:
|
||||
@@ -68,12 +70,7 @@ class MissingEventChecker:
|
||||
events_chunk = await self._protect.get_events(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
types=[
|
||||
EventType.MOTION,
|
||||
EventType.SMART_DETECT,
|
||||
EventType.RING,
|
||||
EventType.SMART_DETECT_LINE,
|
||||
],
|
||||
types=list(EVENT_TYPES_MAP.keys()),
|
||||
limit=chunk_size,
|
||||
)
|
||||
|
||||
@@ -108,35 +105,23 @@ class MissingEventChecker:
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
missing_events = {
|
||||
event_id: event
|
||||
for event_id, event in unifi_events.items()
|
||||
if event_id not in (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
}
|
||||
|
||||
# Exclude events of unwanted types
|
||||
def wanted_event_type(event_id):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
if event.camera_id in self.ignore_cameras:
|
||||
return False
|
||||
if self.cameras and event.camera_id not in self.cameras:
|
||||
return False
|
||||
if event.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.RING and "ring" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.SMART_DETECT_LINE and "line" not in self.detection_types:
|
||||
return False
|
||||
elif event.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in event.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
return False
|
||||
return True
|
||||
|
||||
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
|
||||
wanted_events = {
|
||||
event_id: event
|
||||
for event_id, event in missing_events.items()
|
||||
if wanted_event_type(event, self.detection_types, self.cameras, self.ignore_cameras)
|
||||
}
|
||||
|
||||
# Yeild events one by one to allow the async loop to start other task while
|
||||
# waiting on the full list of events
|
||||
for id in wanted_event_ids:
|
||||
yield unifi_events[id]
|
||||
for event in wanted_events.values():
|
||||
yield event
|
||||
|
||||
# Last chunk was in-complete, we can stop now
|
||||
if len(events_chunk) < chunk_size:
|
||||
@@ -156,7 +141,7 @@ class MissingEventChecker:
|
||||
await self._db.commit()
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
logger.info("Starting Missing Event Checker")
|
||||
while True:
|
||||
try:
|
||||
|
||||
@@ -13,14 +13,14 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def delete_file(file_path, rclone_purge_args):
|
||||
"""Deletes `file_path` via rclone."""
|
||||
"""Delete `file_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone delete -vv "{file_path}" {rclone_purge_args}')
|
||||
if returncode != 0:
|
||||
logger.error(f" Failed to delete file: '{file_path}'")
|
||||
|
||||
|
||||
async def tidy_empty_dirs(base_dir_path):
|
||||
"""Deletes any empty directories in `base_dir_path` via rclone."""
|
||||
"""Delete any empty directories in `base_dir_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rmdirs -vv --ignore-errors --leave-root "{base_dir_path}"')
|
||||
if returncode != 0:
|
||||
logger.error(" Failed to tidy empty dirs")
|
||||
@@ -34,7 +34,7 @@ class Purge:
|
||||
db: aiosqlite.Connection,
|
||||
retention: relativedelta,
|
||||
rclone_destination: str,
|
||||
interval: relativedelta = relativedelta(days=1),
|
||||
interval: relativedelta | None,
|
||||
rclone_purge_args: str = "",
|
||||
):
|
||||
"""Init.
|
||||
@@ -45,15 +45,16 @@ class Purge:
|
||||
rclone_destination (str): What rclone destination the clips are stored in
|
||||
interval (relativedelta): How often to purge old clips
|
||||
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
|
||||
|
||||
"""
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.retention: relativedelta = retention
|
||||
self.rclone_destination: str = rclone_destination
|
||||
self.interval: relativedelta = interval
|
||||
self.interval: relativedelta = interval if interval is not None else relativedelta(days=1)
|
||||
self.rclone_purge_args: str = rclone_purge_args
|
||||
|
||||
async def start(self):
|
||||
"""Main loop - runs forever."""
|
||||
"""Run main loop."""
|
||||
while True:
|
||||
try:
|
||||
deleted_a_file = False
|
||||
@@ -63,7 +64,7 @@ class Purge:
|
||||
async with self._db.execute(
|
||||
f"SELECT * FROM events WHERE end < {retention_oldest_time}"
|
||||
) as event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor: # noqa: B007
|
||||
logger.info(f"Purging event: {event_id}.")
|
||||
|
||||
# For every backup for this event
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"""Monkey patch new download method into uiprotect till PR is merged."""
|
||||
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
@@ -10,13 +12,15 @@ from uiprotect.exceptions import BadRequest
|
||||
from uiprotect.utils import to_js_time
|
||||
|
||||
|
||||
# First, let's add the new VideoExportType enum
|
||||
class VideoExportType(str, enum.Enum):
|
||||
"""Unifi Protect video export types."""
|
||||
|
||||
TIMELAPSE = "timelapse"
|
||||
ROTATING = "rotating"
|
||||
|
||||
|
||||
def monkey_patch_experimental_downloader():
|
||||
"""Apply patches to uiprotect to add new download method."""
|
||||
from uiprotect.api import ProtectApiClient
|
||||
|
||||
# Add the version constant
|
||||
|
||||
@@ -43,7 +43,7 @@ monkey_patch_experimental_downloader()
|
||||
|
||||
|
||||
async def create_database(path: str):
|
||||
"""Creates sqlite database and creates the events abd backups tables."""
|
||||
"""Create sqlite database and creates the events abd backups tables."""
|
||||
db = await aiosqlite.connect(path)
|
||||
await db.execute("CREATE TABLE events(id PRIMARY KEY, type, camera_id, start REAL, end REAL)")
|
||||
await db.execute(
|
||||
@@ -117,8 +117,10 @@ class UnifiProtectBackup:
|
||||
color_logging (bool): Whether to add color to logging output or not
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
|
||||
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the webUI)
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the
|
||||
webUI)
|
||||
parallel_uploads (int): Max number of parallel uploads to allow
|
||||
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
setup_logging(verbose, self.color_logging)
|
||||
@@ -179,11 +181,11 @@ class UnifiProtectBackup:
|
||||
verify_ssl=self.verify_ssl,
|
||||
subscribed_models={ModelType.EVENT},
|
||||
)
|
||||
self.ignore_cameras = ignore_cameras
|
||||
self.cameras = cameras
|
||||
self.ignore_cameras = set(ignore_cameras)
|
||||
self.cameras = set(cameras)
|
||||
self._download_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._unsub: Callable[[], None]
|
||||
self.detection_types = detection_types
|
||||
self.detection_types = set(detection_types)
|
||||
self._has_ffprobe = False
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
@@ -216,7 +218,7 @@ class UnifiProtectBackup:
|
||||
delay = 5 # Start with a 5 second delay
|
||||
max_delay = 3600 # 1 hour in seconds
|
||||
|
||||
for attempts in range(20):
|
||||
for _ in range(20):
|
||||
try:
|
||||
await self._protect.update()
|
||||
break
|
||||
@@ -279,7 +281,7 @@ class UnifiProtectBackup:
|
||||
# Create upload tasks
|
||||
# This will upload the videos in the downloader's buffer to the rclone remotes and log it in the database
|
||||
uploaders = []
|
||||
for i in range(self._parallel_uploads):
|
||||
for _ in range(self._parallel_uploads):
|
||||
uploader = VideoUploader(
|
||||
self._protect,
|
||||
upload_queue,
|
||||
|
||||
@@ -45,6 +45,7 @@ class VideoUploader:
|
||||
file_structure_format (str): format string for how to structure the uploaded files
|
||||
db (aiosqlite.Connection): Async SQlite database connection
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
@@ -59,7 +60,7 @@ class VideoUploader:
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": ""})
|
||||
|
||||
async def start(self):
|
||||
"""Main loop.
|
||||
"""Run main loop.
|
||||
|
||||
Runs forever looking for video data in the video queue and then uploads it
|
||||
using rclone, finally it updates the database
|
||||
@@ -106,6 +107,7 @@ class VideoUploader:
|
||||
|
||||
Raises:
|
||||
RuntimeError: If rclone returns a non-zero exit code
|
||||
|
||||
"""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)
|
||||
if returncode != 0:
|
||||
@@ -131,7 +133,7 @@ class VideoUploader:
|
||||
await self._db.commit()
|
||||
|
||||
async def _generate_file_path(self, event: Event) -> pathlib.Path:
|
||||
"""Generates the rclone destination path for the provided event.
|
||||
"""Generate the rclone destination path for the provided event.
|
||||
|
||||
Generates rclone destination path for the given even based upon the format string
|
||||
in `self.file_structure_format`.
|
||||
|
||||
@@ -4,12 +4,13 @@ import asyncio
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from typing import Optional, Set
|
||||
|
||||
from apprise import NotifyType
|
||||
from async_lru import alru_cache
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType, SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import notifications
|
||||
|
||||
@@ -109,6 +110,9 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
Args:
|
||||
color_logging (bool): If true logging levels will be colorized
|
||||
*args (): Positional arguments to pass to StreamHandler
|
||||
**kwargs: Keyword arguments to pass to StreamHandler
|
||||
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
self.color_logging = color_logging
|
||||
@@ -172,7 +176,7 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
|
||||
def create_logging_handler(format, color_logging):
|
||||
"""Constructs apprise logging handler for the given format."""
|
||||
"""Construct apprise logging handler for the given format."""
|
||||
date_format = "%Y-%m-%d %H:%M:%S"
|
||||
style = "{"
|
||||
|
||||
@@ -182,8 +186,8 @@ def create_logging_handler(format, color_logging):
|
||||
return sh
|
||||
|
||||
|
||||
def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers: List[str] = []) -> None:
|
||||
"""Configures loggers to provided the desired level of verbosity.
|
||||
def setup_logging(verbosity: int, color_logging: bool = False) -> None:
|
||||
"""Configure loggers to provided the desired level of verbosity.
|
||||
|
||||
Verbosity 0: Only log info messages created by `unifi-protect-backup`, and all warnings
|
||||
verbosity 1: Only log info & debug messages created by `unifi-protect-backup`, and all warnings
|
||||
@@ -199,7 +203,6 @@ def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers
|
||||
Args:
|
||||
verbosity (int): The desired level of verbosity
|
||||
color_logging (bool): If colors should be used in the log (default=False)
|
||||
apprise_notifiers (List[str]): Notification services to hook into the logger
|
||||
|
||||
"""
|
||||
add_logging_level(
|
||||
@@ -242,7 +245,7 @@ _initialized_loggers = []
|
||||
|
||||
|
||||
def setup_event_logger(logger, color_logging):
|
||||
"""Sets up a logger that also displays the event ID currently being processed."""
|
||||
"""Set up a logger that also displays the event ID currently being processed."""
|
||||
global _initialized_loggers
|
||||
if logger not in _initialized_loggers:
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
|
||||
@@ -256,12 +259,13 @@ _suffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
|
||||
|
||||
|
||||
def human_readable_size(num: float):
|
||||
"""Turns a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
"""Turn a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
|
||||
Based on: https://stackoverflow.com/a/1094933
|
||||
|
||||
Args:
|
||||
num (int): The number to be converted into human readable format
|
||||
|
||||
"""
|
||||
for unit in _suffixes:
|
||||
if abs(num) < 1024.0:
|
||||
@@ -271,7 +275,7 @@ def human_readable_size(num: float):
|
||||
|
||||
|
||||
def human_readable_to_float(num: str):
|
||||
"""Turns a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
"""Turn a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
pattern = r"([\d.]+)(" + "|".join(_suffixes) + ")"
|
||||
result = re.match(pattern, num)
|
||||
if result is None:
|
||||
@@ -287,7 +291,7 @@ def human_readable_to_float(num: str):
|
||||
# No max size, and a 6 hour ttl
|
||||
@alru_cache(None, ttl=60 * 60 * 6)
|
||||
async def get_camera_name(protect: ProtectApiClient, id: str):
|
||||
"""Returns the name for the camera with the given ID.
|
||||
"""Return the name for the camera with the given ID.
|
||||
|
||||
If the camera ID is not know, it tries refreshing the cached data
|
||||
"""
|
||||
@@ -322,6 +326,7 @@ class SubprocessException(Exception):
|
||||
stdout (str): What rclone output to stdout
|
||||
stderr (str): What rclone output to stderr
|
||||
returncode (str): The return code of the rclone process
|
||||
|
||||
"""
|
||||
super().__init__()
|
||||
self.stdout: str = stdout
|
||||
@@ -329,12 +334,12 @@ class SubprocessException(Exception):
|
||||
self.returncode: int = returncode
|
||||
|
||||
def __str__(self):
|
||||
"""Turns exception into a human readable form."""
|
||||
"""Turn exception into a human readable form."""
|
||||
return f"Return Code: {self.returncode}\nStdout:\n{self.stdout}\nStderr:\n{self.stderr}"
|
||||
|
||||
|
||||
async def run_command(cmd: str, data=None):
|
||||
"""Runs the given command returning the exit code, stdout and stderr."""
|
||||
"""Run the given command returning the exit code, stdout and stderr."""
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdin=asyncio.subprocess.PIPE,
|
||||
@@ -367,11 +372,11 @@ class VideoQueue(asyncio.Queue):
|
||||
self._bytes_sum = 0
|
||||
|
||||
def qsize(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return self._bytes_sum
|
||||
|
||||
def qsize_files(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return super().qsize()
|
||||
|
||||
def _get(self):
|
||||
@@ -450,3 +455,38 @@ async def wait_until(dt):
|
||||
"""Sleep until the specified datetime."""
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
|
||||
EVENT_TYPES_MAP = {
|
||||
EventType.MOTION: {"motion"},
|
||||
EventType.RING: {"ring"},
|
||||
EventType.SMART_DETECT_LINE: {"line"},
|
||||
EventType.FINGERPRINT_IDENTIFIED: {"fingerprint"},
|
||||
EventType.NFC_CARD_SCANNED: {"nfc"},
|
||||
EventType.SMART_DETECT: {t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()},
|
||||
EventType.SMART_AUDIO_DETECT: {f"{t}" for t in SmartDetectAudioType.values()},
|
||||
}
|
||||
|
||||
|
||||
def wanted_event_type(event, wanted_detection_types: Set[str], cameras: Set[str], ignore_cameras: Set[str]):
|
||||
"""Return True if this event is one we want."""
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
|
||||
if event.camera_id in ignore_cameras:
|
||||
return False
|
||||
|
||||
if cameras and event.camera_id not in cameras:
|
||||
return False
|
||||
|
||||
if event.type not in EVENT_TYPES_MAP:
|
||||
return False
|
||||
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
detection_types = set(event.smart_detect_types)
|
||||
else:
|
||||
detection_types = EVENT_TYPES_MAP[event.type]
|
||||
if not detection_types & wanted_detection_types: # No intersection
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user