Compare commits

...

29 Commits

Author SHA1 Message Date
Sebastian Goscik
a8328fd09e Bump version: 0.10.7 → 0.11.0 2024-06-08 01:31:58 +01:00
Sebastian Goscik
28d241610b changelog 2024-06-08 01:31:21 +01:00
Sebastian Goscik
aa1335e73b Fix typos and add experimental downloader to README 2024-06-08 01:29:06 +01:00
Sebastian Goscik
9cb2ccf8b2 Update pyunifiprotect to point to my fork
This is done to accept in features that have not been merged into the upstream repo yet. This also allows for stability in the future.
2024-06-08 01:18:14 +01:00
Sebastian Goscik
30ea7de5c2 Add experimental downloader
This uses a new API to download events like the way the web ui does, where it first asks for a video to be prepared (on the unifi protect host) and then downloads it. This might be potentially more stable than the existing downloader.
2024-06-06 00:41:42 +01:00
Sebastian Goscik
2dac2cee23 TEMP: Switch to fork of pyunifiprotect
In order to test new functionality of a PR this commit temporarily changes the source of pyunifiprotect
2024-06-06 00:41:42 +01:00
Sebastian Goscik
f4d992838a Fix permissions issue with ufp/sessions.json in docker container
The python library `platformdirs` is detecting the user as root instead of the uid being set to execute UPB. This work around forces the session cache file to be placed in /config
2024-06-06 00:41:20 +01:00
Sebastian Goscik
9fe4394ee4 bump pyunifiprotect to 6.0.1 2024-05-27 23:05:19 +01:00
Sebastian Goscik
e65d8dde6c Bump version: 0.10.6 → 0.10.7 2024-03-23 00:18:57 +00:00
Sebastian Goscik
90108edeb8 Force using pyunifiprotect >= 5.0.1 2024-03-23 00:18:49 +00:00
Sebastian Goscik
1194e957a5 Bump version: 0.10.5 → 0.10.6 2024-03-22 22:50:20 +00:00
Sebastian Goscik
65128b35dd changelog 2024-03-22 22:50:14 +00:00
mmolitor87
64bb353f67 Bump pyunifiprotect to support protect 3.0.22 (#133) 2024-03-22 22:47:54 +00:00
Adrian Keenan
558859dd72 Update docs for ignoring cameras (#134)
* update docs

* remove docker from log scanning notes
2024-03-21 23:09:09 +00:00
Sebastian Goscik
d3b40b443a Bump version: 0.10.4 → 0.10.5 2024-02-24 16:19:22 +00:00
Sebastian Goscik
4bfe9afc10 Bump pyunifiprotect 2024-02-24 16:19:11 +00:00
Sebastian Goscik
c69a3e365a Bump version: 0.10.3 → 0.10.4 2024-01-26 19:49:36 +00:00
Sebastian Goscik
ace6a09bba changelong 2024-01-26 19:49:32 +00:00
Sebastian Goscik
e3c00e3dfa Update pyunifiprotect version 2024-01-26 19:47:44 +00:00
Sebastian Goscik
5f7fad72d5 Bump version: 0.10.2 → 0.10.3 2023-12-07 19:59:13 +00:00
Sebastian Goscik
991998aa37 changelog 2023-12-07 19:59:10 +00:00
Sebastian Goscik
074f5b372c bump pyunifiprotect version 2023-12-07 19:57:21 +00:00
Sebastian Goscik
00aec23805 Bump version: 0.10.1 → 0.10.2 2023-11-21 00:20:46 +00:00
Sebastian Goscik
52e4ecd50d changelog 2023-11-21 00:20:35 +00:00
Sebastian Goscik
6b116ab93b Fixed issue where duplicate events were being downloaded
Previously unifi would only end one update which contained the end time stamp
so it was sufficient to check if it existed in the new event data.
However, now it is possible to get update events after the end timestamp
has been set. With this change we now look for when the event change
data contains the end time stamp. So long as unifi does not change its
mind about when an event ends, this should solve the issue.
2023-11-21 00:18:36 +00:00
Sebastian Goscik
70526b2f49 Make default file path format use event start time 2023-11-21 00:08:24 +00:00
Sebastian Goscik
5069d28f0d Bump version: 0.10.0 → 0.10.1 2023-11-01 21:34:01 +00:00
Sebastian Goscik
731ab1081d changelog 2023-11-01 21:33:55 +00:00
Sebastian Goscik
701fd9b0a8 Fix event enum string conversion to value 2023-11-01 21:32:19 +00:00
14 changed files with 1399 additions and 1042 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.10.0
current_version = 0.11.0
commit = True
tag = True

View File

@@ -4,6 +4,42 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.11.0] - 2024-06-08
### Added
- A new experimental downloader that uses the same mechanism the web ui does. Enable with
`--experimental-downloader`
### Fixed
- Support for UniFi OS 4.x.x
## [0.10.7] - 2024-03-22
### Fixed
- Set pyunifiprotect to a minimum version of 5.0.0
## [0.10.6] - 2024-03-22
### Fixed
- Bumped `pyunifiprotect` version to fix with versions of Unifi Protect after 3.0.10
## [0.10.5] - 2024-01-26
### Fixed
- Bumped `pyunifiprotect` version to fix issue with old version of yarl
## [0.10.4] - 2024-01-26
### Fixed
- Bumped `pyunifiprotect` version to fix issue caused by new video modes
## [0.10.3] - 2023-12-07
### Fixed
- Bumped `pyunifiprotect` version to fix issue caused by unifi protect returning invalid UUIDs
## [0.10.2] - 2023-11-21
### Fixed
- Issue where duplicate events were being downloaded causing database errors
- Default file path format now uses event start time instead of event end time which makes more logical sense
## [0.10.1] - 2023-11-01
### Fixed
- Event type enum conversion string was no longer converting to the enum value, this is now done explicitly.
## [0.10.0] - 2023-11-01
### Added
- Command line option to skip events longer than a given length (default 2 hours)

View File

@@ -7,7 +7,7 @@ LABEL maintainer="ep1cman"
WORKDIR /app
COPY dist/unifi_protect_backup-0.10.0.tar.gz sdist.tar.gz
COPY dist/unifi_protect_backup-0.11.0.tar.gz sdist.tar.gz
# https://github.com/rust-lang/cargo/issues/2808
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
@@ -50,6 +50,9 @@ ENV TZ=UTC
ENV IGNORE_CAMERAS=""
ENV SQLITE_PATH=/config/database/events.sqlite
# Fixes issue where `platformdirs` is unable to properly detect the user directory
ENV XDG_CACHE_HOME=/config
COPY docker_root/ /
RUN mkdir -p /config/database /config/rclone

View File

@@ -48,7 +48,7 @@ In order to connect to your unifi protect instance, you will first need to setup
## Installation
*The prefered way to run this tool is using a container*
*The preferred way to run this tool is using a container*
### Docker Container
You can run this tool as a container if you prefer with the following command.
@@ -129,14 +129,16 @@ Options:
example.
--rclone-purge-args TEXT Optional extra arguments to pass to `rclone delete` directly.
Common usage for this would be to execute a permanent delete
instead of using the recycle bin on a destination.
Google Drive example: `--drive-use-trash=false`
instead of using the recycle bin on a destination. Google Drive
example: `--drive-use-trash=false`
--detection-types TEXT A comma separated list of which types of detections to backup.
Valid options are: `motion`, `person`, `vehicle`, `ring`
[default: motion,person,vehicle,ring]
--ignore-camera TEXT IDs of cameras for which events should not be backed up. Use
multiple times to ignore multiple IDs. If being set as an
environment variable the IDs should be separated by whitespace.
Alternatively, use a Unifi user with a role which has access
restricted to the subset of cameras that you wish to backup.
--file-structure-format TEXT A Python format string used to generate the file structure/name
on the rclone remote.For details of the fields available, see
the projects `README.md` file. [default: {camera_name}/{event.s
@@ -187,12 +189,15 @@ Options:
More details about supported platforms can be found here:
https://github.com/caronc/apprise
--skip-missing If set, events which are 'missing' at the start will be ignored.
--skip-missing If set, events which are 'missing' at the start will be ignored.
Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
--download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by
default
--max-event-length INTEGER Only download events shorter than this maximum length, in
seconds [default: 7200]
--experimental-downloader If set, a new experimental download mechanism will be used to match
what the web UI does. This might be more stable if you are experiencing
a lot of failed downloads with the default downloader. [default: False]
--help Show this message and exit.
```
@@ -218,6 +223,7 @@ always take priority over environment variables):
- `SKIP_MISSING`
- `DOWNLOAD_RATELIMIT`
- `MAX_EVENT_LENGTH`
- `EXPERIMENTAL_DOWNLOADER`
## File path formatting
@@ -242,6 +248,14 @@ now on, you can use the `--skip-missing` flag. This does not enable the periodic
If you use this feature it is advised that your run the tool once with this flag, then stop it once the database has been created and the events are ignored. Keeping this flag set permanently could cause events to be missed if the tool crashes and is restarted etc.
## Ignoring cameras
Cameras can be excluded from backups by either:
- Using `--ignore-camera`, see [usage](#usage)
- IDs can be obtained by scanning the logs, starting at `Found cameras:` up to the next log line (currently `NVR TZ`). You can find this section of the logs by piping the logs in to this `sed` command
`sed -n '/Found cameras:/,/NVR TZ/p'`
- Using a Unifi user with a role which has access restricted to the subset of cameras that you wish to backup.
# A note about `rclone` backends and disk wear
This tool attempts to not write the downloaded files to disk to minimise disk wear, and instead streams them directly to
rclone. Sadly, not all storage backends supported by `rclone` allow "Stream Uploads". Please refer to the `StreamUpload` column on this table to see which one do and don't: https://rclone.org/overview/#optional-features
@@ -271,7 +285,7 @@ tmpfs /mnt/tmpfs tmpfs nosuid,nodev,noatime 0 0
```
# Running Backup Tool as a Service (LINUX ONLY)
You can create a service that will run the docker or local version of this backup tool. The service can be configured to launch on boot. This is likely the preferred way you want to execute the tool once you have it completely configured and tested so it is continiously running.
You can create a service that will run the docker or local version of this backup tool. The service can be configured to launch on boot. This is likely the preferred way you want to execute the tool once you have it completely configured and tested so it is continuously running.
First create a service configuration file. You can replace `protectbackup` in the filename below with the name you wish to use for your service, if you change it remember to change the other locations in the following scripts as well.

2087
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
[tool]
[tool.poetry]
name = "unifi_protect_backup"
version = "0.10.0"
version = "0.11.0"
homepage = "https://github.com/ep1cman/unifi-protect-backup"
description = "Python tool to backup unifi event clips in realtime."
authors = ["sebastian.goscik <sebastian@goscik.com>"]
@@ -23,7 +23,6 @@ packages = [
[tool.poetry.dependencies]
python = ">=3.9.0,<4.0"
click = "8.0.1"
pyunifiprotect = "^4.21.0"
aiorun = "^2023.7.2"
aiosqlite = "^0.17.0"
python-dateutil = "^2.8.2"
@@ -31,6 +30,7 @@ apprise = "^1.5.0"
expiring-dict = "^1.1.0"
async-lru = "^2.0.4"
aiolimiter = "^1.1.0"
pyunifiprotect = {git = "https://github.com/ep1cman/pyunifiprotect.git", rev = "experimental"}
[tool.poetry.group.dev]
optional = true

View File

@@ -2,9 +2,10 @@
__author__ = """sebastian.goscik"""
__email__ = 'sebastian@goscik.com'
__version__ = '0.10.0'
__version__ = '0.11.0'
from .downloader import VideoDownloader
from .downloader_experimental import VideoDownloaderExperimental
from .event_listener import EventListener
from .purge import Purge
from .uploader import VideoUploader

View File

@@ -70,8 +70,8 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
default='7d',
show_default=True,
envvar='RCLONE_RETENTION',
help="How long should event clips be backed up for. Format as per the `rclone1 time option format "
"(https://rclone.org/docs/#time-option)",
help="How long should event clips be backed up for. Format as per the `--max-age` argument of `rclone` "
"(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
callback=parse_rclone_retention,
)
@click.option(
@@ -104,7 +104,9 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
multiple=True,
envvar="IGNORE_CAMERAS",
help="IDs of cameras for which events should not be backed up. Use multiple times to ignore "
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace.",
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace. "
"Alternatively, use a Unifi user with a role which has access restricted to the subset of cameras "
"that you wish to backup.",
)
@click.option(
'--file-structure-format',
@@ -211,6 +213,19 @@ Subsequent missing events will be downloaded (e.g. a missed event)
type=int,
help="Only download events shorter than this maximum length, in seconds",
)
@click.option(
'--experimental-downloader',
'use_experimental_downloader',
default=False,
show_default=True,
is_flag=True,
envvar='EXPERIMENTAL_DOWNLOADER',
help="""\b
If set, a new experimental download mechanism will be used to match
what the web UI does. This might be more stable if you are experiencing
a lot of failed downloads with the default downloader.
""",
)
def main(**kwargs):
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
event_listener = UnifiProtectBackup(**kwargs)

View File

@@ -114,9 +114,9 @@ class VideoDownloader:
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
if event.type == EventType.SMART_DETECT:
self.logger.debug(f" Type: {event.type} ({', '.join(event.smart_detect_types)})")
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
else:
self.logger.debug(f" Type: {event.type}")
self.logger.debug(f" Type: {event.type.value}")
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
duration = (event.end - event.start).total_seconds()
@@ -198,7 +198,7 @@ class VideoDownloader:
self.logger.warning("Ignoring event")
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()

View File

@@ -0,0 +1,228 @@
# noqa: D100
import asyncio
import json
import logging
import shutil
from datetime import datetime, timedelta, timezone
from typing import Optional
import aiosqlite
import pytz
from aiohttp.client_exceptions import ClientPayloadError
from expiring_dict import ExpiringDict # type: ignore
from aiolimiter import AsyncLimiter
from pyunifiprotect import ProtectApiClient
from pyunifiprotect.data.nvr import Event
from pyunifiprotect.data.types import EventType
from unifi_protect_backup.utils import (
SubprocessException,
VideoQueue,
get_camera_name,
human_readable_size,
run_command,
setup_event_logger,
)
async def get_video_length(video: bytes) -> float:
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
returncode, stdout, stderr = await run_command(
'ffprobe -v quiet -show_streams -select_streams v:0 -of json -', video
)
if returncode != 0:
raise SubprocessException(stdout, stderr, returncode)
json_data = json.loads(stdout)
return float(json_data['streams'][0]['duration'])
class VideoDownloaderExperimental:
"""Downloads event video clips from Unifi Protect."""
def __init__(
self,
protect: ProtectApiClient,
db: aiosqlite.Connection,
download_queue: asyncio.Queue,
upload_queue: VideoQueue,
color_logging: bool,
download_rate_limit: float,
max_event_length: timedelta,
):
"""Init.
Args:
protect (ProtectApiClient): UniFi Protect API client to use
db (aiosqlite.Connection): Async SQLite database to check for missing events
download_queue (asyncio.Queue): Queue to get event details from
upload_queue (VideoQueue): Queue to place downloaded videos on
color_logging (bool): Whether or not to add color to logging output
download_rate_limit (float): Limit how events can be downloaded in one minute",
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
"""
self._protect: ProtectApiClient = protect
self._db: aiosqlite.Connection = db
self.download_queue: asyncio.Queue = download_queue
self.upload_queue: VideoQueue = upload_queue
self.current_event = None
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
self._download_rate_limit = download_rate_limit
self._max_event_length = max_event_length
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
self.base_logger = logging.getLogger(__name__)
setup_event_logger(self.base_logger, color_logging)
self.logger = logging.LoggerAdapter(self.base_logger, {'event': ''})
# Check if `ffprobe` is available
ffprobe = shutil.which('ffprobe')
if ffprobe is not None:
self.logger.debug(f"ffprobe found: {ffprobe}")
self._has_ffprobe = True
else:
self._has_ffprobe = False
async def start(self):
"""Main loop."""
self.logger.info("Starting Downloader")
while True:
if self._limiter:
self.logger.debug("Waiting for rate limit")
await self._limiter.acquire()
try:
# Wait for unifi protect to be connected
await self._protect.connect_event.wait()
event = await self.download_queue.get()
self.current_event = event
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
# Fix timezones since pyunifiprotect sets all timestamps to UTC. Instead localize them to
# the timezone of the unifi protect NVR.
event.start = event.start.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
event.end = event.end.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
self.logger.info(f"Downloading event: {event.id}")
self.logger.debug(f"Remaining Download Queue: {self.download_queue.qsize()}")
output_queue_current_size = human_readable_size(self.upload_queue.qsize())
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
if event.type == EventType.SMART_DETECT:
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
else:
self.logger.debug(f" Type: {event.type.value}")
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
duration = (event.end - event.start).total_seconds()
self.logger.debug(f" Duration: {duration}s")
# Skip invalid events
if not self._valid_event(event):
await self._ignore_event(event)
continue
# Unifi protect does not return full video clips if the clip is requested too soon.
# There are two issues at play here:
# - Protect will only cut a clip on an keyframe which happen every 5s
# - Protect's pipeline needs a finite amount of time to make a clip available
# So we will wait 1.5x the keyframe interval to ensure that there is always ample video
# stored and Protect can return a full clip (which should be at least the length requested,
# but often longer)
time_since_event_ended = datetime.utcnow().replace(tzinfo=timezone.utc) - event.end
sleep_time = (timedelta(seconds=5 * 1.5) - time_since_event_ended).total_seconds()
if sleep_time > 0:
self.logger.debug(f" Sleeping ({sleep_time}s) to ensure clip is ready to download...")
await asyncio.sleep(sleep_time)
try:
video = await self._download(event)
assert video is not None
except Exception as e:
# Increment failure count
if event.id not in self._failures:
self._failures[event.id] = 1
else:
self._failures[event.id] += 1
self.logger.warning(f"Event failed download attempt {self._failures[event.id]}", exc_info=e)
if self._failures[event.id] >= 10:
self.logger.error(
"Event has failed to download 10 times in a row. Permanently ignoring this event"
)
await self._ignore_event(event)
continue
# Remove successfully downloaded event from failures list
if event.id in self._failures:
del self._failures[event.id]
# Get the actual length of the downloaded video using ffprobe
if self._has_ffprobe:
await self._check_video_length(video, duration)
await self.upload_queue.put((event, video))
self.logger.debug("Added to upload queue")
self.current_event = None
except Exception as e:
self.logger.error(f"Unexpected exception occurred, abandoning event {event.id}:", exc_info=e)
async def _download(self, event: Event) -> Optional[bytes]:
"""Downloads the video clip for the given event."""
self.logger.debug(" Downloading video...")
for x in range(5):
assert isinstance(event.camera_id, str)
assert isinstance(event.start, datetime)
assert isinstance(event.end, datetime)
try:
prepared_video_file = await self._protect.prepare_camera_video(event.camera_id, event.start, event.end)
video = await self._protect.download_camera_video(event.camera_id, prepared_video_file['fileName'])
assert isinstance(video, bytes)
break
except (AssertionError, ClientPayloadError, TimeoutError) as e:
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
await asyncio.sleep(1)
else:
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
return None
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
return video
async def _ignore_event(self, event):
self.logger.warning("Ignoring event")
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()
async def _check_video_length(self, video, duration):
"""Check if the downloaded event is at least the length of the event, warn otherwise.
It is expected for events to regularly be slightly longer than the event specified
"""
try:
downloaded_duration = await get_video_length(video)
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
if downloaded_duration < duration:
self.logger.warning(msg)
else:
self.logger.debug(msg)
except SubprocessException as e:
self.logger.warning(" `ffprobe` failed", exc_info=e)
def _valid_event(self, event):
duration = event.end - event.start
if duration > self._max_event_length:
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
return False
return True

View File

@@ -61,7 +61,7 @@ class EventListener:
return
if msg.new_obj.camera_id in self.ignore_cameras:
return
if msg.new_obj.end is None:
if 'end' not in msg.changed_data:
return
if msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]:
return

View File

@@ -131,7 +131,7 @@ class MissingEventChecker:
logger.extra_debug(f"Ignoring event '{event.id}'")
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
await self._db.commit()
@@ -154,7 +154,7 @@ class MissingEventChecker:
shown_warning = True
if event.type != EventType.SMART_DETECT:
event_name = f"{event.id} ({event.type})"
event_name = f"{event.id} ({event.type.value})"
else:
event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"

View File

@@ -1,4 +1,5 @@
"""Main module."""
import asyncio
import logging
import os
@@ -16,6 +17,7 @@ from unifi_protect_backup import (
MissingEventChecker,
Purge,
VideoDownloader,
VideoDownloaderExperimental,
VideoUploader,
notifications,
)
@@ -67,6 +69,7 @@ class UnifiProtectBackup:
color_logging: bool = False,
download_rate_limit: float = None,
port: int = 443,
use_experimental_downloader: bool = False,
):
"""Will configure logging settings and the Unifi Protect API (but not actually connect).
@@ -97,6 +100,7 @@ class UnifiProtectBackup:
color_logging (bool): Whether to add color to logging output or not
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the webUI)
"""
self.color_logging = color_logging
setup_logging(verbose, self.color_logging)
@@ -133,6 +137,7 @@ class UnifiProtectBackup:
logger.debug(f" {skip_missing=}")
logger.debug(f" {download_rate_limit=} events per minute")
logger.debug(f" {max_event_length=}s")
logger.debug(f" {use_experimental_downloader=}")
self.rclone_destination = rclone_destination
self.retention = retention
@@ -166,6 +171,7 @@ class UnifiProtectBackup:
self._skip_missing = skip_missing
self._download_rate_limit = download_rate_limit
self._max_event_length = timedelta(seconds=max_event_length)
self._use_experimental_downloader = use_experimental_downloader
async def start(self):
"""Bootstrap the backup process and kick off the main loop.
@@ -225,7 +231,12 @@ class UnifiProtectBackup:
# Create downloader task
# This will download video files to its buffer
downloader = VideoDownloader(
if self._use_experimental_downloader:
downloader_cls = VideoDownloaderExperimental
else:
downloader_cls = VideoDownloader
downloader = downloader_cls(
self._protect,
self._db,
download_queue,

View File

@@ -117,7 +117,7 @@ class VideoUploader:
assert isinstance(event.end, datetime)
await self._db.execute(
"INSERT INTO events VALUES "
f"('{event.id}', '{event.type}', '{event.camera_id}',"
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
)
@@ -157,9 +157,9 @@ class VideoUploader:
format_context = {
"event": event,
"duration_seconds": (event.end - event.start).total_seconds(),
"detection_type": f"{event.type} ({' '.join(event.smart_detect_types)})"
"detection_type": f"{event.type.value} ({' '.join(event.smart_detect_types)})"
if event.smart_detect_types
else f"{event.type}",
else f"{event.type.value}",
"camera_name": await get_camera_name(self._protect, event.camera_id),
}