mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
36 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e65d8dde6c | ||
|
|
90108edeb8 | ||
|
|
1194e957a5 | ||
|
|
65128b35dd | ||
|
|
64bb353f67 | ||
|
|
558859dd72 | ||
|
|
d3b40b443a | ||
|
|
4bfe9afc10 | ||
|
|
c69a3e365a | ||
|
|
ace6a09bba | ||
|
|
e3c00e3dfa | ||
|
|
5f7fad72d5 | ||
|
|
991998aa37 | ||
|
|
074f5b372c | ||
|
|
00aec23805 | ||
|
|
52e4ecd50d | ||
|
|
6b116ab93b | ||
|
|
70526b2f49 | ||
|
|
5069d28f0d | ||
|
|
731ab1081d | ||
|
|
701fd9b0a8 | ||
|
|
5fa202005b | ||
|
|
3644ad3754 | ||
|
|
9410051ab9 | ||
|
|
d5a74f475a | ||
|
|
dc8473cc3d | ||
|
|
60901e9a84 | ||
|
|
4a0bd87ef2 | ||
|
|
8dc0f8a212 | ||
|
|
34252c461f | ||
|
|
acc405a1f8 | ||
|
|
b66d40736c | ||
|
|
171796e5c3 | ||
|
|
cbc497909d | ||
|
|
66b3344e29 | ||
|
|
89cab64679 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.9.4
|
||||
current_version = 0.10.7
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
||||
46
CHANGELOG.md
46
CHANGELOG.md
@@ -4,6 +4,52 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
|
||||
## [0.10.7] - 2024-03-22
|
||||
### Fixed
|
||||
- Set pyunifiprotect to a minimum version of 5.0.0
|
||||
|
||||
|
||||
## [0.10.6] - 2024-03-22
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix with versions of Unifi Protect after 3.0.10
|
||||
|
||||
## [0.10.5] - 2024-01-26
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue with old version of yarl
|
||||
|
||||
## [0.10.4] - 2024-01-26
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue caused by new video modes
|
||||
|
||||
## [0.10.3] - 2023-12-07
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue caused by unifi protect returning invalid UUIDs
|
||||
|
||||
## [0.10.2] - 2023-11-21
|
||||
### Fixed
|
||||
- Issue where duplicate events were being downloaded causing database errors
|
||||
- Default file path format now uses event start time instead of event end time which makes more logical sense
|
||||
|
||||
## [0.10.1] - 2023-11-01
|
||||
### Fixed
|
||||
- Event type enum conversion string was no longer converting to the enum value, this is now done explicitly.
|
||||
|
||||
## [0.10.0] - 2023-11-01
|
||||
### Added
|
||||
- Command line option to skip events longer than a given length (default 2 hours)
|
||||
- Docker image is now based on alpine edge giving access to the latest version of rclone
|
||||
### Fixed
|
||||
- Failed uploads no longer write to the database, meaning they will be retried
|
||||
- Fixed issue with chunked event fetch during initial ignore of events
|
||||
- Fixed error when no events were fetched for the retention period
|
||||
|
||||
## [0.9.5] - 2023-10-07
|
||||
### Fixed
|
||||
- Errors caused by latest unifi protect version by bumping the version of pyunifiprotect used
|
||||
- Queries for events are now chunked into groups of 500 which should help stop this tool crashing large
|
||||
unifi protect instances.
|
||||
|
||||
## [0.9.4] - 2023-07-29
|
||||
### Fixed
|
||||
- Time period parsing, 'Y' -> 'y'
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# To build run:
|
||||
# make docker
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:edge
|
||||
|
||||
LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.9.4.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.10.7.tar.gz sdist.tar.gz
|
||||
|
||||
# https://github.com/rust-lang/cargo/issues/2808
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
@@ -29,7 +29,7 @@ RUN \
|
||||
py3-pip \
|
||||
python3 && \
|
||||
echo "**** install unifi-protect-backup ****" && \
|
||||
pip install --no-cache-dir sdist.tar.gz && \
|
||||
pip install --no-cache-dir --break-system-packages sdist.tar.gz && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-dependencies && \
|
||||
|
||||
16
README.md
16
README.md
@@ -137,6 +137,8 @@ Options:
|
||||
--ignore-camera TEXT IDs of cameras for which events should not be backed up. Use
|
||||
multiple times to ignore multiple IDs. If being set as an
|
||||
environment variable the IDs should be separated by whitespace.
|
||||
Alternatively, use a Unifi user with a role which has access
|
||||
retricted to the subset of cameras that you wish to backup.
|
||||
--file-structure-format TEXT A Python format string used to generate the file structure/name
|
||||
on the rclone remote.For details of the fields available, see
|
||||
the projects `README.md` file. [default: {camera_name}/{event.s
|
||||
@@ -189,6 +191,10 @@ Options:
|
||||
https://github.com/caronc/apprise
|
||||
--skip-missing If set, events which are 'missing' at the start will be ignored.
|
||||
Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
|
||||
--download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by
|
||||
default
|
||||
--max-event-length INTEGER Only download events shorter than this maximum length, in
|
||||
seconds [default: 7200]
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
@@ -212,6 +218,8 @@ always take priority over environment variables):
|
||||
- `PURGE_INTERVAL`
|
||||
- `APPRISE_NOTIFIERS`
|
||||
- `SKIP_MISSING`
|
||||
- `DOWNLOAD_RATELIMIT`
|
||||
- `MAX_EVENT_LENGTH`
|
||||
|
||||
## File path formatting
|
||||
|
||||
@@ -236,6 +244,14 @@ now on, you can use the `--skip-missing` flag. This does not enable the periodic
|
||||
|
||||
If you use this feature it is advised that your run the tool once with this flag, then stop it once the database has been created and the events are ignored. Keeping this flag set permanently could cause events to be missed if the tool crashes and is restarted etc.
|
||||
|
||||
## Ignoring cameras
|
||||
|
||||
Cameras can be excluded from backups by either:
|
||||
- Using `--ignore-camera`, see [usage](#usage)
|
||||
- IDs can be obtained by scanning the logs, starting at `Found cameras:` up to the next log line (currently `NVR TZ`). You can find this section of the logs by piping the logs in to this `sed` command
|
||||
`sed -n '/Found cameras:/,/NVR TZ/p'`
|
||||
- Using a Unifi user with a role which has access retricted to the subset of cameras that you wish to backup.
|
||||
|
||||
# A note about `rclone` backends and disk wear
|
||||
This tool attempts to not write the downloaded files to disk to minimise disk wear, and instead streams them directly to
|
||||
rclone. Sadly, not all storage backends supported by `rclone` allow "Stream Uploads". Please refer to the `StreamUpload` column on this table to see which one do and don't: https://rclone.org/overview/#optional-features
|
||||
|
||||
2158
poetry.lock
generated
2158
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
[tool]
|
||||
[tool.poetry]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.9.4"
|
||||
version = "0.10.7"
|
||||
homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
authors = ["sebastian.goscik <sebastian@goscik.com>"]
|
||||
@@ -23,13 +23,14 @@ packages = [
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9.0,<4.0"
|
||||
click = "8.0.1"
|
||||
pyunifiprotect = "^4.0.11"
|
||||
aiorun = "^2022.11.1"
|
||||
pyunifiprotect = "^5.0.2"
|
||||
aiorun = "^2023.7.2"
|
||||
aiosqlite = "^0.17.0"
|
||||
python-dateutil = "^2.8.2"
|
||||
apprise = "^1.3.0"
|
||||
apprise = "^1.5.0"
|
||||
expiring-dict = "^1.1.0"
|
||||
async-lru = "^2.0.3"
|
||||
async-lru = "^2.0.4"
|
||||
aiolimiter = "^1.1.0"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = 'sebastian@goscik.com'
|
||||
__version__ = '0.9.4'
|
||||
__version__ = '0.10.7'
|
||||
|
||||
from .downloader import VideoDownloader
|
||||
from .event_listener import EventListener
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Console script for unifi_protect_backup."""
|
||||
|
||||
import re
|
||||
|
||||
import click
|
||||
from aiorun import run # type: ignore
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from unifi_protect_backup import __version__
|
||||
from unifi_protect_backup.unifi_protect_backup_core import UnifiProtectBackup
|
||||
@@ -22,6 +25,26 @@ def _parse_detection_types(ctx, param, value):
|
||||
return types
|
||||
|
||||
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
"""Parses the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
|
||||
# Check that we matched the whole string
|
||||
if len(retention) != len(''.join([f"{v}{k}" for k, v in matches.items()])):
|
||||
raise click.BadParameter("See here for expected format: https://rclone.org/docs/#time-option")
|
||||
|
||||
return relativedelta(
|
||||
microseconds=matches.get("ms", 0) * 1000,
|
||||
seconds=matches.get("s", 0),
|
||||
minutes=matches.get("m", 0),
|
||||
hours=matches.get("h", 0),
|
||||
days=matches.get("d", 0),
|
||||
weeks=matches.get("w", 0),
|
||||
months=matches.get("M", 0),
|
||||
years=matches.get("y", 0),
|
||||
)
|
||||
|
||||
|
||||
@click.command(context_settings=dict(max_content_width=100))
|
||||
@click.version_option(__version__)
|
||||
@click.option('--address', required=True, envvar='UFP_ADDRESS', help='Address of Unifi Protect instance')
|
||||
@@ -49,6 +72,7 @@ def _parse_detection_types(ctx, param, value):
|
||||
envvar='RCLONE_RETENTION',
|
||||
help="How long should event clips be backed up for. Format as per the `rclone1 time option format "
|
||||
"(https://rclone.org/docs/#time-option)",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
'--rclone-args',
|
||||
@@ -85,7 +109,7 @@ def _parse_detection_types(ctx, param, value):
|
||||
@click.option(
|
||||
'--file-structure-format',
|
||||
envvar='FILE_STRUCTURE_FORMAT',
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.end:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.start:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
show_default=True,
|
||||
help="A Python format string used to generate the file structure/name on the rclone remote."
|
||||
"For details of the fields available, see the projects `README.md` file.",
|
||||
@@ -139,6 +163,7 @@ all warnings, and websocket data
|
||||
envvar='PURGE_INTERVAL',
|
||||
help="How frequently to check for file to purge.\n\nNOTE: Can create a lot of API calls, so be careful if "
|
||||
"your cloud provider charges you per api call",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
'--apprise-notifier',
|
||||
@@ -170,6 +195,22 @@ If set, events which are 'missing' at the start will be ignored.
|
||||
Subsequent missing events will be downloaded (e.g. a missed event)
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
'--download-rate-limit',
|
||||
default=None,
|
||||
show_default=True,
|
||||
envvar='DOWNLOAD_RATELIMIT',
|
||||
type=float,
|
||||
help="Limit how events can be downloaded in one minute. Disabled by default",
|
||||
)
|
||||
@click.option(
|
||||
'--max-event-length',
|
||||
default=2 * 60 * 60,
|
||||
show_default=True,
|
||||
envvar='MAX_EVENT_LENGTH',
|
||||
type=int,
|
||||
help="Only download events shorter than this maximum length, in seconds",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
|
||||
@@ -11,6 +11,7 @@ import aiosqlite
|
||||
import pytz
|
||||
from aiohttp.client_exceptions import ClientPayloadError
|
||||
from expiring_dict import ExpiringDict # type: ignore
|
||||
from aiolimiter import AsyncLimiter
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from pyunifiprotect.data.types import EventType
|
||||
@@ -48,6 +49,8 @@ class VideoDownloader:
|
||||
download_queue: asyncio.Queue,
|
||||
upload_queue: VideoQueue,
|
||||
color_logging: bool,
|
||||
download_rate_limit: float,
|
||||
max_event_length: timedelta,
|
||||
):
|
||||
"""Init.
|
||||
|
||||
@@ -57,6 +60,8 @@ class VideoDownloader:
|
||||
download_queue (asyncio.Queue): Queue to get event details from
|
||||
upload_queue (VideoQueue): Queue to place downloaded videos on
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -64,6 +69,9 @@ class VideoDownloader:
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
self.current_event = None
|
||||
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = max_event_length
|
||||
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
|
||||
|
||||
self.base_logger = logging.getLogger(__name__)
|
||||
setup_event_logger(self.base_logger, color_logging)
|
||||
@@ -81,11 +89,16 @@ class VideoDownloader:
|
||||
"""Main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
self.logger.debug("Waiting for rate limit")
|
||||
await self._limiter.acquire()
|
||||
|
||||
try:
|
||||
# Wait for unifi protect to be connected
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
event = await self.download_queue.get()
|
||||
|
||||
self.current_event = event
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
|
||||
|
||||
@@ -101,14 +114,19 @@ class VideoDownloader:
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
self.logger.debug(f" Type: {event.type} ({', '.join(event.smart_detect_types)})")
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type}")
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
|
||||
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
|
||||
duration = (event.end - event.start).total_seconds()
|
||||
self.logger.debug(f" Duration: {duration}s")
|
||||
|
||||
# Skip invalid events
|
||||
if not self._valid_event(event):
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Unifi protect does not return full video clips if the clip is requested too soon.
|
||||
# There are two issues at play here:
|
||||
# - Protect will only cut a clip on an keyframe which happen every 5s
|
||||
@@ -137,15 +155,7 @@ class VideoDownloader:
|
||||
self.logger.error(
|
||||
"Event has failed to download 10 times in a row. Permanently ignoring this event"
|
||||
)
|
||||
|
||||
# ignore event
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Remove successfully downloaded event from failures list
|
||||
@@ -184,6 +194,15 @@ class VideoDownloader:
|
||||
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
|
||||
return video
|
||||
|
||||
async def _ignore_event(self, event):
|
||||
self.logger.warning("Ignoring event")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
async def _check_video_length(self, video, duration):
|
||||
"""Check if the downloaded event is at least the length of the event, warn otherwise.
|
||||
|
||||
@@ -198,3 +217,11 @@ class VideoDownloader:
|
||||
self.logger.debug(msg)
|
||||
except SubprocessException as e:
|
||||
self.logger.warning(" `ffprobe` failed", exc_info=e)
|
||||
|
||||
def _valid_event(self, event):
|
||||
duration = event.end - event.start
|
||||
if duration > self._max_event_length:
|
||||
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -61,7 +61,7 @@ class EventListener:
|
||||
return
|
||||
if msg.new_obj.camera_id in self.ignore_cameras:
|
||||
return
|
||||
if msg.new_obj.end is None:
|
||||
if 'end' not in msg.changed_data:
|
||||
return
|
||||
if msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]:
|
||||
return
|
||||
|
||||
@@ -55,65 +55,83 @@ class MissingEventChecker:
|
||||
self.interval: int = interval
|
||||
|
||||
async def _get_missing_events(self) -> List[Event]:
|
||||
# Get list of events that need to be backed up from unifi protect
|
||||
unifi_events = await self._protect.get_events(
|
||||
start=datetime.now() - self.retention,
|
||||
end=datetime.now(),
|
||||
types=[EventType.MOTION, EventType.SMART_DETECT, EventType.RING],
|
||||
)
|
||||
unifi_events = {event.id: event for event in unifi_events}
|
||||
start_time = datetime.now() - self.retention
|
||||
end_time = datetime.now()
|
||||
chunk_size = 500
|
||||
|
||||
# Get list of events that have been backed up from the database
|
||||
while True:
|
||||
# Get list of events that need to be backed up from unifi protect
|
||||
logger.extra_debug(f"Fetching events for interval: {start_time} - {end_time}")
|
||||
events_chunk = await self._protect.get_events(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
types=[EventType.MOTION, EventType.SMART_DETECT, EventType.RING],
|
||||
limit=chunk_size,
|
||||
)
|
||||
|
||||
# events(id, type, camera_id, start, end)
|
||||
async with self._db.execute("SELECT * FROM events") as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
db_event_ids = {row[0] for row in rows}
|
||||
if not events_chunk:
|
||||
break # There were no events to backup
|
||||
|
||||
# Prevent re-adding events currently in the download/upload queue
|
||||
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} # type: ignore
|
||||
current_download = self._downloader.current_event
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
start_time = events_chunk[-1].end
|
||||
unifi_events = {event.id: event for event in events_chunk}
|
||||
|
||||
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue} # type: ignore
|
||||
current_upload = self._uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
# Get list of events that have been backed up from the database
|
||||
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
# events(id, type, camera_id, start, end)
|
||||
async with self._db.execute("SELECT * FROM events") as cursor:
|
||||
rows = await cursor.fetchall()
|
||||
db_event_ids = {row[0] for row in rows}
|
||||
|
||||
def wanted_event_type(event_id):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
if event.camera_id in self.ignore_cameras:
|
||||
return False
|
||||
if event.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.RING and "ring" not in self.detection_types:
|
||||
return False
|
||||
elif event.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in event.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
return False
|
||||
return True
|
||||
# Prevent re-adding events currently in the download/upload queue
|
||||
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue} # type: ignore
|
||||
current_download = self._downloader.current_event
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
|
||||
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
|
||||
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue} # type: ignore
|
||||
current_upload = self._uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
return [unifi_events[id] for id in wanted_event_ids]
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
|
||||
# Exclude events of unwanted types
|
||||
def wanted_event_type(event_id):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
if event.camera_id in self.ignore_cameras:
|
||||
return False
|
||||
if event.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.RING and "ring" not in self.detection_types:
|
||||
return False
|
||||
elif event.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in event.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
return False
|
||||
return True
|
||||
|
||||
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
|
||||
|
||||
# Yeild events one by one to allow the async loop to start other task while
|
||||
# waiting on the full list of events
|
||||
for id in wanted_event_ids:
|
||||
yield unifi_events[id]
|
||||
|
||||
# Last chunk was in-complete, we can stop now
|
||||
if len(events_chunk) < chunk_size:
|
||||
break
|
||||
|
||||
async def ignore_missing(self):
|
||||
"""Ignore missing events by adding them to the event table."""
|
||||
wanted_events = await self._get_missing_events()
|
||||
logger.info(f" Ignoring missing events")
|
||||
|
||||
logger.info(f" Ignoring {len(wanted_events)} missing events")
|
||||
|
||||
for event in wanted_events:
|
||||
async for event in self._get_missing_events():
|
||||
logger.extra_debug(f"Ignoring event '{event.id}'")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
@@ -123,28 +141,24 @@ class MissingEventChecker:
|
||||
logger.info("Starting Missing Event Checker")
|
||||
while True:
|
||||
try:
|
||||
shown_warning = False
|
||||
|
||||
# Wait for unifi protect to be connected
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
logger.extra_debug("Running check for missing events...")
|
||||
logger.debug("Running check for missing events...")
|
||||
|
||||
wanted_events = await self._get_missing_events()
|
||||
async for event in self._get_missing_events():
|
||||
if not shown_warning:
|
||||
logger.warning(f" Found missing events, adding to backup queue")
|
||||
shown_warning = True
|
||||
|
||||
logger.debug(f" Undownloaded events of wanted types: {len(wanted_events)}")
|
||||
|
||||
if len(wanted_events) > 20:
|
||||
logger.warning(f" Adding {len(wanted_events)} missing events to backup queue")
|
||||
missing_logger = logger.extra_debug
|
||||
else:
|
||||
missing_logger = logger.warning
|
||||
|
||||
for event in wanted_events:
|
||||
if event.type != EventType.SMART_DETECT:
|
||||
event_name = f"{event.id} ({event.type})"
|
||||
event_name = f"{event.id} ({event.type.value})"
|
||||
else:
|
||||
event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"
|
||||
|
||||
missing_logger(
|
||||
logger.extra_debug(
|
||||
f" Adding missing event to backup queue: {event_name}"
|
||||
f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -"
|
||||
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})"
|
||||
|
||||
@@ -3,10 +3,11 @@ import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Callable, List
|
||||
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.types import ModelType
|
||||
|
||||
@@ -18,14 +19,7 @@ from unifi_protect_backup import (
|
||||
VideoUploader,
|
||||
notifications,
|
||||
)
|
||||
from unifi_protect_backup.utils import (
|
||||
SubprocessException,
|
||||
VideoQueue,
|
||||
human_readable_size,
|
||||
parse_rclone_retention,
|
||||
run_command,
|
||||
setup_logging,
|
||||
)
|
||||
from unifi_protect_backup.utils import SubprocessException, VideoQueue, human_readable_size, run_command, setup_logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,7 +51,7 @@ class UnifiProtectBackup:
|
||||
password: str,
|
||||
verify_ssl: bool,
|
||||
rclone_destination: str,
|
||||
retention: str,
|
||||
retention: relativedelta,
|
||||
rclone_args: str,
|
||||
rclone_purge_args: str,
|
||||
detection_types: List[str],
|
||||
@@ -65,11 +59,13 @@ class UnifiProtectBackup:
|
||||
file_structure_format: str,
|
||||
verbose: int,
|
||||
download_buffer_size: int,
|
||||
purge_interval: str,
|
||||
purge_interval: relativedelta,
|
||||
apprise_notifiers: str,
|
||||
skip_missing: bool,
|
||||
max_event_length: int,
|
||||
sqlite_path: str = "events.sqlite",
|
||||
color_logging=False,
|
||||
color_logging: bool = False,
|
||||
download_rate_limit: float = None,
|
||||
port: int = 443,
|
||||
):
|
||||
"""Will configure logging settings and the Unifi Protect API (but not actually connect).
|
||||
@@ -99,6 +95,8 @@ class UnifiProtectBackup:
|
||||
skip_missing (bool): If initial missing events should be ignored
|
||||
sqlite_path (str): Path where to find/create sqlite database
|
||||
color_logging (bool): Whether to add color to logging output or not
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
|
||||
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
setup_logging(verbose, self.color_logging)
|
||||
@@ -133,9 +131,11 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {purge_interval=}")
|
||||
logger.debug(f" {apprise_notifiers=}")
|
||||
logger.debug(f" {skip_missing=}")
|
||||
logger.debug(f" {download_rate_limit=} events per minute")
|
||||
logger.debug(f" {max_event_length=}s")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = parse_rclone_retention(retention)
|
||||
self.retention = retention
|
||||
self.rclone_args = rclone_args
|
||||
self.rclone_purge_args = rclone_purge_args
|
||||
self.file_structure_format = file_structure_format
|
||||
@@ -162,8 +162,10 @@ class UnifiProtectBackup:
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
self._download_buffer_size = download_buffer_size
|
||||
self._purge_interval = parse_rclone_retention(purge_interval)
|
||||
self._purge_interval = purge_interval
|
||||
self._skip_missing = skip_missing
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = timedelta(seconds=max_event_length)
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -183,7 +185,7 @@ class UnifiProtectBackup:
|
||||
# Start the pyunifiprotect connection by calling `update`
|
||||
logger.info("Connecting to Unifi Protect...")
|
||||
|
||||
for attempts in range(1):
|
||||
for attempts in range(10):
|
||||
try:
|
||||
await self._protect.update()
|
||||
break
|
||||
@@ -223,7 +225,15 @@ class UnifiProtectBackup:
|
||||
|
||||
# Create downloader task
|
||||
# This will download video files to its buffer
|
||||
downloader = VideoDownloader(self._protect, self._db, download_queue, upload_queue, self.color_logging)
|
||||
downloader = VideoDownloader(
|
||||
self._protect,
|
||||
self._db,
|
||||
download_queue,
|
||||
upload_queue,
|
||||
self.color_logging,
|
||||
self._download_rate_limit,
|
||||
self._max_event_length,
|
||||
)
|
||||
tasks.append(downloader.start())
|
||||
|
||||
# Create upload task
|
||||
|
||||
@@ -9,7 +9,14 @@ import aiosqlite
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
|
||||
from unifi_protect_backup.utils import VideoQueue, get_camera_name, human_readable_size, run_command, setup_event_logger
|
||||
from unifi_protect_backup.utils import (
|
||||
VideoQueue,
|
||||
get_camera_name,
|
||||
human_readable_size,
|
||||
run_command,
|
||||
setup_event_logger,
|
||||
SubprocessException,
|
||||
)
|
||||
|
||||
|
||||
class VideoUploader:
|
||||
@@ -74,10 +81,13 @@ class VideoUploader:
|
||||
destination = await self._generate_file_path(event)
|
||||
self.logger.debug(f" Destination: {destination}")
|
||||
|
||||
await self._upload_video(video, destination, self._rclone_args)
|
||||
await self._update_database(event, destination)
|
||||
try:
|
||||
await self._upload_video(video, destination, self._rclone_args)
|
||||
await self._update_database(event, destination)
|
||||
self.logger.debug("Uploaded")
|
||||
except SubprocessException:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
|
||||
self.logger.debug("Uploaded")
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
@@ -99,7 +109,7 @@ class VideoUploader:
|
||||
"""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)
|
||||
if returncode != 0:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
raise SubprocessException(stdout, stderr, returncode)
|
||||
|
||||
async def _update_database(self, event: Event, destination: str):
|
||||
"""Add the backed up event to the database along with where it was backed up to."""
|
||||
@@ -107,7 +117,7 @@ class VideoUploader:
|
||||
assert isinstance(event.end, datetime)
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
|
||||
@@ -147,9 +157,9 @@ class VideoUploader:
|
||||
format_context = {
|
||||
"event": event,
|
||||
"duration_seconds": (event.end - event.start).total_seconds(),
|
||||
"detection_type": f"{event.type} ({' '.join(event.smart_detect_types)})"
|
||||
"detection_type": f"{event.type.value} ({' '.join(event.smart_detect_types)})"
|
||||
if event.smart_detect_types
|
||||
else f"{event.type}",
|
||||
else f"{event.type.value}",
|
||||
"camera_name": await get_camera_name(self._protect, event.camera_id),
|
||||
}
|
||||
|
||||
|
||||
@@ -7,10 +7,9 @@ from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from apprise import NotifyType
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from async_lru import alru_cache
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from async_lru import alru_cache
|
||||
|
||||
from unifi_protect_backup import notifications
|
||||
|
||||
@@ -287,7 +286,7 @@ async def get_camera_name(protect: ProtectApiClient, id: str):
|
||||
If the camera ID is not know, it tries refreshing the cached data
|
||||
"""
|
||||
# Wait for unifi protect to be connected
|
||||
await protect.connect_event.wait()
|
||||
await protect.connect_event.wait() # type: ignore
|
||||
|
||||
try:
|
||||
return protect.bootstrap.cameras[id].name
|
||||
@@ -328,21 +327,6 @@ class SubprocessException(Exception):
|
||||
return f"Return Code: {self.returncode}\nStdout:\n{self.stdout}\nStderr:\n{self.stderr}"
|
||||
|
||||
|
||||
def parse_rclone_retention(retention: str) -> relativedelta:
|
||||
"""Parses the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
return relativedelta(
|
||||
microseconds=matches.get("ms", 0) * 1000,
|
||||
seconds=matches.get("s", 0),
|
||||
minutes=matches.get("m", 0),
|
||||
hours=matches.get("h", 0),
|
||||
days=matches.get("d", 0),
|
||||
weeks=matches.get("w", 0),
|
||||
months=matches.get("M", 0),
|
||||
years=matches.get("y", 0),
|
||||
)
|
||||
|
||||
|
||||
async def run_command(cmd: str, data=None):
|
||||
"""Runs the given command returning the exit code, stdout and stderr."""
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
|
||||
Reference in New Issue
Block a user