mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f7fad72d5 | ||
|
|
991998aa37 | ||
|
|
074f5b372c | ||
|
|
00aec23805 | ||
|
|
52e4ecd50d | ||
|
|
6b116ab93b | ||
|
|
70526b2f49 | ||
|
|
5069d28f0d | ||
|
|
731ab1081d | ||
|
|
701fd9b0a8 | ||
|
|
5fa202005b | ||
|
|
3644ad3754 | ||
|
|
9410051ab9 | ||
|
|
d5a74f475a | ||
|
|
dc8473cc3d | ||
|
|
60901e9a84 | ||
|
|
4a0bd87ef2 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.9.5
|
||||
current_version = 0.10.3
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
||||
22
CHANGELOG.md
22
CHANGELOG.md
@@ -4,6 +4,28 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.10.3] - 2023-12-07
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue caused by unifi protect returning invalid UUIDs
|
||||
|
||||
## [0.10.2] - 2023-11-21
|
||||
### Fixed
|
||||
- Issue where duplicate events were being downloaded causing database errors
|
||||
- Default file path format now uses event start time instead of event end time which makes more logical sense
|
||||
|
||||
## [0.10.1] - 2023-11-01
|
||||
### Fixed
|
||||
- Event type enum conversion string was no longer converting to the enum value, this is now done explicitly.
|
||||
|
||||
## [0.10.0] - 2023-11-01
|
||||
### Added
|
||||
- Command line option to skip events longer than a given length (default 2 hours)
|
||||
- Docker image is now based on alpine edge giving access to the latest version of rclone
|
||||
### Fixed
|
||||
- Failed uploads no longer write to the database, meaning they will be retried
|
||||
- Fixed issue with chunked event fetch during initial ignore of events
|
||||
- Fixed error when no events were fetched for the retention period
|
||||
|
||||
## [0.9.5] - 2023-10-07
|
||||
### Fixed
|
||||
- Errors caused by latest unifi protect version by bumping the version of pyunifiprotect used
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# To build run:
|
||||
# make docker
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:edge
|
||||
|
||||
LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.9.5.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.10.3.tar.gz sdist.tar.gz
|
||||
|
||||
# https://github.com/rust-lang/cargo/issues/2808
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
@@ -29,7 +29,7 @@ RUN \
|
||||
py3-pip \
|
||||
python3 && \
|
||||
echo "**** install unifi-protect-backup ****" && \
|
||||
pip install --no-cache-dir sdist.tar.gz && \
|
||||
pip install --no-cache-dir --break-system-packages sdist.tar.gz && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-dependencies && \
|
||||
|
||||
@@ -191,6 +191,8 @@ Options:
|
||||
Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
|
||||
--download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by
|
||||
default
|
||||
--max-event-length INTEGER Only download events shorter than this maximum length, in
|
||||
seconds [default: 7200]
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
@@ -215,6 +217,7 @@ always take priority over environment variables):
|
||||
- `APPRISE_NOTIFIERS`
|
||||
- `SKIP_MISSING`
|
||||
- `DOWNLOAD_RATELIMIT`
|
||||
- `MAX_EVENT_LENGTH`
|
||||
|
||||
## File path formatting
|
||||
|
||||
|
||||
10
poetry.lock
generated
10
poetry.lock
generated
@@ -1791,21 +1791,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyunifiprotect"
|
||||
version = "4.21.0"
|
||||
version = "4.22.0"
|
||||
description = "Unofficial UniFi Protect Python API and CLI"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pyunifiprotect-4.21.0-py3-none-any.whl", hash = "sha256:6a67a3a4b15576695d140f80de2d97890d0be8d3b1a0c0bc1effde1fd646880e"},
|
||||
{file = "pyunifiprotect-4.21.0.tar.gz", hash = "sha256:d21f5144f16037fd11f192db52ab0cd99db8fb1f8670abc2afb1c0fa04cdb9de"},
|
||||
{file = "pyunifiprotect-4.22.0-py3-none-any.whl", hash = "sha256:21eab9e40a349c9b550715c34728c64fdac7d5d0f2de71644645dff804df04c4"},
|
||||
{file = "pyunifiprotect-4.22.0.tar.gz", hash = "sha256:53b3c6b11f02605ff774343797f6468ed35fa9c0c99c6957c578c9871f47d449"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiofiles = "*"
|
||||
aiohttp = "*"
|
||||
aioshutil = "*"
|
||||
async-timeout = "*"
|
||||
async-timeout = {version = "*", markers = "python_version < \"3.11\""}
|
||||
dateparser = "*"
|
||||
orjson = "*"
|
||||
packaging = "*"
|
||||
@@ -1816,7 +1816,7 @@ typer = {version = ">0.6", extras = ["all"]}
|
||||
|
||||
[package.extras]
|
||||
backup = ["aiosqlite", "asyncify", "av", "sqlalchemy[asyncio]"]
|
||||
dev = ["base36", "black", "build", "coverage[toml]", "flake8", "flake8-docstrings", "ipython", "mike", "mkdocs-git-revision-date-localized-plugin", "mkdocs-include-markdown-plugin", "mkdocs-material", "mkdocstrings[python]", "mypy", "pip-tools", "pydocstyle", "pylint", "pylint-strict-informational", "pyproject-flake8", "pytest", "pytest-asyncio", "pytest-benchmark", "pytest-cov", "pytest-sugar", "pytest-timeout (>=1.2.1)", "pytest-xdist", "sqlalchemy[asyncio,mypy]", "termcolor", "types-aiofiles", "types-dateparser", "types-pillow", "types-pyjwt", "types-termcolor", "tzdata"]
|
||||
dev = ["base36", "black", "build", "coverage[toml]", "ipython", "isort", "mike", "mkdocs-git-revision-date-localized-plugin", "mkdocs-include-markdown-plugin", "mkdocs-material", "mkdocstrings[python]", "mypy", "pip-tools", "pydocstyle", "pytest", "pytest-asyncio", "pytest-benchmark", "pytest-cov", "pytest-sugar", "pytest-timeout (>=1.2.1)", "pytest-xdist[psutil]", "ruff", "sqlalchemy[asyncio,mypy]", "termcolor", "types-aiofiles", "types-dateparser", "types-pillow", "types-pyjwt", "types-termcolor", "tzdata"]
|
||||
full = ["aiosqlite", "asyncify", "av", "ipython", "python-dotenv", "sqlalchemy[asyncio]", "termcolor"]
|
||||
shell = ["ipython", "python-dotenv", "termcolor"]
|
||||
tz = ["tzdata"]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[tool]
|
||||
[tool.poetry]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.9.5"
|
||||
version = "0.10.3"
|
||||
homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
authors = ["sebastian.goscik <sebastian@goscik.com>"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = 'sebastian@goscik.com'
|
||||
__version__ = '0.9.5'
|
||||
__version__ = '0.10.3'
|
||||
|
||||
from .downloader import VideoDownloader
|
||||
from .event_listener import EventListener
|
||||
|
||||
@@ -109,7 +109,7 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
@click.option(
|
||||
'--file-structure-format',
|
||||
envvar='FILE_STRUCTURE_FORMAT',
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.end:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.start:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
show_default=True,
|
||||
help="A Python format string used to generate the file structure/name on the rclone remote."
|
||||
"For details of the fields available, see the projects `README.md` file.",
|
||||
@@ -203,6 +203,14 @@ Subsequent missing events will be downloaded (e.g. a missed event)
|
||||
type=float,
|
||||
help="Limit how events can be downloaded in one minute. Disabled by default",
|
||||
)
|
||||
@click.option(
|
||||
'--max-event-length',
|
||||
default=2 * 60 * 60,
|
||||
show_default=True,
|
||||
envvar='MAX_EVENT_LENGTH',
|
||||
type=int,
|
||||
help="Only download events shorter than this maximum length, in seconds",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
|
||||
@@ -50,6 +50,7 @@ class VideoDownloader:
|
||||
upload_queue: VideoQueue,
|
||||
color_logging: bool,
|
||||
download_rate_limit: float,
|
||||
max_event_length: timedelta,
|
||||
):
|
||||
"""Init.
|
||||
|
||||
@@ -60,6 +61,7 @@ class VideoDownloader:
|
||||
upload_queue (VideoQueue): Queue to place downloaded videos on
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -68,6 +70,7 @@ class VideoDownloader:
|
||||
self.current_event = None
|
||||
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = max_event_length
|
||||
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
|
||||
|
||||
self.base_logger = logging.getLogger(__name__)
|
||||
@@ -95,6 +98,7 @@ class VideoDownloader:
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
event = await self.download_queue.get()
|
||||
|
||||
self.current_event = event
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
|
||||
|
||||
@@ -110,14 +114,19 @@ class VideoDownloader:
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
self.logger.debug(f" Type: {event.type} ({', '.join(event.smart_detect_types)})")
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type}")
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
|
||||
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
|
||||
duration = (event.end - event.start).total_seconds()
|
||||
self.logger.debug(f" Duration: {duration}s")
|
||||
|
||||
# Skip invalid events
|
||||
if not self._valid_event(event):
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Unifi protect does not return full video clips if the clip is requested too soon.
|
||||
# There are two issues at play here:
|
||||
# - Protect will only cut a clip on an keyframe which happen every 5s
|
||||
@@ -146,15 +155,7 @@ class VideoDownloader:
|
||||
self.logger.error(
|
||||
"Event has failed to download 10 times in a row. Permanently ignoring this event"
|
||||
)
|
||||
|
||||
# ignore event
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Remove successfully downloaded event from failures list
|
||||
@@ -193,6 +194,15 @@ class VideoDownloader:
|
||||
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
|
||||
return video
|
||||
|
||||
async def _ignore_event(self, event):
|
||||
self.logger.warning("Ignoring event")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
async def _check_video_length(self, video, duration):
|
||||
"""Check if the downloaded event is at least the length of the event, warn otherwise.
|
||||
|
||||
@@ -207,3 +217,11 @@ class VideoDownloader:
|
||||
self.logger.debug(msg)
|
||||
except SubprocessException as e:
|
||||
self.logger.warning(" `ffprobe` failed", exc_info=e)
|
||||
|
||||
def _valid_event(self, event):
|
||||
duration = event.end - event.start
|
||||
if duration > self._max_event_length:
|
||||
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -61,7 +61,7 @@ class EventListener:
|
||||
return
|
||||
if msg.new_obj.camera_id in self.ignore_cameras:
|
||||
return
|
||||
if msg.new_obj.end is None:
|
||||
if 'end' not in msg.changed_data:
|
||||
return
|
||||
if msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]:
|
||||
return
|
||||
|
||||
@@ -69,6 +69,9 @@ class MissingEventChecker:
|
||||
limit=chunk_size,
|
||||
)
|
||||
|
||||
if not events_chunk:
|
||||
break # There were no events to backup
|
||||
|
||||
start_time = events_chunk[-1].end
|
||||
unifi_events = {event.id: event for event in events_chunk}
|
||||
|
||||
@@ -122,15 +125,13 @@ class MissingEventChecker:
|
||||
|
||||
async def ignore_missing(self):
|
||||
"""Ignore missing events by adding them to the event table."""
|
||||
wanted_events = await self._get_missing_events()
|
||||
logger.info(f" Ignoring missing events")
|
||||
|
||||
logger.info(f" Ignoring {len(wanted_events)} missing events")
|
||||
|
||||
for event in wanted_events:
|
||||
async for event in self._get_missing_events():
|
||||
logger.extra_debug(f"Ignoring event '{event.id}'")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
@@ -153,7 +154,7 @@ class MissingEventChecker:
|
||||
shown_warning = True
|
||||
|
||||
if event.type != EventType.SMART_DETECT:
|
||||
event_name = f"{event.id} ({event.type})"
|
||||
event_name = f"{event.id} ({event.type.value})"
|
||||
else:
|
||||
event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Callable, List
|
||||
|
||||
import aiosqlite
|
||||
@@ -62,6 +62,7 @@ class UnifiProtectBackup:
|
||||
purge_interval: relativedelta,
|
||||
apprise_notifiers: str,
|
||||
skip_missing: bool,
|
||||
max_event_length: int,
|
||||
sqlite_path: str = "events.sqlite",
|
||||
color_logging: bool = False,
|
||||
download_rate_limit: float = None,
|
||||
@@ -95,6 +96,7 @@ class UnifiProtectBackup:
|
||||
sqlite_path (str): Path where to find/create sqlite database
|
||||
color_logging (bool): Whether to add color to logging output or not
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
|
||||
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
setup_logging(verbose, self.color_logging)
|
||||
@@ -130,6 +132,7 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {apprise_notifiers=}")
|
||||
logger.debug(f" {skip_missing=}")
|
||||
logger.debug(f" {download_rate_limit=} events per minute")
|
||||
logger.debug(f" {max_event_length=}s")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = retention
|
||||
@@ -162,6 +165,7 @@ class UnifiProtectBackup:
|
||||
self._purge_interval = purge_interval
|
||||
self._skip_missing = skip_missing
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = timedelta(seconds=max_event_length)
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -222,7 +226,13 @@ class UnifiProtectBackup:
|
||||
# Create downloader task
|
||||
# This will download video files to its buffer
|
||||
downloader = VideoDownloader(
|
||||
self._protect, self._db, download_queue, upload_queue, self.color_logging, self._download_rate_limit
|
||||
self._protect,
|
||||
self._db,
|
||||
download_queue,
|
||||
upload_queue,
|
||||
self.color_logging,
|
||||
self._download_rate_limit,
|
||||
self._max_event_length,
|
||||
)
|
||||
tasks.append(downloader.start())
|
||||
|
||||
|
||||
@@ -9,7 +9,14 @@ import aiosqlite
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
|
||||
from unifi_protect_backup.utils import VideoQueue, get_camera_name, human_readable_size, run_command, setup_event_logger
|
||||
from unifi_protect_backup.utils import (
|
||||
VideoQueue,
|
||||
get_camera_name,
|
||||
human_readable_size,
|
||||
run_command,
|
||||
setup_event_logger,
|
||||
SubprocessException,
|
||||
)
|
||||
|
||||
|
||||
class VideoUploader:
|
||||
@@ -74,10 +81,13 @@ class VideoUploader:
|
||||
destination = await self._generate_file_path(event)
|
||||
self.logger.debug(f" Destination: {destination}")
|
||||
|
||||
await self._upload_video(video, destination, self._rclone_args)
|
||||
await self._update_database(event, destination)
|
||||
try:
|
||||
await self._upload_video(video, destination, self._rclone_args)
|
||||
await self._update_database(event, destination)
|
||||
self.logger.debug("Uploaded")
|
||||
except SubprocessException:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
|
||||
self.logger.debug("Uploaded")
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
@@ -99,7 +109,7 @@ class VideoUploader:
|
||||
"""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)
|
||||
if returncode != 0:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
raise SubprocessException(stdout, stderr, returncode)
|
||||
|
||||
async def _update_database(self, event: Event, destination: str):
|
||||
"""Add the backed up event to the database along with where it was backed up to."""
|
||||
@@ -107,7 +117,7 @@ class VideoUploader:
|
||||
assert isinstance(event.end, datetime)
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
|
||||
@@ -147,9 +157,9 @@ class VideoUploader:
|
||||
format_context = {
|
||||
"event": event,
|
||||
"duration_seconds": (event.end - event.start).total_seconds(),
|
||||
"detection_type": f"{event.type} ({' '.join(event.smart_detect_types)})"
|
||||
"detection_type": f"{event.type.value} ({' '.join(event.smart_detect_types)})"
|
||||
if event.smart_detect_types
|
||||
else f"{event.type}",
|
||||
else f"{event.type.value}",
|
||||
"camera_name": await get_camera_name(self._protect, event.camera_id),
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user