Compare commits

...

9 Commits

Author SHA1 Message Date
Wietse Wind
8b1805b792 Merge 1c9e2ef147 into 7439ac9bda 2025-01-25 13:43:24 -08:00
Sebastian Goscik
7439ac9bda Bump version: 0.11.0 → 0.12.0 2025-01-18 18:23:33 +00:00
Sebastian Goscik
e3cbcc819e git github action python version parsing 2025-01-18 18:23:33 +00:00
Sebastian Goscik
ccb816ddbc fix bump2version config 2025-01-18 17:19:47 +00:00
Sebastian Goscik
9d2d6558a6 Changelog 2025-01-18 17:18:05 +00:00
Sebastian Goscik
3c5056614c Monkey patch in experimental downloader 2025-01-18 17:07:44 +00:00
Sebastian Goscik
1f18c06e17 Bump dependency versions 2025-01-18 17:07:44 +00:00
Sebastian Goscik
3181080bca Fix issue when --camera isnt specified
Click defaults options with multiple=true to an empty list not None if they are not provided
2025-01-18 16:43:02 +00:00
Wietse Wind
1c9e2ef147 Allow postprocessing binary/script post download 2024-12-23 17:45:33 +01:00
16 changed files with 1165 additions and 931 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.11.0
current_version = 0.12.0
commit = True
tag = True
@@ -8,8 +8,8 @@ search = version = "{current_version}"
replace = version = "{new_version}"
[bumpversion:file:unifi_protect_backup/__init__.py]
search = __version__ = '{current_version}'
replace = __version__ = '{new_version}'
search = __version__ = "{current_version}"
replace = __version__ = "{new_version}"
[bumpversion:file:Dockerfile]
search = COPY dist/unifi_protect_backup-{current_version}.tar.gz sdist.tar.gz

View File

@@ -2,24 +2,17 @@
name: release & publish workflow
# Controls when the action will run.
on:
# Triggers the workflow on push events but only for the master branch
push:
tags:
- 'v*'
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "release"
release:
name: Create Release
runs-on: ubuntu-20.04
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: Get version from tag
id: tag_name
@@ -27,7 +20,6 @@ jobs:
echo ::set-output name=current_version::${GITHUB_REF#refs/tags/v}
shell: bash
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Get Changelog Entry
@@ -39,7 +31,7 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: 3.10
python-version: "3.10"
- name: Install dependencies
run: |

View File

@@ -19,7 +19,7 @@ repos:
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.1
rev: v1.14.1
hooks:
- id: mypy
exclude: tests/
@@ -27,3 +27,4 @@ repos:
- types-pytz
- types-cryptography
- types-python-dateutil
- types-aiofiles

View File

@@ -4,11 +4,18 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.12.0] - 2024-08-06
### Fixed
## [0.12.0] - 2025-01-18
### Added
- Tool now targets UIProtect instead of pyunifiprotect which should help any lingering auth issues with Unifi OS 4.X
- Python Version bumped to 3.10 (based on UIPortect need)
- (had to make the dev and test dependencies required instead of extras to get poetry to work)
- Python Version bumped to 3.10 (based on UIProtect need)
- The ability to specify only specific cameras to backup
- Re-enabled the experimental downloader after adding a monkey patch for UIProtect to include the unmerged code
- Switched linter to `ruff`
- Added support for SMART_DETECT_LINE events
-
### Fixed
- Unifi now returns unfinished events, this is now handled correctly
- Login attempts now use an exponentially increasing delay to try work around aggressive rate limiting on logins
## [0.11.0] - 2024-06-08
### Added

View File

@@ -7,7 +7,7 @@ LABEL maintainer="ep1cman"
WORKDIR /app
COPY dist/unifi_protect_backup-0.11.0.tar.gz sdist.tar.gz
COPY dist/unifi_protect_backup-0.12.0.tar.gz sdist.tar.gz
# https://github.com/rust-lang/cargo/issues/2808
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true

View File

@@ -131,6 +131,11 @@ Options:
Common usage for this would be to execute a permanent delete
instead of using the recycle bin on a destination. Google Drive
example: `--drive-use-trash=false`
--postprocess-binary TEXT Optional binary or executable script to run after having
downloaded a video. This can e.g. be a bash script with a CURL
command to post-process the video (detection, move, ...). The
script / binary receives the path where the video is persisted
as first and only argument.
--detection-types TEXT A comma separated list of which types of detections to backup.
Valid options are: `motion`, `person`, `vehicle`, `ring`
[default: motion,person,vehicle,ring]
@@ -217,6 +222,7 @@ always take priority over environment variables):
- `RCLONE_DESTINATION`
- `RCLONE_ARGS`
- `RCLONE_PURGE_ARGS`
- `POSTPROCESS_BINARY`
- `IGNORE_CAMERAS`
- `CAMERAS`
- `DETECTION_TYPES`
@@ -303,6 +309,37 @@ such backends.
If you are running on a linux host you can setup `rclone` to use `tmpfs` (which is in RAM) to store its temp files, but this will significantly increase memory usage of the tool.
## Prostprocessing
To perform additional detection / cleaning / moving / ... on a video post downloading:
- Use `--postprocess-binary` or env. var: `POSTPROCESS_BINARY`
The binary / executable script receives a first argument with the storage location for the downloaded video. You can easily mount a script from a local filesystem to the container:
```bash
rm -r /tmp/unifi ; docker rmi ghcr.io/ep1cman/unifi-protect-backup ; poetry build && docker buildx build . -t ghcr.io/ep1cman/unifi-protect-backup ;
docker run --rm \
-e POSTPROCESS_BINARY='/postprocess.sh' \
-v '/My/Local/Folder/postprocess.sh':/postprocess.sh \
ghcr.io/ep1cman/unifi-protect-backup
```
The script can be as simple as this (to display the upload path inside the container):
```bash
#!/bin/bash
echo "$1"
```
The logging output will show the stdout and stderr for the postprocess script/binary:
```
Uploaded
-- Postprocessing: 'local:/data/camname/date/vidname.pm4' returned status code: '0'
> STDOUT: /data/camname/date/vidname.pm4
> STDERR:
```
### Running Docker Container (LINUX ONLY)
Add the following arguments to your docker run command:
```

1820
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
[tool]
[tool.poetry]
name = "unifi_protect_backup"
version = "0.11.0"
version = "0.12.0"
homepage = "https://github.com/ep1cman/unifi-protect-backup"
description = "Python tool to backup unifi event clips in realtime."
authors = ["sebastian.goscik <sebastian@goscik.com>"]
@@ -43,6 +43,7 @@ types-python-dateutil = "^2.8.19.10"
bump2version = "^1.0.1"
pre-commit = "^2.12.0"
ruff = "^0.5.7"
types-aiofiles = "^24.1.0.20241221"
[tool.poetry.group.test]
optional = true
@@ -66,6 +67,9 @@ target-version = "py310"
[tool.mypy]
allow_redefinition=true
exclude = [
'unifi_protect_backup/uiprotect_patch.py'
]
[build-system]
requires = ["poetry-core>=1.0.0"]

View File

@@ -2,7 +2,7 @@
__author__ = """sebastian.goscik"""
__email__ = "sebastian@goscik.com"
__version__ = "0.11.0"
__version__ = "0.12.0"
from .downloader import VideoDownloader
from .downloader_experimental import VideoDownloaderExperimental

View File

@@ -90,6 +90,12 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
"be to execute a permanent delete instead of using the recycle bin on a destination. "
"Google Drive example: `--drive-use-trash=false`",
)
@click.option(
"--postprocess-binary",
default="",
envvar="POSTPROCESS_BINARY",
help="Optional path to binary to postprocess the processed video, gets video destination path as argument."
)
@click.option(
"--detection-types",
envvar="DETECTION_TYPES",

View File

@@ -85,8 +85,6 @@ class VideoDownloaderExperimental:
else:
self._has_ffprobe = False
raise RuntimeError("The `uiprotect` library is currently missing the features for this to work.")
async def start(self):
"""Main loop."""
self.logger.info("Starting Downloader")

View File

@@ -3,7 +3,7 @@
import asyncio
import logging
from time import sleep
from typing import List, Optional
from typing import List
from uiprotect.api import ProtectApiClient
from uiprotect.websocket import WebsocketState
@@ -23,7 +23,7 @@ class EventListener:
protect: ProtectApiClient,
detection_types: List[str],
ignore_cameras: List[str],
cameras: Optional[List[str]] = None,
cameras: List[str],
):
"""Init.
@@ -32,7 +32,7 @@ class EventListener:
protect (ProtectApiClient): UniFI Protect API client to use
detection_types (List[str]): Desired Event detection types to look for
ignore_cameras (List[str]): Cameras IDs to ignore events from
cameras (Optional[List[str]]): Cameras IDs to ONLY include events from
cameras (List[str]): Cameras IDs to ONLY include events from
"""
self._event_queue: asyncio.Queue = event_queue
self._protect: ProtectApiClient = protect
@@ -40,7 +40,7 @@ class EventListener:
self._unsub_websocketstate = None
self.detection_types: List[str] = detection_types
self.ignore_cameras: List[str] = ignore_cameras
self.cameras: Optional[List[str]] = cameras
self.cameras: List[str] = cameras
async def start(self):
"""Main Loop."""
@@ -63,7 +63,7 @@ class EventListener:
return
if msg.new_obj.camera_id in self.ignore_cameras:
return
if self.cameras is not None and msg.new_obj.camera_id not in self.cameras:
if self.cameras and msg.new_obj.camera_id not in self.cameras:
return
if "end" not in msg.changed_data:
return

View File

@@ -3,7 +3,7 @@
import asyncio
import logging
from datetime import datetime
from typing import AsyncIterator, List, Optional
from typing import AsyncIterator, List
import aiosqlite
from dateutil.relativedelta import relativedelta
@@ -28,8 +28,8 @@ class MissingEventChecker:
uploader: VideoUploader,
retention: relativedelta,
detection_types: List[str],
ignore_cameras: List[str] = [],
cameras: Optional[List[str]] = None,
ignore_cameras: List[str],
cameras: List[str],
interval: int = 60 * 5,
) -> None:
"""Init.
@@ -43,7 +43,7 @@ class MissingEventChecker:
retention (relativedelta): Retention period to limit search window
detection_types (List[str]): Detection types wanted to limit search
ignore_cameras (List[str]): Ignored camera IDs to limit search
cameras (Optional[List[str]]): Included (ONLY) camera IDs to limit search
cameras (List[str]): Included (ONLY) camera IDs to limit search
interval (int): How frequently, in seconds, to check for missing events,
"""
self._protect: ProtectApiClient = protect
@@ -54,7 +54,7 @@ class MissingEventChecker:
self.retention: relativedelta = retention
self.detection_types: List[str] = detection_types
self.ignore_cameras: List[str] = ignore_cameras
self.cameras: Optional[List[str]] = cameras
self.cameras: List[str] = cameras
self.interval: int = interval
async def _get_missing_events(self) -> AsyncIterator[Event]:
@@ -87,7 +87,7 @@ class MissingEventChecker:
break # No completed events to process
# Next chunks start time should be the end of the oldest complete event in the current chunk
start_time = max([event.end for event in unifi_events.values()])
start_time = max([event.end for event in unifi_events.values() if event.end is not None])
# Get list of events that have been backed up from the database
@@ -116,7 +116,7 @@ class MissingEventChecker:
return False # This event is still on-going
if event.camera_id in self.ignore_cameras:
return False
if self.cameras is not None and event.camera_id not in self.cameras:
if self.cameras and event.camera_id not in self.cameras:
return False
if event.type is EventType.MOTION and "motion" not in self.detection_types:
return False

View File

@@ -0,0 +1,135 @@
import enum
from datetime import datetime
from pathlib import Path
from typing import Any, Optional
import aiofiles
from uiprotect.data import Version
from uiprotect.exceptions import BadRequest
from uiprotect.utils import to_js_time
# First, let's add the new VideoExportType enum
class VideoExportType(str, enum.Enum):
TIMELAPSE = "timelapse"
ROTATING = "rotating"
def monkey_patch_experimental_downloader():
from uiprotect.api import ProtectApiClient
# Add the version constant
ProtectApiClient.NEW_DOWNLOAD_VERSION = Version("4.0.0") # You'll need to import Version from uiprotect
async def _validate_channel_id(self, camera_id: str, channel_index: int) -> None:
if self._bootstrap is None:
await self.update()
try:
camera = self._bootstrap.cameras[camera_id]
camera.channels[channel_index]
except (IndexError, AttributeError, KeyError) as e:
raise BadRequest(f"Invalid input: {e}") from e
async def prepare_camera_video(
self,
camera_id: str,
start: datetime,
end: datetime,
channel_index: int = 0,
validate_channel_id: bool = True,
fps: Optional[int] = None,
filename: Optional[str] = None,
) -> Optional[dict[str, Any]]:
if self.bootstrap.nvr.version < self.NEW_DOWNLOAD_VERSION:
raise ValueError("This method is only support from Unifi Protect version >= 4.0.0.")
if validate_channel_id:
await self._validate_channel_id(camera_id, channel_index)
params = {
"camera": camera_id,
"start": to_js_time(start),
"end": to_js_time(end),
}
if channel_index == 3:
params.update({"lens": 2})
else:
params.update({"channel": channel_index})
if fps is not None and fps > 0:
params["fps"] = fps
params["type"] = VideoExportType.TIMELAPSE.value
else:
params["type"] = VideoExportType.ROTATING.value
if not filename:
start_str = start.strftime("%m-%d-%Y, %H.%M.%S %Z")
end_str = end.strftime("%m-%d-%Y, %H.%M.%S %Z")
filename = f"{camera_id} {start_str} - {end_str}.mp4"
params["filename"] = filename
return await self.api_request(
"video/prepare",
params=params,
raise_exception=True,
)
async def download_camera_video(
self,
camera_id: str,
filename: str,
output_file: Optional[Path] = None,
iterator_callback: Optional[callable] = None,
progress_callback: Optional[callable] = None,
chunk_size: int = 65536,
) -> Optional[bytes]:
if self.bootstrap.nvr.version < self.NEW_DOWNLOAD_VERSION:
raise ValueError("This method is only support from Unifi Protect version >= 4.0.0.")
params = {
"camera": camera_id,
"filename": filename,
}
if iterator_callback is None and progress_callback is None and output_file is None:
return await self.api_request_raw(
"video/download",
params=params,
raise_exception=False,
)
r = await self.request(
"get",
f"{self.api_path}video/download",
auto_close=False,
timeout=0,
params=params,
)
if output_file is not None:
async with aiofiles.open(output_file, "wb") as output:
async def callback(total: int, chunk: Optional[bytes]) -> None:
if iterator_callback is not None:
await iterator_callback(total, chunk)
if chunk is not None:
await output.write(chunk)
await self._stream_response(r, chunk_size, callback, progress_callback)
else:
await self._stream_response(
r,
chunk_size,
iterator_callback,
progress_callback,
)
r.close()
return None
# Patch the methods into the class
ProtectApiClient._validate_channel_id = _validate_channel_id
ProtectApiClient.prepare_camera_video = prepare_camera_video
ProtectApiClient.download_camera_video = download_camera_video

View File

@@ -29,11 +29,19 @@ from unifi_protect_backup.utils import (
setup_logging,
)
from unifi_protect_backup.uiprotect_patch import monkey_patch_experimental_downloader
logger = logging.getLogger(__name__)
# TODO: https://github.com/cjrh/aiorun#id6 (smart shield)
# We have been waiting for a long time for this PR to get merged
# https://github.com/uilibs/uiprotect/pull/249
# Since it has not progressed, we will for now patch in the functionality ourselves
monkey_patch_experimental_downloader()
async def create_database(path: str):
"""Creates sqlite database and creates the events abd backups tables."""
db = await aiosqlite.connect(path)
@@ -62,6 +70,7 @@ class UnifiProtectBackup:
retention: relativedelta,
rclone_args: str,
rclone_purge_args: str,
postprocess_binary: str,
detection_types: List[str],
ignore_cameras: List[str],
cameras: List[str],
@@ -95,6 +104,7 @@ class UnifiProtectBackup:
rclone_args (str): A bandwidth limit which is passed to the `--bwlimit` argument of
`rclone` (https://rclone.org/docs/#bwlimit-bandwidth-spec)
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
postprocess_binary (str): Optional path to a binary that gets called to postprocess, with download location as argument.
detection_types (List[str]): List of which detection types to backup.
ignore_cameras (List[str]): List of camera IDs for which to not backup events.
cameras (List[str]): List of ONLY camera IDs for which to backup events.
@@ -134,6 +144,7 @@ class UnifiProtectBackup:
logger.debug(f" {retention=}")
logger.debug(f" {rclone_args=}")
logger.debug(f" {rclone_purge_args=}")
logger.debug(f" {postprocess_binary=}")
logger.debug(f" {ignore_cameras=}")
logger.debug(f" {cameras=}")
logger.debug(f" {verbose=}")
@@ -152,6 +163,7 @@ class UnifiProtectBackup:
self.retention = retention
self.rclone_args = rclone_args
self.rclone_purge_args = rclone_purge_args
self.postprocess_binary = postprocess_binary
self.file_structure_format = file_structure_format
self.address = address
@@ -274,6 +286,7 @@ class UnifiProtectBackup:
self.file_structure_format,
self._db,
self.color_logging,
self.postprocess_binary,
)
tasks.append(uploader.start())

View File

@@ -34,6 +34,7 @@ class VideoUploader:
file_structure_format: str,
db: aiosqlite.Connection,
color_logging: bool,
postprocess_binary: str,
):
"""Init.
@@ -45,11 +46,13 @@ class VideoUploader:
file_structure_format (str): format string for how to structure the uploaded files
db (aiosqlite.Connection): Async SQlite database connection
color_logging (bool): Whether or not to add color to logging output
postprocess_binary (str): Optional postprocess binary path (output location as arg)
"""
self._protect: ProtectApiClient = protect
self.upload_queue: VideoQueue = upload_queue
self._rclone_destination: str = rclone_destination
self._rclone_args: str = rclone_args
self._postprocess_binary: str = postprocess_binary
self._file_structure_format: str = file_structure_format
self._db: aiosqlite.Connection = db
self.current_event = None
@@ -82,9 +85,16 @@ class VideoUploader:
self.logger.debug(f" Destination: {destination}")
try:
await self._upload_video(video, destination, self._rclone_args)
await self._upload_video(video, destination, self._rclone_args, self._postprocess_binary)
await self._update_database(event, destination)
self.logger.debug("Uploaded")
# Postprocess
if self._postprocess_binary:
returncode_postprocess, stdout_postprocess, stderr_postprocess = await run_command(f'"{self._postprocess_binary}" "{destination}"')
self.logger.debug(f" -- Postprocessing: '{destination}' returned status code: '{returncode_postprocess}'")
self.logger.debug(f" > STDOUT: {stdout_postprocess.strip()}")
self.logger.debug(f" > STDERR: {stderr_postprocess.strip()}")
except SubprocessException:
self.logger.error(f" Failed to upload file: '{destination}'")
@@ -93,7 +103,7 @@ class VideoUploader:
except Exception as e:
self.logger.error(f"Unexpected exception occurred, abandoning event {event.id}:", exc_info=e)
async def _upload_video(self, video: bytes, destination: pathlib.Path, rclone_args: str):
async def _upload_video(self, video: bytes, destination: pathlib.Path, rclone_args: str, postprocess_binary: str):
"""Upload video using rclone.
In order to avoid writing to disk, the video file data is piped directly
@@ -103,6 +113,7 @@ class VideoUploader:
video (bytes): The data to be written to the file
destination (pathlib.Path): Where rclone should write the file
rclone_args (str): Optional extra arguments to pass to `rclone`
postprocess_binary (str): Optional extra path to postprocessing binary
Raises:
RuntimeError: If rclone returns a non-zero exit code