Compare commits

...

14 Commits

Author SHA1 Message Date
Sebastian Goscik
4b4cb86749 Bump version: 0.12.0 → 0.13.0 2025-04-09 13:01:45 +01:00
Sebastian Goscik
c091fa4f92 changelog 2025-04-09 13:01:45 +01:00
Sebastian Goscik
2bf90b6763 Update readme with parallel downloads 2025-04-09 11:27:45 +01:00
Sebastian Goscik
f275443a7a Fix issue with duplicated logging with parallel loggers 2025-04-09 11:25:34 +01:00
Sebastian Goscik
3a43c1b670 Enable multiple parallel uploaders 2025-04-09 11:25:34 +01:00
Sebastian Goscik
e0421c1dd1 Add all smart detection types 2025-04-09 02:37:10 +01:00
Sebastian Goscik
4ee70e6d4b Updating dev dependencies 2025-04-09 02:25:10 +01:00
Sebastian Goscik
ce2993624f Correct CAMERAS envvar 2025-04-09 02:12:52 +01:00
Sebastian Goscik
cec1f69d8d Bump uiprotect 2025-04-09 02:06:38 +01:00
Sebastian Goscik
c07fb30fff update pre-commit 2025-04-09 01:54:57 +01:00
Sebastian Goscik
1de9b9a757 [actions] Fix CRLF issue on windows 2025-04-09 01:51:29 +01:00
Sebastian Goscik
3ec69a7a97 [actions] Fix uv install on windows 2025-04-09 01:47:06 +01:00
Sebastian Goscik
855607fa29 Migrate project to use uv 2025-04-09 01:40:24 +01:00
Sebastian Goscik
e11828bd59 Update makfile to use ruff 2025-04-08 23:54:24 +01:00
17 changed files with 1874 additions and 2367 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.12.0
current_version = 0.13.0
commit = True
tag = True

View File

@@ -1,90 +1,108 @@
# This is a basic workflow to help you get started with Actions
name: Test and Build
name: dev workflow
# Controls when the action will run.
on:
# Triggers the workflow on push events but only for the dev branch
push:
branches: [ dev ]
branches-ignore:
- main
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "test"
test:
# The type of runner that the job will run on
strategy:
matrix:
python-versions: [3.9]
os: [ubuntu-18.04, macos-latest, windows-latest]
python-versions: ["3.10", "3.11", "3.12", "3.13"]
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- name: Configure Git to maintain line endings
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-versions }}
- name: Install dependencies
- name: Install uv (Unix)
if: runner.os != 'Windows'
run: |
python -m pip install --upgrade pip
pip install poetry tox tox-gh-actions
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: test with tox
run:
tox
- name: Install uv (Windows)
if: runner.os == 'Windows'
run: |
iwr -useb https://astral.sh/uv/install.ps1 | iex
echo "$HOME\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: list files
run: ls -l .
- uses: codecov/codecov-action@v1
with:
fail_ci_if_error: true
files: coverage.xml
- name: Install dev dependencies
run: |
uv sync --dev
- name: Run pre-commit
run: uv run pre-commit run --all-files
- name: Run pytest
run: uv run pytest
- name: Build
run: uv build
dev_container:
name: Create dev container
runs-on: ubuntu-20.04
if: github.event_name != 'pull_request'
# Steps represent a sequence of tasks that will be executed as part of the job
name: Create dev container
needs: test
if: github.ref == 'refs/heads/dev'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10
python-version: '3.12'
- name: Install dependencies
- name: Install uv (Unix)
if: runner.os != 'Windows'
run: |
python -m pip install --upgrade pip
pip install poetry tox tox-gh-actions
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Build wheels and source tarball
run: >-
poetry build
- name: Install uv (Windows)
if: runner.os == 'Windows'
run: |
iwr -useb https://astral.sh/uv/install.ps1 | iex
echo "$HOME\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Build
run: uv build
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Log in to container registry
uses: docker/login-action@v2
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push dev
uses: docker/build-push-action@v2
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64

View File

@@ -1,6 +1,6 @@
# Publish package on main branch if it's tagged with 'v*'
name: release & publish workflow
name: Release & Publish Workflow
on:
push:
@@ -11,16 +11,17 @@ on:
jobs:
release:
name: Create Release
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- name: Get version from tag
id: tag_name
run: |
echo ::set-output name=current_version::${GITHUB_REF#refs/tags/v}
echo "current_version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
shell: bash
- uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Get Changelog Entry
id: changelog_reader
@@ -29,56 +30,57 @@ jobs:
version: ${{ steps.tag_name.outputs.current_version }}
path: ./CHANGELOG.md
- uses: actions/setup-python@v2
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
- name: Install uv
run: |
python -m pip install --upgrade pip
pip install poetry
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Build wheels and source tarball
run: >-
poetry build
run: uv build
- name: show temporary files
run: >-
ls -lR
- name: Show build artifacts
run: ls -lR dist/
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v3
- name: Log in to container registry
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push dev
uses: docker/build-push-action@v2
- name: Build and push container
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ghcr.io/${{ github.repository }}:${{ steps.tag_name.outputs.current_version }}, ghcr.io/${{ github.repository }}:latest
tags: |
ghcr.io/${{ github.repository }}:${{ steps.tag_name.outputs.current_version }}
ghcr.io/${{ github.repository }}:latest
- name: create github release
- name: Create GitHub release
id: create_release
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
body: ${{ steps.changelog_reader.outputs.changes }}
files: dist/*.whl
files: dist/*
draft: false
prerelease: false
- name: publish to PyPI
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__

View File

@@ -5,7 +5,7 @@ repos:
- id: forbid-crlf
- id: remove-crlf
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: check-yaml

View File

@@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.13.0] - 2025-04-09
### Added
- Parallel uploaders are now supported
- All smart detection types are now supported
- Migrated the project from poetry to uv
### Fixed
- Corrected the envar for setting cameras to backup for ONLY_CAMERAS -> CAMERAS
- Bumped to the latest uiprotect library to fix issue when unifi access devices are present
## [0.12.0] - 2025-01-18
### Added
- Tool now targets UIProtect instead of pyunifiprotect which should help any lingering auth issues with Unifi OS 4.X

View File

@@ -55,12 +55,11 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
$ git clone git@github.com:your_name_here/unifi-protect-backup.git
```
3. Ensure [poetry](https://python-poetry.org/docs/) is installed.
4. Install dependencies and start your virtualenv:
3. Ensure [uv](https://docs.astral.sh/uv/) is installed.
4. Create virtual environment and install dependencies:
```
$ poetry install --with dev,test
$ poetry shell
$ uv install --dev
```
5. Create a branch for local development:
@@ -75,21 +74,21 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
be inside the `poetry shell` virtualenv or run it via poetry:
```
$ poetry run unifi-protect-backup {args}
$ uv run unifi-protect-backup {args}
```
7. Install pre-commit git hooks to ensure all code commit to the repository
is formatted correctly and meets coding standards:
```
$ poetry run pre-commit install
$ uv run pre-commit install
```
8. When you're done making changes, check that your changes pass the
tests:
```
$ poetry run pytest
$ uv run pytest
```
8. Commit your changes and push your branch to GitHub:
@@ -117,7 +116,7 @@ Before you submit a pull request, check that it meets these guidelines:
## Tips
```
$ poetry run pytest tests/test_unifi_protect_backup.py
$ uv run pytest tests/test_unifi_protect_backup.py
```
To run a subset of tests.
@@ -130,7 +129,7 @@ Make sure all your changes are committed (including an entry in CHANGELOG.md).
Then run:
```
$ poetry run bump2version patch # possible: major / minor / patch
$ uv run bump2version patch # possible: major / minor / patch
$ git push
$ git push --tags
```

View File

@@ -7,7 +7,7 @@ LABEL maintainer="ep1cman"
WORKDIR /app
COPY dist/unifi_protect_backup-0.12.0.tar.gz sdist.tar.gz
COPY dist/unifi_protect_backup-0.13.0.tar.gz sdist.tar.gz
# https://github.com/rust-lang/cargo/issues/2808
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true

View File

@@ -203,6 +203,7 @@ Options:
--experimental-downloader If set, a new experimental download mechanism will be used to match
what the web UI does. This might be more stable if you are experiencing
a lot of failed downloads with the default downloader. [default: False]
--parallel-uploads INTEGER Max number of parallel uploads to allow [default: 1]
--help Show this message and exit.
```
@@ -230,6 +231,7 @@ always take priority over environment variables):
- `DOWNLOAD_RATELIMIT`
- `MAX_EVENT_LENGTH`
- `EXPERIMENTAL_DOWNLOADER`
- `PARALLEL_UPLOADS`
## File path formatting

View File

@@ -6,11 +6,10 @@ container_arches ?= linux/amd64,linux/arm64
test: format lint unittest
format:
isort $(sources) tests
black $(sources) tests
ruff format $(sources) tests
lint:
flake8 $(sources) tests
ruff check $(sources) tests
mypy $(sources) tests
unittest:
@@ -29,5 +28,5 @@ clean:
rm -rf coverage.xml .coverage
docker:
poetry build
uv build
docker buildx build . --platform $(container_arches) -t $(container_name) --push

2195
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,61 +1,62 @@
[tool]
[tool.poetry]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "unifi_protect_backup"
version = "0.12.0"
homepage = "https://github.com/ep1cman/unifi-protect-backup"
version = "0.13.0"
description = "Python tool to backup unifi event clips in realtime."
authors = ["sebastian.goscik <sebastian@goscik.com>"]
readme = "README.md"
license = "MIT"
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.10',
license = {text = "MIT"}
authors = [
{name = "sebastian.goscik", email = "sebastian@goscik.com"}
]
packages = [
{ include = "unifi_protect_backup" },
{ include = "tests", format = "sdist" },
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
]
requires-python = ">=3.10.0,<4.0"
dependencies = [
"click==8.0.1",
"aiorun>=2023.7.2",
"aiosqlite>=0.17.0",
"python-dateutil>=2.8.2",
"apprise>=1.5.0",
"expiring-dict>=1.1.0",
"async-lru>=2.0.4",
"aiolimiter>=1.1.0",
"uiprotect==7.5.2",
"aiohttp==3.11.16",
]
[tool.poetry.dependencies]
python = ">=3.10.0,<4.0"
click = "8.0.1"
aiorun = "^2023.7.2"
aiosqlite = "^0.17.0"
python-dateutil = "^2.8.2"
apprise = "^1.5.0"
expiring-dict = "^1.1.0"
async-lru = "^2.0.4"
aiolimiter = "^1.1.0"
uiprotect = "^6.3.1"
[project.urls]
Homepage = "https://github.com/ep1cman/unifi-protect-backup"
[tool.poetry.group.dev]
optional = true
[project.scripts]
unifi-protect-backup = "unifi_protect_backup.cli:main"
[tool.poetry.group.dev.dependencies]
mypy = "^1.11.1"
types-pytz = "^2021.3.5"
types-cryptography = "^3.3.18"
types-python-dateutil = "^2.8.19.10"
bump2version = "^1.0.1"
pre-commit = "^2.12.0"
ruff = "^0.5.7"
types-aiofiles = "^24.1.0.20241221"
[dependency-groups]
dev = [
"mypy>=1.15.0",
"types-pytz>=2021.3.5",
"types-cryptography>=3.3.18",
"types-python-dateutil>=2.8.19.10",
"types-aiofiles>=24.1.0.20241221",
"bump2version>=1.0.1",
"pre-commit>=4.2.0",
"ruff>=0.11.4",
"pytest>=8.3.5",
]
[tool.poetry.group.test]
optional = true
[tool.hatch.build.targets.wheel]
packages = ["unifi_protect_backup"]
[tool.poetry.group.test.dependencies]
pytest = "^6.2.4"
pytest-cov = "^2.12.0"
tox = "^3.20.1"
tox-asdf = "^0.1.0"
[tool.poetry.scripts]
unifi-protect-backup = 'unifi_protect_backup.cli:main'
[tool.hatch.build.targets.sdist]
include = ["unifi_protect_backup", "tests"]
[tool.ruff]
line-length = 120
@@ -66,11 +67,10 @@ target-version = "py310"
[tool.ruff.format]
[tool.mypy]
allow_redefinition=true
allow_redefinition = true
exclude = [
'unifi_protect_backup/uiprotect_patch.py'
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.uv]
default-groups = []

View File

@@ -2,7 +2,7 @@
__author__ = """sebastian.goscik"""
__email__ = "sebastian@goscik.com"
__version__ = "0.12.0"
__version__ = "0.13.0"
from .downloader import VideoDownloader
from .downloader_experimental import VideoDownloaderExperimental

View File

@@ -7,11 +7,13 @@ import click
from aiorun import run # type: ignore
from dateutil.relativedelta import relativedelta
from uiprotect.data.types import SmartDetectObjectType
from unifi_protect_backup import __version__
from unifi_protect_backup.unifi_protect_backup_core import UnifiProtectBackup
from unifi_protect_backup.utils import human_readable_to_float
DETECTION_TYPES = ["motion", "person", "vehicle", "ring", "line"]
DETECTION_TYPES = ["motion", "ring", "line"] + SmartDetectObjectType.values()
def _parse_detection_types(ctx, param, value):
@@ -113,7 +115,7 @@ def parse_rclone_retention(ctx, param, retention) -> relativedelta:
"--camera",
"cameras",
multiple=True,
envvar="ONLY_CAMERAS",
envvar="CAMERAS",
help="IDs of *ONLY* cameras for which events should be backed up. Use multiple times to include "
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace. "
"Alternatively, use a Unifi user with a role which has access restricted to the subset of cameras "
@@ -237,6 +239,14 @@ what the web UI does. This might be more stable if you are experiencing
a lot of failed downloads with the default downloader.
""",
)
@click.option(
"--parallel-uploads",
default=1,
show_default=True,
envvar="PARALLEL_UPLOADS",
type=int,
help="Max number of parallel uploads to allow",
)
def main(**kwargs):
"""A Python based tool for backing up Unifi Protect event clips as they occur."""

View File

@@ -25,7 +25,7 @@ class MissingEventChecker:
db: aiosqlite.Connection,
download_queue: asyncio.Queue,
downloader: VideoDownloader,
uploader: VideoUploader,
uploaders: List[VideoUploader],
retention: relativedelta,
detection_types: List[str],
ignore_cameras: List[str],
@@ -39,7 +39,7 @@ class MissingEventChecker:
db (aiosqlite.Connection): Async SQLite database to check for missing events
download_queue (asyncio.Queue): Download queue to check for on-going downloads
downloader (VideoDownloader): Downloader to check for on-going downloads
uploader (VideoUploader): Uploader to check for on-going uploads
uploaders (List[VideoUploader]): Uploaders to check for on-going uploads
retention (relativedelta): Retention period to limit search window
detection_types (List[str]): Detection types wanted to limit search
ignore_cameras (List[str]): Ignored camera IDs to limit search
@@ -50,7 +50,7 @@ class MissingEventChecker:
self._db: aiosqlite.Connection = db
self._download_queue: asyncio.Queue = download_queue
self._downloader: VideoDownloader = downloader
self._uploader: VideoUploader = uploader
self._uploaders: List[VideoUploader] = uploaders
self.retention: relativedelta = retention
self.detection_types: List[str] = detection_types
self.ignore_cameras: List[str] = ignore_cameras
@@ -102,10 +102,11 @@ class MissingEventChecker:
if current_download is not None:
downloading_event_ids.add(current_download.id)
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue} # type: ignore
current_upload = self._uploader.current_event
if current_upload is not None:
uploading_event_ids.add(current_upload.id)
uploading_event_ids = {event.id for event, video in self._downloader.upload_queue._queue} # type: ignore
for uploader in self._uploaders:
current_upload = uploader.current_event
if current_upload is not None:
uploading_event_ids.add(current_upload.id)
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)

View File

@@ -85,6 +85,7 @@ class UnifiProtectBackup:
download_rate_limit: float | None = None,
port: int = 443,
use_experimental_downloader: bool = False,
parallel_uploads: int = 1,
):
"""Will configure logging settings and the Unifi Protect API (but not actually connect).
@@ -117,6 +118,7 @@ class UnifiProtectBackup:
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the webUI)
parallel_uploads (int): Max number of parallel uploads to allow
"""
self.color_logging = color_logging
setup_logging(verbose, self.color_logging)
@@ -155,6 +157,7 @@ class UnifiProtectBackup:
logger.debug(f" {download_rate_limit=} events per minute")
logger.debug(f" {max_event_length=}s")
logger.debug(f" {use_experimental_downloader=}")
logger.debug(f" {parallel_uploads=}")
self.rclone_destination = rclone_destination
self.retention = retention
@@ -190,6 +193,7 @@ class UnifiProtectBackup:
self._download_rate_limit = download_rate_limit
self._max_event_length = timedelta(seconds=max_event_length)
self._use_experimental_downloader = use_experimental_downloader
self._parallel_uploads = parallel_uploads
async def start(self):
"""Bootstrap the backup process and kick off the main loop.
@@ -272,18 +276,21 @@ class UnifiProtectBackup:
)
tasks.append(downloader.start())
# Create upload task
# Create upload tasks
# This will upload the videos in the downloader's buffer to the rclone remotes and log it in the database
uploader = VideoUploader(
self._protect,
upload_queue,
self.rclone_destination,
self.rclone_args,
self.file_structure_format,
self._db,
self.color_logging,
)
tasks.append(uploader.start())
uploaders = []
for i in range(self._parallel_uploads):
uploader = VideoUploader(
self._protect,
upload_queue,
self.rclone_destination,
self.rclone_args,
self.file_structure_format,
self._db,
self.color_logging,
)
uploaders.append(uploader)
tasks.append(uploader.start())
# Create event listener task
# This will connect to the unifi protect websocket and listen for events. When one is detected it will
@@ -312,7 +319,7 @@ class UnifiProtectBackup:
self._db,
download_queue,
downloader,
uploader,
uploaders,
self.retention,
self.detection_types,
self.ignore_cameras,

View File

@@ -238,12 +238,18 @@ def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers
logger.setLevel(logging.WEBSOCKET_DATA) # type: ignore
_initialized_loggers = []
def setup_event_logger(logger, color_logging):
"""Sets up a logger that also displays the event ID currently being processed."""
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
sh = create_logging_handler(format, color_logging)
logger.addHandler(sh)
logger.propagate = False
global _initialized_loggers
if logger not in _initialized_loggers:
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
sh = create_logging_handler(format, color_logging)
logger.addHandler(sh)
logger.propagate = False
_initialized_loggers.append(logger)
_suffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
@@ -407,7 +413,7 @@ class VideoQueue(asyncio.Queue):
)
while self.full(item):
putter = self._loop.create_future() # type: ignore
putter = self._get_loop().create_future() # type: ignore
self._putters.append(putter) # type: ignore
try:
await putter

1648
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff