mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
109 Commits
7f051277b4
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26c1797ce9 | ||
|
|
ef0cf38f83 | ||
|
|
2bd48014a0 | ||
|
|
afe025be1d | ||
|
|
a14ff1bf30 | ||
|
|
ba64722937 | ||
|
|
65d8e66e79 | ||
|
|
cb54078153 | ||
|
|
048e061df1 | ||
|
|
7f8177de35 | ||
|
|
eaabfbdb4e | ||
|
|
edf377adc4 | ||
|
|
ef06d2a4d4 | ||
|
|
12c8539977 | ||
|
|
474d3c32fa | ||
|
|
3750847055 | ||
|
|
c16a380918 | ||
|
|
df466b5d0b | ||
|
|
18a78863a7 | ||
|
|
4d2002b98d | ||
|
|
4b4cb86749 | ||
|
|
c091fa4f92 | ||
|
|
2bf90b6763 | ||
|
|
f275443a7a | ||
|
|
3a43c1b670 | ||
|
|
e0421c1dd1 | ||
|
|
4ee70e6d4b | ||
|
|
ce2993624f | ||
|
|
cec1f69d8d | ||
|
|
c07fb30fff | ||
|
|
1de9b9a757 | ||
|
|
3ec69a7a97 | ||
|
|
855607fa29 | ||
|
|
e11828bd59 | ||
|
|
7439ac9bda | ||
|
|
e3cbcc819e | ||
|
|
ccb816ddbc | ||
|
|
9d2d6558a6 | ||
|
|
3c5056614c | ||
|
|
1f18c06e17 | ||
|
|
3181080bca | ||
|
|
6e5d90a9f5 | ||
|
|
475beaee3d | ||
|
|
75cd1207b4 | ||
|
|
c067dbd9f7 | ||
|
|
2c43149c99 | ||
|
|
78a2c3034d | ||
|
|
1bb8496b30 | ||
|
|
80ad55d0d0 | ||
|
|
0b2c46888c | ||
|
|
0026eaa2ca | ||
|
|
c3290a223a | ||
|
|
4265643806 | ||
|
|
78be4808d9 | ||
|
|
0a6a259120 | ||
|
|
de4f69dcb5 | ||
|
|
a7c4eb8dae | ||
|
|
129d89480e | ||
|
|
a7ccef7f1d | ||
|
|
bbd70f49bf | ||
|
|
f9d74c27f9 | ||
|
|
9d79890eff | ||
|
|
ccf2cde272 | ||
|
|
a8328fd09e | ||
|
|
28d241610b | ||
|
|
aa1335e73b | ||
|
|
9cb2ccf8b2 | ||
|
|
30ea7de5c2 | ||
|
|
2dac2cee23 | ||
|
|
f4d992838a | ||
|
|
9fe4394ee4 | ||
|
|
e65d8dde6c | ||
|
|
90108edeb8 | ||
|
|
1194e957a5 | ||
|
|
65128b35dd | ||
|
|
64bb353f67 | ||
|
|
558859dd72 | ||
|
|
d3b40b443a | ||
|
|
4bfe9afc10 | ||
|
|
c69a3e365a | ||
|
|
ace6a09bba | ||
|
|
e3c00e3dfa | ||
|
|
5f7fad72d5 | ||
|
|
991998aa37 | ||
|
|
074f5b372c | ||
|
|
00aec23805 | ||
|
|
52e4ecd50d | ||
|
|
6b116ab93b | ||
|
|
70526b2f49 | ||
|
|
5069d28f0d | ||
|
|
731ab1081d | ||
|
|
701fd9b0a8 | ||
|
|
5fa202005b | ||
|
|
3644ad3754 | ||
|
|
9410051ab9 | ||
|
|
d5a74f475a | ||
|
|
dc8473cc3d | ||
|
|
60901e9a84 | ||
|
|
4a0bd87ef2 | ||
|
|
8dc0f8a212 | ||
|
|
34252c461f | ||
|
|
acc405a1f8 | ||
|
|
b66d40736c | ||
|
|
171796e5c3 | ||
|
|
cbc497909d | ||
|
|
66b3344e29 | ||
|
|
89cab64679 | ||
|
|
f2f1c49ae9 | ||
|
|
8786f2ceb0 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.9.3
|
||||
current_version = 0.14.0
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
@@ -7,9 +7,13 @@ tag = True
|
||||
search = version = "{current_version}"
|
||||
replace = version = "{new_version}"
|
||||
|
||||
[bumpversion:file:uv.lock]
|
||||
search = version = "{current_version}"
|
||||
replace = version = "{new_version}"
|
||||
|
||||
[bumpversion:file:unifi_protect_backup/__init__.py]
|
||||
search = __version__ = '{current_version}'
|
||||
replace = __version__ = '{new_version}'
|
||||
search = __version__ = "{current_version}"
|
||||
replace = __version__ = "{new_version}"
|
||||
|
||||
[bumpversion:file:Dockerfile]
|
||||
search = COPY dist/unifi_protect_backup-{current_version}.tar.gz sdist.tar.gz
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
# http://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
|
||||
[*.bat]
|
||||
indent_style = tab
|
||||
end_of_line = crlf
|
||||
|
||||
[LICENSE]
|
||||
insert_final_newline = false
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
[*.{yml, yaml}]
|
||||
indent_size = 2
|
||||
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help UPB improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
* Unifi Protect Backup version:
|
||||
* Unifi Protect version:
|
||||
* Python version:
|
||||
* Operating System:
|
||||
* Are you using a docker container or native?:
|
||||
|
||||
### Description
|
||||
|
||||
Describe what you were trying to get done.
|
||||
Tell us what happened, what went wrong, and what you expected to happen.
|
||||
|
||||
### What I Did
|
||||
|
||||
```
|
||||
Paste the command(s) you ran and the output.
|
||||
If there was a crash, please include the traceback here.
|
||||
```
|
||||
114
.github/workflows/dev.yml
vendored
114
.github/workflows/dev.yml
vendored
@@ -1,90 +1,108 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
name: Test and Build
|
||||
|
||||
name: dev workflow
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push events but only for the dev branch
|
||||
push:
|
||||
branches: [ dev ]
|
||||
branches-ignore:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "test"
|
||||
test:
|
||||
# The type of runner that the job will run on
|
||||
strategy:
|
||||
matrix:
|
||||
python-versions: [3.9]
|
||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||
python-versions: ["3.10", "3.11", "3.12", "3.13"]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- name: Configure Git to maintain line endings
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-versions }}
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install uv (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry tox tox-gh-actions
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: test with tox
|
||||
run:
|
||||
tox
|
||||
- name: Install uv (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
iwr -useb https://astral.sh/uv/install.ps1 | iex
|
||||
echo "$HOME\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
- name: list files
|
||||
run: ls -l .
|
||||
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: coverage.xml
|
||||
- name: Install dev dependencies
|
||||
run: |
|
||||
uv sync --dev
|
||||
|
||||
- name: Run pre-commit
|
||||
run: uv run pre-commit run --all-files
|
||||
|
||||
- name: Run pytest
|
||||
run: uv run pytest
|
||||
|
||||
- name: Build
|
||||
run: uv build
|
||||
|
||||
dev_container:
|
||||
name: Create dev container
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.event_name != 'pull_request'
|
||||
needs: test
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install uv (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry tox tox-gh-actions
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Build wheels and source tarball
|
||||
run: >-
|
||||
poetry build
|
||||
- name: Install uv (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
iwr -useb https://astral.sh/uv/install.ps1 | iex
|
||||
echo "$HOME\.cargo\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
- name: Build
|
||||
run: uv build
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v2
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push dev
|
||||
uses: docker/build-push-action@v2
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
56
.github/workflows/release.yml
vendored
56
.github/workflows/release.yml
vendored
@@ -1,34 +1,27 @@
|
||||
# Publish package on main branch if it's tagged with 'v*'
|
||||
|
||||
name: release & publish workflow
|
||||
name: Release & Publish Workflow
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push events but only for the master branch
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "release"
|
||||
release:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
- name: Get version from tag
|
||||
id: tag_name
|
||||
run: |
|
||||
echo ::set-output name=current_version::${GITHUB_REF#refs/tags/v}
|
||||
echo "current_version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get Changelog Entry
|
||||
id: changelog_reader
|
||||
@@ -37,56 +30,57 @@ jobs:
|
||||
version: ${{ steps.tag_name.outputs.current_version }}
|
||||
path: ./CHANGELOG.md
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install uv
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Build wheels and source tarball
|
||||
run: >-
|
||||
poetry build
|
||||
run: uv build
|
||||
|
||||
- name: show temporary files
|
||||
run: >-
|
||||
ls -lR
|
||||
- name: Show build artifacts
|
||||
run: ls -lR dist/
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push dev
|
||||
uses: docker/build-push-action@v2
|
||||
- name: Build and push container
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ghcr.io/${{ github.repository }}:${{ steps.tag_name.outputs.current_version }}, ghcr.io/${{ github.repository }}:latest
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:${{ steps.tag_name.outputs.current_version }}
|
||||
ghcr.io/${{ github.repository }}:latest
|
||||
|
||||
- name: create github release
|
||||
- name: Create GitHub release
|
||||
id: create_release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
body: ${{ steps.changelog_reader.outputs.changes }}
|
||||
files: dist/*.whl
|
||||
files: dist/*
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
||||
- name: publish to PyPI
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -120,3 +120,5 @@ data/
|
||||
.envrc
|
||||
clips/
|
||||
*.sqlite
|
||||
.tool-versions
|
||||
docker-compose.yml
|
||||
@@ -5,32 +5,26 @@ repos:
|
||||
- id: forbid-crlf
|
||||
- id: remove-crlf
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.4.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: [ --unsafe ]
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v5.8.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.4
|
||||
hooks:
|
||||
- id: isort
|
||||
args: [ "--filter-files" ]
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 21.5b1
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [ flake8-typing-imports==1.10.0 ]
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.901
|
||||
rev: v1.14.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: tests/
|
||||
additional_dependencies:
|
||||
- types-click
|
||||
- types-pytz
|
||||
- types-cryptography
|
||||
- types-python-dateutil
|
||||
- types-aiofiles
|
||||
|
||||
85
CHANGELOG.md
85
CHANGELOG.md
@@ -4,6 +4,91 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.14.0] - 2025-07-18
|
||||
- Add support for Finger Print, NFC Card Scan, and Audio Detections
|
||||
|
||||
## [0.13.1] - 2025-06-26
|
||||
### Fixed
|
||||
- Bumped uiprotect version to support unifi protect 6
|
||||
|
||||
## [0.13.0] - 2025-04-09
|
||||
### Added
|
||||
- Parallel uploaders are now supported
|
||||
- All smart detection types are now supported
|
||||
- Migrated the project from poetry to uv
|
||||
|
||||
### Fixed
|
||||
- Corrected the envar for setting cameras to backup for ONLY_CAMERAS -> CAMERAS
|
||||
- Bumped to the latest uiprotect library to fix issue when unifi access devices are present
|
||||
|
||||
## [0.12.0] - 2025-01-18
|
||||
### Added
|
||||
- Tool now targets UIProtect instead of pyunifiprotect which should help any lingering auth issues with Unifi OS 4.X
|
||||
- Python Version bumped to 3.10 (based on UIProtect need)
|
||||
- The ability to specify only specific cameras to backup
|
||||
- Re-enabled the experimental downloader after adding a monkey patch for UIProtect to include the unmerged code
|
||||
- Switched linter to `ruff`
|
||||
- Added support for SMART_DETECT_LINE events
|
||||
-
|
||||
### Fixed
|
||||
- Unifi now returns unfinished events, this is now handled correctly
|
||||
- Login attempts now use an exponentially increasing delay to try work around aggressive rate limiting on logins
|
||||
|
||||
## [0.11.0] - 2024-06-08
|
||||
### Added
|
||||
- A new experimental downloader that uses the same mechanism the web ui does. Enable with
|
||||
`--experimental-downloader`
|
||||
### Fixed
|
||||
- Support for UniFi OS 4.x.x
|
||||
|
||||
## [0.10.7] - 2024-03-22
|
||||
### Fixed
|
||||
- Set pyunifiprotect to a minimum version of 5.0.0
|
||||
|
||||
## [0.10.6] - 2024-03-22
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix with versions of Unifi Protect after 3.0.10
|
||||
|
||||
## [0.10.5] - 2024-01-26
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue with old version of yarl
|
||||
|
||||
## [0.10.4] - 2024-01-26
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue caused by new video modes
|
||||
|
||||
## [0.10.3] - 2023-12-07
|
||||
### Fixed
|
||||
- Bumped `pyunifiprotect` version to fix issue caused by unifi protect returning invalid UUIDs
|
||||
|
||||
## [0.10.2] - 2023-11-21
|
||||
### Fixed
|
||||
- Issue where duplicate events were being downloaded causing database errors
|
||||
- Default file path format now uses event start time instead of event end time which makes more logical sense
|
||||
|
||||
## [0.10.1] - 2023-11-01
|
||||
### Fixed
|
||||
- Event type enum conversion string was no longer converting to the enum value, this is now done explicitly.
|
||||
|
||||
## [0.10.0] - 2023-11-01
|
||||
### Added
|
||||
- Command line option to skip events longer than a given length (default 2 hours)
|
||||
- Docker image is now based on alpine edge giving access to the latest version of rclone
|
||||
### Fixed
|
||||
- Failed uploads no longer write to the database, meaning they will be retried
|
||||
- Fixed issue with chunked event fetch during initial ignore of events
|
||||
- Fixed error when no events were fetched for the retention period
|
||||
|
||||
## [0.9.5] - 2023-10-07
|
||||
### Fixed
|
||||
- Errors caused by latest unifi protect version by bumping the version of pyunifiprotect used
|
||||
- Queries for events are now chunked into groups of 500 which should help stop this tool crashing large
|
||||
unifi protect instances.
|
||||
|
||||
## [0.9.4] - 2023-07-29
|
||||
### Fixed
|
||||
- Time period parsing, 'Y' -> 'y'
|
||||
|
||||
## [0.9.3] - 2023-07-08
|
||||
### Fixed
|
||||
- Queued up downloads etc now wait for dropped connections to be re-established.
|
||||
|
||||
@@ -55,12 +55,11 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
|
||||
$ git clone git@github.com:your_name_here/unifi-protect-backup.git
|
||||
```
|
||||
|
||||
3. Ensure [poetry](https://python-poetry.org/docs/) is installed.
|
||||
4. Install dependencies and start your virtualenv:
|
||||
3. Ensure [uv](https://docs.astral.sh/uv/) is installed.
|
||||
4. Create virtual environment and install dependencies:
|
||||
|
||||
```
|
||||
$ poetry install -E test -E dev
|
||||
$ poetry shell
|
||||
$ uv install --dev
|
||||
```
|
||||
|
||||
5. Create a branch for local development:
|
||||
@@ -75,14 +74,21 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
|
||||
be inside the `poetry shell` virtualenv or run it via poetry:
|
||||
|
||||
```
|
||||
$ poetry run unifi-protect-backup {args}
|
||||
$ uv run unifi-protect-backup {args}
|
||||
```
|
||||
|
||||
7. When you're done making changes, check that your changes pass the
|
||||
tests, including testing other Python versions, with tox:
|
||||
7. Install pre-commit git hooks to ensure all code commit to the repository
|
||||
is formatted correctly and meets coding standards:
|
||||
|
||||
```
|
||||
$ poetry run tox
|
||||
$ uv run pre-commit install
|
||||
```
|
||||
|
||||
8. When you're done making changes, check that your changes pass the
|
||||
tests:
|
||||
|
||||
```
|
||||
$ uv run pytest
|
||||
```
|
||||
|
||||
8. Commit your changes and push your branch to GitHub:
|
||||
@@ -103,14 +109,14 @@ Before you submit a pull request, check that it meets these guidelines:
|
||||
2. If the pull request adds functionality, the docs should be updated. Put
|
||||
your new functionality into a function with a docstring. If adding a CLI
|
||||
option, you should update the "usage" in README.md.
|
||||
3. The pull request should work for Python 3.9. Check
|
||||
3. The pull request should work for Python 3.10. Check
|
||||
https://github.com/ep1cman/unifi-protect-backup/actions
|
||||
and make sure that the tests pass for all supported Python versions.
|
||||
|
||||
## Tips
|
||||
|
||||
```
|
||||
$ poetry run pytest tests/test_unifi_protect_backup.py
|
||||
$ uv run pytest tests/test_unifi_protect_backup.py
|
||||
```
|
||||
|
||||
To run a subset of tests.
|
||||
@@ -123,7 +129,7 @@ Make sure all your changes are committed (including an entry in CHANGELOG.md).
|
||||
Then run:
|
||||
|
||||
```
|
||||
$ poetry run bump2version patch # possible: major / minor / patch
|
||||
$ uv run bump2version patch # possible: major / minor / patch
|
||||
$ git push
|
||||
$ git push --tags
|
||||
```
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# To build run:
|
||||
# make docker
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:edge
|
||||
|
||||
LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.9.3.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.14.0.tar.gz sdist.tar.gz
|
||||
|
||||
# https://github.com/rust-lang/cargo/issues/2808
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
@@ -29,7 +29,7 @@ RUN \
|
||||
py3-pip \
|
||||
python3 && \
|
||||
echo "**** install unifi-protect-backup ****" && \
|
||||
pip install --no-cache-dir sdist.tar.gz && \
|
||||
pip install --no-cache-dir --break-system-packages sdist.tar.gz && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-dependencies && \
|
||||
@@ -50,6 +50,9 @@ ENV TZ=UTC
|
||||
ENV IGNORE_CAMERAS=""
|
||||
ENV SQLITE_PATH=/config/database/events.sqlite
|
||||
|
||||
# Fixes issue where `platformdirs` is unable to properly detect the user directory
|
||||
ENV XDG_CONFIG_HOME=/config
|
||||
|
||||
COPY docker_root/ /
|
||||
|
||||
RUN mkdir -p /config/database /config/rclone
|
||||
|
||||
101
README.md
101
README.md
@@ -23,13 +23,13 @@ retention period.
|
||||
## Features
|
||||
|
||||
- Listens to events in real-time via the Unifi Protect websocket API
|
||||
- Ensures any previous and/or missed events within the retention period are also backed up
|
||||
- Ensures any previous and/or missed events within the missing range are also backed up
|
||||
- Supports uploading to a [wide range of storage systems using `rclone`](https://rclone.org/overview/)
|
||||
- Automatic pruning of old clips
|
||||
|
||||
## Requirements
|
||||
- Python 3.9+
|
||||
- Unifi Protect version 1.20 or higher (as per [`pyunifiprotect`](https://github.com/briis/pyunifiprotect))
|
||||
- Python 3.10+
|
||||
- Unifi Protect version 1.20 or higher (as per [`uiprotect`](https://github.com/uilibs/uiprotect))
|
||||
- `rclone` installed with at least one remote configured.
|
||||
|
||||
# Setup
|
||||
@@ -48,7 +48,7 @@ In order to connect to your unifi protect instance, you will first need to setup
|
||||
|
||||
## Installation
|
||||
|
||||
*The prefered way to run this tool is using a container*
|
||||
*The preferred way to run this tool is using a container*
|
||||
|
||||
### Docker Container
|
||||
You can run this tool as a container if you prefer with the following command.
|
||||
@@ -90,8 +90,7 @@ docker run \
|
||||
-e UFP_ADDRESS='UNIFI_PROTECT_IP' \
|
||||
-e UFP_SSL_VERIFY='false' \
|
||||
-e RCLONE_DESTINATION='my_remote:/unifi_protect_backup' \
|
||||
-v '/path/to/save/clips':'/data' \
|
||||
-v '/path/to/rclone.conf':'/config/rclone/rclone.conf' \
|
||||
-v '/path/to/config/rclone':'/config/rclone/' \
|
||||
-v '/path/to/save/database':/config/database/ \
|
||||
ghcr.io/ep1cman/unifi-protect-backup
|
||||
```
|
||||
@@ -124,19 +123,37 @@ Options:
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-
|
||||
older-than-this) [default: 7d]
|
||||
--missing-range TEXT How far back should missing events be checked for. Defaults to
|
||||
the same as the retention time. Format as per the `--max-age`
|
||||
argument of `rclone` (https://rclone.org/filtering/#max-age-don-
|
||||
t-transfer-any-file-older-than-this)
|
||||
--rclone-args TEXT Optional extra arguments to pass to `rclone rcat` directly.
|
||||
Common usage for this would be to set a bandwidth limit, for
|
||||
example.
|
||||
--rclone-purge-args TEXT Optional extra arguments to pass to `rclone delete` directly.
|
||||
Common usage for this would be to execute a permanent delete
|
||||
instead of using the recycle bin on a destination.
|
||||
Google Drive example: `--drive-use-trash=false`
|
||||
instead of using the recycle bin on a destination. Google Drive
|
||||
example: `--drive-use-trash=false`
|
||||
--detection-types TEXT A comma separated list of which types of detections to backup.
|
||||
Valid options are: `motion`, `person`, `vehicle`, `ring`
|
||||
[default: motion,person,vehicle,ring]
|
||||
Valid options are: `motion`, `ring`, `line`, `fingerprint`,
|
||||
`nfc`, `person`, `animal`, `vehicle`, `licensePlate`, `package`,
|
||||
`face`, `car`, `pet`, `alrmSmoke`, `alrmCmonx`, `smoke_cmonx`,
|
||||
`alrmSiren`, `alrmBabyCry`, `alrmSpeak`, `alrmBark`,
|
||||
`alrmBurglar`, `alrmCarHorn`, `alrmGlassBreak` [default: motion
|
||||
,ring,line,fingerprint,nfc,person,animal,vehicle,licensePlate,pa
|
||||
ckage,face,car,pet,alrmSmoke,alrmCmonx,smoke_cmonx,alrmSiren,alr
|
||||
mBabyCry,alrmSpeak,alrmBark,alrmBurglar,alrmCarHorn,alrmGlassBre
|
||||
ak]
|
||||
--ignore-camera TEXT IDs of cameras for which events should not be backed up. Use
|
||||
multiple times to ignore multiple IDs. If being set as an
|
||||
environment variable the IDs should be separated by whitespace.
|
||||
Alternatively, use a Unifi user with a role which has access
|
||||
restricted to the subset of cameras that you wish to backup.
|
||||
--camera TEXT IDs of *ONLY* cameras for which events should be backed up. Use
|
||||
multiple times to include multiple IDs. If being set as an
|
||||
environment variable the IDs should be separated by whitespace.
|
||||
Alternatively, use a Unifi user with a role which has access
|
||||
restricted to the subset of cameras that you wish to backup.
|
||||
--file-structure-format TEXT A Python format string used to generate the file structure/name
|
||||
on the rclone remote.For details of the fields available, see
|
||||
the projects `README.md` file. [default: {camera_name}/{event.s
|
||||
@@ -189,6 +206,14 @@ Options:
|
||||
https://github.com/caronc/apprise
|
||||
--skip-missing If set, events which are 'missing' at the start will be ignored.
|
||||
Subsequent missing events will be downloaded (e.g. a missed event) [default: False]
|
||||
--download-rate-limit FLOAT Limit how events can be downloaded in one minute. Disabled by
|
||||
default
|
||||
--max-event-length INTEGER Only download events shorter than this maximum length, in
|
||||
seconds [default: 7200]
|
||||
--experimental-downloader If set, a new experimental download mechanism will be used to match
|
||||
what the web UI does. This might be more stable if you are experiencing
|
||||
a lot of failed downloads with the default downloader. [default: False]
|
||||
--parallel-uploads INTEGER Max number of parallel uploads to allow [default: 1]
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
@@ -200,10 +225,12 @@ always take priority over environment variables):
|
||||
- `UFP_PORT`
|
||||
- `UFP_SSL_VERIFY`
|
||||
- `RCLONE_RETENTION`
|
||||
- `MISSING_RANGE`
|
||||
- `RCLONE_DESTINATION`
|
||||
- `RCLONE_ARGS`
|
||||
- `RCLONE_PURGE_ARGS`
|
||||
- `IGNORE_CAMERAS`
|
||||
- `CAMERAS`
|
||||
- `DETECTION_TYPES`
|
||||
- `FILE_STRUCTURE_FORMAT`
|
||||
- `SQLITE_PATH`
|
||||
@@ -212,6 +239,10 @@ always take priority over environment variables):
|
||||
- `PURGE_INTERVAL`
|
||||
- `APPRISE_NOTIFIERS`
|
||||
- `SKIP_MISSING`
|
||||
- `DOWNLOAD_RATELIMIT`
|
||||
- `MAX_EVENT_LENGTH`
|
||||
- `EXPERIMENTAL_DOWNLOADER`
|
||||
- `PARALLEL_UPLOADS`
|
||||
|
||||
## File path formatting
|
||||
|
||||
@@ -223,7 +254,7 @@ If you wish for the clips to be structured differently you can do this using the
|
||||
option. It uses standard [python format string syntax](https://docs.python.org/3/library/string.html#formatstrings).
|
||||
|
||||
The following fields are provided to the format string:
|
||||
- *event:* The `Event` object as per https://github.com/briis/pyunifiprotect/blob/master/pyunifiprotect/data/nvr.py
|
||||
- *event:* The `Event` object as per https://github.com/uilibs/uiprotect/blob/main/src/uiprotect/data/nvr.py
|
||||
- *duration_seconds:* The duration of the event in seconds
|
||||
- *detection_type:* A nicely formatted list of the event detection type and the smart detection types (if any)
|
||||
- *camera_name:* The name of the camera that generated this event
|
||||
@@ -236,6 +267,46 @@ now on, you can use the `--skip-missing` flag. This does not enable the periodic
|
||||
|
||||
If you use this feature it is advised that your run the tool once with this flag, then stop it once the database has been created and the events are ignored. Keeping this flag set permanently could cause events to be missed if the tool crashes and is restarted etc.
|
||||
|
||||
## Selecting cameras
|
||||
|
||||
By default unifi-protect-backup backs up clips from all cameras.
|
||||
If you want to limit the backups to certain cameras you can do that in one of two ways.
|
||||
|
||||
Note: Camera IDs can be obtained by scanning the logs, by looking for `Found cameras:`. You can find this section of the logs by piping the logs in to this `sed` command
|
||||
`sed -n '/Found cameras:/,/NVR TZ/p'`
|
||||
|
||||
### Back-up only specific cameras
|
||||
By using the `--camera` argument, you can specify the ID of the cameras you want to backup. If you want to backup more than one camera you can specify this argument more than once. If this argument is specified all other cameras will be ignored.
|
||||
|
||||
#### Example:
|
||||
If you have three cameras:
|
||||
- `CAMERA_ID_1`
|
||||
- `CAMERA_ID_2`
|
||||
- `CAMERA_ID_3`
|
||||
and run the following command:
|
||||
```
|
||||
$ unifi-protect-backup [...] --camera CAMERA_ID_1 --camera CAMERA_ID_2
|
||||
```
|
||||
Only `CAMERA_ID_1` and `CAMERA_ID_2` will be backed up.
|
||||
|
||||
### Ignoring cameras
|
||||
|
||||
By using the `--ignore-camera` argument, you can specify the ID of the cameras you *do not* want to backup. If you want to ignore more than one camera you can specify this argument more than once. If this argument is specified all cameras will be backed up except the ones specified
|
||||
|
||||
#### Example:
|
||||
If you have three cameras:
|
||||
- `CAMERA_ID_1`
|
||||
- `CAMERA_ID_2`
|
||||
- `CAMERA_ID_3`
|
||||
and run the following command:
|
||||
```
|
||||
$ unifi-protect-backup [...] --ignore-camera CAMERA_ID_1 --ignore-camera CAMERA_ID_2
|
||||
```
|
||||
Only `CAMERA_ID_3` will be backed up.
|
||||
|
||||
### Note about unifi protect accounts
|
||||
It is possible to limit what cameras a unifi protect accounts can see. If an account does not have access to a camera this tool will never see it as available so it will not be impacted by the above arguments.
|
||||
|
||||
# A note about `rclone` backends and disk wear
|
||||
This tool attempts to not write the downloaded files to disk to minimise disk wear, and instead streams them directly to
|
||||
rclone. Sadly, not all storage backends supported by `rclone` allow "Stream Uploads". Please refer to the `StreamUpload` column on this table to see which one do and don't: https://rclone.org/overview/#optional-features
|
||||
@@ -265,7 +336,7 @@ tmpfs /mnt/tmpfs tmpfs nosuid,nodev,noatime 0 0
|
||||
```
|
||||
|
||||
# Running Backup Tool as a Service (LINUX ONLY)
|
||||
You can create a service that will run the docker or local version of this backup tool. The service can be configured to launch on boot. This is likely the preferred way you want to execute the tool once you have it completely configured and tested so it is continiously running.
|
||||
You can create a service that will run the docker or local version of this backup tool. The service can be configured to launch on boot. This is likely the preferred way you want to execute the tool once you have it completely configured and tested so it is continuously running.
|
||||
|
||||
First create a service configuration file. You can replace `protectbackup` in the filename below with the name you wish to use for your service, if you change it remember to change the other locations in the following scripts as well.
|
||||
|
||||
@@ -310,7 +381,7 @@ If you need to debug your rclone setup, you can invoke rclone directly like so:
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-v /path/to/config/rclone:/config/rclone \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
@@ -321,7 +392,7 @@ For example to check that your config file is being read properly and list the c
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-v /path/to/config/rclone:/config/rclone \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
@@ -335,7 +406,7 @@ docker run \
|
||||
</a>
|
||||
|
||||
|
||||
- Heavily utilises [`pyunifiprotect`](https://github.com/briis/pyunifiprotect) by [@briis](https://github.com/briis/)
|
||||
- Heavily utilises [`uiprotect`](https://github.com/uilibs/uiprotect)
|
||||
- All the cloud functionality is provided by [`rclone`](https://rclone.org/)
|
||||
- This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [waynerv/cookiecutter-pypackage](https://github.com/waynerv/cookiecutter-pypackage) project template.
|
||||
|
||||
|
||||
2
docker_root/etc/cont-init.d/30-config
Normal file → Executable file
2
docker_root/etc/cont-init.d/30-config
Normal file → Executable file
@@ -4,7 +4,7 @@ mkdir -p /config/rclone
|
||||
|
||||
# For backwards compatibility
|
||||
[[ -f "/root/.config/rclone/rclone.conf" ]] && \
|
||||
echo "DEPRECATED: Copying rclone conf from /root/.config/rclone/rclone.conf, please change your mount to /config/rclone.conf"
|
||||
echo "DEPRECATED: Copying rclone conf from /root/.config/rclone/rclone.conf, please change your mount to /config/rclone/rclone.conf" && \
|
||||
cp \
|
||||
/root/.config/rclone/rclone.conf \
|
||||
/config/rclone/rclone.conf
|
||||
|
||||
18
docker_root/etc/services.d/unifi-protect-backup/run
Normal file → Executable file
18
docker_root/etc/services.d/unifi-protect-backup/run
Normal file → Executable file
@@ -1,9 +1,21 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
|
||||
export RCLONE_CONFIG=/config/rclone/rclone.conf
|
||||
export XDG_CACHE_HOME=/config
|
||||
|
||||
echo $VERBOSITY
|
||||
[[ -n "$VERBOSITY" ]] && export VERBOSITY_ARG=-$VERBOSITY || export VERBOSITY_ARG=""
|
||||
|
||||
exec \
|
||||
s6-setuidgid abc unifi-protect-backup ${VERBOSITY_ARG}
|
||||
# Run without exec to catch the exit code
|
||||
s6-setuidgid abc unifi-protect-backup ${VERBOSITY_ARG}
|
||||
exit_code=$?
|
||||
|
||||
# If exit code is 200 (arg error), exit the container
|
||||
if [ $exit_code -eq 200 ]; then
|
||||
# Send shutdown signal to s6
|
||||
/run/s6/basedir/bin/halt
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
# Otherwise, let s6 handle potential restart
|
||||
exit $exit_code
|
||||
|
||||
7
makefile
7
makefile
@@ -6,11 +6,10 @@ container_arches ?= linux/amd64,linux/arm64
|
||||
test: format lint unittest
|
||||
|
||||
format:
|
||||
isort $(sources) tests
|
||||
black $(sources) tests
|
||||
ruff format $(sources) tests
|
||||
|
||||
lint:
|
||||
flake8 $(sources) tests
|
||||
ruff check $(sources) tests
|
||||
mypy $(sources) tests
|
||||
|
||||
unittest:
|
||||
@@ -29,5 +28,5 @@ clean:
|
||||
rm -rf coverage.xml .coverage
|
||||
|
||||
docker:
|
||||
poetry build
|
||||
uv build
|
||||
docker buildx build . --platform $(container_arches) -t $(container_name) --push
|
||||
2439
poetry.lock
generated
2439
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
149
pyproject.toml
149
pyproject.toml
@@ -1,99 +1,82 @@
|
||||
[tool]
|
||||
[tool.poetry]
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.9.3"
|
||||
homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
version = "0.14.0"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
authors = ["sebastian.goscik <sebastian@goscik.com>"]
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Information Technology',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Natural Language :: English',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
license = {text = "MIT"}
|
||||
authors = [
|
||||
{name = "sebastian.goscik", email = "sebastian@goscik.com"}
|
||||
]
|
||||
packages = [
|
||||
{ include = "unifi_protect_backup" },
|
||||
{ include = "tests", format = "sdist" },
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Information Technology",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
]
|
||||
requires-python = ">=3.10.0,<4.0"
|
||||
dependencies = [
|
||||
"click==8.0.1",
|
||||
"aiorun>=2023.7.2",
|
||||
"aiosqlite>=0.17.0",
|
||||
"python-dateutil>=2.8.2",
|
||||
"apprise>=1.5.0",
|
||||
"expiring-dict>=1.1.0",
|
||||
"async-lru>=2.0.4",
|
||||
"aiolimiter>=1.1.0",
|
||||
"uiprotect==7.14.1",
|
||||
"aiohttp==3.12.14",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9.0,<4.0"
|
||||
click = "8.0.1"
|
||||
pyunifiprotect = "^4.0.11"
|
||||
aiorun = "^2022.11.1"
|
||||
aiosqlite = "^0.17.0"
|
||||
python-dateutil = "^2.8.2"
|
||||
apprise = "^1.3.0"
|
||||
expiring-dict = "^1.1.0"
|
||||
async-lru = "^2.0.3"
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
[project.scripts]
|
||||
unifi-protect-backup = "unifi_protect_backup.cli:main"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^22.10.0"
|
||||
isort = "^5.8.0"
|
||||
flake8 = "^3.9.2"
|
||||
flake8-docstrings = "^1.6.0"
|
||||
virtualenv = "^20.2.2"
|
||||
mypy = "^0.900"
|
||||
types-pytz = "^2021.3.5"
|
||||
types-cryptography = "^3.3.18"
|
||||
twine = "^3.3.0"
|
||||
bump2version = "^1.0.1"
|
||||
pre-commit = "^2.12.0"
|
||||
types-python-dateutil = "^2.8.19.10"
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"mypy>=1.15.0",
|
||||
"types-pytz>=2021.3.5",
|
||||
"types-cryptography>=3.3.18",
|
||||
"types-python-dateutil>=2.8.19.10",
|
||||
"types-aiofiles>=24.1.0.20241221",
|
||||
"bump2version>=1.0.1",
|
||||
"pre-commit>=4.2.0",
|
||||
"ruff>=0.11.4",
|
||||
"pytest>=8.3.5",
|
||||
]
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["unifi_protect_backup"]
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^6.2.4"
|
||||
pytest-cov = "^2.12.0"
|
||||
tox = "^3.20.1"
|
||||
tox-asdf = "^0.1.0"
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = ["unifi_protect_backup", "tests"]
|
||||
|
||||
[tool.poetry.scripts]
|
||||
unifi-protect-backup = 'unifi_protect_backup.cli:main'
|
||||
|
||||
[tool.black]
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
skip-string-normalization = true
|
||||
target-version = ['py39']
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
/(
|
||||
\.eggs
|
||||
| \.git
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
)/
|
||||
'''
|
||||
target-version = "py310"
|
||||
|
||||
[tool.isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
force_grid_wrap = 0
|
||||
use_parentheses = true
|
||||
ensure_newline_before_comments = true
|
||||
line_length = 120
|
||||
skip_gitignore = true
|
||||
# you can skip files as below
|
||||
#skip_glob = docs/conf.py
|
||||
[tool.ruff.lint]
|
||||
select = ["E","F","D","B","W"]
|
||||
ignore = ["D203", "D213"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
line-ending = "lf"
|
||||
docstring-code-format = true
|
||||
|
||||
[tool.mypy]
|
||||
allow_redefinition=true
|
||||
allow_redefinition = true
|
||||
exclude = [
|
||||
'unifi_protect_backup/uiprotect_patch.py'
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
[tool.uv]
|
||||
default-groups = []
|
||||
|
||||
88
setup.cfg
88
setup.cfg
@@ -1,88 +0,0 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
max-complexity = 18
|
||||
ignore = E203, E266, W503
|
||||
docstring-convention = google
|
||||
per-file-ignores = __init__.py:F401
|
||||
exclude = .git,
|
||||
__pycache__,
|
||||
setup.py,
|
||||
build,
|
||||
dist,
|
||||
docs,
|
||||
releases,
|
||||
.venv,
|
||||
.tox,
|
||||
.mypy_cache,
|
||||
.pytest_cache,
|
||||
.vscode,
|
||||
.github,
|
||||
# By default test codes will be linted.
|
||||
# tests
|
||||
|
||||
[mypy]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[coverage:run]
|
||||
# uncomment the following to omit files during running
|
||||
#omit =
|
||||
[coverage:report]
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
if self.debug:
|
||||
if settings.DEBUG
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
if 0:
|
||||
if __name__ == .__main__.:
|
||||
def main
|
||||
|
||||
[tox:tox]
|
||||
isolated_build = true
|
||||
envlist = py39, format, lint, build
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
3.9: py39, format, lint, build
|
||||
|
||||
[testenv]
|
||||
allowlist_externals = pytest
|
||||
extras =
|
||||
test
|
||||
passenv = *
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
PYTHONWARNINGS = ignore
|
||||
commands =
|
||||
pytest --cov=unifi_protect_backup --cov-branch --cov-report=xml --cov-report=term-missing tests
|
||||
|
||||
[testenv:format]
|
||||
allowlist_externals =
|
||||
isort
|
||||
black
|
||||
extras =
|
||||
test
|
||||
commands =
|
||||
isort unifi_protect_backup
|
||||
black unifi_protect_backup tests
|
||||
|
||||
[testenv:lint]
|
||||
allowlist_externals =
|
||||
flake8
|
||||
mypy
|
||||
extras =
|
||||
test
|
||||
commands =
|
||||
flake8 unifi_protect_backup tests
|
||||
mypy unifi_protect_backup tests
|
||||
|
||||
[testenv:build]
|
||||
allowlist_externals =
|
||||
poetry
|
||||
twine
|
||||
extras =
|
||||
dev
|
||||
commands =
|
||||
poetry build
|
||||
twine check dist/*
|
||||
@@ -1,12 +1,21 @@
|
||||
"""Top-level package for Unifi Protect Backup."""
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = 'sebastian@goscik.com'
|
||||
__version__ = '0.9.3'
|
||||
__email__ = "sebastian@goscik.com"
|
||||
__version__ = "0.14.0"
|
||||
|
||||
from .downloader import VideoDownloader
|
||||
from .downloader_experimental import VideoDownloaderExperimental
|
||||
from .event_listener import EventListener
|
||||
from .purge import Purge
|
||||
from .uploader import VideoUploader
|
||||
from .missing_event_checker import MissingEventChecker
|
||||
|
||||
from .missing_event_checker import MissingEventChecker # isort: skip
|
||||
__all__ = [
|
||||
"VideoDownloader",
|
||||
"VideoDownloaderExperimental",
|
||||
"EventListener",
|
||||
"Purge",
|
||||
"VideoUploader",
|
||||
"MissingEventChecker",
|
||||
]
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
"""Console script for unifi_protect_backup."""
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
import click
|
||||
from aiorun import run # type: ignore
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from uiprotect.data.types import SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import __version__
|
||||
from unifi_protect_backup.unifi_protect_backup_core import UnifiProtectBackup
|
||||
from unifi_protect_backup.utils import human_readable_to_float
|
||||
|
||||
DETECTION_TYPES = ["motion", "person", "vehicle", "ring"]
|
||||
DETECTION_TYPES = ["motion", "ring", "line", "fingerprint", "nfc"]
|
||||
DETECTION_TYPES += [t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()]
|
||||
DETECTION_TYPES += [f"{t}" for t in SmartDetectAudioType.values()]
|
||||
|
||||
|
||||
def _parse_detection_types(ctx, param, value):
|
||||
# split columns by ',' and remove whitespace
|
||||
types = [t.strip() for t in value.split(',')]
|
||||
types = [t.strip() for t in value.split(",")]
|
||||
|
||||
# validate passed columns
|
||||
for t in types:
|
||||
@@ -22,77 +30,122 @@ def _parse_detection_types(ctx, param, value):
|
||||
return types
|
||||
|
||||
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta | None:
|
||||
"""Parse the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
if retention is None:
|
||||
return None
|
||||
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
|
||||
# Check that we matched the whole string
|
||||
if len(retention) != len("".join([f"{v}{k}" for k, v in matches.items()])):
|
||||
raise click.BadParameter("See here for expected format: https://rclone.org/docs/#time-option")
|
||||
|
||||
return relativedelta(
|
||||
microseconds=matches.get("ms", 0) * 1000,
|
||||
seconds=matches.get("s", 0),
|
||||
minutes=matches.get("m", 0),
|
||||
hours=matches.get("h", 0),
|
||||
days=matches.get("d", 0),
|
||||
weeks=matches.get("w", 0),
|
||||
months=matches.get("M", 0),
|
||||
years=matches.get("y", 0),
|
||||
)
|
||||
|
||||
|
||||
@click.command(context_settings=dict(max_content_width=100))
|
||||
@click.version_option(__version__)
|
||||
@click.option('--address', required=True, envvar='UFP_ADDRESS', help='Address of Unifi Protect instance')
|
||||
@click.option('--port', default=443, envvar='UFP_PORT', show_default=True, help='Port of Unifi Protect instance')
|
||||
@click.option('--username', required=True, envvar='UFP_USERNAME', help='Username to login to Unifi Protect instance')
|
||||
@click.option('--password', required=True, envvar='UFP_PASSWORD', help='Password for Unifi Protect user')
|
||||
@click.option("--address", required=True, envvar="UFP_ADDRESS", help="Address of Unifi Protect instance")
|
||||
@click.option("--port", default=443, envvar="UFP_PORT", show_default=True, help="Port of Unifi Protect instance")
|
||||
@click.option("--username", required=True, envvar="UFP_USERNAME", help="Username to login to Unifi Protect instance")
|
||||
@click.option("--password", required=True, envvar="UFP_PASSWORD", help="Password for Unifi Protect user")
|
||||
@click.option(
|
||||
'--verify-ssl/--no-verify-ssl',
|
||||
"--verify-ssl/--no-verify-ssl",
|
||||
default=True,
|
||||
show_default=True,
|
||||
envvar='UFP_SSL_VERIFY',
|
||||
envvar="UFP_SSL_VERIFY",
|
||||
help="Set if you do not have a valid HTTPS Certificate for your instance",
|
||||
)
|
||||
@click.option(
|
||||
'--rclone-destination',
|
||||
"--rclone-destination",
|
||||
required=True,
|
||||
envvar='RCLONE_DESTINATION',
|
||||
envvar="RCLONE_DESTINATION",
|
||||
help="`rclone` destination path in the format {rclone remote}:{path on remote}."
|
||||
" E.g. `gdrive:/backups/unifi_protect`",
|
||||
)
|
||||
@click.option(
|
||||
'--retention',
|
||||
default='7d',
|
||||
"--retention",
|
||||
default="7d",
|
||||
show_default=True,
|
||||
envvar='RCLONE_RETENTION',
|
||||
help="How long should event clips be backed up for. Format as per the `--max-age` argument of "
|
||||
"`rclone` (https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
envvar="RCLONE_RETENTION",
|
||||
help="How long should event clips be backed up for. Format as per the `--max-age` argument of `rclone` "
|
||||
"(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
'--rclone-args',
|
||||
default='',
|
||||
envvar='RCLONE_ARGS',
|
||||
"--missing-range",
|
||||
default=None,
|
||||
envvar="MISSING_RANGE",
|
||||
help="How far back should missing events be checked for. Defaults to the same as the retention time. "
|
||||
"Format as per the `--max-age` argument of `rclone` "
|
||||
"(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
"--rclone-args",
|
||||
default="",
|
||||
envvar="RCLONE_ARGS",
|
||||
help="Optional extra arguments to pass to `rclone rcat` directly. Common usage for this would "
|
||||
"be to set a bandwidth limit, for example.",
|
||||
)
|
||||
@click.option(
|
||||
'--rclone-purge-args',
|
||||
default='',
|
||||
envvar='RCLONE_PURGE_ARGS',
|
||||
"--rclone-purge-args",
|
||||
default="",
|
||||
envvar="RCLONE_PURGE_ARGS",
|
||||
help="Optional extra arguments to pass to `rclone delete` directly. Common usage for this would "
|
||||
"be to execute a permanent delete instead of using the recycle bin on a destination. "
|
||||
"Google Drive example: `--drive-use-trash=false`",
|
||||
)
|
||||
@click.option(
|
||||
'--detection-types',
|
||||
envvar='DETECTION_TYPES',
|
||||
default=','.join(DETECTION_TYPES),
|
||||
"--detection-types",
|
||||
envvar="DETECTION_TYPES",
|
||||
default=",".join(DETECTION_TYPES),
|
||||
show_default=True,
|
||||
help="A comma separated list of which types of detections to backup. "
|
||||
f"Valid options are: {', '.join([f'`{t}`' for t in DETECTION_TYPES])}",
|
||||
callback=_parse_detection_types,
|
||||
)
|
||||
@click.option(
|
||||
'--ignore-camera',
|
||||
'ignore_cameras',
|
||||
"--ignore-camera",
|
||||
"ignore_cameras",
|
||||
multiple=True,
|
||||
envvar="IGNORE_CAMERAS",
|
||||
help="IDs of cameras for which events should not be backed up. Use multiple times to ignore "
|
||||
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace.",
|
||||
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace. "
|
||||
"Alternatively, use a Unifi user with a role which has access restricted to the subset of cameras "
|
||||
"that you wish to backup.",
|
||||
)
|
||||
@click.option(
|
||||
'--file-structure-format',
|
||||
envvar='FILE_STRUCTURE_FORMAT',
|
||||
"--camera",
|
||||
"cameras",
|
||||
multiple=True,
|
||||
envvar="CAMERAS",
|
||||
help="IDs of *ONLY* cameras for which events should be backed up. Use multiple times to include "
|
||||
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace. "
|
||||
"Alternatively, use a Unifi user with a role which has access restricted to the subset of cameras "
|
||||
"that you wish to backup.",
|
||||
)
|
||||
@click.option(
|
||||
"--file-structure-format",
|
||||
envvar="FILE_STRUCTURE_FORMAT",
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.end:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
show_default=True,
|
||||
help="A Python format string used to generate the file structure/name on the rclone remote."
|
||||
"For details of the fields available, see the projects `README.md` file.",
|
||||
)
|
||||
@click.option(
|
||||
'-v',
|
||||
'--verbose',
|
||||
"-v",
|
||||
"--verbose",
|
||||
count=True,
|
||||
help="How verbose the logging output should be."
|
||||
"""
|
||||
@@ -112,37 +165,38 @@ all warnings, and websocket data
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
'--sqlite_path',
|
||||
default='events.sqlite',
|
||||
envvar='SQLITE_PATH',
|
||||
"--sqlite_path",
|
||||
default="events.sqlite",
|
||||
envvar="SQLITE_PATH",
|
||||
help="Path to the SQLite database to use/create",
|
||||
)
|
||||
@click.option(
|
||||
'--color-logging/--plain-logging',
|
||||
"--color-logging/--plain-logging",
|
||||
default=False,
|
||||
show_default=True,
|
||||
envvar='COLOR_LOGGING',
|
||||
envvar="COLOR_LOGGING",
|
||||
help="Set if you want to use color in logging output",
|
||||
)
|
||||
@click.option(
|
||||
'--download-buffer-size',
|
||||
default='512MiB',
|
||||
"--download-buffer-size",
|
||||
default="512MiB",
|
||||
show_default=True,
|
||||
envvar='DOWNLOAD_BUFFER_SIZE',
|
||||
envvar="DOWNLOAD_BUFFER_SIZE",
|
||||
help='How big the download buffer should be (you can use suffixes like "B", "KiB", "MiB", "GiB")',
|
||||
callback=lambda ctx, param, value: human_readable_to_float(value),
|
||||
callback=lambda ctx, param, value: int(human_readable_to_float(value)),
|
||||
)
|
||||
@click.option(
|
||||
'--purge_interval',
|
||||
default='1d',
|
||||
"--purge_interval",
|
||||
default="1d",
|
||||
show_default=True,
|
||||
envvar='PURGE_INTERVAL',
|
||||
envvar="PURGE_INTERVAL",
|
||||
help="How frequently to check for file to purge.\n\nNOTE: Can create a lot of API calls, so be careful if "
|
||||
"your cloud provider charges you per api call",
|
||||
callback=parse_rclone_retention,
|
||||
)
|
||||
@click.option(
|
||||
'--apprise-notifier',
|
||||
'apprise_notifiers',
|
||||
"--apprise-notifier",
|
||||
"apprise_notifiers",
|
||||
multiple=True,
|
||||
envvar="APPRISE_NOTIFIERS",
|
||||
help="""\b
|
||||
@@ -160,20 +214,76 @@ If no tags are specified, it defaults to ERROR
|
||||
More details about supported platforms can be found here: https://github.com/caronc/apprise""",
|
||||
)
|
||||
@click.option(
|
||||
'--skip-missing',
|
||||
"--skip-missing",
|
||||
default=False,
|
||||
show_default=True,
|
||||
is_flag=True,
|
||||
envvar='SKIP_MISSING',
|
||||
envvar="SKIP_MISSING",
|
||||
help="""\b
|
||||
If set, events which are 'missing' at the start will be ignored.
|
||||
Subsequent missing events will be downloaded (e.g. a missed event)
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--download-rate-limit",
|
||||
default=None,
|
||||
show_default=True,
|
||||
envvar="DOWNLOAD_RATELIMIT",
|
||||
type=float,
|
||||
help="Limit how events can be downloaded in one minute. Disabled by default",
|
||||
)
|
||||
@click.option(
|
||||
"--max-event-length",
|
||||
default=2 * 60 * 60,
|
||||
show_default=True,
|
||||
envvar="MAX_EVENT_LENGTH",
|
||||
type=int,
|
||||
help="Only download events shorter than this maximum length, in seconds",
|
||||
)
|
||||
@click.option(
|
||||
"--experimental-downloader",
|
||||
"use_experimental_downloader",
|
||||
default=False,
|
||||
show_default=True,
|
||||
is_flag=True,
|
||||
envvar="EXPERIMENTAL_DOWNLOADER",
|
||||
help="""\b
|
||||
If set, a new experimental download mechanism will be used to match
|
||||
what the web UI does. This might be more stable if you are experiencing
|
||||
a lot of failed downloads with the default downloader.
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--parallel-uploads",
|
||||
default=1,
|
||||
show_default=True,
|
||||
envvar="PARALLEL_UPLOADS",
|
||||
type=int,
|
||||
help="Max number of parallel uploads to allow",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
"""Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
try:
|
||||
# Validate only one of the camera select arguments was given
|
||||
if kwargs.get("cameras") and kwargs.get("ignore_cameras"):
|
||||
click.echo(
|
||||
"Error: --camera and --ignore-camera options are mutually exclusive. "
|
||||
"Please use only one of these options.",
|
||||
err=True,
|
||||
)
|
||||
raise SystemExit(200) # throw 200 = arg error, service will not be restarted (docker)
|
||||
|
||||
if kwargs.get("missing_range") is None:
|
||||
kwargs["missing_range"] = kwargs.get("retention")
|
||||
|
||||
# Only create the event listener and run if validation passes
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
run(event_listener.start(), stop_on_unhandled_errors=True)
|
||||
except SystemExit as e:
|
||||
sys.exit(e.code)
|
||||
except Exception as e:
|
||||
click.echo(f"Error: {str(e)}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -10,10 +10,11 @@ from typing import Optional
|
||||
import aiosqlite
|
||||
import pytz
|
||||
from aiohttp.client_exceptions import ClientPayloadError
|
||||
from aiolimiter import AsyncLimiter
|
||||
from expiring_dict import ExpiringDict # type: ignore
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from pyunifiprotect.data.types import EventType
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
|
||||
from unifi_protect_backup.utils import (
|
||||
SubprocessException,
|
||||
@@ -26,16 +27,16 @@ from unifi_protect_backup.utils import (
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
'ffprobe -v quiet -show_streams -select_streams v:0 -of json -', video
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
|
||||
if returncode != 0:
|
||||
raise SubprocessException(stdout, stderr, returncode)
|
||||
|
||||
json_data = json.loads(stdout)
|
||||
return float(json_data['streams'][0]['duration'])
|
||||
return float(json_data["streams"][0]["duration"])
|
||||
|
||||
|
||||
class VideoDownloader:
|
||||
@@ -48,6 +49,8 @@ class VideoDownloader:
|
||||
download_queue: asyncio.Queue,
|
||||
upload_queue: VideoQueue,
|
||||
color_logging: bool,
|
||||
download_rate_limit: float,
|
||||
max_event_length: timedelta,
|
||||
):
|
||||
"""Init.
|
||||
|
||||
@@ -57,6 +60,9 @@ class VideoDownloader:
|
||||
download_queue (asyncio.Queue): Queue to get event details from
|
||||
upload_queue (VideoQueue): Queue to place downloaded videos on
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -64,13 +70,16 @@ class VideoDownloader:
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
self.current_event = None
|
||||
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = max_event_length
|
||||
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
|
||||
|
||||
self.base_logger = logging.getLogger(__name__)
|
||||
setup_event_logger(self.base_logger, color_logging)
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': ''})
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": ""})
|
||||
|
||||
# Check if `ffprobe` is available
|
||||
ffprobe = shutil.which('ffprobe')
|
||||
ffprobe = shutil.which("ffprobe")
|
||||
if ffprobe is not None:
|
||||
self.logger.debug(f"ffprobe found: {ffprobe}")
|
||||
self._has_ffprobe = True
|
||||
@@ -78,18 +87,23 @@ class VideoDownloader:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
self.logger.debug("Waiting for rate limit")
|
||||
await self._limiter.acquire()
|
||||
|
||||
try:
|
||||
# Wait for unifi protect to be connected
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
event = await self.download_queue.get()
|
||||
self.current_event = event
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
|
||||
|
||||
# Fix timezones since pyunifiprotect sets all timestamps to UTC. Instead localize them to
|
||||
self.current_event = event
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": f" [{event.id}]"})
|
||||
|
||||
# Fix timezones since uiprotect sets all timestamps to UTC. Instead localize them to
|
||||
# the timezone of the unifi protect NVR.
|
||||
event.start = event.start.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
event.end = event.end.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
@@ -100,15 +114,20 @@ class VideoDownloader:
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
self.logger.debug(f" Type: {event.type} ({', '.join(event.smart_detect_types)})")
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type}")
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
|
||||
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
|
||||
duration = (event.end - event.start).total_seconds()
|
||||
self.logger.debug(f" Duration: {duration}s")
|
||||
|
||||
# Skip invalid events
|
||||
if not self._valid_event(event):
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Unifi protect does not return full video clips if the clip is requested too soon.
|
||||
# There are two issues at play here:
|
||||
# - Protect will only cut a clip on an keyframe which happen every 5s
|
||||
@@ -137,15 +156,7 @@ class VideoDownloader:
|
||||
self.logger.error(
|
||||
"Event has failed to download 10 times in a row. Permanently ignoring this event"
|
||||
)
|
||||
|
||||
# ignore event
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Remove successfully downloaded event from failures list
|
||||
@@ -164,7 +175,7 @@ class VideoDownloader:
|
||||
self.logger.error(f"Unexpected exception occurred, abandoning event {event.id}:", exc_info=e)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Downloads the video clip for the given event."""
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
@@ -175,7 +186,7 @@ class VideoDownloader:
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
@@ -184,6 +195,15 @@ class VideoDownloader:
|
||||
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
|
||||
return video
|
||||
|
||||
async def _ignore_event(self, event):
|
||||
self.logger.warning("Ignoring event")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
async def _check_video_length(self, video, duration):
|
||||
"""Check if the downloaded event is at least the length of the event, warn otherwise.
|
||||
|
||||
@@ -191,10 +211,18 @@ class VideoDownloader:
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
self.logger.debug(msg)
|
||||
except SubprocessException as e:
|
||||
self.logger.warning(" `ffprobe` failed", exc_info=e)
|
||||
|
||||
def _valid_event(self, event):
|
||||
duration = event.end - event.start
|
||||
if duration > self._max_event_length:
|
||||
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
239
unifi_protect_backup/downloader_experimental.py
Normal file
239
unifi_protect_backup/downloader_experimental.py
Normal file
@@ -0,0 +1,239 @@
|
||||
# noqa: D100
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
import aiosqlite
|
||||
import pytz
|
||||
from aiohttp.client_exceptions import ClientPayloadError
|
||||
from aiolimiter import AsyncLimiter
|
||||
from expiring_dict import ExpiringDict # type: ignore
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
|
||||
from unifi_protect_backup.utils import (
|
||||
SubprocessException,
|
||||
VideoQueue,
|
||||
get_camera_name,
|
||||
human_readable_size,
|
||||
run_command,
|
||||
setup_event_logger,
|
||||
)
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
|
||||
if returncode != 0:
|
||||
raise SubprocessException(stdout, stderr, returncode)
|
||||
|
||||
json_data = json.loads(stdout)
|
||||
return float(json_data["streams"][0]["duration"])
|
||||
|
||||
|
||||
class VideoDownloaderExperimental:
|
||||
"""Downloads event video clips from Unifi Protect."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
protect: ProtectApiClient,
|
||||
db: aiosqlite.Connection,
|
||||
download_queue: asyncio.Queue,
|
||||
upload_queue: VideoQueue,
|
||||
color_logging: bool,
|
||||
download_rate_limit: float,
|
||||
max_event_length: timedelta,
|
||||
):
|
||||
"""Init.
|
||||
|
||||
Args:
|
||||
protect (ProtectApiClient): UniFi Protect API client to use
|
||||
db (aiosqlite.Connection): Async SQLite database to check for missing events
|
||||
download_queue (asyncio.Queue): Queue to get event details from
|
||||
upload_queue (VideoQueue): Queue to place downloaded videos on
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.download_queue: asyncio.Queue = download_queue
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
self.current_event = None
|
||||
self._failures = ExpiringDict(60 * 60 * 12) # Time to live = 12h
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = max_event_length
|
||||
self._limiter = AsyncLimiter(self._download_rate_limit) if self._download_rate_limit is not None else None
|
||||
|
||||
self.base_logger = logging.getLogger(__name__)
|
||||
setup_event_logger(self.base_logger, color_logging)
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": ""})
|
||||
|
||||
# Check if `ffprobe` is available
|
||||
ffprobe = shutil.which("ffprobe")
|
||||
if ffprobe is not None:
|
||||
self.logger.debug(f"ffprobe found: {ffprobe}")
|
||||
self._has_ffprobe = True
|
||||
else:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
self.logger.debug("Waiting for rate limit")
|
||||
await self._limiter.acquire()
|
||||
|
||||
try:
|
||||
# Wait for unifi protect to be connected
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
event = await self.download_queue.get()
|
||||
|
||||
self.current_event = event
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": f" [{event.id}]"})
|
||||
|
||||
# Fix timezones since uiprotect sets all timestamps to UTC. Instead localize them to
|
||||
# the timezone of the unifi protect NVR.
|
||||
event.start = event.start.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
event.end = event.end.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
|
||||
self.logger.info(f"Downloading event: {event.id}")
|
||||
self.logger.debug(f"Remaining Download Queue: {self.download_queue.qsize()}")
|
||||
output_queue_current_size = human_readable_size(self.upload_queue.qsize())
|
||||
output_queue_max_size = human_readable_size(self.upload_queue.maxsize)
|
||||
self.logger.debug(f"Video Download Buffer: {output_queue_current_size}/{output_queue_max_size}")
|
||||
self.logger.debug(f" Camera: {await get_camera_name(self._protect, event.camera_id)}")
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
self.logger.debug(f" Type: {event.type.value} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
self.logger.debug(f" Type: {event.type.value}")
|
||||
self.logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
|
||||
self.logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
|
||||
duration = (event.end - event.start).total_seconds()
|
||||
self.logger.debug(f" Duration: {duration}s")
|
||||
|
||||
# Skip invalid events
|
||||
if not self._valid_event(event):
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Unifi protect does not return full video clips if the clip is requested too soon.
|
||||
# There are two issues at play here:
|
||||
# - Protect will only cut a clip on an keyframe which happen every 5s
|
||||
# - Protect's pipeline needs a finite amount of time to make a clip available
|
||||
# So we will wait 1.5x the keyframe interval to ensure that there is always ample video
|
||||
# stored and Protect can return a full clip (which should be at least the length requested,
|
||||
# but often longer)
|
||||
time_since_event_ended = datetime.utcnow().replace(tzinfo=timezone.utc) - event.end
|
||||
sleep_time = (timedelta(seconds=5 * 1.5) - time_since_event_ended).total_seconds()
|
||||
if sleep_time > 0:
|
||||
self.logger.debug(f" Sleeping ({sleep_time}s) to ensure clip is ready to download...")
|
||||
await asyncio.sleep(sleep_time)
|
||||
|
||||
try:
|
||||
video = await self._download(event)
|
||||
assert video is not None
|
||||
except Exception as e:
|
||||
# Increment failure count
|
||||
if event.id not in self._failures:
|
||||
self._failures[event.id] = 1
|
||||
else:
|
||||
self._failures[event.id] += 1
|
||||
self.logger.warning(
|
||||
f"Event failed download attempt {self._failures[event.id]}",
|
||||
exc_info=e,
|
||||
)
|
||||
|
||||
if self._failures[event.id] >= 10:
|
||||
self.logger.error(
|
||||
"Event has failed to download 10 times in a row. Permanently ignoring this event"
|
||||
)
|
||||
await self._ignore_event(event)
|
||||
continue
|
||||
|
||||
# Remove successfully downloaded event from failures list
|
||||
if event.id in self._failures:
|
||||
del self._failures[event.id]
|
||||
|
||||
# Get the actual length of the downloaded video using ffprobe
|
||||
if self._has_ffprobe:
|
||||
await self._check_video_length(video, duration)
|
||||
|
||||
await self.upload_queue.put((event, video))
|
||||
self.logger.debug("Added to upload queue")
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Unexpected exception occurred, abandoning event {event.id}:",
|
||||
exc_info=e,
|
||||
)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
assert isinstance(event.start, datetime)
|
||||
assert isinstance(event.end, datetime)
|
||||
try:
|
||||
prepared_video_file = await self._protect.prepare_camera_video( # type: ignore
|
||||
event.camera_id, event.start, event.end
|
||||
)
|
||||
video = await self._protect.download_camera_video( # type: ignore
|
||||
event.camera_id, prepared_video_file["fileName"]
|
||||
)
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
return None
|
||||
|
||||
self.logger.debug(f" Downloaded video size: {human_readable_size(len(video))}s")
|
||||
return video
|
||||
|
||||
async def _ignore_event(self, event):
|
||||
self.logger.warning("Ignoring event")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
async def _check_video_length(self, video, duration):
|
||||
"""Check if the downloaded event is at least the length of the event, warn otherwise.
|
||||
|
||||
It is expected for events to regularly be slightly longer than the event specified
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
self.logger.debug(msg)
|
||||
except SubprocessException as e:
|
||||
self.logger.warning(" `ffprobe` failed", exc_info=e)
|
||||
|
||||
def _valid_event(self, event):
|
||||
duration = event.end - event.start
|
||||
if duration > self._max_event_length:
|
||||
self.logger.warning(f"Event longer ({duration}) than max allowed length {self._max_event_length}")
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -3,12 +3,14 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from time import sleep
|
||||
from typing import List
|
||||
from typing import Set
|
||||
|
||||
from pyunifiprotect.api import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from pyunifiprotect.data.types import EventType
|
||||
from pyunifiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
from uiprotect.api import ProtectApiClient
|
||||
from uiprotect.websocket import WebsocketState
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
|
||||
from unifi_protect_backup.utils import wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,63 +22,51 @@ class EventListener:
|
||||
self,
|
||||
event_queue: asyncio.Queue,
|
||||
protect: ProtectApiClient,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
):
|
||||
"""Init.
|
||||
|
||||
Args:
|
||||
event_queue (asyncio.Queue): Queue to place events to backup on
|
||||
protect (ProtectApiClient): UniFI Protect API client to use
|
||||
detection_types (List[str]): Desired Event detection types to look for
|
||||
ignore_cameras (List[str]): Cameras IDs to ignore events from
|
||||
detection_types (Set[str]): Desired Event detection types to look for
|
||||
ignore_cameras (Set[str]): Cameras IDs to ignore events from
|
||||
cameras (Set[str]): Cameras IDs to ONLY include events from
|
||||
|
||||
"""
|
||||
self._event_queue: asyncio.Queue = event_queue
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._unsub = None
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self._unsub_websocketstate = None
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
|
||||
async def start(self):
|
||||
"""Main Loop."""
|
||||
"""Run main Loop."""
|
||||
logger.debug("Subscribed to websocket")
|
||||
self._unsub_websocket_state = self._protect.subscribe_websocket_state(self._websocket_state_callback)
|
||||
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(60)
|
||||
await self._check_websocket_and_reconnect()
|
||||
|
||||
def _websocket_callback(self, msg: WSSubscriptionMessage) -> None:
|
||||
"""Callback for "EVENT" websocket messages.
|
||||
"""'EVENT' websocket message callback.
|
||||
|
||||
Filters the incoming events, and puts completed events onto the download queue
|
||||
|
||||
Args:
|
||||
msg (Event): Incoming event data
|
||||
|
||||
"""
|
||||
logger.websocket_data(msg) # type: ignore
|
||||
|
||||
assert isinstance(msg.new_obj, Event)
|
||||
if msg.action != WSAction.UPDATE:
|
||||
return
|
||||
if msg.new_obj.camera_id in self.ignore_cameras:
|
||||
if "end" not in msg.changed_data:
|
||||
return
|
||||
if msg.new_obj.end is None:
|
||||
return
|
||||
if msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]:
|
||||
return
|
||||
if msg.new_obj.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted motion detection event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.RING and "ring" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted ring event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
elif msg.new_obj.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in msg.new_obj.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
logger.extra_debug( # type: ignore
|
||||
f"Skipping unwanted {event_smart_detection_type} detection event: {msg.new_obj.id}"
|
||||
)
|
||||
if not wanted_event_type(msg.new_obj, self.detection_types, self.cameras, self.ignore_cameras):
|
||||
return
|
||||
|
||||
# TODO: Will this even work? I think it will block the async loop
|
||||
@@ -89,42 +79,21 @@ class EventListener:
|
||||
# Unifi protect has started sending the event id in the websocket as a {event_id}-{camera_id} but when the
|
||||
# API is queried they only have {event_id}. Keeping track of these both of these would be complicated so
|
||||
# instead we fudge the ID here to match what the API returns
|
||||
if '-' in msg.new_obj.id:
|
||||
msg.new_obj.id = msg.new_obj.id.split('-')[0]
|
||||
if "-" in msg.new_obj.id:
|
||||
msg.new_obj.id = msg.new_obj.id.split("-")[0]
|
||||
|
||||
logger.debug(f"Adding event {msg.new_obj.id} to queue (Current download queue={self._event_queue.qsize()})")
|
||||
|
||||
async def _check_websocket_and_reconnect(self):
|
||||
"""Checks for websocket disconnect and triggers a reconnect."""
|
||||
logger.extra_debug("Checking the status of the websocket...")
|
||||
if self._protect.check_ws():
|
||||
logger.extra_debug("Websocket is connected.")
|
||||
else:
|
||||
self._protect.connect_event.clear()
|
||||
logger.warning("Lost connection to Unifi Protect.")
|
||||
def _websocket_state_callback(self, state: WebsocketState) -> None:
|
||||
"""Websocket state message callback.
|
||||
|
||||
# Unsubscribe, close the session.
|
||||
self._unsub()
|
||||
await self._protect.close_session()
|
||||
Flags the websocket for reconnection
|
||||
|
||||
while True:
|
||||
logger.warning("Attempting reconnect...")
|
||||
Args:
|
||||
state (WebsocketState): new state of the websocket
|
||||
|
||||
try:
|
||||
# Start the pyunifiprotect connection by calling `update`
|
||||
await self._protect.close_session()
|
||||
self._protect._bootstrap = None
|
||||
await self._protect.update(force=True)
|
||||
if self._protect.check_ws():
|
||||
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
|
||||
break
|
||||
else:
|
||||
logger.error("Unable to establish connection to Unifi Protect")
|
||||
except Exception as e:
|
||||
logger.error("Unexpected exception occurred while trying to reconnect:", exc_info=e)
|
||||
|
||||
# Back off for a little while
|
||||
await asyncio.sleep(10)
|
||||
|
||||
self._protect.connect_event.set()
|
||||
logger.info("Re-established connection to Unifi Protect and to the websocket.")
|
||||
"""
|
||||
if state == WebsocketState.DISCONNECTED:
|
||||
logger.error("Unifi Protect Websocket lost connection. Reconnecting...")
|
||||
elif state == WebsocketState.CONNECTED:
|
||||
logger.info("Unifi Protect Websocket connection restored")
|
||||
|
||||
@@ -3,15 +3,16 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
from typing import AsyncIterator, List, Set
|
||||
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from pyunifiprotect.data.types import EventType
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType
|
||||
|
||||
from unifi_protect_backup import VideoDownloader, VideoUploader
|
||||
from unifi_protect_backup.utils import EVENT_TYPES_MAP, wanted_event_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,10 +26,11 @@ class MissingEventChecker:
|
||||
db: aiosqlite.Connection,
|
||||
download_queue: asyncio.Queue,
|
||||
downloader: VideoDownloader,
|
||||
uploader: VideoUploader,
|
||||
uploaders: List[VideoUploader],
|
||||
retention: relativedelta,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
detection_types: Set[str],
|
||||
ignore_cameras: Set[str],
|
||||
cameras: Set[str],
|
||||
interval: int = 60 * 5,
|
||||
) -> None:
|
||||
"""Init.
|
||||
@@ -38,30 +40,51 @@ class MissingEventChecker:
|
||||
db (aiosqlite.Connection): Async SQLite database to check for missing events
|
||||
download_queue (asyncio.Queue): Download queue to check for on-going downloads
|
||||
downloader (VideoDownloader): Downloader to check for on-going downloads
|
||||
uploader (VideoUploader): Uploader to check for on-going uploads
|
||||
uploaders (List[VideoUploader]): Uploaders to check for on-going uploads
|
||||
retention (relativedelta): Retention period to limit search window
|
||||
detection_types (List[str]): Detection types wanted to limit search
|
||||
ignore_cameras (List[str]): Ignored camera IDs to limit search
|
||||
detection_types (Set[str]): Detection types wanted to limit search
|
||||
ignore_cameras (Set[str]): Ignored camera IDs to limit search
|
||||
cameras (Set[str]): Included (ONLY) camera IDs to limit search
|
||||
interval (int): How frequently, in seconds, to check for missing events,
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
self._download_queue: asyncio.Queue = download_queue
|
||||
self._downloader: VideoDownloader = downloader
|
||||
self._uploader: VideoUploader = uploader
|
||||
self._uploaders: List[VideoUploader] = uploaders
|
||||
self.retention: relativedelta = retention
|
||||
self.detection_types: List[str] = detection_types
|
||||
self.ignore_cameras: List[str] = ignore_cameras
|
||||
self.detection_types: Set[str] = detection_types
|
||||
self.ignore_cameras: Set[str] = ignore_cameras
|
||||
self.cameras: Set[str] = cameras
|
||||
self.interval: int = interval
|
||||
|
||||
async def _get_missing_events(self) -> List[Event]:
|
||||
async def _get_missing_events(self) -> AsyncIterator[Event]:
|
||||
start_time = datetime.now() - self.retention
|
||||
end_time = datetime.now()
|
||||
chunk_size = 500
|
||||
|
||||
while True:
|
||||
# Get list of events that need to be backed up from unifi protect
|
||||
unifi_events = await self._protect.get_events(
|
||||
start=datetime.now() - self.retention,
|
||||
end=datetime.now(),
|
||||
types=[EventType.MOTION, EventType.SMART_DETECT, EventType.RING],
|
||||
logger.extra_debug(f"Fetching events for interval: {start_time} - {end_time}") # type: ignore
|
||||
events_chunk = await self._protect.get_events(
|
||||
start=start_time,
|
||||
end=end_time,
|
||||
types=list(EVENT_TYPES_MAP.keys()),
|
||||
limit=chunk_size,
|
||||
)
|
||||
unifi_events = {event.id: event for event in unifi_events}
|
||||
|
||||
if not events_chunk:
|
||||
break # There were no events to backup
|
||||
|
||||
# Filter out on-going events
|
||||
unifi_events = {event.id: event for event in events_chunk if event.end is not None}
|
||||
|
||||
if not unifi_events:
|
||||
break # No completed events to process
|
||||
|
||||
# Next chunks start time should be the start of the oldest complete event in the current chunk
|
||||
start_time = max([event.start for event in unifi_events.values() if event.end is not None])
|
||||
|
||||
# Get list of events that have been backed up from the database
|
||||
|
||||
@@ -76,75 +99,69 @@ class MissingEventChecker:
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
|
||||
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue} # type: ignore
|
||||
current_upload = self._uploader.current_event
|
||||
uploading_event_ids = {event.id for event, video in self._downloader.upload_queue._queue} # type: ignore
|
||||
for uploader in self._uploaders:
|
||||
current_upload = uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
existing_ids = db_event_ids | downloading_event_ids | uploading_event_ids
|
||||
missing_events = {
|
||||
event_id: event for event_id, event in unifi_events.items() if event_id not in existing_ids
|
||||
}
|
||||
|
||||
def wanted_event_type(event_id):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
if event.camera_id in self.ignore_cameras:
|
||||
return False
|
||||
if event.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
return False
|
||||
if event.type is EventType.RING and "ring" not in self.detection_types:
|
||||
return False
|
||||
elif event.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in event.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
return False
|
||||
return True
|
||||
# Exclude events of unwanted types
|
||||
wanted_events = {
|
||||
event_id: event
|
||||
for event_id, event in missing_events.items()
|
||||
if wanted_event_type(event, self.detection_types, self.cameras, self.ignore_cameras)
|
||||
}
|
||||
|
||||
wanted_event_ids = set(filter(wanted_event_type, missing_event_ids))
|
||||
# Yeild events one by one to allow the async loop to start other task while
|
||||
# waiting on the full list of events
|
||||
for event in wanted_events.values():
|
||||
yield event
|
||||
|
||||
return [unifi_events[id] for id in wanted_event_ids]
|
||||
# Last chunk was in-complete, we can stop now
|
||||
if len(events_chunk) < chunk_size:
|
||||
break
|
||||
|
||||
async def ignore_missing(self):
|
||||
"""Ignore missing events by adding them to the event table."""
|
||||
wanted_events = await self._get_missing_events()
|
||||
logger.info(" Ignoring missing events")
|
||||
|
||||
logger.info(f" Ignoring {len(wanted_events)} missing events")
|
||||
|
||||
for event in wanted_events:
|
||||
async for event in self._get_missing_events():
|
||||
logger.extra_debug(f"Ignoring event '{event.id}'")
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
await self._db.commit()
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
logger.info("Starting Missing Event Checker")
|
||||
while True:
|
||||
try:
|
||||
shown_warning = False
|
||||
|
||||
# Wait for unifi protect to be connected
|
||||
await self._protect.connect_event.wait()
|
||||
|
||||
logger.extra_debug("Running check for missing events...")
|
||||
logger.debug("Running check for missing events...")
|
||||
|
||||
wanted_events = await self._get_missing_events()
|
||||
async for event in self._get_missing_events():
|
||||
if not shown_warning:
|
||||
logger.warning(" Found missing events, adding to backup queue")
|
||||
shown_warning = True
|
||||
|
||||
logger.debug(f" Undownloaded events of wanted types: {len(wanted_events)}")
|
||||
|
||||
if len(wanted_events) > 20:
|
||||
logger.warning(f" Adding {len(wanted_events)} missing events to backup queue")
|
||||
missing_logger = logger.extra_debug
|
||||
else:
|
||||
missing_logger = logger.warning
|
||||
|
||||
for event in wanted_events:
|
||||
if event.type != EventType.SMART_DETECT:
|
||||
event_name = f"{event.id} ({event.type})"
|
||||
event_name = f"{event.id} ({event.type.value})"
|
||||
else:
|
||||
event_name = f"{event.id} ({', '.join(event.smart_detect_types)})"
|
||||
|
||||
missing_logger(
|
||||
logger.extra_debug(
|
||||
f" Adding missing event to backup queue: {event_name}"
|
||||
f" ({event.start.strftime('%Y-%m-%dT%H-%M-%S')} -"
|
||||
f" {event.end.strftime('%Y-%m-%dT%H-%M-%S')})"
|
||||
@@ -152,6 +169,9 @@ class MissingEventChecker:
|
||||
await self._download_queue.put(event)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Unexpected exception occurred during missing event check:", exc_info=e)
|
||||
logger.error(
|
||||
"Unexpected exception occurred during missing event check:",
|
||||
exc_info=e,
|
||||
)
|
||||
|
||||
await asyncio.sleep(self.interval)
|
||||
|
||||
@@ -8,11 +8,11 @@ notifier = apprise.Apprise()
|
||||
def add_notification_service(url):
|
||||
"""Add apprise URI with support for tags e.g. TAG1,TAG2=PROTOCOL://settings."""
|
||||
config = apprise.AppriseConfig()
|
||||
config.add_config(url, format='text')
|
||||
config.add_config(url, format="text")
|
||||
|
||||
# If not tags are specified, default to errors otherwise ALL logging will
|
||||
# be spammed to the notification service
|
||||
if not config.servers()[0].tags:
|
||||
config.servers()[0].tags = {'ERROR'}
|
||||
config.servers()[0].tags = {"ERROR"}
|
||||
|
||||
notifier.add(config)
|
||||
|
||||
@@ -13,14 +13,14 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def delete_file(file_path, rclone_purge_args):
|
||||
"""Deletes `file_path` via rclone."""
|
||||
"""Delete `file_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone delete -vv "{file_path}" {rclone_purge_args}')
|
||||
if returncode != 0:
|
||||
logger.error(f" Failed to delete file: '{file_path}'")
|
||||
|
||||
|
||||
async def tidy_empty_dirs(base_dir_path):
|
||||
"""Deletes any empty directories in `base_dir_path` via rclone."""
|
||||
"""Delete any empty directories in `base_dir_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rmdirs -vv --ignore-errors --leave-root "{base_dir_path}"')
|
||||
if returncode != 0:
|
||||
logger.error(" Failed to tidy empty dirs")
|
||||
@@ -34,7 +34,7 @@ class Purge:
|
||||
db: aiosqlite.Connection,
|
||||
retention: relativedelta,
|
||||
rclone_destination: str,
|
||||
interval: relativedelta = relativedelta(days=1),
|
||||
interval: relativedelta | None,
|
||||
rclone_purge_args: str = "",
|
||||
):
|
||||
"""Init.
|
||||
@@ -45,15 +45,16 @@ class Purge:
|
||||
rclone_destination (str): What rclone destination the clips are stored in
|
||||
interval (relativedelta): How often to purge old clips
|
||||
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
|
||||
|
||||
"""
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.retention: relativedelta = retention
|
||||
self.rclone_destination: str = rclone_destination
|
||||
self.interval: relativedelta = interval
|
||||
self.interval: relativedelta = interval if interval is not None else relativedelta(days=1)
|
||||
self.rclone_purge_args: str = rclone_purge_args
|
||||
|
||||
async def start(self):
|
||||
"""Main loop - runs forever."""
|
||||
"""Run main loop."""
|
||||
while True:
|
||||
try:
|
||||
deleted_a_file = False
|
||||
@@ -63,15 +64,14 @@ class Purge:
|
||||
async with self._db.execute(
|
||||
f"SELECT * FROM events WHERE end < {retention_oldest_time}"
|
||||
) as event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor:
|
||||
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor: # noqa: B007
|
||||
logger.info(f"Purging event: {event_id}.")
|
||||
|
||||
# For every backup for this event
|
||||
async with self._db.execute(f"SELECT * FROM backups WHERE id = '{event_id}'") as backup_cursor:
|
||||
async for _, remote, file_path in backup_cursor:
|
||||
logger.debug(f" Deleted: {remote}:{file_path}")
|
||||
await delete_file(f"{remote}:{file_path}", self.rclone_purge_args)
|
||||
logger.debug(f" Deleted: {remote}:{file_path}")
|
||||
deleted_a_file = True
|
||||
|
||||
# delete event from database
|
||||
@@ -86,5 +86,5 @@ class Purge:
|
||||
logger.error("Unexpected exception occurred during purge:", exc_info=e)
|
||||
|
||||
next_purge_time = datetime.now() + self.interval
|
||||
logger.extra_debug(f'sleeping until {next_purge_time}')
|
||||
logger.extra_debug(f"sleeping until {next_purge_time}")
|
||||
await wait_until(next_purge_time)
|
||||
|
||||
139
unifi_protect_backup/uiprotect_patch.py
Normal file
139
unifi_protect_backup/uiprotect_patch.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""Monkey patch new download method into uiprotect till PR is merged."""
|
||||
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
import aiofiles
|
||||
|
||||
from uiprotect.data import Version
|
||||
from uiprotect.exceptions import BadRequest
|
||||
from uiprotect.utils import to_js_time
|
||||
|
||||
|
||||
class VideoExportType(str, enum.Enum):
|
||||
"""Unifi Protect video export types."""
|
||||
|
||||
TIMELAPSE = "timelapse"
|
||||
ROTATING = "rotating"
|
||||
|
||||
|
||||
def monkey_patch_experimental_downloader():
|
||||
"""Apply patches to uiprotect to add new download method."""
|
||||
from uiprotect.api import ProtectApiClient
|
||||
|
||||
# Add the version constant
|
||||
ProtectApiClient.NEW_DOWNLOAD_VERSION = Version("4.0.0") # You'll need to import Version from uiprotect
|
||||
|
||||
async def _validate_channel_id(self, camera_id: str, channel_index: int) -> None:
|
||||
if self._bootstrap is None:
|
||||
await self.update()
|
||||
try:
|
||||
camera = self._bootstrap.cameras[camera_id]
|
||||
camera.channels[channel_index]
|
||||
except (IndexError, AttributeError, KeyError) as e:
|
||||
raise BadRequest(f"Invalid input: {e}") from e
|
||||
|
||||
async def prepare_camera_video(
|
||||
self,
|
||||
camera_id: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
channel_index: int = 0,
|
||||
validate_channel_id: bool = True,
|
||||
fps: Optional[int] = None,
|
||||
filename: Optional[str] = None,
|
||||
) -> Optional[dict[str, Any]]:
|
||||
if self.bootstrap.nvr.version < self.NEW_DOWNLOAD_VERSION:
|
||||
raise ValueError("This method is only support from Unifi Protect version >= 4.0.0.")
|
||||
|
||||
if validate_channel_id:
|
||||
await self._validate_channel_id(camera_id, channel_index)
|
||||
|
||||
params = {
|
||||
"camera": camera_id,
|
||||
"start": to_js_time(start),
|
||||
"end": to_js_time(end),
|
||||
}
|
||||
|
||||
if channel_index == 3:
|
||||
params.update({"lens": 2})
|
||||
else:
|
||||
params.update({"channel": channel_index})
|
||||
|
||||
if fps is not None and fps > 0:
|
||||
params["fps"] = fps
|
||||
params["type"] = VideoExportType.TIMELAPSE.value
|
||||
else:
|
||||
params["type"] = VideoExportType.ROTATING.value
|
||||
|
||||
if not filename:
|
||||
start_str = start.strftime("%m-%d-%Y, %H.%M.%S %Z")
|
||||
end_str = end.strftime("%m-%d-%Y, %H.%M.%S %Z")
|
||||
filename = f"{camera_id} {start_str} - {end_str}.mp4"
|
||||
|
||||
params["filename"] = filename
|
||||
|
||||
return await self.api_request(
|
||||
"video/prepare",
|
||||
params=params,
|
||||
raise_exception=True,
|
||||
)
|
||||
|
||||
async def download_camera_video(
|
||||
self,
|
||||
camera_id: str,
|
||||
filename: str,
|
||||
output_file: Optional[Path] = None,
|
||||
iterator_callback: Optional[callable] = None,
|
||||
progress_callback: Optional[callable] = None,
|
||||
chunk_size: int = 65536,
|
||||
) -> Optional[bytes]:
|
||||
if self.bootstrap.nvr.version < self.NEW_DOWNLOAD_VERSION:
|
||||
raise ValueError("This method is only support from Unifi Protect version >= 4.0.0.")
|
||||
|
||||
params = {
|
||||
"camera": camera_id,
|
||||
"filename": filename,
|
||||
}
|
||||
|
||||
if iterator_callback is None and progress_callback is None and output_file is None:
|
||||
return await self.api_request_raw(
|
||||
"video/download",
|
||||
params=params,
|
||||
raise_exception=False,
|
||||
)
|
||||
|
||||
r = await self.request(
|
||||
"get",
|
||||
f"{self.api_path}video/download",
|
||||
auto_close=False,
|
||||
timeout=0,
|
||||
params=params,
|
||||
)
|
||||
|
||||
if output_file is not None:
|
||||
async with aiofiles.open(output_file, "wb") as output:
|
||||
|
||||
async def callback(total: int, chunk: Optional[bytes]) -> None:
|
||||
if iterator_callback is not None:
|
||||
await iterator_callback(total, chunk)
|
||||
if chunk is not None:
|
||||
await output.write(chunk)
|
||||
|
||||
await self._stream_response(r, chunk_size, callback, progress_callback)
|
||||
else:
|
||||
await self._stream_response(
|
||||
r,
|
||||
chunk_size,
|
||||
iterator_callback,
|
||||
progress_callback,
|
||||
)
|
||||
r.close()
|
||||
return None
|
||||
|
||||
# Patch the methods into the class
|
||||
ProtectApiClient._validate_channel_id = _validate_channel_id
|
||||
ProtectApiClient.prepare_camera_video = prepare_camera_video
|
||||
ProtectApiClient.download_camera_video = download_camera_video
|
||||
@@ -1,20 +1,23 @@
|
||||
"""Main module."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Callable, List
|
||||
|
||||
import aiosqlite
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.types import ModelType
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.types import ModelType
|
||||
|
||||
from unifi_protect_backup import (
|
||||
EventListener,
|
||||
MissingEventChecker,
|
||||
Purge,
|
||||
VideoDownloader,
|
||||
VideoDownloaderExperimental,
|
||||
VideoUploader,
|
||||
notifications,
|
||||
)
|
||||
@@ -22,18 +25,25 @@ from unifi_protect_backup.utils import (
|
||||
SubprocessException,
|
||||
VideoQueue,
|
||||
human_readable_size,
|
||||
parse_rclone_retention,
|
||||
run_command,
|
||||
setup_logging,
|
||||
)
|
||||
|
||||
from unifi_protect_backup.uiprotect_patch import monkey_patch_experimental_downloader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# TODO: https://github.com/cjrh/aiorun#id6 (smart shield)
|
||||
|
||||
|
||||
# We have been waiting for a long time for this PR to get merged
|
||||
# https://github.com/uilibs/uiprotect/pull/249
|
||||
# Since it has not progressed, we will for now patch in the functionality ourselves
|
||||
monkey_patch_experimental_downloader()
|
||||
|
||||
|
||||
async def create_database(path: str):
|
||||
"""Creates sqlite database and creates the events abd backups tables."""
|
||||
"""Create sqlite database and creates the events abd backups tables."""
|
||||
db = await aiosqlite.connect(path)
|
||||
await db.execute("CREATE TABLE events(id PRIMARY KEY, type, camera_id, start REAL, end REAL)")
|
||||
await db.execute(
|
||||
@@ -57,20 +67,26 @@ class UnifiProtectBackup:
|
||||
password: str,
|
||||
verify_ssl: bool,
|
||||
rclone_destination: str,
|
||||
retention: str,
|
||||
retention: relativedelta,
|
||||
missing_range: relativedelta,
|
||||
rclone_args: str,
|
||||
rclone_purge_args: str,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
cameras: List[str],
|
||||
file_structure_format: str,
|
||||
verbose: int,
|
||||
download_buffer_size: int,
|
||||
purge_interval: str,
|
||||
purge_interval: relativedelta,
|
||||
apprise_notifiers: str,
|
||||
skip_missing: bool,
|
||||
max_event_length: int,
|
||||
sqlite_path: str = "events.sqlite",
|
||||
color_logging=False,
|
||||
color_logging: bool = False,
|
||||
download_rate_limit: float | None = None,
|
||||
port: int = 443,
|
||||
use_experimental_downloader: bool = False,
|
||||
parallel_uploads: int = 1,
|
||||
):
|
||||
"""Will configure logging settings and the Unifi Protect API (but not actually connect).
|
||||
|
||||
@@ -83,7 +99,11 @@ class UnifiProtectBackup:
|
||||
rclone_destination (str): `rclone` destination path in the format
|
||||
{rclone remote}:{path on remote}. E.g.
|
||||
`gdrive:/backups/unifi_protect`
|
||||
retention (str): How long should event clips be backed up for. Format as per the
|
||||
retention (relativedelta): How long should event clips be backed up for. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
missing_range (relativedelta): How far back should missing events be checked for. Defaults to
|
||||
the same as the retention time. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
rclone_args (str): A bandwidth limit which is passed to the `--bwlimit` argument of
|
||||
@@ -91,6 +111,7 @@ class UnifiProtectBackup:
|
||||
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
|
||||
detection_types (List[str]): List of which detection types to backup.
|
||||
ignore_cameras (List[str]): List of camera IDs for which to not backup events.
|
||||
cameras (List[str]): List of ONLY camera IDs for which to backup events.
|
||||
file_structure_format (str): A Python format string for output file path.
|
||||
verbose (int): How verbose to setup logging, see :func:`setup_logging` for details.
|
||||
download_buffer_size (int): How many bytes big the download buffer should be
|
||||
@@ -99,6 +120,12 @@ class UnifiProtectBackup:
|
||||
skip_missing (bool): If initial missing events should be ignored
|
||||
sqlite_path (str): Path where to find/create sqlite database
|
||||
color_logging (bool): Whether to add color to logging output or not
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
|
||||
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the
|
||||
webUI)
|
||||
parallel_uploads (int): Max number of parallel uploads to allow
|
||||
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
setup_logging(verbose, self.color_logging)
|
||||
@@ -122,9 +149,11 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {verify_ssl=}")
|
||||
logger.debug(f" {rclone_destination=}")
|
||||
logger.debug(f" {retention=}")
|
||||
logger.debug(f" {missing_range=}")
|
||||
logger.debug(f" {rclone_args=}")
|
||||
logger.debug(f" {rclone_purge_args=}")
|
||||
logger.debug(f" {ignore_cameras=}")
|
||||
logger.debug(f" {cameras=}")
|
||||
logger.debug(f" {verbose=}")
|
||||
logger.debug(f" {detection_types=}")
|
||||
logger.debug(f" {file_structure_format=}")
|
||||
@@ -133,9 +162,14 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {purge_interval=}")
|
||||
logger.debug(f" {apprise_notifiers=}")
|
||||
logger.debug(f" {skip_missing=}")
|
||||
logger.debug(f" {download_rate_limit=} events per minute")
|
||||
logger.debug(f" {max_event_length=}s")
|
||||
logger.debug(f" {use_experimental_downloader=}")
|
||||
logger.debug(f" {parallel_uploads=}")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = parse_rclone_retention(retention)
|
||||
self.retention = retention
|
||||
self.missing_range = missing_range
|
||||
self.rclone_args = rclone_args
|
||||
self.rclone_purge_args = rclone_purge_args
|
||||
self.file_structure_format = file_structure_format
|
||||
@@ -154,16 +188,21 @@ class UnifiProtectBackup:
|
||||
verify_ssl=self.verify_ssl,
|
||||
subscribed_models={ModelType.EVENT},
|
||||
)
|
||||
self.ignore_cameras = ignore_cameras
|
||||
self.ignore_cameras = set(ignore_cameras)
|
||||
self.cameras = set(cameras)
|
||||
self._download_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._unsub: Callable[[], None]
|
||||
self.detection_types = detection_types
|
||||
self.detection_types = set(detection_types)
|
||||
self._has_ffprobe = False
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
self._download_buffer_size = download_buffer_size
|
||||
self._purge_interval = parse_rclone_retention(purge_interval)
|
||||
self._purge_interval = purge_interval
|
||||
self._skip_missing = skip_missing
|
||||
self._download_rate_limit = download_rate_limit
|
||||
self._max_event_length = timedelta(seconds=max_event_length)
|
||||
self._use_experimental_downloader = use_experimental_downloader
|
||||
self._parallel_uploads = parallel_uploads
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -180,18 +219,25 @@ class UnifiProtectBackup:
|
||||
logger.info("Checking rclone configuration...")
|
||||
await self._check_rclone()
|
||||
|
||||
# Start the pyunifiprotect connection by calling `update`
|
||||
# Start the uiprotect connection by calling `update`
|
||||
logger.info("Connecting to Unifi Protect...")
|
||||
|
||||
for attempts in range(1):
|
||||
delay = 5 # Start with a 5 second delay
|
||||
max_delay = 3600 # 1 hour in seconds
|
||||
|
||||
for _ in range(20):
|
||||
try:
|
||||
await self._protect.update()
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to UniFi Protect, retrying in {attempts}s...", exc_info=e)
|
||||
await asyncio.sleep(attempts)
|
||||
logger.warning(
|
||||
f"Failed to connect to UniFi Protect, retrying in {delay}s...",
|
||||
exc_info=e,
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
delay = min(max_delay, delay * 2) # Double the delay but do not exceed max_delay
|
||||
else:
|
||||
raise ConnectionError("Failed to connect to UniFi Protect after 10 attempts")
|
||||
raise ConnectionError("Failed to connect to UniFi Protect after 20 attempts")
|
||||
|
||||
# Add a lock to the protect client that can be used to prevent code accessing the client when it has
|
||||
# lost connection
|
||||
@@ -204,8 +250,8 @@ class UnifiProtectBackup:
|
||||
logger.info(f" - {camera.id}: {camera.name}")
|
||||
|
||||
# Print timezone info for debugging
|
||||
logger.debug(f'NVR TZ: {self._protect.bootstrap.nvr.timezone}')
|
||||
logger.debug(f'Local TZ: {datetime.now(timezone.utc).astimezone().tzinfo}')
|
||||
logger.debug(f"NVR TZ: {self._protect.bootstrap.nvr.timezone}")
|
||||
logger.debug(f"Local TZ: {datetime.now(timezone.utc).astimezone().tzinfo}")
|
||||
|
||||
tasks = []
|
||||
|
||||
@@ -223,11 +269,26 @@ class UnifiProtectBackup:
|
||||
|
||||
# Create downloader task
|
||||
# This will download video files to its buffer
|
||||
downloader = VideoDownloader(self._protect, self._db, download_queue, upload_queue, self.color_logging)
|
||||
if self._use_experimental_downloader:
|
||||
downloader_cls = VideoDownloaderExperimental
|
||||
else:
|
||||
downloader_cls = VideoDownloader
|
||||
|
||||
downloader = downloader_cls(
|
||||
self._protect,
|
||||
self._db,
|
||||
download_queue,
|
||||
upload_queue,
|
||||
self.color_logging,
|
||||
self._download_rate_limit,
|
||||
self._max_event_length,
|
||||
)
|
||||
tasks.append(downloader.start())
|
||||
|
||||
# Create upload task
|
||||
# Create upload tasks
|
||||
# This will upload the videos in the downloader's buffer to the rclone remotes and log it in the database
|
||||
uploaders = []
|
||||
for _ in range(self._parallel_uploads):
|
||||
uploader = VideoUploader(
|
||||
self._protect,
|
||||
upload_queue,
|
||||
@@ -237,33 +298,41 @@ class UnifiProtectBackup:
|
||||
self._db,
|
||||
self.color_logging,
|
||||
)
|
||||
uploaders.append(uploader)
|
||||
tasks.append(uploader.start())
|
||||
|
||||
# Create event listener task
|
||||
# This will connect to the unifi protect websocket and listen for events. When one is detected it will
|
||||
# be added to the queue of events to download
|
||||
event_listener = EventListener(download_queue, self._protect, self.detection_types, self.ignore_cameras)
|
||||
event_listener = EventListener(
|
||||
download_queue, self._protect, self.detection_types, self.ignore_cameras, self.cameras
|
||||
)
|
||||
tasks.append(event_listener.start())
|
||||
|
||||
# Create purge task
|
||||
# This will, every midnight, purge old backups from the rclone remotes and database
|
||||
purge = Purge(
|
||||
self._db, self.retention, self.rclone_destination, self._purge_interval, self.rclone_purge_args
|
||||
self._db,
|
||||
self.retention,
|
||||
self.rclone_destination,
|
||||
self._purge_interval,
|
||||
self.rclone_purge_args,
|
||||
)
|
||||
tasks.append(purge.start())
|
||||
|
||||
# Create missing event task
|
||||
# This will check all the events within the retention period, if any have been missed and not backed up
|
||||
# they will be added to the event queue
|
||||
# This will check all the events within the missing_range period, if any have been missed and not
|
||||
# backed up. they will be added to the event queue
|
||||
missing = MissingEventChecker(
|
||||
self._protect,
|
||||
self._db,
|
||||
download_queue,
|
||||
downloader,
|
||||
uploader,
|
||||
self.retention,
|
||||
uploaders,
|
||||
self.missing_range,
|
||||
self.detection_types,
|
||||
self.ignore_cameras,
|
||||
self.cameras,
|
||||
)
|
||||
if self._skip_missing:
|
||||
logger.info("Ignoring missing events")
|
||||
@@ -292,7 +361,7 @@ class UnifiProtectBackup:
|
||||
ValueError: The given rclone destination is for a remote that is not configured
|
||||
|
||||
"""
|
||||
rclone = shutil.which('rclone')
|
||||
rclone = shutil.which("rclone")
|
||||
if not rclone:
|
||||
raise RuntimeError("`rclone` is not installed on this system")
|
||||
logger.debug(f"rclone found: {rclone}")
|
||||
|
||||
@@ -6,10 +6,17 @@ import re
|
||||
from datetime import datetime
|
||||
|
||||
import aiosqlite
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
|
||||
from unifi_protect_backup.utils import VideoQueue, get_camera_name, human_readable_size, run_command, setup_event_logger
|
||||
from unifi_protect_backup.utils import (
|
||||
SubprocessException,
|
||||
VideoQueue,
|
||||
get_camera_name,
|
||||
human_readable_size,
|
||||
run_command,
|
||||
setup_event_logger,
|
||||
)
|
||||
|
||||
|
||||
class VideoUploader:
|
||||
@@ -38,6 +45,7 @@ class VideoUploader:
|
||||
file_structure_format (str): format string for how to structure the uploaded files
|
||||
db (aiosqlite.Connection): Async SQlite database connection
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self.upload_queue: VideoQueue = upload_queue
|
||||
@@ -49,10 +57,10 @@ class VideoUploader:
|
||||
|
||||
self.base_logger = logging.getLogger(__name__)
|
||||
setup_event_logger(self.base_logger, color_logging)
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': ''})
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": ""})
|
||||
|
||||
async def start(self):
|
||||
"""Main loop.
|
||||
"""Run main loop.
|
||||
|
||||
Runs forever looking for video data in the video queue and then uploads it
|
||||
using rclone, finally it updates the database
|
||||
@@ -63,7 +71,7 @@ class VideoUploader:
|
||||
event, video = await self.upload_queue.get()
|
||||
self.current_event = event
|
||||
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {'event': f' [{event.id}]'})
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": f" [{event.id}]"})
|
||||
|
||||
self.logger.info(f"Uploading event: {event.id}")
|
||||
self.logger.debug(
|
||||
@@ -74,10 +82,13 @@ class VideoUploader:
|
||||
destination = await self._generate_file_path(event)
|
||||
self.logger.debug(f" Destination: {destination}")
|
||||
|
||||
try:
|
||||
await self._upload_video(video, destination, self._rclone_args)
|
||||
await self._update_database(event, destination)
|
||||
|
||||
self.logger.debug("Uploaded")
|
||||
except SubprocessException:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
@@ -96,10 +107,11 @@ class VideoUploader:
|
||||
|
||||
Raises:
|
||||
RuntimeError: If rclone returns a non-zero exit code
|
||||
|
||||
"""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)
|
||||
if returncode != 0:
|
||||
self.logger.error(f" Failed to upload file: '{destination}'")
|
||||
raise SubprocessException(stdout, stderr, returncode)
|
||||
|
||||
async def _update_database(self, event: Event, destination: str):
|
||||
"""Add the backed up event to the database along with where it was backed up to."""
|
||||
@@ -107,7 +119,7 @@ class VideoUploader:
|
||||
assert isinstance(event.end, datetime)
|
||||
await self._db.execute(
|
||||
"INSERT INTO events VALUES "
|
||||
f"('{event.id}', '{event.type}', '{event.camera_id}',"
|
||||
f"('{event.id}', '{event.type.value}', '{event.camera_id}',"
|
||||
f"'{event.start.timestamp()}', '{event.end.timestamp()}')"
|
||||
)
|
||||
|
||||
@@ -121,14 +133,14 @@ class VideoUploader:
|
||||
await self._db.commit()
|
||||
|
||||
async def _generate_file_path(self, event: Event) -> pathlib.Path:
|
||||
"""Generates the rclone destination path for the provided event.
|
||||
"""Generate the rclone destination path for the provided event.
|
||||
|
||||
Generates rclone destination path for the given even based upon the format string
|
||||
in `self.file_structure_format`.
|
||||
|
||||
Provides the following fields to the format string:
|
||||
event: The `Event` object as per
|
||||
https://github.com/briis/pyunifiprotect/blob/master/pyunifiprotect/data/nvr.py
|
||||
https://github.com/briis/uiprotect/blob/master/uiprotect/data/nvr.py
|
||||
duration_seconds: The duration of the event in seconds
|
||||
detection_type: A nicely formatted list of the event detection type and the smart detection types (if any)
|
||||
camera_name: The name of the camera that generated this event
|
||||
@@ -147,13 +159,13 @@ class VideoUploader:
|
||||
format_context = {
|
||||
"event": event,
|
||||
"duration_seconds": (event.end - event.start).total_seconds(),
|
||||
"detection_type": f"{event.type} ({' '.join(event.smart_detect_types)})"
|
||||
"detection_type": f"{event.type.value} ({' '.join(event.smart_detect_types)})"
|
||||
if event.smart_detect_types
|
||||
else f"{event.type}",
|
||||
else f"{event.type.value}",
|
||||
"camera_name": await get_camera_name(self._protect, event.camera_id),
|
||||
}
|
||||
|
||||
file_path = self._file_structure_format.format(**format_context)
|
||||
file_path = re.sub(r'[^\w\-_\.\(\)/ ]', '', file_path) # Sanitize any invalid chars
|
||||
file_path = re.sub(r"[^\w\-_\.\(\)/ ]", "", file_path) # Sanitize any invalid chars
|
||||
|
||||
return pathlib.Path(f"{self._rclone_destination}/{file_path}")
|
||||
|
||||
@@ -4,13 +4,13 @@ import asyncio
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from typing import Optional, Set
|
||||
|
||||
from apprise import NotifyType
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from async_lru import alru_cache
|
||||
from uiprotect import ProtectApiClient
|
||||
from uiprotect.data.nvr import Event
|
||||
from uiprotect.data.types import EventType, SmartDetectObjectType, SmartDetectAudioType
|
||||
|
||||
from unifi_protect_backup import notifications
|
||||
|
||||
@@ -51,11 +51,11 @@ def add_logging_level(levelName: str, levelNum: int, methodName: Optional[str] =
|
||||
methodName = levelName.lower()
|
||||
|
||||
if hasattr(logging, levelName):
|
||||
raise AttributeError('{} already defined in logging module'.format(levelName))
|
||||
raise AttributeError("{} already defined in logging module".format(levelName))
|
||||
if hasattr(logging, methodName):
|
||||
raise AttributeError('{} already defined in logging module'.format(methodName))
|
||||
raise AttributeError("{} already defined in logging module".format(methodName))
|
||||
if hasattr(logging.getLoggerClass(), methodName):
|
||||
raise AttributeError('{} already defined in logger class'.format(methodName))
|
||||
raise AttributeError("{} already defined in logger class".format(methodName))
|
||||
|
||||
# This method was inspired by the answers to Stack Overflow post
|
||||
# http://stackoverflow.com/q/2183233/2988730, especially
|
||||
@@ -85,19 +85,19 @@ def add_color_to_record_levelname(record):
|
||||
"""Colorizes logging level names."""
|
||||
levelno = record.levelno
|
||||
if levelno >= logging.CRITICAL:
|
||||
color = '\x1b[31;1m' # RED
|
||||
color = "\x1b[31;1m" # RED
|
||||
elif levelno >= logging.ERROR:
|
||||
color = '\x1b[31;1m' # RED
|
||||
color = "\x1b[31;1m" # RED
|
||||
elif levelno >= logging.WARNING:
|
||||
color = '\x1b[33;1m' # YELLOW
|
||||
color = "\x1b[33;1m" # YELLOW
|
||||
elif levelno >= logging.INFO:
|
||||
color = '\x1b[32;1m' # GREEN
|
||||
color = "\x1b[32;1m" # GREEN
|
||||
elif levelno >= logging.DEBUG:
|
||||
color = '\x1b[36;1m' # CYAN
|
||||
color = "\x1b[36;1m" # CYAN
|
||||
elif levelno >= logging.EXTRA_DEBUG:
|
||||
color = '\x1b[35;1m' # MAGENTA
|
||||
color = "\x1b[35;1m" # MAGENTA
|
||||
else:
|
||||
color = '\x1b[0m'
|
||||
color = "\x1b[0m"
|
||||
|
||||
return f"{color}{record.levelname}\x1b[0m"
|
||||
|
||||
@@ -110,6 +110,9 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
Args:
|
||||
color_logging (bool): If true logging levels will be colorized
|
||||
*args (): Positional arguments to pass to StreamHandler
|
||||
**kwargs: Keyword arguments to pass to StreamHandler
|
||||
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
self.color_logging = color_logging
|
||||
@@ -173,9 +176,9 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
|
||||
def create_logging_handler(format, color_logging):
|
||||
"""Constructs apprise logging handler for the given format."""
|
||||
"""Construct apprise logging handler for the given format."""
|
||||
date_format = "%Y-%m-%d %H:%M:%S"
|
||||
style = '{'
|
||||
style = "{"
|
||||
|
||||
sh = AppriseStreamHandler(color_logging)
|
||||
formatter = logging.Formatter(format, date_format, style)
|
||||
@@ -183,8 +186,8 @@ def create_logging_handler(format, color_logging):
|
||||
return sh
|
||||
|
||||
|
||||
def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers: List[str] = []) -> None:
|
||||
"""Configures loggers to provided the desired level of verbosity.
|
||||
def setup_logging(verbosity: int, color_logging: bool = False) -> None:
|
||||
"""Configure loggers to provided the desired level of verbosity.
|
||||
|
||||
Verbosity 0: Only log info messages created by `unifi-protect-backup`, and all warnings
|
||||
verbosity 1: Only log info & debug messages created by `unifi-protect-backup`, and all warnings
|
||||
@@ -200,19 +203,18 @@ def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers
|
||||
Args:
|
||||
verbosity (int): The desired level of verbosity
|
||||
color_logging (bool): If colors should be used in the log (default=False)
|
||||
apprise_notifiers (List[str]): Notification services to hook into the logger
|
||||
|
||||
"""
|
||||
add_logging_level(
|
||||
'EXTRA_DEBUG',
|
||||
"EXTRA_DEBUG",
|
||||
logging.DEBUG - 1,
|
||||
)
|
||||
add_logging_level(
|
||||
'WEBSOCKET_DATA',
|
||||
"WEBSOCKET_DATA",
|
||||
logging.DEBUG - 2,
|
||||
)
|
||||
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} : {message}"
|
||||
format = "{asctime} [{levelname:^11s}] {name:<46} : {message}"
|
||||
sh = create_logging_handler(format, color_logging)
|
||||
|
||||
logger = logging.getLogger("unifi_protect_backup")
|
||||
@@ -239,24 +241,31 @@ def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers
|
||||
logger.setLevel(logging.WEBSOCKET_DATA) # type: ignore
|
||||
|
||||
|
||||
_initialized_loggers = []
|
||||
|
||||
|
||||
def setup_event_logger(logger, color_logging):
|
||||
"""Sets up a logger that also displays the event ID currently being processed."""
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
|
||||
"""Set up a logger that also displays the event ID currently being processed."""
|
||||
global _initialized_loggers
|
||||
if logger not in _initialized_loggers:
|
||||
format = "{asctime} [{levelname:^11s}] {name:<46} :{event} {message}"
|
||||
sh = create_logging_handler(format, color_logging)
|
||||
logger.addHandler(sh)
|
||||
logger.propagate = False
|
||||
_initialized_loggers.append(logger)
|
||||
|
||||
|
||||
_suffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
|
||||
|
||||
|
||||
def human_readable_size(num: float):
|
||||
"""Turns a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
"""Turn a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
|
||||
Based on: https://stackoverflow.com/a/1094933
|
||||
|
||||
Args:
|
||||
num (int): The number to be converted into human readable format
|
||||
|
||||
"""
|
||||
for unit in _suffixes:
|
||||
if abs(num) < 1024.0:
|
||||
@@ -266,7 +275,7 @@ def human_readable_size(num: float):
|
||||
|
||||
|
||||
def human_readable_to_float(num: str):
|
||||
"""Turns a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
"""Turn a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
pattern = r"([\d.]+)(" + "|".join(_suffixes) + ")"
|
||||
result = re.match(pattern, num)
|
||||
if result is None:
|
||||
@@ -282,12 +291,12 @@ def human_readable_to_float(num: str):
|
||||
# No max size, and a 6 hour ttl
|
||||
@alru_cache(None, ttl=60 * 60 * 6)
|
||||
async def get_camera_name(protect: ProtectApiClient, id: str):
|
||||
"""Returns the name for the camera with the given ID.
|
||||
"""Return the name for the camera with the given ID.
|
||||
|
||||
If the camera ID is not know, it tries refreshing the cached data
|
||||
"""
|
||||
# Wait for unifi protect to be connected
|
||||
await protect.connect_event.wait()
|
||||
await protect.connect_event.wait() # type: ignore
|
||||
|
||||
try:
|
||||
return protect.bootstrap.cameras[id].name
|
||||
@@ -295,7 +304,7 @@ async def get_camera_name(protect: ProtectApiClient, id: str):
|
||||
# Refresh cameras
|
||||
logger.debug(f"Unknown camera id: '{id}', checking API")
|
||||
|
||||
await protect.update(force=True)
|
||||
await protect.update()
|
||||
|
||||
try:
|
||||
name = protect.bootstrap.cameras[id].name
|
||||
@@ -317,6 +326,7 @@ class SubprocessException(Exception):
|
||||
stdout (str): What rclone output to stdout
|
||||
stderr (str): What rclone output to stderr
|
||||
returncode (str): The return code of the rclone process
|
||||
|
||||
"""
|
||||
super().__init__()
|
||||
self.stdout: str = stdout
|
||||
@@ -324,27 +334,12 @@ class SubprocessException(Exception):
|
||||
self.returncode: int = returncode
|
||||
|
||||
def __str__(self):
|
||||
"""Turns exception into a human readable form."""
|
||||
"""Turn exception into a human readable form."""
|
||||
return f"Return Code: {self.returncode}\nStdout:\n{self.stdout}\nStderr:\n{self.stderr}"
|
||||
|
||||
|
||||
def parse_rclone_retention(retention: str) -> relativedelta:
|
||||
"""Parses the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
return relativedelta(
|
||||
microseconds=matches.get("ms", 0) * 1000,
|
||||
seconds=matches.get("s", 0),
|
||||
minutes=matches.get("m", 0),
|
||||
hours=matches.get("h", 0),
|
||||
days=matches.get("d", 0),
|
||||
weeks=matches.get("w", 0),
|
||||
months=matches.get("M", 0),
|
||||
years=matches.get("Y", 0),
|
||||
)
|
||||
|
||||
|
||||
async def run_command(cmd: str, data=None):
|
||||
"""Runs the given command returning the exit code, stdout and stderr."""
|
||||
"""Run the given command returning the exit code, stdout and stderr."""
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdin=asyncio.subprocess.PIPE,
|
||||
@@ -353,9 +348,9 @@ async def run_command(cmd: str, data=None):
|
||||
)
|
||||
stdout, stderr = await proc.communicate(data)
|
||||
stdout = stdout.decode()
|
||||
stdout_indented = '\t' + stdout.replace('\n', '\n\t').strip()
|
||||
stdout_indented = "\t" + stdout.replace("\n", "\n\t").strip()
|
||||
stderr = stderr.decode()
|
||||
stderr_indented = '\t' + stderr.replace('\n', '\n\t').strip()
|
||||
stderr_indented = "\t" + stderr.replace("\n", "\n\t").strip()
|
||||
|
||||
if proc.returncode != 0:
|
||||
logger.error(f"Failed to run: '{cmd}")
|
||||
@@ -377,11 +372,11 @@ class VideoQueue(asyncio.Queue):
|
||||
self._bytes_sum = 0
|
||||
|
||||
def qsize(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return self._bytes_sum
|
||||
|
||||
def qsize_files(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return super().qsize()
|
||||
|
||||
def _get(self):
|
||||
@@ -393,7 +388,7 @@ class VideoQueue(asyncio.Queue):
|
||||
self._queue.append(item) # type: ignore
|
||||
self._bytes_sum += len(item[1])
|
||||
|
||||
def full(self, item: tuple[Event, bytes] = None):
|
||||
def full(self, item: tuple[Event, bytes] | None = None):
|
||||
"""Return True if there are maxsize bytes in the queue.
|
||||
|
||||
optionally if `item` is provided, it will return False if there is enough space to
|
||||
@@ -423,7 +418,7 @@ class VideoQueue(asyncio.Queue):
|
||||
)
|
||||
|
||||
while self.full(item):
|
||||
putter = self._loop.create_future() # type: ignore
|
||||
putter = self._get_loop().create_future() # type: ignore
|
||||
self._putters.append(putter) # type: ignore
|
||||
try:
|
||||
await putter
|
||||
@@ -460,3 +455,38 @@ async def wait_until(dt):
|
||||
"""Sleep until the specified datetime."""
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
|
||||
EVENT_TYPES_MAP = {
|
||||
EventType.MOTION: {"motion"},
|
||||
EventType.RING: {"ring"},
|
||||
EventType.SMART_DETECT_LINE: {"line"},
|
||||
EventType.FINGERPRINT_IDENTIFIED: {"fingerprint"},
|
||||
EventType.NFC_CARD_SCANNED: {"nfc"},
|
||||
EventType.SMART_DETECT: {t for t in SmartDetectObjectType.values() if t not in SmartDetectAudioType.values()},
|
||||
EventType.SMART_AUDIO_DETECT: {f"{t}" for t in SmartDetectAudioType.values()},
|
||||
}
|
||||
|
||||
|
||||
def wanted_event_type(event, wanted_detection_types: Set[str], cameras: Set[str], ignore_cameras: Set[str]):
|
||||
"""Return True if this event is one we want."""
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
|
||||
if event.camera_id in ignore_cameras:
|
||||
return False
|
||||
|
||||
if cameras and event.camera_id not in cameras:
|
||||
return False
|
||||
|
||||
if event.type not in EVENT_TYPES_MAP:
|
||||
return False
|
||||
|
||||
if event.type in [EventType.SMART_DETECT, EventType.SMART_AUDIO_DETECT]:
|
||||
detection_types = set(event.smart_detect_types)
|
||||
else:
|
||||
detection_types = EVENT_TYPES_MAP[event.type]
|
||||
if not detection_types & wanted_detection_types: # No intersection
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user