mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
63 Commits
v0.2.1
...
multiarch-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76db42da0d | ||
|
|
34f27c9cdc | ||
|
|
760cb56f63 | ||
|
|
0783dc58d5 | ||
|
|
5ae43f08af | ||
|
|
0a36102eed | ||
|
|
92be1cea5d | ||
|
|
1813bc0176 | ||
|
|
9451fb4235 | ||
|
|
6fe18a193b | ||
|
|
f3a8bf6957 | ||
|
|
cb93ec7c6e | ||
|
|
f82e6064e7 | ||
|
|
6aac1aadab | ||
|
|
13b11359fa | ||
|
|
540ad6e9f6 | ||
|
|
912433e640 | ||
|
|
f4a0c2bdcd | ||
|
|
f2c9ee5c76 | ||
|
|
53ab3dc432 | ||
|
|
381f90f497 | ||
|
|
af8ca90356 | ||
|
|
189450e590 | ||
|
|
3f55fa5fdb | ||
|
|
52e72a7425 | ||
|
|
003e6eb990 | ||
|
|
8bebeceaa6 | ||
|
|
e2eb7858da | ||
|
|
453fed6c57 | ||
|
|
ae323e68aa | ||
|
|
4eec2fdde0 | ||
|
|
d31b9bffc6 | ||
|
|
0a4a2401be | ||
|
|
3c3c47b3b4 | ||
|
|
51e2446e44 | ||
|
|
5f8ae03d7a | ||
|
|
92bb362f2b | ||
|
|
401031dc2f | ||
|
|
24e508bf69 | ||
|
|
71c86714c1 | ||
|
|
7ee34c1c6a | ||
|
|
5bd4a35d5d | ||
|
|
298f500811 | ||
|
|
0125b6d21a | ||
|
|
04694712d8 | ||
|
|
e3ed8ef303 | ||
|
|
43dd561d81 | ||
|
|
ad6b4dc632 | ||
|
|
a268ad652a | ||
|
|
2b46b5bd4a | ||
|
|
9e164de686 | ||
|
|
78e7b8fbb0 | ||
|
|
76a0591beb | ||
|
|
15e0ae5f4d | ||
|
|
c9634ba10a | ||
|
|
e3fbb1be10 | ||
|
|
47c9338fe5 | ||
|
|
48042aee04 | ||
|
|
e56a38b73f | ||
|
|
3e53d43f95 | ||
|
|
90e50fd982 | ||
|
|
0a2c0aa326 | ||
|
|
9f6ec7628c |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.2.1
|
||||
current_version = 0.7.3
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,6 +1,8 @@
|
||||
* Unifi Protect Backup version:
|
||||
* Unifi Protect version:
|
||||
* Python version:
|
||||
* Operating System:
|
||||
* Are you using a docker container or native?:
|
||||
|
||||
### Description
|
||||
|
||||
|
||||
46
.github/workflows/dev.yml
vendored
46
.github/workflows/dev.yml
vendored
@@ -2,16 +2,13 @@
|
||||
|
||||
name: dev workflow
|
||||
|
||||
env:
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
push:
|
||||
branches: [ master, main, dev ]
|
||||
branches: [ dev ]
|
||||
pull_request:
|
||||
branches: [ master, main, dev ]
|
||||
branches: [ dev ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
@@ -55,9 +52,7 @@ jobs:
|
||||
dev_container:
|
||||
name: Create dev container
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-versions: [3.9]
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
@@ -66,7 +61,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-versions }}
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -77,20 +72,23 @@ jobs:
|
||||
run: >-
|
||||
poetry build
|
||||
|
||||
- name: build container
|
||||
id: docker_build
|
||||
run: docker build . --file Dockerfile --tag $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: log in to container registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: push container image
|
||||
run: |
|
||||
IMAGE_ID=ghcr.io/$IMAGE_NAME
|
||||
|
||||
# Change all uppercase to lowercase
|
||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||
echo IMAGE_ID=$IMAGE_ID
|
||||
echo VERSION=$VERSION
|
||||
docker tag $IMAGE_NAME $IMAGE_ID:dev
|
||||
docker push $IMAGE_ID:dev
|
||||
- name: Build and push dev
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ghcr.io/${{ github.repository }}:dev
|
||||
50
.github/workflows/preview.yml
vendored
50
.github/workflows/preview.yml
vendored
@@ -1,50 +0,0 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: stage & preview workflow
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
push:
|
||||
branches: [ master, main ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
publish_dev_build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-versions: [ 3.9 ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-versions }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry tox tox-gh-actions
|
||||
|
||||
- name: test with tox
|
||||
run:
|
||||
tox
|
||||
|
||||
- name: Build wheels and source tarball
|
||||
run: |
|
||||
poetry version $(poetry version --short)-dev.$GITHUB_RUN_NUMBER
|
||||
poetry version --short
|
||||
poetry build
|
||||
|
||||
- name: publish to Test PyPI
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.TEST_PYPI_API_TOKEN}}
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
skip_existing: true
|
||||
53
.github/workflows/release.yml
vendored
53
.github/workflows/release.yml
vendored
@@ -12,9 +12,6 @@ on:
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "release"
|
||||
@@ -22,10 +19,6 @@ jobs:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-versions: [3.9]
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
- name: Get version from tag
|
||||
@@ -46,7 +39,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-versions }}
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -61,12 +54,26 @@ jobs:
|
||||
run: >-
|
||||
ls -l
|
||||
|
||||
- name: build container
|
||||
id: docker_build
|
||||
run: docker build . --file Dockerfile --tag $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: log in to container registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push dev
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ghcr.io/${{ github.repository }}:$VERSION, ghcr.io/${{ github.repository }}:latest
|
||||
|
||||
- name: create github release
|
||||
id: create_release
|
||||
@@ -79,26 +86,6 @@ jobs:
|
||||
draft: false
|
||||
prerelease: false
|
||||
|
||||
- name: push container image
|
||||
run: |
|
||||
IMAGE_ID=ghcr.io/$IMAGE_NAME
|
||||
|
||||
# Change all uppercase to lowercase
|
||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||
# Strip git ref prefix from version
|
||||
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
|
||||
# Strip "v" prefix from tag name
|
||||
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
|
||||
# Use Docker `latest` tag convention
|
||||
[ "$VERSION" == "master" ] && VERSION=latest
|
||||
echo IMAGE_ID=$IMAGE_ID
|
||||
echo VERSION=$VERSION
|
||||
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
|
||||
docker tag $IMAGE_NAME $IMAGE_ID:latest
|
||||
docker push $IMAGE_ID:$VERSION
|
||||
docker push $IMAGE_ID:latest
|
||||
|
||||
|
||||
- name: publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -113,5 +113,8 @@ ENV/
|
||||
# mkdocs build dir
|
||||
site/
|
||||
|
||||
# Docker mounted volumes
|
||||
config/
|
||||
data/
|
||||
|
||||
.envrc
|
||||
|
||||
69
CHANGELOG.md
69
CHANGELOG.md
@@ -4,6 +4,75 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.7.3] - 2022-07-31
|
||||
### Fixed
|
||||
- Updated to the 4.0.0 version of pyunifiprotect
|
||||
- Added rust to the container, and bumped it to alpine 3.16
|
||||
|
||||
## [0.7.2] - 2022-07-17
|
||||
### Fixed
|
||||
- Updated to the latest version of pyunifiprotect to fix issues introduced in unifi protect 2.1.1
|
||||
|
||||
## [0.7.1] - 2022-06-08
|
||||
### Fixed
|
||||
- Updated to the latest version of pyunifiprotect to fix issues introduced in unifi protect 2.0.1
|
||||
- Updated documentation to include how to set up local user accounts on unifi protect
|
||||
|
||||
## [0.7.0] - 2022-03-26
|
||||
### Added
|
||||
- Added a the ability to change the way the clip files are structured via a template string.
|
||||
### Fixed
|
||||
- Fixed issue where event types without clips would attempt (and fail1) to download clips
|
||||
- Drastically reduced the size of the docker container
|
||||
- Fixed typos in the documentation
|
||||
- Some dev dependencies are now not installed as default
|
||||
|
||||
## [0.6.0] - 2022-03-18
|
||||
### Added
|
||||
- Support for doorbell ring events
|
||||
- `detection_types` parameter to limit which kinds of events are backed up
|
||||
### Fixed
|
||||
- Actually fixed timestamps this time.
|
||||
|
||||
## [0.5.3] - 2022-03-11
|
||||
### Fixed
|
||||
- Timestamps in filenames and logging now show time in the timezone of the NVR not UTC
|
||||
|
||||
## [0.5.2] - 2022-03-10
|
||||
### Fixed
|
||||
- rclone delete command now works as expected on windows when spaces are in the file path
|
||||
- Dockerfile now allows setting of user and group to run as, as well as a default config
|
||||
|
||||
## [0.5.1] - 2022-03-07
|
||||
### Fixed
|
||||
- rclone command now works as expected on windows when spaces are in the file path
|
||||
|
||||
## [0.5.0] - 2022-03-06
|
||||
### Added
|
||||
- If `ffprobe` is available, the downloaded clips length is checked and logged
|
||||
### Fixed
|
||||
- A time delay has been added before downloading clips to try to resolve an issue where
|
||||
downloaded clips were too short
|
||||
|
||||
## [0.4.0] - 2022-03-05
|
||||
### Added
|
||||
- A `--version` command line option to show the tools version
|
||||
### Fixed
|
||||
- Websocket checks are no longer logged in verbosity level 1 to reduce log spam
|
||||
|
||||
## [0.3.1] - 2022-02-24
|
||||
### Fixed
|
||||
- Now checks if the websocket connection is alive, and attempts to reconnect if it isn't.
|
||||
|
||||
## [0.3.0] - 2022-02-22
|
||||
### Added
|
||||
- New CLI argument for passing CLI arguments directly to `rclone`.
|
||||
|
||||
### Fixed
|
||||
- A new camera getting added while running no longer crashes the application.
|
||||
- A timeout during download now correctly retries the download instead of
|
||||
abandoning the event.
|
||||
|
||||
## [0.2.1] - 2022-02-21
|
||||
### Fixed
|
||||
- Retry logging formatting
|
||||
|
||||
@@ -59,7 +59,8 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
|
||||
4. Install dependencies and start your virtualenv:
|
||||
|
||||
```
|
||||
$ poetry install -E test -E doc -E dev
|
||||
$ poetry install -E test -E dev
|
||||
$ poetry shell
|
||||
```
|
||||
|
||||
5. Create a branch for local development:
|
||||
@@ -70,14 +71,21 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
|
||||
|
||||
Now you can make your changes locally.
|
||||
|
||||
6. When you're done making changes, check that your changes pass the
|
||||
6. To run `unifi-protect-backup` while developing you will need to either
|
||||
be inside the `poetry shell` virtualenv or run it via poetry:
|
||||
|
||||
```
|
||||
$ poetry run unifi-protect-backup {args}
|
||||
```
|
||||
|
||||
7. When you're done making changes, check that your changes pass the
|
||||
tests, including testing other Python versions, with tox:
|
||||
|
||||
```
|
||||
$ poetry run tox
|
||||
```
|
||||
|
||||
7. Commit your changes and push your branch to GitHub:
|
||||
8. Commit your changes and push your branch to GitHub:
|
||||
|
||||
```
|
||||
$ git add .
|
||||
@@ -85,7 +93,7 @@ Ready to contribute? Here's how to set up `unifi-protect-backup` for local devel
|
||||
$ git push origin name-of-your-bugfix-or-feature
|
||||
```
|
||||
|
||||
8. Submit a pull request through the GitHub website.
|
||||
9. Submit a pull request through the GitHub website.
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
@@ -93,8 +101,8 @@ Before you submit a pull request, check that it meets these guidelines:
|
||||
|
||||
1. The pull request should include tests.
|
||||
2. If the pull request adds functionality, the docs should be updated. Put
|
||||
your new functionality into a function with a docstring, and add the
|
||||
feature to the list in README.md.
|
||||
your new functionality into a function with a docstring. If adding a CLI
|
||||
option, you should update the "usage" in README.md.
|
||||
3. The pull request should work for Python 3.9. Check
|
||||
https://github.com/ep1cman/unifi-protect-backup/actions
|
||||
and make sure that the tests pass for all supported Python versions.
|
||||
@@ -120,4 +128,5 @@ $ git push
|
||||
$ git push --tags
|
||||
```
|
||||
|
||||
GitHub Actions will then deploy to PyPI if tests pass.
|
||||
GitHub Actions will then deploy to PyPI, produce a GitHub release, and a container
|
||||
build if tests pass.
|
||||
|
||||
42
Dockerfile
42
Dockerfile
@@ -1,24 +1,52 @@
|
||||
# To build run:
|
||||
# $ poetry build
|
||||
# $ docker build -t ghcr.io/ep1cman/unifi-protect-backup .
|
||||
FROM python:3.9-alpine
|
||||
|
||||
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
|
||||
|
||||
LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
RUN apk add gcc musl-dev zlib-dev jpeg-dev rclone
|
||||
COPY dist/unifi-protect-backup-0.2.1.tar.gz sdist.tar.gz
|
||||
RUN pip install sdist.tar.gz
|
||||
|
||||
COPY dist/unifi-protect-backup-0.7.3.tar.gz sdist.tar.gz
|
||||
|
||||
RUN \
|
||||
echo "**** install build packages ****" && \
|
||||
apk add --no-cache --virtual=build-dependencies \
|
||||
gcc \
|
||||
musl-dev \
|
||||
jpeg-dev \
|
||||
zlib-dev \
|
||||
python3-dev \
|
||||
cargo && \
|
||||
echo "**** install packages ****" && \
|
||||
apk add --no-cache \
|
||||
rclone \
|
||||
ffmpeg \
|
||||
py3-pip \
|
||||
python3 && \
|
||||
echo "**** install unifi-protect-backup ****" && \
|
||||
pip install --no-cache-dir sdist.tar.gz && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-dependencies && \
|
||||
rm -rf \
|
||||
/tmp/* \
|
||||
/app/sdist.tar.gz
|
||||
|
||||
# Settings
|
||||
ENV UFP_USERNAME=unifi_protect_user
|
||||
ENV UFP_PASSWORD=unifi_protect_password
|
||||
ENV UFP_ADDRESS=127.0.0.1
|
||||
ENV UFP_PORT=443
|
||||
ENV UFP_SSL_VERIFY=true
|
||||
ENV RCLONE_RETENTION=7d
|
||||
ENV RCLONE_DESTINATION=my_remote:/unifi_protect_backup
|
||||
ENV RCLONE_DESTINATION=local:/data
|
||||
ENV VERBOSITY="v"
|
||||
ENV TZ=UTC
|
||||
ENV IGNORE_CAMERAS=""
|
||||
|
||||
VOLUME [ "/root/.config/rclone/" ]
|
||||
COPY docker_root/ /
|
||||
|
||||
CMD ["sh", "-c", "unifi-protect-backup -${VERBOSITY}"]
|
||||
VOLUME [ "/config" ]
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
115
README.md
115
README.md
@@ -28,7 +28,7 @@ retention period.
|
||||
|
||||
## Requirements
|
||||
- Python 3.9+
|
||||
- Unifi Protect version 1.20 or higher (as per [`pyunifiproect`](https://github.com/briis/pyunifiprotect))
|
||||
- Unifi Protect version 1.20 or higher (as per [`pyunifiprotect`](https://github.com/briis/pyunifiprotect))
|
||||
- `rclone` installed with at least one remote configured.
|
||||
|
||||
## Installation
|
||||
@@ -36,6 +36,21 @@ retention period.
|
||||
1. Install `rclone`. Instructions for your platform can be found here: https://rclone.org/install/#quickstart
|
||||
2. Configure the `rclone` remote you want to backup to. Instructions can be found here: https://rclone.org/docs/#configure
|
||||
3. `pip install unifi-protect-backup`
|
||||
4. Optional: Install `ffprobe` so that `unifi-protect-backup` can check the length of the clips it downloads
|
||||
|
||||
|
||||
### Account Setup
|
||||
In order to connect to your unifi protect instance, you will first need to setup a local admin account:
|
||||
|
||||
* Login to your *Local Portal* on your UniFiOS device, and click on *Users*
|
||||
* Open the `Roles` tab and click `Add Role` in the top right.
|
||||
* Give the role a name like `unifi protect backup` and give it `Full Management` permissions for the unifi protect app.
|
||||
* Now switch to the `User` tab and click `Add User` in the top right, and fill out the form. Specific Fields to pay attention to:
|
||||
* Role: Must be the role created in the last step
|
||||
* Account Type: *Local Access Only*
|
||||
* Click *Add* in at the bottom Right.
|
||||
* Select the newly created user in the list, and navigate to the `Assignments` tab in the left-hand pane, and ensure all cameras are ticked.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -48,14 +63,17 @@ Usage: unifi-protect-backup [OPTIONS]
|
||||
A Python based tool for backing up Unifi Protect event clips as they occur.
|
||||
|
||||
Options:
|
||||
--version Show the version and exit.
|
||||
--address TEXT Address of Unifi Protect instance
|
||||
[required]
|
||||
--port INTEGER Port of Unifi Protect instance
|
||||
--port INTEGER Port of Unifi Protect instance [default:
|
||||
443]
|
||||
--username TEXT Username to login to Unifi Protect instance
|
||||
[required]
|
||||
--password TEXT Password for Unifi Protect user [required]
|
||||
--verify-ssl / --no-verify-ssl Set if you do not have a valid HTTPS
|
||||
Certificate for your instance
|
||||
Certificate for your instance [default:
|
||||
verify-ssl]
|
||||
--rclone-destination TEXT `rclone` destination path in the format
|
||||
{rclone remote}:{path on remote}. E.g.
|
||||
`gdrive:/backups/unifi_protect` [required]
|
||||
@@ -64,11 +82,25 @@ Options:
|
||||
of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-
|
||||
t-transfer-any-file-older-than-this)
|
||||
[default: 7d]
|
||||
--rclone-args TEXT Optional extra arguments to pass to `rclone
|
||||
rcat` directly. Common usage for this would
|
||||
be to set a bandwidth limit, for example.
|
||||
--detection-types TEXT A comma separated list of which types of
|
||||
detections to backup. Valid options are:
|
||||
`motion`, `person`, `vehicle`, `ring`
|
||||
[default: motion,person,vehicle,ring]
|
||||
--ignore-camera TEXT IDs of cameras for which events should not
|
||||
be backed up. Use multiple times to ignore
|
||||
multiple IDs. If being set as an environment
|
||||
variable the IDs should be separated by
|
||||
whitespace.
|
||||
--file-structure-format TEXT A Python format string used to generate the
|
||||
file structure/name on the rclone remote.For
|
||||
details of the fields available, see the
|
||||
projects `README.md` file. [default: {camer
|
||||
a_name}/{event.start:%Y-%m-%d}/{event.end:%Y
|
||||
-%m-%dT%H-%M-%S} {detection_type}.mp4]
|
||||
-v, --verbose How verbose the logging output should be.
|
||||
|
||||
None: Only log info messages created by
|
||||
@@ -106,30 +138,97 @@ always take priority over environment variables):
|
||||
- `UFP_SSL_VERIFY`
|
||||
- `RCLONE_RETENTION`
|
||||
- `RCLONE_DESTINATION`
|
||||
- `RCLONE_ARGS`
|
||||
- `IGNORE_CAMERAS`
|
||||
- `DETECTION_TYPES`
|
||||
- `FILE_STRUCTURE_FORMAT`
|
||||
|
||||
## File path formatting
|
||||
|
||||
By default, the application will save clips in the following structure on the provided rclone remote:
|
||||
```
|
||||
{camera_name}/{event.start:%Y-%m-%d}/{event.end:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4
|
||||
```
|
||||
If you wish for the clips to be structured differently you can do this using the `--file-structure-format`
|
||||
option. It uses standard [python format string syntax](https://docs.python.org/3/library/string.html#formatstrings).
|
||||
|
||||
The following fields are provided to the format string:
|
||||
- *event:* The `Event` object as per https://github.com/briis/pyunifiprotect/blob/master/pyunifiprotect/data/nvr.py
|
||||
- *duration_seconds:* The duration of the event in seconds
|
||||
- *detection_type:* A nicely formatted list of the event detection type and the smart detection types (if any)
|
||||
- *camera_name:* The name of the camera that generated this event
|
||||
|
||||
You can optionally format the `event.start`/`event.end` timestamps as per the [`strftime` format](https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior) by appending it after a `:` e.g to get just the date without the time: `{event.start:%Y-%m-%d}`
|
||||
|
||||
|
||||
## Docker Container
|
||||
You can run this tool as a container if you prefer with the following command.
|
||||
Remember to change the variable to make your setup.
|
||||
|
||||
|
||||
### Backing up locally
|
||||
By default, if no rclone config is provided clips will be backed up to `/data`.
|
||||
|
||||
```
|
||||
docker run \
|
||||
-e UFP_USERNAME='USERNAME' \
|
||||
-e UFP_PASSWORD='PASSWORD' \
|
||||
-e UFP_ADDRESS='UNIFI_PROTECT_IP' \
|
||||
-e UFP_SSL_VERIFY='false' \
|
||||
-e RCLONE_DESTINATION='my_remote:/unifi_protect_backup' \
|
||||
-v '/path/to/rclone.conf':'/root/.config/rclone/rclone.conf' \
|
||||
-v '/path/to/save/clips':'/data' \
|
||||
ghcr.io/ep1cman/unifi-protect-backup
|
||||
```
|
||||
|
||||
### Backing up to cloud storage
|
||||
In order to backup to cloud storage you need to provide a `rclone.conf` file.
|
||||
|
||||
If you do not already have a `rclone.conf` file you can create one as follows:
|
||||
```
|
||||
$ docker run -it --rm -v $PWD:/root/.config/rclone/ ghcr.io/ep1cman/unifi-protect-backup rclone config
|
||||
$ docker run -it --rm -v $PWD:/root/.config/rclone --entrypoint rclone ghcr.io/ep1cman/unifi-protect-backup config
|
||||
```
|
||||
Follow the interactive configuration proceed, this will create a `rclone.conf`
|
||||
file in your current directory.
|
||||
|
||||
Finally, start the container:
|
||||
```
|
||||
docker run \
|
||||
-e UFP_USERNAME='USERNAME' \
|
||||
-e UFP_PASSWORD='PASSWORD' \
|
||||
-e UFP_ADDRESS='UNIFI_PROTECT_IP' \
|
||||
-e UFP_SSL_VERIFY='false' \
|
||||
-e RCLONE_DESTINATION='my_remote:/unifi_protect_backup' \
|
||||
-v '/path/to/save/clips':'/data' \
|
||||
-v `/path/to/rclone.conf':'/config/rclone/rclone.conf'
|
||||
ghcr.io/ep1cman/unifi-protect-backup
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
If you need to debug your rclone setup, you can invoke rclone directly like so:
|
||||
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
{rclone subcommand as per: https://rclone.org/docs/#subcommands}
|
||||
```
|
||||
|
||||
For example to check that your config file is being read properly and list the configured remotes:
|
||||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-v /path/to/rclone.conf:/config/rclone/rclone.conf \
|
||||
-e RCLONE_CONFIG='/config/rclone/rclone.conf' \
|
||||
--entrypoint rclone \
|
||||
ghcr.io/ep1cman/unifi-protect-backup \
|
||||
listremotes
|
||||
```
|
||||
This will create a `rclone.conf` file in your current directory
|
||||
|
||||
## Credits
|
||||
|
||||
- Heavily utilises [`pyunifiproect`](https://github.com/briis/pyunifiprotect) by [@briis](https://github.com/briis/)
|
||||
- Heavily utilises [`pyunifiprotect`](https://github.com/briis/pyunifiprotect) by [@briis](https://github.com/briis/)
|
||||
- All the cloud functionality is provided by [`rclone`](https://rclone.org/)
|
||||
- This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [waynerv/cookiecutter-pypackage](https://github.com/waynerv/cookiecutter-pypackage) project template.
|
||||
|
||||
2
docker_root/defaults/rclone.conf
Normal file
2
docker_root/defaults/rclone.conf
Normal file
@@ -0,0 +1,2 @@
|
||||
[local]
|
||||
type = local
|
||||
23
docker_root/etc/cont-init.d/30-config
Normal file
23
docker_root/etc/cont-init.d/30-config
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
mkdir -p /config/rclone
|
||||
|
||||
# For backwards compatibility
|
||||
[[ -f "/root/.config/rclone/rclone.conf" ]] && \
|
||||
echo "DEPRECATED: Copying rclone conf from /root/.config/rclone/rclone.conf, please change your mount to /config/rclone.conf"
|
||||
cp \
|
||||
/root/.config/rclone/rclone.conf \
|
||||
/config/rclone/rclone.conf
|
||||
|
||||
# default config file
|
||||
[[ ! -f "/config/rclone/rclone.conf" ]] && \
|
||||
mkdir -p /config/rclone && \
|
||||
cp \
|
||||
/defaults/rclone.conf \
|
||||
/config/rclone/rclone.conf
|
||||
|
||||
chown -R abc:abc \
|
||||
/config
|
||||
|
||||
chown -R abc:abc \
|
||||
/data
|
||||
6
docker_root/etc/services.d/unifi-protect-backup/run
Normal file
6
docker_root/etc/services.d/unifi-protect-backup/run
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
|
||||
|
||||
export RCLONE_CONFIG=/config/rclone/rclone.conf
|
||||
exec \
|
||||
s6-setuidgid abc unifi-protect-backup -${VERBOSITY}
|
||||
799
poetry.lock
generated
799
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
[tool]
|
||||
[tool.poetry]
|
||||
name = "unifi-protect-backup"
|
||||
version = "0.2.1"
|
||||
version = "0.7.3"
|
||||
homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
authors = ["sebastian.goscik <sebastian@goscik.com>"]
|
||||
@@ -39,8 +39,11 @@ pre-commit = {version = "^2.12.0", optional = true}
|
||||
toml = {version = "^0.10.2", optional = true}
|
||||
bump2version = {version = "^1.0.1", optional = true}
|
||||
tox-asdf = {version = "^0.1.0", optional = true}
|
||||
pyunifiprotect = "^3.2.1"
|
||||
pyunifiprotect = "^4.0.11"
|
||||
aiocron = "^1.8"
|
||||
ipdb = {version = "^0.13.9", optional = true}
|
||||
types-pytz = {version = "^2021.3.5", optional = true}
|
||||
types-cryptography = {version = "^3.3.18", optional = true}
|
||||
|
||||
[tool.poetry.extras]
|
||||
test = [
|
||||
@@ -50,10 +53,12 @@ test = [
|
||||
"mypy",
|
||||
"flake8",
|
||||
"flake8-docstrings",
|
||||
"pytest-cov"
|
||||
"pytest-cov",
|
||||
"types-pytz",
|
||||
"types-cryptography"
|
||||
]
|
||||
|
||||
dev = ["tox", "pre-commit", "virtualenv", "pip", "twine", "toml", "bump2version", "tox-asdf"]
|
||||
dev = ["tox", "pre-commit", "virtualenv", "pip", "twine", "toml", "bump2version", "tox-asdf", "ipdb"]
|
||||
|
||||
[tool.poetry.scripts]
|
||||
unifi-protect-backup = 'unifi_protect_backup.cli:main'
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = 'sebastian@goscik.com'
|
||||
__version__ = '0.2.1'
|
||||
__version__ = '0.7.3'
|
||||
|
||||
from .unifi_protect_backup import UnifiProtectBackup
|
||||
|
||||
@@ -4,17 +4,33 @@ import asyncio
|
||||
|
||||
import click
|
||||
|
||||
from unifi_protect_backup import UnifiProtectBackup
|
||||
from unifi_protect_backup import UnifiProtectBackup, __version__
|
||||
|
||||
DETECTION_TYPES = ["motion", "person", "vehicle", "ring"]
|
||||
|
||||
|
||||
def _parse_detection_types(ctx, param, value):
|
||||
# split columns by ',' and remove whitespace
|
||||
types = [t.strip() for t in value.split(',')]
|
||||
|
||||
# validate passed columns
|
||||
for t in types:
|
||||
if t not in DETECTION_TYPES:
|
||||
raise click.BadOptionUsage("detection-types", f"`{t}` is not an available detection type.", ctx)
|
||||
|
||||
return types
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.version_option(__version__)
|
||||
@click.option('--address', required=True, envvar='UFP_ADDRESS', help='Address of Unifi Protect instance')
|
||||
@click.option('--port', default=443, envvar='UFP_PORT', help='Port of Unifi Protect instance')
|
||||
@click.option('--port', default=443, envvar='UFP_PORT', show_default=True, help='Port of Unifi Protect instance')
|
||||
@click.option('--username', required=True, envvar='UFP_USERNAME', help='Username to login to Unifi Protect instance')
|
||||
@click.option('--password', required=True, envvar='UFP_PASSWORD', help='Password for Unifi Protect user')
|
||||
@click.option(
|
||||
'--verify-ssl/--no-verify-ssl',
|
||||
default=True,
|
||||
show_default=True,
|
||||
envvar='UFP_SSL_VERIFY',
|
||||
help="Set if you do not have a valid HTTPS Certificate for your instance",
|
||||
)
|
||||
@@ -28,10 +44,27 @@ from unifi_protect_backup import UnifiProtectBackup
|
||||
@click.option(
|
||||
'--retention',
|
||||
default='7d',
|
||||
show_default=True,
|
||||
envvar='RCLONE_RETENTION',
|
||||
help="How long should event clips be backed up for. Format as per the `--max-age` argument of "
|
||||
"`rclone` (https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)",
|
||||
)
|
||||
@click.option(
|
||||
'--rclone-args',
|
||||
default='',
|
||||
envvar='RCLONE_ARGS',
|
||||
help="Optional extra arguments to pass to `rclone rcat` directly. Common usage for this would "
|
||||
"be to set a bandwidth limit, for example.",
|
||||
)
|
||||
@click.option(
|
||||
'--detection-types',
|
||||
envvar='DETECTION_TYPES',
|
||||
default=','.join(DETECTION_TYPES),
|
||||
show_default=True,
|
||||
help="A comma separated list of which types of detections to backup. "
|
||||
f"Valid options are: {', '.join([f'`{t}`' for t in DETECTION_TYPES])}",
|
||||
callback=_parse_detection_types,
|
||||
)
|
||||
@click.option(
|
||||
'--ignore-camera',
|
||||
'ignore_cameras',
|
||||
@@ -40,6 +73,14 @@ from unifi_protect_backup import UnifiProtectBackup
|
||||
help="IDs of cameras for which events should not be backed up. Use multiple times to ignore "
|
||||
"multiple IDs. If being set as an environment variable the IDs should be separated by whitespace.",
|
||||
)
|
||||
@click.option(
|
||||
'--file-structure-format',
|
||||
envvar='FILE_STRUCTURE_FORMAT',
|
||||
default="{camera_name}/{event.start:%Y-%m-%d}/{event.end:%Y-%m-%dT%H-%M-%S} {detection_type}.mp4",
|
||||
show_default=True,
|
||||
help="A Python format string used to generate the file structure/name on the rclone remote."
|
||||
"For details of the fields available, see the projects `README.md` file.",
|
||||
)
|
||||
@click.option(
|
||||
'-v',
|
||||
'--verbose',
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
"""Main module."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
import aiocron
|
||||
import aiohttp
|
||||
from pyunifiprotect import ProtectApiClient
|
||||
import pytz
|
||||
from aiohttp.client_exceptions import ClientPayloadError
|
||||
from pyunifiprotect import NvrError, ProtectApiClient
|
||||
from pyunifiprotect.data.nvr import Event
|
||||
from pyunifiprotect.data.types import EventType, ModelType
|
||||
from pyunifiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
@@ -15,7 +20,7 @@ from pyunifiprotect.data.websocket import WSAction, WSSubscriptionMessage
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RcloneException(Exception):
|
||||
class SubprocessException(Exception):
|
||||
"""Exception class for when rclone does not exit with `0`."""
|
||||
|
||||
def __init__(self, stdout, stderr, returncode):
|
||||
@@ -168,10 +173,14 @@ class UnifiProtectBackup:
|
||||
retention (str): How long should event clips be backed up for. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
rclone_args (str): Extra args passed directly to `rclone rcat`.
|
||||
ignore_cameras (List[str]): List of camera IDs for which to not backup events
|
||||
verbose (int): How verbose to setup logging, see :func:`setup_logging` for details.
|
||||
detection_types (List[str]): List of which detection types to backup.
|
||||
file_structure_format (str): A Python format string for output file path
|
||||
_download_queue (asyncio.Queue): Queue of events that need to be backed up
|
||||
_unsub (Callable): Unsubscribe from the websocket callback
|
||||
_has_ffprobe (bool): If ffprobe was found on the host
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -182,7 +191,10 @@ class UnifiProtectBackup:
|
||||
verify_ssl: bool,
|
||||
rclone_destination: str,
|
||||
retention: str,
|
||||
rclone_args: str,
|
||||
detection_types: List[str],
|
||||
ignore_cameras: List[str],
|
||||
file_structure_format: str,
|
||||
verbose: int,
|
||||
port: int = 443,
|
||||
):
|
||||
@@ -200,7 +212,11 @@ class UnifiProtectBackup:
|
||||
retention (str): How long should event clips be backed up for. Format as per the
|
||||
`--max-age` argument of `rclone`
|
||||
(https://rclone.org/filtering/#max-age-don-t-transfer-any-file-older-than-this)
|
||||
ignore_cameras (List[str]): List of camera IDs for which to not backup events
|
||||
rclone_args (str): A bandwidth limit which is passed to the `--bwlimit` argument of
|
||||
`rclone` (https://rclone.org/docs/#bwlimit-bandwidth-spec)
|
||||
detection_types (List[str]): List of which detection types to backup.
|
||||
ignore_cameras (List[str]): List of camera IDs for which to not backup events.
|
||||
file_structure_format (str): A Python format string for output file path.
|
||||
verbose (int): How verbose to setup logging, see :func:`setup_logging` for details.
|
||||
"""
|
||||
setup_logging(verbose)
|
||||
@@ -217,23 +233,37 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {verify_ssl=}")
|
||||
logger.debug(f" {rclone_destination=}")
|
||||
logger.debug(f" {retention=}")
|
||||
logger.debug(f" {rclone_args=}")
|
||||
logger.debug(f" {ignore_cameras=}")
|
||||
logger.debug(f" {verbose=}")
|
||||
logger.debug(f" {detection_types=}")
|
||||
logger.debug(f" {file_structure_format=}")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = retention
|
||||
self.rclone_args = rclone_args
|
||||
self.file_structure_format = file_structure_format
|
||||
|
||||
self.address = address
|
||||
self.port = port
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.verify_ssl = verify_ssl
|
||||
|
||||
self._protect = ProtectApiClient(
|
||||
address,
|
||||
port,
|
||||
username,
|
||||
password,
|
||||
verify_ssl=verify_ssl,
|
||||
self.address,
|
||||
self.port,
|
||||
self.username,
|
||||
self.password,
|
||||
verify_ssl=self.verify_ssl,
|
||||
subscribed_models={ModelType.EVENT},
|
||||
)
|
||||
self.ignore_cameras = ignore_cameras
|
||||
self._download_queue: asyncio.Queue = asyncio.Queue()
|
||||
self._unsub: Callable[[], None]
|
||||
self.detection_types = detection_types
|
||||
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -243,13 +273,21 @@ class UnifiProtectBackup:
|
||||
"""
|
||||
logger.info("Starting...")
|
||||
|
||||
# Ensure rclone is installed and properly configured
|
||||
# Ensure `rclone` is installed and properly configured
|
||||
logger.info("Checking rclone configuration...")
|
||||
await self._check_rclone()
|
||||
|
||||
# Check if `ffprobe` is available
|
||||
ffprobe = shutil.which('ffprobe')
|
||||
if ffprobe is not None:
|
||||
logger.debug(f"ffprobe found: {ffprobe}")
|
||||
self._has_ffprobe = True
|
||||
|
||||
# Start the pyunifiprotect connection by calling `update`
|
||||
logger.info("Connecting to Unifi Protect...")
|
||||
await self._protect.update()
|
||||
|
||||
# Get a mapping of camera ids -> names
|
||||
logger.info("Found cameras:")
|
||||
for camera in self._protect.bootstrap.cameras.values():
|
||||
logger.info(f" - {camera.id}: {camera.name}")
|
||||
@@ -263,8 +301,8 @@ class UnifiProtectBackup:
|
||||
@aiocron.crontab("0 0 * * *")
|
||||
async def rclone_purge_old():
|
||||
logger.info("Deleting old files...")
|
||||
cmd = f"rclone delete -vv --min-age {self.retention} '{self.rclone_destination}'"
|
||||
cmd += f" && rclone rmdirs -vv --leave-root '{self.rclone_destination}'"
|
||||
cmd = f'rclone delete -vv --min-age {self.retention} "{self.rclone_destination}"'
|
||||
cmd += f' && rclone rmdirs -vv --leave-root "{self.rclone_destination}"'
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
@@ -272,14 +310,67 @@ class UnifiProtectBackup:
|
||||
)
|
||||
stdout, stderr = await proc.communicate()
|
||||
if proc.returncode == 0:
|
||||
logger.extra_debug(f"stdout:\n{stdout.decode()}") # type: ignore
|
||||
logger.extra_debug(f"stderr:\n{stderr.decode()}") # type: ignore
|
||||
logger.extra_debug(f"stdout:\n{stdout.decode()}")
|
||||
logger.extra_debug(f"stderr:\n{stderr.decode()}")
|
||||
logger.info("Successfully deleted old files")
|
||||
else:
|
||||
logger.warn("Failed to purge old files")
|
||||
logger.warn(f"stdout:\n{stdout.decode()}")
|
||||
logger.warn(f"stderr:\n{stderr.decode()}")
|
||||
|
||||
# We need to catch websocket disconnect and trigger a reconnect.
|
||||
@aiocron.crontab("* * * * *")
|
||||
async def check_websocket_and_reconnect():
|
||||
logger.extra_debug("Checking the status of the websocket...")
|
||||
if self._protect.check_ws():
|
||||
logger.extra_debug("Websocket is connected.")
|
||||
else:
|
||||
logger.warn("Lost connection to Unifi Protect.")
|
||||
|
||||
# Unsubscribe, close the session.
|
||||
self._unsub()
|
||||
await self._protect.close_session()
|
||||
|
||||
while True:
|
||||
logger.warn("Attempting reconnect...")
|
||||
|
||||
try:
|
||||
# Start again from scratch. In principle if Unifi
|
||||
# Protect has not been restarted we should just be able
|
||||
# to call self._protect.update() to reconnect to the
|
||||
# websocket. However, if the server has been restarted a
|
||||
# call to self._protect.check_ws() returns true and some
|
||||
# seconds later pyunifiprotect detects the websocket as
|
||||
# disconnected again. Therefore, kill it all and try
|
||||
# again!
|
||||
replacement_protect = ProtectApiClient(
|
||||
self.address,
|
||||
self.port,
|
||||
self.username,
|
||||
self.password,
|
||||
verify_ssl=self.verify_ssl,
|
||||
subscribed_models={ModelType.EVENT},
|
||||
)
|
||||
# Start the pyunifiprotect connection by calling `update`
|
||||
await replacement_protect.update()
|
||||
if replacement_protect.check_ws():
|
||||
self._protect = replacement_protect
|
||||
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
|
||||
break
|
||||
else:
|
||||
logger.warn("Unable to establish connection to Unifi Protect")
|
||||
except Exception as e:
|
||||
logger.warn("Unexpected exception occurred while trying to reconnect:")
|
||||
logger.exception(e)
|
||||
finally:
|
||||
# If we get here we need to close the replacement session again
|
||||
await replacement_protect.close_session()
|
||||
|
||||
# Back off for a little while
|
||||
await asyncio.sleep(10)
|
||||
|
||||
logger.info("Re-established connection to Unifi Protect and to the websocket.")
|
||||
|
||||
# Launches the main loop
|
||||
logger.info("Listening for events...")
|
||||
await self._backup_events()
|
||||
@@ -293,14 +384,14 @@ class UnifiProtectBackup:
|
||||
"""Check if rclone is installed and the specified remote is configured.
|
||||
|
||||
Raises:
|
||||
RcloneException: If rclone is not installed or it failed to list remotes
|
||||
SubprocessException: If rclone is not installed or it failed to list remotes
|
||||
ValueError: The given rclone destination is for a remote that is not configured
|
||||
|
||||
"""
|
||||
rclone = shutil.which('rclone')
|
||||
logger.debug(f"rclone found: {rclone}")
|
||||
if not rclone:
|
||||
raise RuntimeError("`rclone` is not installed on this system")
|
||||
logger.debug(f"rclone found: {rclone}")
|
||||
|
||||
cmd = "rclone listremotes -vv"
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
@@ -312,7 +403,7 @@ class UnifiProtectBackup:
|
||||
logger.extra_debug(f"stdout:\n{stdout.decode()}") # type: ignore
|
||||
logger.extra_debug(f"stderr:\n{stderr.decode()}") # type: ignore
|
||||
if proc.returncode != 0:
|
||||
raise RcloneException(stdout.decode(), stderr.decode(), proc.returncode)
|
||||
raise SubprocessException(stdout.decode(), stderr.decode(), proc.returncode)
|
||||
|
||||
# Check if the destination is for a configured remote
|
||||
for line in stdout.splitlines():
|
||||
@@ -340,8 +431,22 @@ class UnifiProtectBackup:
|
||||
return
|
||||
if msg.new_obj.end is None:
|
||||
return
|
||||
if msg.new_obj.type not in {EventType.MOTION, EventType.SMART_DETECT}:
|
||||
if msg.new_obj.type not in [EventType.MOTION, EventType.SMART_DETECT, EventType.RING]:
|
||||
return
|
||||
if msg.new_obj.type is EventType.MOTION and "motion" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted motion detection event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
if msg.new_obj.type is EventType.RING and "ring" not in self.detection_types:
|
||||
logger.extra_debug(f"Skipping unwanted ring event: {msg.new_obj.id}") # type: ignore
|
||||
return
|
||||
elif msg.new_obj.type is EventType.SMART_DETECT:
|
||||
for event_smart_detection_type in msg.new_obj.smart_detect_types:
|
||||
if event_smart_detection_type not in self.detection_types:
|
||||
logger.extra_debug( # type: ignore
|
||||
f"Skipping unwanted {event_smart_detection_type} detection event: {msg.new_obj.id}"
|
||||
)
|
||||
return
|
||||
|
||||
self._download_queue.put_nowait(msg.new_obj)
|
||||
logger.debug(f"Adding event {msg.new_obj.id} to queue (Current queue={self._download_queue.qsize()})")
|
||||
|
||||
@@ -354,18 +459,39 @@ class UnifiProtectBackup:
|
||||
|
||||
"""
|
||||
while True:
|
||||
event = await self._download_queue.get()
|
||||
destination = self.generate_file_path(event)
|
||||
|
||||
logger.info(f"Backing up event: {event.id}")
|
||||
logger.debug(f"Remaining Queue: {self._download_queue.qsize()}")
|
||||
logger.debug(f" Camera: {self._protect.bootstrap.cameras[event.camera_id].name}")
|
||||
logger.debug(f" Type: {event.type}")
|
||||
logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')}")
|
||||
logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')}")
|
||||
logger.debug(f" Duration: {event.end-event.start}")
|
||||
|
||||
try:
|
||||
event = await self._download_queue.get()
|
||||
|
||||
# Fix timezones since pyunifiprotect sets all timestamps to UTC. Instead localize them to
|
||||
# the timezone of the unifi protect NVR.
|
||||
event.start = event.start.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
event.end = event.end.replace(tzinfo=pytz.utc).astimezone(self._protect.bootstrap.nvr.timezone)
|
||||
|
||||
logger.info(f"Backing up event: {event.id}")
|
||||
logger.debug(f"Remaining Queue: {self._download_queue.qsize()}")
|
||||
logger.debug(f" Camera: {await self._get_camera_name(event.camera_id)}")
|
||||
if event.type == EventType.SMART_DETECT:
|
||||
logger.debug(f" Type: {event.type} ({', '.join(event.smart_detect_types)})")
|
||||
else:
|
||||
logger.debug(f" Type: {event.type}")
|
||||
logger.debug(f" Start: {event.start.strftime('%Y-%m-%dT%H-%M-%S')} ({event.start.timestamp()})")
|
||||
logger.debug(f" End: {event.end.strftime('%Y-%m-%dT%H-%M-%S')} ({event.end.timestamp()})")
|
||||
duration = (event.end - event.start).total_seconds()
|
||||
logger.debug(f" Duration: {duration}")
|
||||
|
||||
# Unifi protect does not return full video clips if the clip is requested too soon.
|
||||
# There are two issues at play here:
|
||||
# - Protect will only cut a clip on an keyframe which happen every 5s
|
||||
# - Protect's pipeline needs a finite amount of time to make a clip available
|
||||
# So we will wait 1.5x the keyframe interval to ensure that there is always ample video
|
||||
# stored and Protect can return a full clip (which should be at least the length requested,
|
||||
# but often longer)
|
||||
time_since_event_ended = datetime.utcnow().replace(tzinfo=timezone.utc) - event.end
|
||||
sleep_time = (timedelta(seconds=5 * 1.5) - time_since_event_ended).total_seconds()
|
||||
if sleep_time > 0:
|
||||
logger.debug(f" Sleeping ({sleep_time}s) to ensure clip is ready to download...")
|
||||
await asyncio.sleep(sleep_time)
|
||||
|
||||
# Download video
|
||||
logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
@@ -373,7 +499,7 @@ class UnifiProtectBackup:
|
||||
video = await self._protect.get_camera_video(event.camera_id, event.start, event.end)
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, aiohttp.client_exceptions.ClientPayloadError) as e:
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
logger.warn(f" Failed download attempt {x+1}, retying in 1s")
|
||||
logger.exception(e)
|
||||
await asyncio.sleep(1)
|
||||
@@ -381,14 +507,33 @@ class UnifiProtectBackup:
|
||||
logger.warn(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
continue
|
||||
|
||||
destination = await self.generate_file_path(event)
|
||||
|
||||
# Get the actual length of the downloaded video using ffprobe
|
||||
if self._has_ffprobe:
|
||||
try:
|
||||
downloaded_duration = await self._get_video_length(video)
|
||||
msg = (
|
||||
f" Downloaded video length: {downloaded_duration:.3f}s"
|
||||
f"({downloaded_duration - duration:+.3f}s)"
|
||||
)
|
||||
if downloaded_duration < duration:
|
||||
logger.warning(msg)
|
||||
else:
|
||||
logger.debug(msg)
|
||||
except SubprocessException as e:
|
||||
logger.warn(" `ffprobe` failed")
|
||||
logger.exception(e)
|
||||
|
||||
# Upload video
|
||||
logger.debug(" Uploading video via rclone...")
|
||||
logger.debug(f" To: {destination}")
|
||||
logger.debug(f" Size: {human_readable_size(len(video))}")
|
||||
for x in range(5):
|
||||
try:
|
||||
await self._upload_video(video, destination)
|
||||
await self._upload_video(video, destination, self.rclone_args)
|
||||
break
|
||||
except RcloneException as e:
|
||||
except SubprocessException as e:
|
||||
logger.warn(f" Failed upload attempt {x+1}, retying in 1s")
|
||||
logger.exception(e)
|
||||
await asyncio.sleep(1)
|
||||
@@ -402,7 +547,7 @@ class UnifiProtectBackup:
|
||||
logger.warn(f"Unexpected exception occurred, abandoning event {event.id}:")
|
||||
logger.exception(e)
|
||||
|
||||
async def _upload_video(self, video: bytes, destination: pathlib.Path):
|
||||
async def _upload_video(self, video: bytes, destination: pathlib.Path, rclone_args: str):
|
||||
"""Upload video using rclone.
|
||||
|
||||
In order to avoid writing to disk, the video file data is piped directly
|
||||
@@ -411,11 +556,12 @@ class UnifiProtectBackup:
|
||||
Args:
|
||||
video (bytes): The data to be written to the file
|
||||
destination (pathlib.Path): Where rclone should write the file
|
||||
rclone_args (str): Optional extra arguments to pass to `rclone`
|
||||
|
||||
Raises:
|
||||
RuntimeError: If rclone returns a non-zero exit code
|
||||
"""
|
||||
cmd = f"rclone rcat -vv '{destination}'"
|
||||
cmd = f'rclone rcat -vv {rclone_args} "{destination}"'
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdin=asyncio.subprocess.PIPE,
|
||||
@@ -427,17 +573,39 @@ class UnifiProtectBackup:
|
||||
logger.extra_debug(f"stdout:\n{stdout.decode()}") # type: ignore
|
||||
logger.extra_debug(f"stderr:\n{stderr.decode()}") # type: ignore
|
||||
else:
|
||||
raise RcloneException(stdout.decode(), stderr.decode(), proc.returncode)
|
||||
raise SubprocessException(stdout.decode(), stderr.decode(), proc.returncode)
|
||||
|
||||
def generate_file_path(self, event: Event) -> pathlib.Path:
|
||||
async def _get_video_length(self, video: bytes) -> float:
|
||||
cmd = 'ffprobe -v quiet -show_streams -select_streams v:0 -of json -'
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdin=asyncio.subprocess.PIPE,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = await proc.communicate(video)
|
||||
if proc.returncode == 0:
|
||||
logger.extra_debug(f"stdout:\n{stdout.decode()}") # type: ignore
|
||||
logger.extra_debug(f"stderr:\n{stderr.decode()}") # type: ignore
|
||||
|
||||
json_data = json.loads(stdout.decode())
|
||||
return float(json_data['streams'][0]['duration'])
|
||||
|
||||
else:
|
||||
raise SubprocessException(stdout.decode(), stderr.decode(), proc.returncode)
|
||||
|
||||
async def generate_file_path(self, event: Event) -> pathlib.Path:
|
||||
"""Generates the rclone destination path for the provided event.
|
||||
|
||||
Generates paths in the following structure:
|
||||
::
|
||||
rclone_destination
|
||||
|- Camera Name
|
||||
|- {Date}
|
||||
|- {start timestamp} {event type} ({detections}).mp4
|
||||
Generates rclone destination path for the given even based upon the format string
|
||||
in `self.file_structure_format`.
|
||||
|
||||
Provides the following fields to the format string:
|
||||
event: The `Event` object as per
|
||||
https://github.com/briis/pyunifiprotect/blob/master/pyunifiprotect/data/nvr.py
|
||||
duration_seconds: The duration of the event in seconds
|
||||
detection_type: A nicely formatted list of the event detection type and the smart detection types (if any)
|
||||
camera_name: The name of the camera that generated this event
|
||||
|
||||
Args:
|
||||
event: The event for which to create an output path
|
||||
@@ -446,18 +614,37 @@ class UnifiProtectBackup:
|
||||
pathlib.Path: The rclone path the event should be backed up to
|
||||
|
||||
"""
|
||||
path = pathlib.Path(self.rclone_destination)
|
||||
assert isinstance(event.camera_id, str)
|
||||
path /= self._protect.bootstrap.cameras[event.camera_id].name # directory per camera
|
||||
path /= event.start.strftime("%Y-%m-%d") # Directory per day
|
||||
assert isinstance(event.start, datetime)
|
||||
assert isinstance(event.end, datetime)
|
||||
|
||||
file_name = f"{event.start.strftime('%Y-%m-%dT%H-%M-%S')} {event.type}"
|
||||
format_context = {
|
||||
"event": event,
|
||||
"duration_seconds": (event.end - event.start).total_seconds(),
|
||||
"detection_type": f"{event.type} ({' '.join(event.smart_detect_types)})"
|
||||
if event.smart_detect_types
|
||||
else f"{event.type}",
|
||||
"camera_name": await self._get_camera_name(event.camera_id),
|
||||
}
|
||||
|
||||
if event.smart_detect_types:
|
||||
detections = " ".join(event.smart_detect_types)
|
||||
file_name += f" ({detections})"
|
||||
file_name += ".mp4"
|
||||
file_path = self.file_structure_format.format(**format_context)
|
||||
file_path = re.sub(r'[^\w\-_\.\(\)/ ]', '', file_path) # Sanitize any invalid chars
|
||||
|
||||
path /= file_name
|
||||
return pathlib.Path(f"{self.rclone_destination}/{file_path}")
|
||||
|
||||
return path
|
||||
async def _get_camera_name(self, id: str):
|
||||
try:
|
||||
return self._protect.bootstrap.cameras[id].name
|
||||
except KeyError:
|
||||
# Refresh cameras
|
||||
logger.debug(f"Unknown camera id: '{id}', checking API")
|
||||
|
||||
try:
|
||||
await self._protect.update(force=True)
|
||||
except NvrError:
|
||||
logger.debug(f"Unknown camera id: '{id}'")
|
||||
raise
|
||||
|
||||
name = self._protect.bootstrap.cameras[id].name
|
||||
logger.debug(f"Found camera - {id}: {name}")
|
||||
return name
|
||||
|
||||
Reference in New Issue
Block a user