mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3540ec1d04 | ||
|
|
8ed60aa925 | ||
|
|
ca455ebcd0 | ||
|
|
16315ca23c | ||
|
|
ac0f6f5fcb | ||
|
|
0c34294b7e | ||
|
|
f195b8a4a4 | ||
|
|
645e339314 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.8.4
|
||||
current_version = 0.8.8
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -4,7 +4,25 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.8.8]- 2022-12-30
|
||||
### Added
|
||||
- Added ability to configure purge interval
|
||||
|
||||
### Fixed
|
||||
- Purge interval returned to previous default of once a day
|
||||
|
||||
## [0.8.7]- 2022-12-11
|
||||
### Fixed
|
||||
- Fix improper unpacking of upload events
|
||||
|
||||
## [0.8.6]- 2022-12-10
|
||||
### Fixed
|
||||
- check that current event is not none before trying to get it it’s ID
|
||||
- downloader/uploaded clear their current event once it’s been processed
|
||||
|
||||
## [0.8.5] - 2022-12-09
|
||||
### Fixed
|
||||
- use event ID of currently up/downloading event, not whole event object when checking missing events
|
||||
|
||||
## [0.8.4] - 2022-12-09
|
||||
### Added
|
||||
|
||||
@@ -8,7 +8,7 @@ LABEL maintainer="ep1cman"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY dist/unifi_protect_backup-0.8.4.tar.gz sdist.tar.gz
|
||||
COPY dist/unifi_protect_backup-0.8.8.tar.gz sdist.tar.gz
|
||||
|
||||
RUN \
|
||||
echo "**** install build packages ****" && \
|
||||
|
||||
@@ -131,6 +131,11 @@ Options:
|
||||
--download-buffer-size TEXT How big the download buffer should be (you
|
||||
can use suffixes like "B", "KiB", "MiB",
|
||||
"GiB") [default: 512MiB]
|
||||
--purge_interval TEXT How frequently to check for file to purge.
|
||||
|
||||
NOTE: Can create a lot of API calls, so be
|
||||
careful if your cloud provider charges you per
|
||||
api call [default: 1d]
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
@@ -150,6 +155,7 @@ always take priority over environment variables):
|
||||
- `SQLITE_PATH`
|
||||
- `DOWNLOAD_BUFFER_SIZE`
|
||||
- `COLOR_LOGGING`
|
||||
- `PURGE_INTERVAL`
|
||||
|
||||
## File path formatting
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[tool]
|
||||
[tool.poetry]
|
||||
name = "unifi_protect_backup"
|
||||
version = "0.8.4"
|
||||
version = "0.8.8"
|
||||
homepage = "https://github.com/ep1cman/unifi-protect-backup"
|
||||
description = "Python tool to backup unifi event clips in realtime."
|
||||
authors = ["sebastian.goscik <sebastian@goscik.com>"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
__author__ = """sebastian.goscik"""
|
||||
__email__ = 'sebastian@goscik.com'
|
||||
__version__ = '0.8.4'
|
||||
__version__ = '0.8.8'
|
||||
|
||||
# from .unifi_protect_backup import UnifiProtectBackup
|
||||
from .downloader import VideoDownloader
|
||||
|
||||
@@ -126,6 +126,14 @@ all warnings, and websocket data
|
||||
help='How big the download buffer should be (you can use suffixes like "B", "KiB", "MiB", "GiB")',
|
||||
callback=lambda ctx, param, value: human_readable_to_float(value),
|
||||
)
|
||||
@click.option(
|
||||
'--purge_interval',
|
||||
default='1d',
|
||||
show_default=True,
|
||||
envvar='PURGE_INTERVAL',
|
||||
help="How frequently to check for file to purge.\n\nNOTE: Can create a lot of API calls, so be careful if "
|
||||
"your cloud provider charges you per api call",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
|
||||
@@ -106,7 +106,8 @@ class VideoDownloader:
|
||||
|
||||
await self.upload_queue.put((event, video))
|
||||
self.logger.debug("Added to upload queue")
|
||||
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warn(f"Unexpected exception occurred, abandoning event {event.id}:")
|
||||
self.logger.exception(e)
|
||||
|
||||
@@ -62,9 +62,14 @@ class MissingEventChecker:
|
||||
|
||||
# Prevent re-adding events currently in the download/upload queue
|
||||
downloading_event_ids = {event.id for event in self._downloader.download_queue._queue}
|
||||
downloading_event_ids.add(self._downloader.current_event.id)
|
||||
uploading_event_ids = {event.id for event in self._uploader.upload_queue._queue}
|
||||
uploading_event_ids.add(self._uploader.current_event.id)
|
||||
current_download = self._downloader.current_event
|
||||
if current_download is not None:
|
||||
downloading_event_ids.add(current_download.id)
|
||||
|
||||
uploading_event_ids = {event.id for event, video in self._uploader.upload_queue._queue}
|
||||
current_upload = self._uploader.current_event
|
||||
if current_upload is not None:
|
||||
uploading_event_ids.add(current_upload.id)
|
||||
|
||||
missing_event_ids = set(unifi_events.keys()) - (
|
||||
db_event_ids | downloading_event_ids | uploading_event_ids
|
||||
|
||||
@@ -6,17 +6,11 @@ from datetime import datetime
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from unifi_protect_backup.utils import parse_rclone_retention, run_command
|
||||
from unifi_protect_backup.utils import run_command, wait_until
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def wait_until(dt):
|
||||
# sleep until the specified datetime
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
|
||||
async def delete_file(file_path):
|
||||
returncode, stdout, stderr = await run_command(f'rclone delete -vv "{file_path}"')
|
||||
if returncode != 0:
|
||||
@@ -32,11 +26,17 @@ async def tidy_empty_dirs(base_dir_path):
|
||||
class Purge:
|
||||
"""Deletes old files from rclone remotes"""
|
||||
|
||||
def __init__(self, db: aiosqlite.Connection, retention: relativedelta, rclone_destination: str, interval: int = 60):
|
||||
def __init__(
|
||||
self,
|
||||
db: aiosqlite.Connection,
|
||||
retention: relativedelta,
|
||||
rclone_destination: str,
|
||||
interval: relativedelta(days=1),
|
||||
):
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.retention: relativedelta = retention
|
||||
self.rclone_destination: str = rclone_destination
|
||||
self.interval: int = interval
|
||||
self.interval: relativedelta = interval
|
||||
|
||||
async def start(self):
|
||||
"""Main loop - runs forever"""
|
||||
@@ -72,4 +72,6 @@ class Purge:
|
||||
logger.warn(f"Unexpected exception occurred during purge:")
|
||||
logger.exception(e)
|
||||
|
||||
await asyncio.sleep(self.interval)
|
||||
next_purge_time = datetime.now() + self.interval
|
||||
logger.extra_debug(f'sleeping until {next_purge_time}')
|
||||
await wait_until(next_purge_time)
|
||||
|
||||
@@ -60,6 +60,7 @@ class UnifiProtectBackup:
|
||||
file_structure_format: str,
|
||||
verbose: int,
|
||||
download_buffer_size: int,
|
||||
purge_interval: str,
|
||||
sqlite_path: str = "events.sqlite",
|
||||
color_logging=False,
|
||||
port: int = 443,
|
||||
@@ -85,6 +86,7 @@ class UnifiProtectBackup:
|
||||
file_structure_format (str): A Python format string for output file path.
|
||||
verbose (int): How verbose to setup logging, see :func:`setup_logging` for details.
|
||||
sqlite_path (str): Path where to find/create sqlite database
|
||||
purge_interval (str): How often to check for files to delete
|
||||
"""
|
||||
setup_logging(verbose, color_logging)
|
||||
|
||||
@@ -107,6 +109,7 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {file_structure_format=}")
|
||||
logger.debug(f" {sqlite_path=}")
|
||||
logger.debug(f" download_buffer_size={human_readable_size(download_buffer_size)}")
|
||||
logger.debug(f" {purge_interval=}")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = parse_rclone_retention(retention)
|
||||
@@ -135,6 +138,7 @@ class UnifiProtectBackup:
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
self._download_buffer_size = download_buffer_size
|
||||
self._purge_interval = parse_rclone_retention(purge_interval)
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -201,7 +205,7 @@ class UnifiProtectBackup:
|
||||
|
||||
# Create purge task
|
||||
# This will, every midnight, purge old backups from the rclone remotes and database
|
||||
purge = Purge(self._db, self.retention, self.rclone_destination)
|
||||
purge = Purge(self._db, self.retention, self.rclone_destination, self._purge_interval)
|
||||
tasks.append(asyncio.create_task(purge.start()))
|
||||
|
||||
# Create missing event task
|
||||
|
||||
@@ -64,7 +64,8 @@ class VideoUploader:
|
||||
await self._update_database(event, destination)
|
||||
|
||||
self.logger.debug(f"Uploaded")
|
||||
|
||||
self.current_event = None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warn(f"Unexpected exception occurred, abandoning event {event.id}:")
|
||||
self.logger.exception(e)
|
||||
|
||||
@@ -2,6 +2,7 @@ import logging
|
||||
import re
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
@@ -385,3 +386,9 @@ class VideoQueue(asyncio.Queue):
|
||||
self._unfinished_tasks += 1
|
||||
self._finished.clear()
|
||||
self._wakeup_next(self._getters)
|
||||
|
||||
|
||||
async def wait_until(dt):
|
||||
# sleep until the specified datetime
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
Reference in New Issue
Block a user