mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Made purge interval configurable and default back to once a day
This commit is contained in:
@@ -131,6 +131,11 @@ Options:
|
||||
--download-buffer-size TEXT How big the download buffer should be (you
|
||||
can use suffixes like "B", "KiB", "MiB",
|
||||
"GiB") [default: 512MiB]
|
||||
--purge_interval TEXT How frequently to check for file to purge.
|
||||
|
||||
NOTE: Can create a lot of API calls, so be
|
||||
careful if your cloud provider charges you per
|
||||
api call [default: 1d]
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
@@ -150,6 +155,7 @@ always take priority over environment variables):
|
||||
- `SQLITE_PATH`
|
||||
- `DOWNLOAD_BUFFER_SIZE`
|
||||
- `COLOR_LOGGING`
|
||||
- `PURGE_INTERVAL`
|
||||
|
||||
## File path formatting
|
||||
|
||||
|
||||
@@ -126,6 +126,14 @@ all warnings, and websocket data
|
||||
help='How big the download buffer should be (you can use suffixes like "B", "KiB", "MiB", "GiB")',
|
||||
callback=lambda ctx, param, value: human_readable_to_float(value),
|
||||
)
|
||||
@click.option(
|
||||
'--purge_interval',
|
||||
default='1d',
|
||||
show_default=True,
|
||||
envvar='PURGE_INTERVAL',
|
||||
help="How frequently to check for file to purge.\n\nNOTE: Can create a lot of API calls, so be careful if "
|
||||
"your cloud provider charges you per api call",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
event_listener = UnifiProtectBackup(**kwargs)
|
||||
|
||||
@@ -6,17 +6,11 @@ from datetime import datetime
|
||||
import aiosqlite
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from unifi_protect_backup.utils import parse_rclone_retention, run_command
|
||||
from unifi_protect_backup.utils import run_command, wait_until
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def wait_until(dt):
|
||||
# sleep until the specified datetime
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
|
||||
async def delete_file(file_path):
|
||||
returncode, stdout, stderr = await run_command(f'rclone delete -vv "{file_path}"')
|
||||
if returncode != 0:
|
||||
@@ -32,11 +26,17 @@ async def tidy_empty_dirs(base_dir_path):
|
||||
class Purge:
|
||||
"""Deletes old files from rclone remotes"""
|
||||
|
||||
def __init__(self, db: aiosqlite.Connection, retention: relativedelta, rclone_destination: str, interval: int = 60):
|
||||
def __init__(
|
||||
self,
|
||||
db: aiosqlite.Connection,
|
||||
retention: relativedelta,
|
||||
rclone_destination: str,
|
||||
interval: relativedelta(days=1),
|
||||
):
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.retention: relativedelta = retention
|
||||
self.rclone_destination: str = rclone_destination
|
||||
self.interval: int = interval
|
||||
self.interval: relativedelta = interval
|
||||
|
||||
async def start(self):
|
||||
"""Main loop - runs forever"""
|
||||
@@ -72,4 +72,6 @@ class Purge:
|
||||
logger.warn(f"Unexpected exception occurred during purge:")
|
||||
logger.exception(e)
|
||||
|
||||
await asyncio.sleep(self.interval)
|
||||
next_purge_time = datetime.now() + self.interval
|
||||
logger.extra_debug(f'sleeping until {next_purge_time}')
|
||||
await wait_until(next_purge_time)
|
||||
|
||||
@@ -60,6 +60,7 @@ class UnifiProtectBackup:
|
||||
file_structure_format: str,
|
||||
verbose: int,
|
||||
download_buffer_size: int,
|
||||
purge_interval: str,
|
||||
sqlite_path: str = "events.sqlite",
|
||||
color_logging=False,
|
||||
port: int = 443,
|
||||
@@ -85,6 +86,7 @@ class UnifiProtectBackup:
|
||||
file_structure_format (str): A Python format string for output file path.
|
||||
verbose (int): How verbose to setup logging, see :func:`setup_logging` for details.
|
||||
sqlite_path (str): Path where to find/create sqlite database
|
||||
purge_interval (str): How often to check for files to delete
|
||||
"""
|
||||
setup_logging(verbose, color_logging)
|
||||
|
||||
@@ -107,6 +109,7 @@ class UnifiProtectBackup:
|
||||
logger.debug(f" {file_structure_format=}")
|
||||
logger.debug(f" {sqlite_path=}")
|
||||
logger.debug(f" download_buffer_size={human_readable_size(download_buffer_size)}")
|
||||
logger.debug(f" {purge_interval=}")
|
||||
|
||||
self.rclone_destination = rclone_destination
|
||||
self.retention = parse_rclone_retention(retention)
|
||||
@@ -135,6 +138,7 @@ class UnifiProtectBackup:
|
||||
self._sqlite_path = sqlite_path
|
||||
self._db = None
|
||||
self._download_buffer_size = download_buffer_size
|
||||
self._purge_interval = parse_rclone_retention(purge_interval)
|
||||
|
||||
async def start(self):
|
||||
"""Bootstrap the backup process and kick off the main loop.
|
||||
@@ -201,7 +205,7 @@ class UnifiProtectBackup:
|
||||
|
||||
# Create purge task
|
||||
# This will, every midnight, purge old backups from the rclone remotes and database
|
||||
purge = Purge(self._db, self.retention, self.rclone_destination)
|
||||
purge = Purge(self._db, self.retention, self.rclone_destination, self._purge_interval)
|
||||
tasks.append(asyncio.create_task(purge.start()))
|
||||
|
||||
# Create missing event task
|
||||
|
||||
@@ -2,6 +2,7 @@ import logging
|
||||
import re
|
||||
import asyncio
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
@@ -385,3 +386,9 @@ class VideoQueue(asyncio.Queue):
|
||||
self._unfinished_tasks += 1
|
||||
self._finished.clear()
|
||||
self._wakeup_next(self._getters)
|
||||
|
||||
|
||||
async def wait_until(dt):
|
||||
# sleep until the specified datetime
|
||||
now = datetime.now()
|
||||
await asyncio.sleep((dt - now).total_seconds())
|
||||
|
||||
Reference in New Issue
Block a user