mirror of
https://github.com/ep1cman/unifi-protect-backup.git
synced 2025-12-05 23:53:30 +00:00
Linting
This commit is contained in:
@@ -12,7 +12,7 @@ repos:
|
||||
args: [ --unsafe ]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.7
|
||||
rev: v0.11.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -63,8 +63,14 @@ line-length = 120
|
||||
target-version = "py310"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E","F","D","B","W"]
|
||||
ignore = ["D203", "D213"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
line-ending = "lf"
|
||||
docstring-code-format = true
|
||||
|
||||
[tool.mypy]
|
||||
allow_redefinition = true
|
||||
|
||||
@@ -29,7 +29,7 @@ def _parse_detection_types(ctx, param, value):
|
||||
|
||||
|
||||
def parse_rclone_retention(ctx, param, retention) -> relativedelta:
|
||||
"""Parses the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
"""Parse the rclone `retention` parameter into a relativedelta which can then be used to calculate datetimes."""
|
||||
matches = {k: int(v) for v, k in re.findall(r"([\d]+)(ms|s|m|h|d|w|M|y)", retention)}
|
||||
|
||||
# Check that we matched the whole string
|
||||
@@ -248,8 +248,7 @@ a lot of failed downloads with the default downloader.
|
||||
help="Max number of parallel uploads to allow",
|
||||
)
|
||||
def main(**kwargs):
|
||||
"""A Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
|
||||
"""Python based tool for backing up Unifi Protect event clips as they occur."""
|
||||
try:
|
||||
# Validate only one of the camera select arguments was given
|
||||
if kwargs.get("cameras") and kwargs.get("ignore_cameras"):
|
||||
|
||||
@@ -27,7 +27,7 @@ from unifi_protect_backup.utils import (
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
@@ -62,6 +62,7 @@ class VideoDownloader:
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -86,7 +87,7 @@ class VideoDownloader:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
@@ -174,7 +175,7 @@ class VideoDownloader:
|
||||
self.logger.error(f"Unexpected exception occurred, abandoning event {event.id}:", exc_info=e)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Downloads the video clip for the given event."""
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
@@ -185,7 +186,7 @@ class VideoDownloader:
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
@@ -210,7 +211,7 @@ class VideoDownloader:
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
|
||||
@@ -27,7 +27,7 @@ from unifi_protect_backup.utils import (
|
||||
|
||||
|
||||
async def get_video_length(video: bytes) -> float:
|
||||
"""Uses ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
"""Use ffprobe to get the length of the video file passed in as a byte stream."""
|
||||
returncode, stdout, stderr = await run_command(
|
||||
"ffprobe -v quiet -show_streams -select_streams v:0 -of json -", video
|
||||
)
|
||||
@@ -62,6 +62,7 @@ class VideoDownloaderExperimental:
|
||||
color_logging (bool): Whether or not to add color to logging output
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute",
|
||||
max_event_length (timedelta): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -86,7 +87,7 @@ class VideoDownloaderExperimental:
|
||||
self._has_ffprobe = False
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
self.logger.info("Starting Downloader")
|
||||
while True:
|
||||
if self._limiter:
|
||||
@@ -180,7 +181,7 @@ class VideoDownloaderExperimental:
|
||||
)
|
||||
|
||||
async def _download(self, event: Event) -> Optional[bytes]:
|
||||
"""Downloads the video clip for the given event."""
|
||||
"""Download the video clip for the given event."""
|
||||
self.logger.debug(" Downloading video...")
|
||||
for x in range(5):
|
||||
assert isinstance(event.camera_id, str)
|
||||
@@ -196,7 +197,7 @@ class VideoDownloaderExperimental:
|
||||
assert isinstance(video, bytes)
|
||||
break
|
||||
except (AssertionError, ClientPayloadError, TimeoutError) as e:
|
||||
self.logger.warning(f" Failed download attempt {x+1}, retying in 1s", exc_info=e)
|
||||
self.logger.warning(f" Failed download attempt {x + 1}, retying in 1s", exc_info=e)
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
self.logger.error(f"Download failed after 5 attempts, abandoning event {event.id}:")
|
||||
@@ -221,7 +222,7 @@ class VideoDownloaderExperimental:
|
||||
"""
|
||||
try:
|
||||
downloaded_duration = await get_video_length(video)
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s" f"({downloaded_duration - duration:+.3f}s)"
|
||||
msg = f" Downloaded video length: {downloaded_duration:.3f}s ({downloaded_duration - duration:+.3f}s)"
|
||||
if downloaded_duration < duration:
|
||||
self.logger.warning(msg)
|
||||
else:
|
||||
|
||||
@@ -33,6 +33,7 @@ class EventListener:
|
||||
detection_types (List[str]): Desired Event detection types to look for
|
||||
ignore_cameras (List[str]): Cameras IDs to ignore events from
|
||||
cameras (List[str]): Cameras IDs to ONLY include events from
|
||||
|
||||
"""
|
||||
self._event_queue: asyncio.Queue = event_queue
|
||||
self._protect: ProtectApiClient = protect
|
||||
@@ -43,18 +44,19 @@ class EventListener:
|
||||
self.cameras: List[str] = cameras
|
||||
|
||||
async def start(self):
|
||||
"""Main Loop."""
|
||||
"""Run main Loop."""
|
||||
logger.debug("Subscribed to websocket")
|
||||
self._unsub_websocket_state = self._protect.subscribe_websocket_state(self._websocket_state_callback)
|
||||
self._unsub = self._protect.subscribe_websocket(self._websocket_callback)
|
||||
|
||||
def _websocket_callback(self, msg: WSSubscriptionMessage) -> None:
|
||||
"""Callback for "EVENT" websocket messages.
|
||||
"""'EVENT' websocket message callback.
|
||||
|
||||
Filters the incoming events, and puts completed events onto the download queue
|
||||
|
||||
Args:
|
||||
msg (Event): Incoming event data
|
||||
|
||||
"""
|
||||
logger.websocket_data(msg) # type: ignore
|
||||
|
||||
@@ -107,12 +109,13 @@ class EventListener:
|
||||
logger.debug(f"Adding event {msg.new_obj.id} to queue (Current download queue={self._event_queue.qsize()})")
|
||||
|
||||
def _websocket_state_callback(self, state: WebsocketState) -> None:
|
||||
"""Callback for websocket state messages.
|
||||
"""Websocket state message callback.
|
||||
|
||||
Flags the websocket for reconnection
|
||||
|
||||
Args:
|
||||
msg (WebsocketState): new state of the websocket
|
||||
state (WebsocketState): new state of the websocket
|
||||
|
||||
"""
|
||||
if state == WebsocketState.DISCONNECTED:
|
||||
logger.error("Unifi Protect Websocket lost connection. Reconnecting...")
|
||||
|
||||
@@ -45,6 +45,7 @@ class MissingEventChecker:
|
||||
ignore_cameras (List[str]): Ignored camera IDs to limit search
|
||||
cameras (List[str]): Included (ONLY) camera IDs to limit search
|
||||
interval (int): How frequently, in seconds, to check for missing events,
|
||||
|
||||
"""
|
||||
self._protect: ProtectApiClient = protect
|
||||
self._db: aiosqlite.Connection = db
|
||||
@@ -111,7 +112,7 @@ class MissingEventChecker:
|
||||
missing_event_ids = set(unifi_events.keys()) - (db_event_ids | downloading_event_ids | uploading_event_ids)
|
||||
|
||||
# Exclude events of unwanted types
|
||||
def wanted_event_type(event_id):
|
||||
def wanted_event_type(event_id, unifi_events=unifi_events):
|
||||
event = unifi_events[event_id]
|
||||
if event.start is None or event.end is None:
|
||||
return False # This event is still on-going
|
||||
@@ -156,7 +157,7 @@ class MissingEventChecker:
|
||||
await self._db.commit()
|
||||
|
||||
async def start(self):
|
||||
"""Main loop."""
|
||||
"""Run main loop."""
|
||||
logger.info("Starting Missing Event Checker")
|
||||
while True:
|
||||
try:
|
||||
|
||||
@@ -13,14 +13,14 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def delete_file(file_path, rclone_purge_args):
|
||||
"""Deletes `file_path` via rclone."""
|
||||
"""Delete `file_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone delete -vv "{file_path}" {rclone_purge_args}')
|
||||
if returncode != 0:
|
||||
logger.error(f" Failed to delete file: '{file_path}'")
|
||||
|
||||
|
||||
async def tidy_empty_dirs(base_dir_path):
|
||||
"""Deletes any empty directories in `base_dir_path` via rclone."""
|
||||
"""Delete any empty directories in `base_dir_path` via rclone."""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rmdirs -vv --ignore-errors --leave-root "{base_dir_path}"')
|
||||
if returncode != 0:
|
||||
logger.error(" Failed to tidy empty dirs")
|
||||
@@ -34,7 +34,7 @@ class Purge:
|
||||
db: aiosqlite.Connection,
|
||||
retention: relativedelta,
|
||||
rclone_destination: str,
|
||||
interval: relativedelta = relativedelta(days=1),
|
||||
interval: relativedelta | None,
|
||||
rclone_purge_args: str = "",
|
||||
):
|
||||
"""Init.
|
||||
@@ -45,15 +45,16 @@ class Purge:
|
||||
rclone_destination (str): What rclone destination the clips are stored in
|
||||
interval (relativedelta): How often to purge old clips
|
||||
rclone_purge_args (str): Optional extra arguments to pass to `rclone delete` directly.
|
||||
|
||||
"""
|
||||
self._db: aiosqlite.Connection = db
|
||||
self.retention: relativedelta = retention
|
||||
self.rclone_destination: str = rclone_destination
|
||||
self.interval: relativedelta = interval
|
||||
self.interval: relativedelta = interval if interval is not None else relativedelta(days=1)
|
||||
self.rclone_purge_args: str = rclone_purge_args
|
||||
|
||||
async def start(self):
|
||||
"""Main loop - runs forever."""
|
||||
"""Run main loop."""
|
||||
while True:
|
||||
try:
|
||||
deleted_a_file = False
|
||||
@@ -63,7 +64,7 @@ class Purge:
|
||||
async with self._db.execute(
|
||||
f"SELECT * FROM events WHERE end < {retention_oldest_time}"
|
||||
) as event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor:
|
||||
async for event_id, event_type, camera_id, event_start, event_end in event_cursor: # noqa: B007
|
||||
logger.info(f"Purging event: {event_id}.")
|
||||
|
||||
# For every backup for this event
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
"""Monkey patch new download method into uiprotect till PR is merged."""
|
||||
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
@@ -10,13 +12,15 @@ from uiprotect.exceptions import BadRequest
|
||||
from uiprotect.utils import to_js_time
|
||||
|
||||
|
||||
# First, let's add the new VideoExportType enum
|
||||
class VideoExportType(str, enum.Enum):
|
||||
"""Unifi Protect video export types."""
|
||||
|
||||
TIMELAPSE = "timelapse"
|
||||
ROTATING = "rotating"
|
||||
|
||||
|
||||
def monkey_patch_experimental_downloader():
|
||||
"""Apply patches to uiprotect to add new download method."""
|
||||
from uiprotect.api import ProtectApiClient
|
||||
|
||||
# Add the version constant
|
||||
|
||||
@@ -43,7 +43,7 @@ monkey_patch_experimental_downloader()
|
||||
|
||||
|
||||
async def create_database(path: str):
|
||||
"""Creates sqlite database and creates the events abd backups tables."""
|
||||
"""Create sqlite database and creates the events abd backups tables."""
|
||||
db = await aiosqlite.connect(path)
|
||||
await db.execute("CREATE TABLE events(id PRIMARY KEY, type, camera_id, start REAL, end REAL)")
|
||||
await db.execute(
|
||||
@@ -117,7 +117,8 @@ class UnifiProtectBackup:
|
||||
color_logging (bool): Whether to add color to logging output or not
|
||||
download_rate_limit (float): Limit how events can be downloaded in one minute. Disabled by default",
|
||||
max_event_length (int): Maximum length in seconds for an event to be considered valid and downloaded
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the webUI)
|
||||
use_experimental_downloader (bool): Use the new experimental downloader (the same method as used by the
|
||||
webUI)
|
||||
parallel_uploads (int): Max number of parallel uploads to allow
|
||||
"""
|
||||
self.color_logging = color_logging
|
||||
@@ -216,7 +217,7 @@ class UnifiProtectBackup:
|
||||
delay = 5 # Start with a 5 second delay
|
||||
max_delay = 3600 # 1 hour in seconds
|
||||
|
||||
for attempts in range(20):
|
||||
for _ in range(20):
|
||||
try:
|
||||
await self._protect.update()
|
||||
break
|
||||
@@ -279,7 +280,7 @@ class UnifiProtectBackup:
|
||||
# Create upload tasks
|
||||
# This will upload the videos in the downloader's buffer to the rclone remotes and log it in the database
|
||||
uploaders = []
|
||||
for i in range(self._parallel_uploads):
|
||||
for _ in range(self._parallel_uploads):
|
||||
uploader = VideoUploader(
|
||||
self._protect,
|
||||
upload_queue,
|
||||
|
||||
@@ -59,7 +59,7 @@ class VideoUploader:
|
||||
self.logger = logging.LoggerAdapter(self.base_logger, {"event": ""})
|
||||
|
||||
async def start(self):
|
||||
"""Main loop.
|
||||
"""Run main loop.
|
||||
|
||||
Runs forever looking for video data in the video queue and then uploads it
|
||||
using rclone, finally it updates the database
|
||||
@@ -106,6 +106,7 @@ class VideoUploader:
|
||||
|
||||
Raises:
|
||||
RuntimeError: If rclone returns a non-zero exit code
|
||||
|
||||
"""
|
||||
returncode, stdout, stderr = await run_command(f'rclone rcat -vv {rclone_args} "{destination}"', video)
|
||||
if returncode != 0:
|
||||
@@ -131,7 +132,7 @@ class VideoUploader:
|
||||
await self._db.commit()
|
||||
|
||||
async def _generate_file_path(self, event: Event) -> pathlib.Path:
|
||||
"""Generates the rclone destination path for the provided event.
|
||||
"""Generate the rclone destination path for the provided event.
|
||||
|
||||
Generates rclone destination path for the given even based upon the format string
|
||||
in `self.file_structure_format`.
|
||||
|
||||
@@ -4,7 +4,7 @@ import asyncio
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from apprise import NotifyType
|
||||
from async_lru import alru_cache
|
||||
@@ -109,6 +109,9 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
Args:
|
||||
color_logging (bool): If true logging levels will be colorized
|
||||
*args (): Positional arguments to pass to StreamHandler
|
||||
**kwargs: Keyword arguments to pass to StreamHandler
|
||||
|
||||
"""
|
||||
super().__init__(*args, **kwargs)
|
||||
self.color_logging = color_logging
|
||||
@@ -172,7 +175,7 @@ class AppriseStreamHandler(logging.StreamHandler):
|
||||
|
||||
|
||||
def create_logging_handler(format, color_logging):
|
||||
"""Constructs apprise logging handler for the given format."""
|
||||
"""Construct apprise logging handler for the given format."""
|
||||
date_format = "%Y-%m-%d %H:%M:%S"
|
||||
style = "{"
|
||||
|
||||
@@ -182,8 +185,8 @@ def create_logging_handler(format, color_logging):
|
||||
return sh
|
||||
|
||||
|
||||
def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers: List[str] = []) -> None:
|
||||
"""Configures loggers to provided the desired level of verbosity.
|
||||
def setup_logging(verbosity: int, color_logging: bool = False) -> None:
|
||||
"""Configure loggers to provided the desired level of verbosity.
|
||||
|
||||
Verbosity 0: Only log info messages created by `unifi-protect-backup`, and all warnings
|
||||
verbosity 1: Only log info & debug messages created by `unifi-protect-backup`, and all warnings
|
||||
@@ -199,7 +202,6 @@ def setup_logging(verbosity: int, color_logging: bool = False, apprise_notifiers
|
||||
Args:
|
||||
verbosity (int): The desired level of verbosity
|
||||
color_logging (bool): If colors should be used in the log (default=False)
|
||||
apprise_notifiers (List[str]): Notification services to hook into the logger
|
||||
|
||||
"""
|
||||
add_logging_level(
|
||||
@@ -242,7 +244,7 @@ _initialized_loggers = []
|
||||
|
||||
|
||||
def setup_event_logger(logger, color_logging):
|
||||
"""Sets up a logger that also displays the event ID currently being processed."""
|
||||
"""Set up a logger that also displays the event ID currently being processed."""
|
||||
global _initialized_loggers
|
||||
if logger not in _initialized_loggers:
|
||||
format = "{asctime} [{levelname:^11s}] {name:<42} :{event} {message}"
|
||||
@@ -256,12 +258,13 @@ _suffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
|
||||
|
||||
|
||||
def human_readable_size(num: float):
|
||||
"""Turns a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
"""Turn a number into a human readable number with ISO/IEC 80000 binary prefixes.
|
||||
|
||||
Based on: https://stackoverflow.com/a/1094933
|
||||
|
||||
Args:
|
||||
num (int): The number to be converted into human readable format
|
||||
|
||||
"""
|
||||
for unit in _suffixes:
|
||||
if abs(num) < 1024.0:
|
||||
@@ -271,7 +274,7 @@ def human_readable_size(num: float):
|
||||
|
||||
|
||||
def human_readable_to_float(num: str):
|
||||
"""Turns a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
"""Turn a human readable ISO/IEC 80000 suffix value to its full float value."""
|
||||
pattern = r"([\d.]+)(" + "|".join(_suffixes) + ")"
|
||||
result = re.match(pattern, num)
|
||||
if result is None:
|
||||
@@ -287,7 +290,7 @@ def human_readable_to_float(num: str):
|
||||
# No max size, and a 6 hour ttl
|
||||
@alru_cache(None, ttl=60 * 60 * 6)
|
||||
async def get_camera_name(protect: ProtectApiClient, id: str):
|
||||
"""Returns the name for the camera with the given ID.
|
||||
"""Return the name for the camera with the given ID.
|
||||
|
||||
If the camera ID is not know, it tries refreshing the cached data
|
||||
"""
|
||||
@@ -322,6 +325,7 @@ class SubprocessException(Exception):
|
||||
stdout (str): What rclone output to stdout
|
||||
stderr (str): What rclone output to stderr
|
||||
returncode (str): The return code of the rclone process
|
||||
|
||||
"""
|
||||
super().__init__()
|
||||
self.stdout: str = stdout
|
||||
@@ -329,12 +333,12 @@ class SubprocessException(Exception):
|
||||
self.returncode: int = returncode
|
||||
|
||||
def __str__(self):
|
||||
"""Turns exception into a human readable form."""
|
||||
"""Turn exception into a human readable form."""
|
||||
return f"Return Code: {self.returncode}\nStdout:\n{self.stdout}\nStderr:\n{self.stderr}"
|
||||
|
||||
|
||||
async def run_command(cmd: str, data=None):
|
||||
"""Runs the given command returning the exit code, stdout and stderr."""
|
||||
"""Run the given command returning the exit code, stdout and stderr."""
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdin=asyncio.subprocess.PIPE,
|
||||
@@ -367,11 +371,11 @@ class VideoQueue(asyncio.Queue):
|
||||
self._bytes_sum = 0
|
||||
|
||||
def qsize(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return self._bytes_sum
|
||||
|
||||
def qsize_files(self):
|
||||
"""Number of items in the queue."""
|
||||
"""Get number of items in the queue."""
|
||||
return super().qsize()
|
||||
|
||||
def _get(self):
|
||||
|
||||
Reference in New Issue
Block a user