mirror of
https://github.com/netinvent/npbackup.git
synced 2025-09-08 14:05:41 +08:00
Reformat files with black
This commit is contained in:
parent
ffd436747b
commit
de85e915b9
17 changed files with 164 additions and 145 deletions
4
.github/workflows/pylint-linux.yaml
vendored
4
.github/workflows/pylint-linux.yaml
vendored
|
@ -1,5 +1,9 @@
|
|||
name: pylint-linux-tests
|
||||
|
||||
# Quick and dirty pylint
|
||||
|
||||
# pylint --disable=C,W1201,W1202,W1203,W0718,W0621,W0603,R0801,R0912,R0913,R0915,R0911,R0914,R0911,R1702,R0902,R0903,R0904 npbackup
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
|
|
|
@ -25,7 +25,6 @@ from npbackup.__version__ import version_string, version_dict
|
|||
from npbackup.__debug__ import _DEBUG
|
||||
from npbackup.common import execution_logs
|
||||
from npbackup.core import upgrade_runner
|
||||
from npbackup.core.i18n_helper import _t
|
||||
from npbackup import key_management
|
||||
from npbackup.task import create_scheduled_task
|
||||
|
||||
|
@ -492,21 +491,30 @@ This is free software, and you are welcome to redistribute it under certain cond
|
|||
if "interval" in args.create_scheduled_task:
|
||||
interval = args.create_scheduled_task.split("=")[1].strip()
|
||||
result = create_scheduled_task(
|
||||
config_file, type="backup", interval_minutes=int(interval)
|
||||
config_file,
|
||||
task_type="backup",
|
||||
repo=repo,
|
||||
group=group,
|
||||
interval_minutes=int(interval),
|
||||
)
|
||||
elif (
|
||||
"hour" in args.create_scheduled_task
|
||||
and "minute" in args.create_scheduled_task
|
||||
):
|
||||
if args.create_backup_scheduled_task:
|
||||
type = "backup"
|
||||
task_type = "backup"
|
||||
if args.create_housekeeping_scheduled_task:
|
||||
type = "housekeeping"
|
||||
task_type = "housekeeping"
|
||||
hours, minutes = args.create_scheduled_task.split(",")
|
||||
hour = hours.split("=")[1].strip()
|
||||
minute = minutes.split("=")[1].strip()
|
||||
result = create_scheduled_task(
|
||||
config_file, type=type, hour=int(hour), minute=int(minute)
|
||||
config_file,
|
||||
task_type=task_type,
|
||||
repo=repo,
|
||||
group=group,
|
||||
hour=int(hour),
|
||||
minute=int(minute),
|
||||
)
|
||||
if not result:
|
||||
msg = "Scheduled task creation failed"
|
||||
|
@ -648,7 +656,7 @@ This is free software, and you are welcome to redistribute it under certain cond
|
|||
cli_args["operation"] = "prune"
|
||||
elif args.prune_max or args.group_operation == "prune_max":
|
||||
cli_args["operation"] = "prune"
|
||||
cli_args["op_args"] = {"max": True}
|
||||
cli_args["op_args"] = {"prune_max": True}
|
||||
elif args.unlock or args.group_operation == "unlock":
|
||||
cli_args["operation"] = "unlock"
|
||||
elif args.repair_index or args.group_operation == "repair_index":
|
||||
|
|
|
@ -51,11 +51,9 @@ def execution_logs(start_time: datetime) -> None:
|
|||
elif logger_worst_level >= 30:
|
||||
log_level_reached = "warnings"
|
||||
except AttributeError as exc:
|
||||
logger.error("Cannot get worst log level reached: {}".format(exc))
|
||||
logger.error(f"Cannot get worst log level reached: {exc}")
|
||||
logger.info(
|
||||
"ExecTime = {}, finished, state is: {}.".format(
|
||||
end_time - start_time, log_level_reached
|
||||
)
|
||||
f"ExecTime = {end_time - start_time}, finished, state is: {log_level_reached}."
|
||||
)
|
||||
# using sys.exit(code) in a atexit function will swallow the exitcode and render 0
|
||||
# Using sys.exit(logger.get_worst_logger_level()) is the way to go, when using ofunctions.logger_utils >= 2.4.1
|
||||
|
|
|
@ -7,7 +7,7 @@ __intname__ = "npbackup.configuration"
|
|||
__author__ = "Orsiris de Jong"
|
||||
__copyright__ = "Copyright (C) 2022-2025 NetInvent"
|
||||
__license__ = "GPL-3.0-only"
|
||||
__build__ = "2024110701"
|
||||
__build__ = "2025012401"
|
||||
__version__ = "npbackup 3.0.0+"
|
||||
|
||||
MIN_CONF_VERSION = 3.0
|
||||
|
@ -84,7 +84,7 @@ def g(self, path, sep=".", default=None, list_ok=False):
|
|||
logger.debug(
|
||||
f"CONFIG ERROR {exc} for path={path},sep={sep},default={default},list_ok={list_ok}"
|
||||
)
|
||||
raise AssertionError
|
||||
raise AssertionError from exc
|
||||
|
||||
|
||||
def s(self, path, value, sep="."):
|
||||
|
@ -167,7 +167,6 @@ empty_config_dict = {
|
|||
"compression": "auto",
|
||||
"use_fs_snapshot": True,
|
||||
"ignore_cloud_files": True,
|
||||
"exclude_caches": True,
|
||||
"one_file_system": False,
|
||||
"priority": "low",
|
||||
"exclude_caches": True,
|
||||
|
@ -775,13 +774,13 @@ def _get_config_file_checksum(config_file: Path) -> str:
|
|||
It's nice to log checksums of config file to see whenever it was changed
|
||||
"""
|
||||
with open(config_file, "rb") as fh:
|
||||
hash = 0
|
||||
cur_hash = 0
|
||||
while True:
|
||||
s = fh.read(65536)
|
||||
if not s:
|
||||
break
|
||||
hash = zlib.crc32(s, hash)
|
||||
return "%08X" % (hash & 0xFFFFFFFF)
|
||||
cur_hash = zlib.crc32(s, cur_hash)
|
||||
return "%08X" % (cur_hash & 0xFFFFFFFF)
|
||||
|
||||
|
||||
def _load_config_file(config_file: Path) -> Union[bool, dict]:
|
||||
|
|
|
@ -18,12 +18,11 @@ import pidfile
|
|||
import queue
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from functools import wraps
|
||||
import queue
|
||||
from copy import deepcopy
|
||||
from command_runner import command_runner
|
||||
from ofunctions.threading import threaded
|
||||
from ofunctions.platform import os_arch
|
||||
from ofunctions.misc import BytesConverter, fn_name
|
||||
from ofunctions.misc import fn_name
|
||||
import ntplib
|
||||
from npbackup.restic_metrics import (
|
||||
restic_str_output_to_json,
|
||||
|
@ -244,7 +243,7 @@ class NPBackupRunner:
|
|||
@repo_config.setter
|
||||
def repo_config(self, value: dict):
|
||||
if not isinstance(value, dict):
|
||||
msg = f"Bogus repo config object given"
|
||||
msg = "Bogus repo config object given"
|
||||
self.write_logs(msg, level="critical", raise_error="ValueError")
|
||||
self._repo_config = deepcopy(value)
|
||||
# Create an instance of restic wrapper
|
||||
|
@ -494,7 +493,7 @@ class NPBackupRunner:
|
|||
else:
|
||||
# pylint: disable=E1101 (no-member)
|
||||
operation = fn.__name__
|
||||
msg = f"Runner cannot execute, backend not ready"
|
||||
msg = "Runner cannot execute, backend not ready"
|
||||
if self.stderr:
|
||||
self.stderr.put(msg)
|
||||
if self.json_output:
|
||||
|
@ -1446,7 +1445,7 @@ class NPBackupRunner:
|
|||
policy["keep-tags"] = keep_tags
|
||||
# Fool proof, don't run without policy, or else we'll get
|
||||
if not policy:
|
||||
msg = f"Empty retention policy. Won't run"
|
||||
msg = "Empty retention policy. Won't run"
|
||||
self.write_logs(msg, level="error")
|
||||
return self.convert_to_json_output(False, msg)
|
||||
|
||||
|
@ -1575,11 +1574,11 @@ class NPBackupRunner:
|
|||
@has_permission
|
||||
@is_ready
|
||||
@apply_config_to_restic_runner
|
||||
def prune(self, max: bool = False) -> bool:
|
||||
def prune(self, prune_max: bool = False) -> bool:
|
||||
self.write_logs(
|
||||
f"Pruning snapshots for repo {self.repo_config.g('name')}", level="info"
|
||||
)
|
||||
if max:
|
||||
if prune_max:
|
||||
max_unused = self.repo_config.g("prune_max_unused")
|
||||
max_repack_size = self.repo_config.g("prune_max_repack_size")
|
||||
result = self.restic_runner.prune(
|
||||
|
|
|
@ -49,12 +49,12 @@ def need_upgrade(upgrade_interval: int) -> bool:
|
|||
with open(file, "r", encoding="utf-8") as fpr:
|
||||
count = int(fpr.read())
|
||||
return count
|
||||
except OSError:
|
||||
except OSError as exc:
|
||||
# We may not have read privileges
|
||||
None
|
||||
except ValueError:
|
||||
logger.error("Bogus upgrade counter in %s", file)
|
||||
return None
|
||||
logger.eror(f"Cannot read upgrade counter file {file}: {exc}")
|
||||
except ValueError as exc:
|
||||
logger.error(f"Bogus upgrade counter in {file}: {exc}")
|
||||
return None
|
||||
|
||||
try:
|
||||
upgrade_interval = int(upgrade_interval)
|
||||
|
@ -97,7 +97,9 @@ def check_new_version(full_config: dict) -> bool:
|
|||
username = full_config.g("global_options.auto_upgrade_server_username")
|
||||
password = full_config.g("global_options.auto_upgrade_server_password")
|
||||
if not upgrade_url or not username or not password:
|
||||
logger.warning(f"Missing auto upgrade info, cannot launch auto upgrade")
|
||||
logger.warning(
|
||||
"Missing auto upgrade info, cannot check new version for auto upgrade"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
return _check_new_version(upgrade_url, username, password)
|
||||
|
@ -108,7 +110,7 @@ def run_upgrade(full_config: dict, ignore_errors: bool = False) -> bool:
|
|||
username = full_config.g("global_options.auto_upgrade_server_username")
|
||||
password = full_config.g("global_options.auto_upgrade_server_password")
|
||||
if not upgrade_url or not username or not password:
|
||||
logger.warning(f"Missing auto upgrade info, cannot launch auto upgrade")
|
||||
logger.warning("Missing auto upgrade info, cannot launch auto upgrade")
|
||||
return False
|
||||
|
||||
evaluated_full_config = npbackup.configuration.evaluate_variables(
|
||||
|
|
|
@ -29,7 +29,6 @@ from ofunctions.misc import BytesConverter
|
|||
import FreeSimpleGUI as sg
|
||||
import _tkinter
|
||||
import npbackup.configuration
|
||||
from npbackup.__version__ import version_string
|
||||
import npbackup.common
|
||||
from resources.customization import (
|
||||
OEM_STRING,
|
||||
|
@ -77,7 +76,7 @@ def popup_wait_for_upgrade(text: str):
|
|||
|
||||
layout = [[sg.Text(text)]]
|
||||
window = sg.Window(
|
||||
f"Upgrade", layout=layout, no_titlebar=False, keep_on_top=True, finalize=True
|
||||
"Upgrade", layout=layout, no_titlebar=False, keep_on_top=True, finalize=True
|
||||
)
|
||||
window.read(timeout=0)
|
||||
return window
|
||||
|
@ -565,14 +564,14 @@ def _main_gui(viewer_mode: bool):
|
|||
while True:
|
||||
action = None
|
||||
event, values = window.read()
|
||||
if event in [sg.WIN_X_EVENT, sg.WIN_CLOSED, "--CANCEL--"]:
|
||||
if event in (sg.WIN_X_EVENT, sg.WIN_CLOSED, "--CANCEL--"):
|
||||
action = "--CANCEL--"
|
||||
break
|
||||
if event == "--NEW-CONFIG--":
|
||||
action = event
|
||||
config_file = Path(values["-config_file-"])
|
||||
break
|
||||
if event == "--LOAD--" or event == "-config_file-":
|
||||
if event in ("--LOAD--", "-config_file-"):
|
||||
config_file = Path(values["-config_file-"])
|
||||
if not values["-config_file-"] or not config_file.exists():
|
||||
sg.PopupError(_t("generic.file_does_not_exist"), keep_on_top=True)
|
||||
|
@ -842,7 +841,7 @@ def _main_gui(viewer_mode: bool):
|
|||
else:
|
||||
config_file = Path(f"{CURRENT_DIR}/npbackup.conf").absolute()
|
||||
if not config_file.is_file():
|
||||
config_file = Path(f"./npbackup.conf").absolute()
|
||||
config_file = Path("./npbackup.conf").absolute()
|
||||
if not config_file.is_file():
|
||||
config_file = None
|
||||
|
||||
|
@ -872,9 +871,9 @@ def _main_gui(viewer_mode: bool):
|
|||
full_config,
|
||||
config_file,
|
||||
repo_config,
|
||||
backup_destination,
|
||||
_,
|
||||
backend_type,
|
||||
repo_uri,
|
||||
_,
|
||||
repo_list,
|
||||
) = get_config(config_file=config_file, repo_name=args.repo_name)
|
||||
|
||||
|
@ -1050,7 +1049,7 @@ def _main_gui(viewer_mode: bool):
|
|||
if full_config.g(f"repos.{active_repo}"):
|
||||
(
|
||||
repo_config,
|
||||
config_inheriteance,
|
||||
_,
|
||||
) = npbackup.configuration.get_repo_config(full_config, active_repo)
|
||||
current_state, backup_tz, snapshot_list = get_gui_data(repo_config)
|
||||
gui_update_state()
|
||||
|
@ -1115,7 +1114,7 @@ def _main_gui(viewer_mode: bool):
|
|||
continue
|
||||
repo_config = viewer_create_repo(viewer_repo_uri, viewer_repo_password)
|
||||
event = "--STATE-BUTTON--"
|
||||
if event == "--LOAD-CONF--" or event == "--LOAD-EXISTING-CONF--":
|
||||
if event in ("--LOAD-CONF--", "--LOAD-EXISTING-CONF--"):
|
||||
if event == "--LOAD-EXISTING-CONF--":
|
||||
cfg_file = config_file
|
||||
else:
|
||||
|
@ -1133,9 +1132,9 @@ def _main_gui(viewer_mode: bool):
|
|||
full_config = _full_config
|
||||
config_file = _config_file
|
||||
repo_config = _repo_config
|
||||
backup_destination = _backup_destination
|
||||
_ = _backup_destination
|
||||
backend_type = _backend_type
|
||||
repo_uri = _repo_uri
|
||||
_ = _repo_uri
|
||||
repo_list = _repo_list
|
||||
else:
|
||||
sg.PopupError(
|
||||
|
|
|
@ -7,7 +7,7 @@ __intname__ = "npbackup.gui.config"
|
|||
__author__ = "Orsiris de Jong"
|
||||
__copyright__ = "Copyright (C) 2022-2025 NetInvent"
|
||||
__license__ = "GPL-3.0-only"
|
||||
__build__ = "2024110701"
|
||||
__build__ = "2025012401"
|
||||
|
||||
|
||||
from typing import List, Tuple
|
||||
|
@ -18,12 +18,9 @@ from logging import getLogger
|
|||
import FreeSimpleGUI as sg
|
||||
import textwrap
|
||||
from ruamel.yaml.comments import CommentedMap
|
||||
import npbackup.configuration as configuration
|
||||
from npbackup import configuration
|
||||
from ofunctions.misc import get_key_from_value, BytesConverter
|
||||
from npbackup.core.i18n_helper import _t
|
||||
from npbackup.__version__ import IS_COMPILED
|
||||
from npbackup.path_helper import CURRENT_DIR
|
||||
from npbackup.__debug__ import _DEBUG, fmt_json
|
||||
from resources.customization import (
|
||||
INHERITED_ICON,
|
||||
NON_INHERITED_ICON,
|
||||
|
@ -59,7 +56,7 @@ def delete(self, key):
|
|||
key_list = temp
|
||||
return True
|
||||
except KeyError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
sg.TreeData.delete = delete
|
||||
|
@ -213,17 +210,17 @@ def config_gui(full_config: dict, config_file: str):
|
|||
) -> None:
|
||||
object_list = get_objects()
|
||||
if not object_name or not object_type:
|
||||
object = object_list[0]
|
||||
obj = object_list[0]
|
||||
else:
|
||||
# We need to remove the "s" and the end if we want our comobox name to be usable later
|
||||
object = f"{object_type.rstrip('s').capitalize()}: {object_name}"
|
||||
obj = f"{object_type.rstrip('s').capitalize()}: {object_name}"
|
||||
|
||||
window["-OBJECT-SELECT-"].Update(values=object_list)
|
||||
window["-OBJECT-SELECT-"].Update(value=object)
|
||||
window["-OBJECT-SELECT-"].Update(value=obj)
|
||||
|
||||
# Also update task object selector
|
||||
window["-OBJECT-SELECT-TASKS-"].Update(values=object_list)
|
||||
window["-OBJECT-SELECT-TASKS-"].Update(value=object)
|
||||
window["-OBJECT-SELECT-TASKS-"].Update(value=obj)
|
||||
|
||||
def get_object_from_combo(combo_value: str) -> Tuple[str, str]:
|
||||
"""
|
||||
|
@ -2529,11 +2526,11 @@ Google Cloud storage: GOOGLE_PROJECT_ID GOOGLE_APPLICATION_CREDENTIALS\n\
|
|||
hour = None
|
||||
minute = None
|
||||
if event == "create_housekeeping_daily_task":
|
||||
type = "housekeeping"
|
||||
task_type = "housekeeping"
|
||||
hour = values["scheduled_housekeeping_task_hour"]
|
||||
minute = values["scheduled_housekeeping_task_minute"]
|
||||
else:
|
||||
type = "backup"
|
||||
task_type = "backup"
|
||||
if event == "create_backup_interval_task":
|
||||
interval = values["scheduled_backup_task_interval"]
|
||||
else:
|
||||
|
@ -2542,7 +2539,7 @@ Google Cloud storage: GOOGLE_PROJECT_ID GOOGLE_APPLICATION_CREDENTIALS\n\
|
|||
|
||||
result = create_scheduled_task(
|
||||
config_file=config_file,
|
||||
type=type,
|
||||
task_type=task_type,
|
||||
repo=task_repo_name,
|
||||
group=task_repo_group,
|
||||
interval_minutes=interval,
|
||||
|
|
|
@ -251,7 +251,7 @@ def gui_thread_runner(
|
|||
titlebar_icon=OEM_ICON,
|
||||
)
|
||||
# Finalize the window
|
||||
event, values = progress_window.read(timeout=0.01)
|
||||
event, _ = progress_window.read(timeout=0.01)
|
||||
progress_window.bring_to_front()
|
||||
|
||||
read_stdout_queue = __stdout
|
||||
|
|
|
@ -43,11 +43,11 @@ def gui_update_state(window, full_config: dict, unencrypted: str = None) -> list
|
|||
try:
|
||||
for repo_name in full_config.g("repos"):
|
||||
repo_config, _ = get_repo_config(full_config, repo_name)
|
||||
if repo_config.g(f"repo_uri") and (
|
||||
repo_config.g(f"repo_opts.repo_password")
|
||||
or repo_config.g(f"repo_opts.repo_password_command")
|
||||
if repo_config.g("repo_uri") and (
|
||||
repo_config.g("repo_opts.repo_password")
|
||||
or repo_config.g("repo_opts.repo_password_command")
|
||||
):
|
||||
backend_type, repo_uri = get_anon_repo_uri(repo_config.g(f"repo_uri"))
|
||||
backend_type, repo_uri = get_anon_repo_uri(repo_config.g("repo_uri"))
|
||||
repo_group = repo_config.g("repo_group")
|
||||
if not unencrypted and unencrypted != repo_name:
|
||||
repo_uri = ENCRYPTED_DATA_PLACEHOLDER
|
||||
|
@ -149,6 +149,7 @@ def show_stats(statistics: List[dict]) -> None:
|
|||
"""
|
||||
|
||||
data = []
|
||||
entry = None
|
||||
for entry in statistics:
|
||||
repo_name = list(entry.keys())[0]
|
||||
state = "Success" if entry[repo_name]["result"] else "Failure"
|
||||
|
@ -453,6 +454,7 @@ def operations_gui(full_config: dict) -> dict:
|
|||
try:
|
||||
object_name = complete_repo_list[values["repo--group-list"][0]][0]
|
||||
except Exception as exc:
|
||||
logger.error(f"Could not get object name: {exc}")
|
||||
logger.debug("Trace:", exc_info=True)
|
||||
object_name = None
|
||||
if not object_name:
|
||||
|
|
|
@ -48,5 +48,5 @@ def create_key_file(key_location: str):
|
|||
logger.info(f"Encryption key file created at {key_location}")
|
||||
return True
|
||||
except OSError as exc:
|
||||
logger.critical("Cannot create encryption key file: {exc}")
|
||||
logger.critical(f"Cannot create encryption key file: {exc}")
|
||||
return False
|
||||
|
|
|
@ -197,8 +197,8 @@ def restic_json_to_prometheus(
|
|||
if not isinstance(restic_json, dict):
|
||||
try:
|
||||
restic_json = json.loads(restic_json)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
logger.error(f"Cannot decode JSON from restic data")
|
||||
except (json.JSONDecodeError, TypeError) as exc:
|
||||
logger.error(f"Cannot decode JSON from restic data: {exc}")
|
||||
logger.debug(f"Data is: {restic_json}, Trace:", exc_info=True)
|
||||
restic_json = {}
|
||||
|
||||
|
|
|
@ -400,8 +400,7 @@ class ResticRunner:
|
|||
if is_cloud_error is True:
|
||||
self.last_command_status = True
|
||||
return True, output
|
||||
else:
|
||||
self.write_logs("Some files could not be backed up", level="error")
|
||||
self.write_logs("Some files could not be backed up", level="error")
|
||||
# TEMP-FIX-4155-END
|
||||
self.last_command_status = False
|
||||
|
||||
|
@ -455,8 +454,8 @@ class ResticRunner:
|
|||
value = int(BytesConverter(value).kbytes)
|
||||
if value > 0:
|
||||
self._limit_upload = value
|
||||
except TypeError:
|
||||
raise ValueError("Cannot set upload limit")
|
||||
except TypeError as exc:
|
||||
raise ValueError("Cannot set upload limit") from exc
|
||||
|
||||
@property
|
||||
def limit_download(self):
|
||||
|
@ -469,8 +468,8 @@ class ResticRunner:
|
|||
value = int(BytesConverter(value).kbytes)
|
||||
if value > 0:
|
||||
self._limit_download = value
|
||||
except TypeError:
|
||||
raise ValueError("Cannot set download limit")
|
||||
except TypeError as exc:
|
||||
raise ValueError("Cannot set download limit") from exc
|
||||
|
||||
@property
|
||||
def backend_connections(self):
|
||||
|
@ -665,11 +664,11 @@ class ResticRunner:
|
|||
# For backup operations, we'll auto-initialize the repo
|
||||
# pylint: disable=E1101 (no-member)
|
||||
if fn.__name__ == "backup" or fn_name(1) == "has_recent_snapshot":
|
||||
msg = f"Repo is not initialized. Initializing repo for backup operation"
|
||||
msg = "Repo is not initialized. Initializing repo for backup operation"
|
||||
self.write_logs(msg, level="info")
|
||||
init = self.init()
|
||||
if not init:
|
||||
msg = f"Could not initialize repo for backup operation"
|
||||
msg = "Could not initialize repo for backup operation"
|
||||
self.write_logs(
|
||||
msg,
|
||||
level="critical",
|
||||
|
@ -739,7 +738,7 @@ class ResticRunner:
|
|||
else:
|
||||
js["output"].append(decoder.decode(line))
|
||||
is_first_line = False
|
||||
except msgspec.DecodeError as exc:
|
||||
except msgspec.DecodeError:
|
||||
# We may have a json decode error, but actually, we just want to get the output
|
||||
# in any case, since restic might output non json data, but we need to
|
||||
# convert it to json
|
||||
|
@ -754,7 +753,7 @@ class ResticRunner:
|
|||
try:
|
||||
# pylint: disable=E0601 (used-before-assignment)
|
||||
js["output"].append(json.loads(line))
|
||||
except json.JSONDecodeError as exc:
|
||||
except json.JSONDecodeError:
|
||||
# Same as above
|
||||
|
||||
# msg = f"JSON decode error: {exc} on content '{line}'"
|
||||
|
@ -776,7 +775,7 @@ class ResticRunner:
|
|||
if HAVE_MSGSPEC:
|
||||
try:
|
||||
js["output"] = msgspec.json.decode(str(output))
|
||||
except msgspec.DecodeError as exc:
|
||||
except msgspec.DecodeError:
|
||||
# Save as above
|
||||
|
||||
# msg = f"JSON decode error: {exc} on output '{output}'"
|
||||
|
@ -787,7 +786,7 @@ class ResticRunner:
|
|||
try:
|
||||
# pylint: disable=E0601 (used-before-assignment)
|
||||
js["output"] = json.loads(output)
|
||||
except json.JSONDecodeError as exc:
|
||||
except json.JSONDecodeError:
|
||||
# same as above
|
||||
# msg = f"JSON decode error: {exc} on output '{output}'"
|
||||
# self.write_logs(msg, level="error")
|
||||
|
@ -1232,10 +1231,10 @@ class ResticRunner:
|
|||
kwargs = locals()
|
||||
kwargs.pop("self")
|
||||
|
||||
cmd = f"recover"
|
||||
cmd = "recover"
|
||||
result, output = self.executor(cmd)
|
||||
if result:
|
||||
msg = f"Recovery finished"
|
||||
msg = "Recovery finished"
|
||||
else:
|
||||
msg = f"Recovery failed:\n{output}"
|
||||
return self.convert_to_json_output(result, output, msg=msg, **kwargs)
|
||||
|
@ -1248,10 +1247,10 @@ class ResticRunner:
|
|||
kwargs = locals()
|
||||
kwargs.pop("self")
|
||||
|
||||
cmd = f"unlock"
|
||||
cmd = "unlock"
|
||||
result, output = self.executor(cmd)
|
||||
if result:
|
||||
msg = f"Repo successfully unlocked"
|
||||
msg = "Repo successfully unlocked"
|
||||
else:
|
||||
msg = f"Repo unlock failed:\n{output}"
|
||||
return self.convert_to_json_output(result, output, msg=msg, **kwargs)
|
||||
|
@ -1280,12 +1279,12 @@ class ResticRunner:
|
|||
kwargs = locals()
|
||||
kwargs.pop("self")
|
||||
|
||||
cmd = f"stats"
|
||||
cmd = "stats"
|
||||
if subject:
|
||||
cmd += f" {subject}"
|
||||
result, output = self.executor(cmd)
|
||||
if result:
|
||||
msg = f"Repo statistics command success"
|
||||
msg = "Repo statistics command success"
|
||||
else:
|
||||
msg = f"Cannot get repo statistics:\n {output}"
|
||||
return self.convert_to_json_output(result, output, msg=msg, **kwargs)
|
||||
|
@ -1334,7 +1333,7 @@ class ResticRunner:
|
|||
f"Recent snapshot {last_snapshot['short_id']} of {last_snapshot['time']} exists !"
|
||||
)
|
||||
return True, backup_ts
|
||||
return False, backup_ts
|
||||
return False, backup_ts
|
||||
|
||||
# @check_if_init # We don't need to run if init before checking snapshots since if init searches for snapshots
|
||||
def has_recent_snapshot(self, delta: int = None) -> Tuple[bool, Optional[datetime]]:
|
||||
|
|
|
@ -24,8 +24,6 @@ except ImportError:
|
|||
def __init_subclass__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
pass
|
||||
|
||||
class StrEnum:
|
||||
pass
|
||||
|
||||
|
|
|
@ -67,17 +67,18 @@ def entrypoint(*args, **kwargs):
|
|||
# Unless operation is "ls", because it's too slow for command_runner poller method that allows live_output
|
||||
# But we still need to log the result to our logfile
|
||||
if not operation == "ls":
|
||||
handler = None
|
||||
for handler in logger.handlers:
|
||||
if handler.stream == sys.stdout:
|
||||
logger.removeHandler(handler)
|
||||
break
|
||||
logger.info(f"\n{result}")
|
||||
if not operation == "ls":
|
||||
if not operation == "ls" and handler:
|
||||
logger.addHandler(handler)
|
||||
if result:
|
||||
logger.info(f"Operation finished")
|
||||
logger.info("Operation finished")
|
||||
else:
|
||||
logger.error(f"Operation finished")
|
||||
logger.error("Operation finished")
|
||||
else:
|
||||
if HAVE_MSGSPEC:
|
||||
print(msgspec.json.encode(result).decode("utf-8", errors="ignore"))
|
||||
|
|
|
@ -7,10 +7,9 @@ __intname__ = "npbackup.task"
|
|||
__author__ = "Orsiris de Jong"
|
||||
__copyright__ = "Copyright (C) 2022-2025 NetInvent"
|
||||
__license__ = "GPL-3.0-only"
|
||||
__build__ = "2024102901"
|
||||
__build__ = "2025012401"
|
||||
|
||||
|
||||
from typing import List
|
||||
import sys
|
||||
import os
|
||||
from logging import getLogger
|
||||
|
@ -24,14 +23,20 @@ from npbackup.__version__ import IS_COMPILED
|
|||
logger = getLogger()
|
||||
|
||||
|
||||
def _scheduled_task_exists_unix(config_file: str, type: str, object_args: str) -> bool:
|
||||
def _scheduled_task_exists_unix(
|
||||
config_file: str, task_type: str, object_args: str
|
||||
) -> bool:
|
||||
cron_file = "/etc/cron.d/npbackup"
|
||||
try:
|
||||
with open(cron_file, "r", encoding="utf-8") as file_handle:
|
||||
current_crontab = file_handle.readlines()
|
||||
for line in current_crontab:
|
||||
if f"--{type}" in line and config_file in line and object_args in line:
|
||||
logger.info(f"Found existing {type} task")
|
||||
if (
|
||||
f"--{task_type}" in line
|
||||
and config_file in line
|
||||
and object_args in line
|
||||
):
|
||||
logger.info(f"Found existing {task_type} task")
|
||||
return True
|
||||
except OSError as exc:
|
||||
logger.error("Could not read file {}: {}".format(cron_file, exc))
|
||||
|
@ -40,7 +45,7 @@ def _scheduled_task_exists_unix(config_file: str, type: str, object_args: str) -
|
|||
|
||||
def create_scheduled_task(
|
||||
config_file: str,
|
||||
type: str,
|
||||
task_type: str,
|
||||
repo: str = None,
|
||||
group: str = None,
|
||||
interval_minutes: int = None,
|
||||
|
@ -63,8 +68,8 @@ def create_scheduled_task(
|
|||
logger.error("Bogus interval given")
|
||||
return False
|
||||
|
||||
if type not in ("backup", "housekeeping"):
|
||||
logger.error("Undefined task type")
|
||||
if task_type not in ("backup", "housekeeping"):
|
||||
logger.error(f"Undefined task type: {task_type}")
|
||||
return False
|
||||
|
||||
if isinstance(interval_minutes, int) and interval_minutes < 1:
|
||||
|
@ -89,21 +94,21 @@ def create_scheduled_task(
|
|||
subject = f"group_name {group}"
|
||||
object_args = f" --repo-group {group}"
|
||||
else:
|
||||
subject = f"repo_name default"
|
||||
subject = "repo_name default"
|
||||
object_args = ""
|
||||
if interval_minutes:
|
||||
logger.info(
|
||||
f"Creating scheduled task {type} for {subject} to run every {interval_minutes} minutes"
|
||||
f"Creating scheduled task {task_type} for {subject} to run every {interval_minutes} minutes"
|
||||
)
|
||||
elif hour and minute:
|
||||
logger.info(
|
||||
f"Creating scheduled task {type} for {subject} to run at everyday at {hour}h{minute}"
|
||||
f"Creating scheduled task {task_type} for {subject} to run at everyday at {hour}h{minute}"
|
||||
)
|
||||
|
||||
if os.name == "nt":
|
||||
return create_scheduled_task_windows(
|
||||
config_file,
|
||||
type,
|
||||
task_type,
|
||||
CURRENT_EXECUTABLE,
|
||||
subject,
|
||||
object_args,
|
||||
|
@ -114,7 +119,7 @@ def create_scheduled_task(
|
|||
else:
|
||||
return create_scheduled_task_unix(
|
||||
config_file,
|
||||
type,
|
||||
task_type,
|
||||
CURRENT_EXECUTABLE,
|
||||
subject,
|
||||
object_args,
|
||||
|
@ -126,7 +131,7 @@ def create_scheduled_task(
|
|||
|
||||
def create_scheduled_task_unix(
|
||||
config_file: str,
|
||||
type: str,
|
||||
task_type: str,
|
||||
cli_executable_path: str,
|
||||
subject: str,
|
||||
object_args: str,
|
||||
|
@ -134,6 +139,7 @@ def create_scheduled_task_unix(
|
|||
hour: int = None,
|
||||
minute: int = None,
|
||||
):
|
||||
logger.debug(f"Creating task {subject}")
|
||||
executable_dir = os.path.dirname(cli_executable_path)
|
||||
if "python" in sys.executable and not IS_COMPILED:
|
||||
cli_executable_path = f'"{sys.executable}" "{cli_executable_path}"'
|
||||
|
@ -142,14 +148,16 @@ def create_scheduled_task_unix(
|
|||
cron_file = "/etc/cron.d/npbackup"
|
||||
|
||||
if interval_minutes is not None:
|
||||
TASK_ARGS = f'-c "{config_file}" --{type} --run-as-cli{object_args}'
|
||||
TASK_ARGS = f'-c "{config_file}" --{task_type} --run-as-cli{object_args}'
|
||||
trigger = f"*/{interval_minutes} * * * * root"
|
||||
elif hour is not None and minute is not None:
|
||||
if type == "backup":
|
||||
if task_type == "backup":
|
||||
force_opt = " --force"
|
||||
else:
|
||||
force_opt = ""
|
||||
TASK_ARGS = f'-c "{config_file}" --{type}{force_opt} --run-as-cli{object_args}'
|
||||
TASK_ARGS = (
|
||||
f'-c "{config_file}" --{task_type}{force_opt} --run-as-cli{object_args}'
|
||||
)
|
||||
trigger = f"{minute} {hour} * * * root"
|
||||
else:
|
||||
raise ValueError("Bogus trigger given")
|
||||
|
@ -165,19 +173,24 @@ def create_scheduled_task_unix(
|
|||
with open(cron_file, "r", encoding="utf-8") as file_handle:
|
||||
current_crontab = file_handle.readlines()
|
||||
for line in current_crontab:
|
||||
if f"--{type}" in line and config_file in line and object_args in line:
|
||||
logger.info(f"Replacing existing {type} task")
|
||||
if (
|
||||
f"--{task_type}" in line
|
||||
and config_file in line
|
||||
and object_args in line
|
||||
):
|
||||
logger.info(f"Replacing existing {task_type} task")
|
||||
if replaced:
|
||||
logger.info(f"Skipping duplicate {type} task")
|
||||
logger.info(f"Skipping duplicate {task_type} task")
|
||||
continue
|
||||
crontab_file.append(crontab_entry)
|
||||
replaced = True
|
||||
else:
|
||||
crontab_file.append(line)
|
||||
if not replaced:
|
||||
logger.info(f"Adding new {type} task")
|
||||
logger.info(f"Adding new {task_type} task")
|
||||
crontab_file.append(crontab_entry)
|
||||
except OSError as exc:
|
||||
logger.debug(f"Error reading file {cron_file}: {exc}")
|
||||
crontab_file.append(crontab_entry)
|
||||
|
||||
try:
|
||||
|
@ -190,13 +203,13 @@ def create_scheduled_task_unix(
|
|||
return True
|
||||
|
||||
|
||||
def _get_scheduled_task_name_windows(type: str, subject: str) -> str:
|
||||
return f"{PROGRAM_NAME} - {type.capitalize()} {subject}"
|
||||
def _get_scheduled_task_name_windows(task_type: str, subject: str) -> str:
|
||||
return f"{PROGRAM_NAME} - {task_type.capitalize()} {subject}"
|
||||
|
||||
|
||||
def create_scheduled_task_windows(
|
||||
config_file: str,
|
||||
type: str,
|
||||
task_type: str,
|
||||
cli_executable_path: str,
|
||||
subject: str,
|
||||
object_args: str,
|
||||
|
@ -204,6 +217,7 @@ def create_scheduled_task_windows(
|
|||
hour: int = None,
|
||||
minute: int = None,
|
||||
):
|
||||
logger.debug(f"Creating task {subject}")
|
||||
executable_dir = os.path.dirname(cli_executable_path)
|
||||
if "python" in sys.executable and not IS_COMPILED:
|
||||
runner = sys.executable
|
||||
|
@ -213,10 +227,12 @@ def create_scheduled_task_windows(
|
|||
task_args = ""
|
||||
temp_task_file = os.path.join(tempfile.gettempdir(), "npbackup_task.xml")
|
||||
|
||||
task_name = _get_scheduled_task_name_windows(type, subject)
|
||||
task_name = _get_scheduled_task_name_windows(task_type, subject)
|
||||
|
||||
if interval_minutes is not None:
|
||||
task_args = f'{task_args}-c "{config_file}" --{type} --run-as-cli{object_args}'
|
||||
task_args = (
|
||||
f'{task_args}-c "{config_file}" --{task_type} --run-as-cli{object_args}'
|
||||
)
|
||||
start_date = datetime.datetime.now().replace(microsecond=0).isoformat()
|
||||
trigger = f"""<TimeTrigger>
|
||||
<Repetition>
|
||||
|
@ -228,9 +244,7 @@ def create_scheduled_task_windows(
|
|||
<Enabled>true</Enabled>
|
||||
</TimeTrigger>"""
|
||||
elif hour is not None and minute is not None:
|
||||
task_args = (
|
||||
f'{task_args}-c "{config_file}" --{type} --force --run-as-cli{object_args}'
|
||||
)
|
||||
task_args = f'{task_args}-c "{config_file}" --{task_type} --force --run-as-cli{object_args}'
|
||||
start_date = (
|
||||
datetime.datetime.now()
|
||||
.replace(microsecond=0, hour=hour, minute=minute, second=0)
|
||||
|
@ -291,15 +305,15 @@ def create_scheduled_task_windows(
|
|||
</Exec>
|
||||
</Actions>
|
||||
</Task>"""
|
||||
# Create task file
|
||||
# Create task file, without specific encoding in order to use platform prefered encoding
|
||||
# platform prefered encoding is locale.getpreferredencoding() (cp1252 on windows, utf-8 on linux)
|
||||
try:
|
||||
# pylint: disable=W1514 (unspecified-encoding)
|
||||
with open(temp_task_file, "w") as file_handle:
|
||||
file_handle.write(SCHEDULED_TASK_FILE_CONTENT)
|
||||
except OSError as exc:
|
||||
logger.error(
|
||||
"Could not create temporary scheduled task file {}: {}".format(
|
||||
temp_task_file, exc
|
||||
)
|
||||
f"Could not create temporary scheduled task file {temp_task_file}: {exc}"
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
|
@ -135,30 +135,29 @@ def _check_new_version(
|
|||
else:
|
||||
logger.error(msg)
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
if online_version:
|
||||
if version.parse(online_version) > version.parse(
|
||||
version_dict["version"]
|
||||
):
|
||||
logger.info(
|
||||
"Current version %s is older than online version %s",
|
||||
version_dict["version"],
|
||||
online_version,
|
||||
)
|
||||
return True
|
||||
else:
|
||||
logger.info(
|
||||
"Current version %s is up-to-date (online version %s)",
|
||||
version_dict["version"],
|
||||
online_version,
|
||||
)
|
||||
return False
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
f"Cannot determine if online version '{online_version}' is newer than current version {version_dict['verison']}: {exc}"
|
||||
|
||||
try:
|
||||
if online_version:
|
||||
if version.parse(online_version) > version.parse(version_dict["version"]):
|
||||
logger.info(
|
||||
"Current version %s is older than online version %s",
|
||||
version_dict["version"],
|
||||
online_version,
|
||||
)
|
||||
return True
|
||||
logger.info(
|
||||
"Current version %s is up-to-date (online version %s)",
|
||||
version_dict["version"],
|
||||
online_version,
|
||||
)
|
||||
return False
|
||||
logger.error("Cannot determine online version")
|
||||
return None
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
f"Cannot determine if online version '{online_version}' is newer than current version {version_dict['verison']}: {exc}"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def auto_upgrader(
|
||||
|
@ -288,19 +287,19 @@ def auto_upgrader(
|
|||
f'echo "Launching upgrade" >> "{log_file}" 2>&1 & '
|
||||
f'echo "Moving current dist from {CURRENT_DIR} to {backup_dist}" >> "{log_file}" 2>&1 & '
|
||||
f'move /Y "{CURRENT_DIR}" "{backup_dist}" >> "{log_file}" 2>&1 & '
|
||||
f'IF !ERRORLEVEL! NEQ 0 ( '
|
||||
f"IF !ERRORLEVEL! NEQ 0 ( "
|
||||
f'echo "Moving current dist failed. Trying to copy it." >> "{log_file}" 2>&1 & '
|
||||
f'xcopy /S /Y /I "{CURRENT_DIR}\*" "{backup_dist}" >> "{log_file}" 2>&1 & '
|
||||
f'echo "Now trying to overwrite current dist with upgrade dist" >> "{log_file}" 2>&1 & '
|
||||
f'xcopy /S /Y "{upgrade_dist}\*" "{CURRENT_DIR}" >> "{log_file}" 2>&1 && '
|
||||
f'set REPLACE_METHOD=overwrite'
|
||||
f"set REPLACE_METHOD=overwrite"
|
||||
f") ELSE ( "
|
||||
f'echo "Moving upgraded dist from {upgrade_dist} to {CURRENT_DIR}" >> "{log_file}" 2>&1 & '
|
||||
f'move /Y "{upgrade_dist}" "{CURRENT_DIR}" >> "{log_file}" 2>&1 && '
|
||||
f'echo "Copying optional configuration files from {backup_dist} to {CURRENT_DIR}" >> "{log_file}" 2>&1 & '
|
||||
# Just copy any possible *.conf file from any subdirectory
|
||||
rf'xcopy /S /Y "{backup_dist}\*conf" {CURRENT_DIR} > NUL 2>&1 && '
|
||||
f'set REPLACE_METHOD=move'
|
||||
f"set REPLACE_METHOD=move"
|
||||
f") &"
|
||||
f'echo "Loading new executable {CURRENT_EXECUTABLE} --check-config {" ".join(sys.argv[1:])}" >> "{log_file}" 2>&1 & '
|
||||
f'"{CURRENT_EXECUTABLE}" --check-config >{" ".join(sys.argv[1:])}> "{log_file}" 2>&1 & '
|
||||
|
@ -313,7 +312,7 @@ def auto_upgrader(
|
|||
f'echo "Move method used. Move back" >> "{log_file}" 2>&1 & '
|
||||
f'rd /S /Q "{CURRENT_DIR}" >> "{log_file}" 2>&1 & '
|
||||
f'move /Y "{backup_dist}" "{CURRENT_DIR}" >> "{log_file}" 2>&1 '
|
||||
f') '
|
||||
f") "
|
||||
f") ELSE ( "
|
||||
f'echo "Upgrade successful" >> "{log_file}" 2>&1 & '
|
||||
f'rd /S /Q "{backup_dist}" >> "{log_file}" 2>&1 & '
|
||||
|
|
Loading…
Add table
Reference in a new issue