merge main

This commit is contained in:
Orsiris de Jong 2023-12-11 00:28:04 +01:00
commit f698cf0f06
17 changed files with 72 additions and 32 deletions

View file

@ -9,6 +9,7 @@ lost+found
/media /media
/mnt /mnt
/proc /proc
/sys
/run /run
/selinux /selinux
/var/cache /var/cache

View file

@ -92,7 +92,11 @@ def execution_logs(start_time: datetime) -> None:
10 = debug, 20 = info, 30 = warning, 40 = error, 50 = critical 10 = debug, 20 = info, 30 = warning, 40 = error, 50 = critical
so "if 30 in logger._cache" checks if warning has been triggered so "if 30 in logger._cache" checks if warning has been triggered
ATTENTION: logger._cache does only contain cache of current main, not modules, deprecated in favor of ATTENTION: logger._cache does only contain cache of current main, not modules, deprecated in favor of
ofunctions.ContextFilterWorstLevel ofunctions.logger_utils.ContextFilterWorstLevel
ATTENTION: For ofunctions.logger_utils.ContextFilterWorstLevel will only check current logger instance
So using logger = getLogger("anotherinstance") will create a separate instance from the one we can inspect
Makes sense ;)
""" """
end_time = datetime.utcnow() end_time = datetime.utcnow()

View file

@ -55,7 +55,7 @@ except ImportError:
sys.exit(1) sys.exit(1)
logger = getLogger(__name__) logger = getLogger()
# Monkeypatching ruamel.yaml ordreddict so we get to use pseudo dot notations # Monkeypatching ruamel.yaml ordreddict so we get to use pseudo dot notations

View file

@ -17,7 +17,7 @@ import i18n
from npbackup.path_helper import BASEDIR from npbackup.path_helper import BASEDIR
logger = getLogger(__intname__) logger = getLogger()
TRANSLATIONS_DIR = os.path.join(BASEDIR, "translations") TRANSLATIONS_DIR = os.path.join(BASEDIR, "translations")

View file

@ -26,7 +26,7 @@ from npbackup.__version__ import __intname__ as NAME, __version__ as VERSION
from npbackup import configuration from npbackup import configuration
logger = logging.getLogger(__intname__) logger = logging.getLogger()
def metric_writer( def metric_writer(
@ -350,6 +350,15 @@ class NPBackupRunner:
except ValueError: except ValueError:
logger.warning("Bogus ignore_cloud_files value given") logger.warning("Bogus ignore_cloud_files value given")
try:
if self.config_dict["backup"]["additional_parameters"]:
self.restic_runner.additional_parameters = self.config_dict["backup"][
"additional_parameters"
]
except KeyError:
pass
except ValueError:
logger.warning("Bogus additional parameters given")
self.restic_runner.stdout = self.stdout self.restic_runner.stdout = self.stdout
try: try:
@ -570,14 +579,18 @@ class NPBackupRunner:
tags = None tags = None
try: try:
additional_parameters = self.config_dict["backup"]["additional_parameters"] additional_backup_only_parameters = self.config_dict["backup"][
"additional_backup_only_parameters"
]
except KeyError: except KeyError:
additional_parameters = None additional_backup_only_parameters = None
# Check if backup is required # Check if backup is required
self.restic_runner.verbose = False self.restic_runner.verbose = False
if not self.restic_runner.is_init: if not self.restic_runner.is_init:
self.restic_runner.init() if not self.restic_runner.init():
logger.error("Cannot continue.")
return False
if self.check_recent_backups() and not force: if self.check_recent_backups() and not force:
logger.info("No backup necessary.") logger.info("No backup necessary.")
return True return True
@ -602,8 +615,8 @@ class NPBackupRunner:
if pre_exec_failure_is_fatal: if pre_exec_failure_is_fatal:
return False return False
else: else:
logger.debug( logger.info(
"Pre-execution of command {} success with\n{}.".format( "Pre-execution of command {} success with:\n{}.".format(
pre_exec_command, output pre_exec_command, output
) )
) )
@ -619,7 +632,7 @@ class NPBackupRunner:
one_file_system=one_file_system, one_file_system=one_file_system,
use_fs_snapshot=use_fs_snapshot, use_fs_snapshot=use_fs_snapshot,
tags=tags, tags=tags,
additional_parameters=additional_parameters, additional_backup_only_parameters=additional_backup_only_parameters,
) )
logger.debug("Restic output:\n{}".format(result_string)) logger.debug("Restic output:\n{}".format(result_string))
metric_writer( metric_writer(
@ -639,8 +652,8 @@ class NPBackupRunner:
if post_exec_failure_is_fatal: if post_exec_failure_is_fatal:
return False return False
else: else:
logger.debug( logger.info(
"Post-execution of command {} success with\n{}.".format( "Post-execution of command {} success with:\n{}.".format(
post_exec_command, output post_exec_command, output
) )
) )

View file

@ -16,7 +16,7 @@ from npbackup.upgrade_client.upgrader import auto_upgrader, _check_new_version
from npbackup.__version__ import __version__ as npbackup_version from npbackup.__version__ import __version__ as npbackup_version
logger = getLogger(__intname__) logger = getLogger()
def check_new_version(config_dict: dict) -> bool: def check_new_version(config_dict: dict) -> bool:

View file

@ -22,7 +22,7 @@ from npbackup.core.nuitka_helper import IS_COMPILED
if os.name == "nt": if os.name == "nt":
from npbackup.windows.task import create_scheduled_task from npbackup.windows.task import create_scheduled_task
logger = getLogger(__intname__) logger = getLogger()
def ask_backup_admin_password(config_dict) -> bool: def ask_backup_admin_password(config_dict) -> bool:
@ -283,6 +283,10 @@ def config_gui(config_dict: dict, config_file: str):
sg.Text(_t("config_gui.additional_parameters"), size=(40, 1)), sg.Text(_t("config_gui.additional_parameters"), size=(40, 1)),
sg.Input(key="backup---additional_parameters", size=(50, 1)), sg.Input(key="backup---additional_parameters", size=(50, 1)),
], ],
[
sg.Text(_t("config_gui.additional_backup_only_parameters"), size=(40, 1)),
sg.Input(key="backup---additional_backup_only_parameters", size=(50, 1)),
],
] ]
repo_col = [ repo_col = [

View file

@ -45,7 +45,7 @@ from npbackup.core.i18n_helper import _t
from npbackup.core.upgrade_runner import run_upgrade, check_new_version from npbackup.core.upgrade_runner import run_upgrade, check_new_version
logger = getLogger(__intname__) logger = getLogger()
# Let's use mutable to get a cheap way of transfering data from thread to main program # Let's use mutable to get a cheap way of transfering data from thread to main program
# There are no possible race conditions since we don't modifiy the data from anywhere outside the thread # There are no possible race conditions since we don't modifiy the data from anywhere outside the thread

View file

@ -3,7 +3,7 @@
# #
# This file is part of npbackup # This file is part of npbackup
__intname__ = "npbackup.paths" __intname__ = "npbackup.path_helper"
__author__ = "Orsiris de Jong" __author__ = "Orsiris de Jong"
__copyright__ = "Copyright (C) 2023 NetInvent" __copyright__ = "Copyright (C) 2023 NetInvent"
__license__ = "GPL-3.0-only" __license__ = "GPL-3.0-only"

View file

@ -22,7 +22,7 @@ import queue
from command_runner import command_runner from command_runner import command_runner
logger = getLogger(__intname__) logger = getLogger()
# Arbitrary timeout for init / init checks. # Arbitrary timeout for init / init checks.
# If init takes more than a minute, we really have a problem # If init takes more than a minute, we really have a problem
@ -70,7 +70,7 @@ class ResticRunner:
except AttributeError: except AttributeError:
self._backend_type = None self._backend_type = None
self._ignore_cloud_files = True self._ignore_cloud_files = True
self._addition_parameters = None self._additional_parameters = None
self._environment_variables = {} self._environment_variables = {}
self._stop_on = ( self._stop_on = (
@ -201,7 +201,12 @@ class ResticRunner:
""" """
start_time = datetime.utcnow() start_time = datetime.utcnow()
self._executor_finished = False self._executor_finished = False
_cmd = '"{}" {}{}'.format(self._binary, cmd, self.generic_arguments) additional_parameters = (
f" {self.additional_parameters.strip()} "
if self.additional_parameters
else ""
)
_cmd = f'"{self._binary}" {additional_parameters}{cmd}{self.generic_arguments}'
if self.dry_run: if self.dry_run:
_cmd += " --dry-run" _cmd += " --dry-run"
logger.debug("Running command: [{}]".format(_cmd)) logger.debug("Running command: [{}]".format(_cmd))
@ -352,11 +357,11 @@ class ResticRunner:
@property @property
def additional_parameters(self): def additional_parameters(self):
return self._addition_parameters return self._additional_parameters
@additional_parameters.setter @additional_parameters.setter
def additional_parameters(self, value: str): def additional_parameters(self, value: str):
self._addition_parameters = value self._additional_parameters = value
@property @property
def priority(self): def priority(self):
@ -442,11 +447,17 @@ class ResticRunner:
if re.search( if re.search(
r"created restic repository ([a-z0-9]+) at .+", output, re.IGNORECASE r"created restic repository ([a-z0-9]+) at .+", output, re.IGNORECASE
): ):
self.is_init = True
return True return True
else: else:
if re.search(".*already exists", output, re.IGNORECASE): if re.search(".*already exists", output, re.IGNORECASE):
logger.info("Repo already initialized.") logger.info("Repo already initialized.")
self.is_init = True self.is_init = True
return True
logger.error(f"Cannot contact repo: {output}")
self.is_init = False
return False
self.is_init = False
return False return False
@property @property
@ -536,7 +547,7 @@ class ResticRunner:
use_fs_snapshot: bool = False, use_fs_snapshot: bool = False,
tags: List[str] = [], tags: List[str] = [],
one_file_system: bool = False, one_file_system: bool = False,
additional_parameters: str = None, additional_backup_only_parameters: str = None,
) -> Tuple[bool, str]: ) -> Tuple[bool, str]:
""" """
Executes restic backup after interpreting all arguments Executes restic backup after interpreting all arguments
@ -560,10 +571,15 @@ class ResticRunner:
for path in paths: for path in paths:
cmd += ' {} "{}"'.format(source_parameter, path) cmd += ' {} "{}"'.format(source_parameter, path)
else: else:
# make sure path is a list and does not have trailing slashes # make sure path is a list and does not have trailing slashes, unless we're backing up root
# We don't need to scan files for ETA, so let's add --no-scan # We don't need to scan files for ETA, so let's add --no-scan
cmd = "backup --no-scan {}".format( cmd = "backup --no-scan {}".format(
" ".join(['"{}"'.format(path.rstrip("/\\")) for path in paths]) " ".join(
[
'"{}"'.format(path.rstrip("/\\")) if path != "/" else path
for path in paths
]
)
) )
case_ignore_param = "" case_ignore_param = ""
@ -598,8 +614,8 @@ class ResticRunner:
if tag: if tag:
tag = tag.strip() tag = tag.strip()
cmd += " --tag {}".format(tag) cmd += " --tag {}".format(tag)
if additional_parameters: if additional_backup_only_parameters:
cmd += " {}".format(additional_parameters) cmd += " {}".format(additional_backup_only_parameters)
result, output = self.executor(cmd, live_stream=True) result, output = self.executor(cmd, live_stream=True)
if ( if (

View file

@ -20,6 +20,7 @@ en:
one_per_line: one per line one_per_line: one per line
backup_priority: Backup priority backup_priority: Backup priority
additional_parameters: Additional parameters additional_parameters: Additional parameters
additional_backup_only_parameters: Additional backup only parmas
backup_destination: Backup destination backup_destination: Backup destination
minimum_backup_age: Minimum delay between two backups minimum_backup_age: Minimum delay between two backups

View file

@ -20,6 +20,7 @@ fr:
one_per_line: un par ligne one_per_line: un par ligne
backup_priority: Priorité de sauvegarde backup_priority: Priorité de sauvegarde
additional_parameters: Paramètres supplémentaires additional_parameters: Paramètres supplémentaires
additional_backup_only_parameters: Paramètres supp. sauvegarde
backup_destination: Destination de sauvegarde backup_destination: Destination de sauvegarde
minimum_backup_age: Délai minimal entre deux sauvegardes minimum_backup_age: Délai minimal entre deux sauvegardes

View file

@ -16,7 +16,7 @@ import json
import requests import requests
logger = getLogger(__intname__) logger = getLogger()
class Requestor: class Requestor:

View file

@ -25,7 +25,7 @@ from npbackup.path_helper import CURRENT_DIR, CURRENT_EXECUTABLE
from npbackup.core.nuitka_helper import IS_COMPILED from npbackup.core.nuitka_helper import IS_COMPILED
from npbackup.__version__ import __version__ as npbackup_version from npbackup.__version__ import __version__ as npbackup_version
logger = getLogger(__intname__) logger = getLogger()
UPGRADE_DEFER_TIME = 60 # Wait x seconds before we actually do the upgrade so current program could quit before being erased UPGRADE_DEFER_TIME = 60 # Wait x seconds before we actually do the upgrade so current program could quit before being erased

View file

@ -17,7 +17,7 @@ import tempfile
from command_runner import command_runner from command_runner import command_runner
from npbackup.customization import PROGRAM_NAME from npbackup.customization import PROGRAM_NAME
logger = getLogger(__intname__) logger = getLogger()
# This is the path to a onefile executable binary # This is the path to a onefile executable binary

View file

@ -18,7 +18,7 @@ from logging import getLogger
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
config_file = os.path.join(ROOT_DIR, "upgrade_server.conf") config_file = os.path.join(ROOT_DIR, "upgrade_server.conf")
logger = getLogger(__intname__) logger = getLogger()
def load_config(config_file: str = config_file): def load_config(config_file: str = config_file):

View file

@ -39,7 +39,7 @@ else:
config_dict = configuration.load_config() config_dict = configuration.load_config()
logger = getLogger(__intname__) logger = getLogger()
def sha256sum_data(data): def sha256sum_data(data):