mirror of
https://github.com/netinvent/npbackup.git
synced 2025-09-11 23:45:29 +08:00
Make linter happy
This commit is contained in:
parent
dab41ff7b7
commit
b720b74649
13 changed files with 32 additions and 34 deletions
|
@ -35,7 +35,7 @@ if not __SPECIAL_DEBUG_STRING:
|
||||||
sys.argv.pop(sys.argv.index("--debug"))
|
sys.argv.pop(sys.argv.index("--debug"))
|
||||||
|
|
||||||
|
|
||||||
if not "_DEBUG" in globals():
|
if "_DEBUG" not in globals():
|
||||||
_DEBUG = False
|
_DEBUG = False
|
||||||
if __SPECIAL_DEBUG_STRING:
|
if __SPECIAL_DEBUG_STRING:
|
||||||
if __debug_os_env == __SPECIAL_DEBUG_STRING:
|
if __debug_os_env == __SPECIAL_DEBUG_STRING:
|
||||||
|
|
|
@ -37,7 +37,7 @@ UPGRADE_DEFER_TIME = 60
|
||||||
# Maximum allowed time offset in seconds to allow policy operations to run
|
# Maximum allowed time offset in seconds to allow policy operations to run
|
||||||
MAX_ALLOWED_NTP_OFFSET = 600.0
|
MAX_ALLOWED_NTP_OFFSET = 600.0
|
||||||
|
|
||||||
if not "BUILD_TYPE" in globals():
|
if "BUILD_TYPE" not in globals():
|
||||||
BUILD_TYPE = "UnknownBuildType"
|
BUILD_TYPE = "UnknownBuildType"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -387,9 +387,6 @@ This is free software, and you are welcome to redistribute it under certain cond
|
||||||
else:
|
else:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if args.verbose:
|
|
||||||
_VERBOSE = True
|
|
||||||
|
|
||||||
if args.config_file:
|
if args.config_file:
|
||||||
if not os.path.isfile(args.config_file):
|
if not os.path.isfile(args.config_file):
|
||||||
msg = f"Config file {args.config_file} cannot be read or does not exist"
|
msg = f"Config file {args.config_file} cannot be read or does not exist"
|
||||||
|
@ -630,6 +627,9 @@ This is free software, and you are welcome to redistribute it under certain cond
|
||||||
|
|
||||||
# On group operations, we also need to set op_args
|
# On group operations, we also need to set op_args
|
||||||
|
|
||||||
|
if args.verbose:
|
||||||
|
cli_args["verbose"] = True
|
||||||
|
|
||||||
if args.stdin:
|
if args.stdin:
|
||||||
cli_args["operation"] = "backup"
|
cli_args["operation"] = "backup"
|
||||||
cli_args["op_args"] = {
|
cli_args["op_args"] = {
|
||||||
|
@ -736,7 +736,7 @@ This is free software, and you are welcome to redistribute it under certain cond
|
||||||
elif args.init:
|
elif args.init:
|
||||||
cli_args["operation"] = "init"
|
cli_args["operation"] = "init"
|
||||||
|
|
||||||
#### Group operation mode
|
# Group operation mode
|
||||||
possible_group_ops = (
|
possible_group_ops = (
|
||||||
"backup",
|
"backup",
|
||||||
"restore",
|
"restore",
|
||||||
|
|
|
@ -29,10 +29,9 @@ from ofunctions.random import random_string
|
||||||
from ofunctions.misc import replace_in_iterable, BytesConverter, iter_over_keys
|
from ofunctions.misc import replace_in_iterable, BytesConverter, iter_over_keys
|
||||||
from resources.customization import ID_STRING
|
from resources.customization import ID_STRING
|
||||||
from npbackup.key_management import AES_KEY, EARLIER_AES_KEY, IS_PRIV_BUILD, get_aes_key
|
from npbackup.key_management import AES_KEY, EARLIER_AES_KEY, IS_PRIV_BUILD, get_aes_key
|
||||||
|
from npbackup.__version__ import __version__ as MAX_CONF_VERSION
|
||||||
|
|
||||||
MIN_CONF_VERSION = "3.0"
|
MIN_CONF_VERSION = "3.0"
|
||||||
from npbackup.__version__ import __version__ as MAX_CONF_VERSION
|
|
||||||
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.normpath(os.path.join(os.path.dirname(__file__), "..")))
|
sys.path.insert(0, os.path.normpath(os.path.join(os.path.dirname(__file__), "..")))
|
||||||
|
@ -891,13 +890,13 @@ def load_config(config_file: Path) -> Optional[dict]:
|
||||||
full_config = crypt_config(
|
full_config = crypt_config(
|
||||||
full_config, AES_KEY, ENCRYPTED_OPTIONS, operation="decrypt"
|
full_config, AES_KEY, ENCRYPTED_OPTIONS, operation="decrypt"
|
||||||
)
|
)
|
||||||
if full_config == False:
|
if full_config is False:
|
||||||
if EARLIER_AES_KEY:
|
if EARLIER_AES_KEY:
|
||||||
logger.warning("Trying to migrate encryption key")
|
logger.warning("Trying to migrate encryption key")
|
||||||
full_config = crypt_config(
|
full_config = crypt_config(
|
||||||
full_config, EARLIER_AES_KEY, ENCRYPTED_OPTIONS, operation="decrypt"
|
full_config, EARLIER_AES_KEY, ENCRYPTED_OPTIONS, operation="decrypt"
|
||||||
)
|
)
|
||||||
if full_config == False:
|
if full_config is False:
|
||||||
msg = "Cannot decrypt config file. Looks like our keys don't match."
|
msg = "Cannot decrypt config file. Looks like our keys don't match."
|
||||||
logger.critical(msg)
|
logger.critical(msg)
|
||||||
raise EnvironmentError(msg)
|
raise EnvironmentError(msg)
|
||||||
|
|
|
@ -22,7 +22,7 @@ from npbackup.restic_metrics import (
|
||||||
write_metrics_file,
|
write_metrics_file,
|
||||||
)
|
)
|
||||||
from npbackup.__version__ import __intname__ as NAME, version_dict
|
from npbackup.__version__ import __intname__ as NAME, version_dict
|
||||||
from npbackup.__debug__ import _DEBUG, fmt_json
|
from npbackup.__debug__ import fmt_json
|
||||||
from resources.customization import OEM_STRING
|
from resources.customization import OEM_STRING
|
||||||
|
|
||||||
logger = getLogger()
|
logger = getLogger()
|
||||||
|
@ -156,10 +156,10 @@ def metric_analyser(
|
||||||
date=date,
|
date=date,
|
||||||
)
|
)
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
logger.info("Metrics error: {}".format(exc))
|
logger.info(f"Metrics error: {exc}")
|
||||||
logger.debug("Trace:", exc_info=True)
|
logger.debug("Trace:", exc_info=True)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
logger.error("Metrics OS error: ".format(exc))
|
logger.error(f"Metrics OS error: {exc}")
|
||||||
logger.debug("Trace:", exc_info=True)
|
logger.debug("Trace:", exc_info=True)
|
||||||
return operation_success, backup_too_small
|
return operation_success, backup_too_small
|
||||||
|
|
||||||
|
@ -185,7 +185,7 @@ def send_prometheus_metrics(
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
logger.error("No prometheus configuration found in config file.")
|
logger.error("No prometheus configuration found in config file: {exc}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if destination and dry_run:
|
if destination and dry_run:
|
||||||
|
@ -194,12 +194,12 @@ def send_prometheus_metrics(
|
||||||
logger.debug("Sending metrics to {}".format(destination))
|
logger.debug("Sending metrics to {}".format(destination))
|
||||||
dest = destination.lower()
|
dest = destination.lower()
|
||||||
if dest.startswith("http"):
|
if dest.startswith("http"):
|
||||||
if not "metrics" in dest:
|
if "metrics" not in dest:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Destination does not contain 'metrics' keyword. Not uploading."
|
"Destination does not contain 'metrics' keyword. Not uploading."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
if not "job" in dest:
|
if "job" not in dest:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Destination does not contain 'job' keyword. Not uploading."
|
"Destination does not contain 'job' keyword. Not uploading."
|
||||||
)
|
)
|
||||||
|
|
|
@ -486,7 +486,7 @@ class NPBackupRunner:
|
||||||
current_permissions = self.repo_config.g("permissions")
|
current_permissions = self.repo_config.g("permissions")
|
||||||
if (
|
if (
|
||||||
current_permissions
|
current_permissions
|
||||||
and not current_permissions in required_permissions[operation]
|
and current_permissions not in required_permissions[operation]
|
||||||
):
|
):
|
||||||
self.write_logs(
|
self.write_logs(
|
||||||
f"Required permissions for operation '{operation}' must be one of {', '.join(required_permissions[operation])}, current permission is '{current_permissions}'",
|
f"Required permissions for operation '{operation}' must be one of {', '.join(required_permissions[operation])}, current permission is '{current_permissions}'",
|
||||||
|
@ -935,9 +935,9 @@ class NPBackupRunner:
|
||||||
if self.json_output:
|
if self.json_output:
|
||||||
if isinstance(result, dict):
|
if isinstance(result, dict):
|
||||||
js = result
|
js = result
|
||||||
if not "additional_error_info" in js.keys():
|
if "additional_error_info" not in js.keys():
|
||||||
js["additional_error_info"] = []
|
js["additional_error_info"] = []
|
||||||
if not "additional_warning_info" in js.keys():
|
if "additional_warning_info" not in js.keys():
|
||||||
js["additional_warning_info"] = []
|
js["additional_warning_info"] = []
|
||||||
else:
|
else:
|
||||||
js = {
|
js = {
|
||||||
|
@ -1405,7 +1405,7 @@ class NPBackupRunner:
|
||||||
all_files_present = self.check_source_files_present(source_type, paths)
|
all_files_present = self.check_source_files_present(source_type, paths)
|
||||||
if not all_files_present:
|
if not all_files_present:
|
||||||
self.write_logs(
|
self.write_logs(
|
||||||
f"Not all files/folders are present in backup source",
|
"Not all files/folders are present in backup source",
|
||||||
level="error",
|
level="error",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1508,7 +1508,7 @@ class NPBackupRunner:
|
||||||
current_permissions = self.repo_config.g("permissions")
|
current_permissions = self.repo_config.g("permissions")
|
||||||
if (
|
if (
|
||||||
current_permissions
|
current_permissions
|
||||||
and not current_permissions in required_permissions[post_backup_op]
|
and current_permissions not in required_permissions[post_backup_op]
|
||||||
):
|
):
|
||||||
self.write_logs(
|
self.write_logs(
|
||||||
f"Required permissions for post backup housekeeping must be one of {', '.join(required_permissions[post_backup_op])}, current permission is '{current_permissions}'",
|
f"Required permissions for post backup housekeeping must be one of {', '.join(required_permissions[post_backup_op])}, current permission is '{current_permissions}'",
|
||||||
|
|
|
@ -11,7 +11,6 @@ __build__ = "2025030701"
|
||||||
|
|
||||||
|
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from random import randint
|
|
||||||
from npbackup.upgrade_client.upgrader import auto_upgrader, _check_new_version
|
from npbackup.upgrade_client.upgrader import auto_upgrader, _check_new_version
|
||||||
import npbackup.configuration
|
import npbackup.configuration
|
||||||
|
|
||||||
|
|
|
@ -249,10 +249,10 @@ def config_gui(full_config: dict, config_file: str):
|
||||||
try:
|
try:
|
||||||
if combo_value.startswith("Repo: "):
|
if combo_value.startswith("Repo: "):
|
||||||
object_type = "repos"
|
object_type = "repos"
|
||||||
object_name = combo_value[len("Repo: ") :]
|
object_name = combo_value[len("Repo: "):]
|
||||||
elif combo_value.startswith("Group: "):
|
elif combo_value.startswith("Group: "):
|
||||||
object_type = "groups"
|
object_type = "groups"
|
||||||
object_name = combo_value[len("Group: ") :]
|
object_name = combo_value[len("Group: "):]
|
||||||
else:
|
else:
|
||||||
object_type = None
|
object_type = None
|
||||||
object_name = None
|
object_name = None
|
||||||
|
@ -635,7 +635,7 @@ def config_gui(full_config: dict, config_file: str):
|
||||||
# First we need to clear the whole GUI to reload new values
|
# First we need to clear the whole GUI to reload new values
|
||||||
for key in window.AllKeysDict:
|
for key in window.AllKeysDict:
|
||||||
# We only clear config keys, which have '.' separator
|
# We only clear config keys, which have '.' separator
|
||||||
if "." in str(key) and not "inherited" in str(key):
|
if "." in str(key) and "inherited" not in str(key):
|
||||||
if isinstance(window[key], sg.Tree):
|
if isinstance(window[key], sg.Tree):
|
||||||
window[key].Update(sg.TreeData())
|
window[key].Update(sg.TreeData())
|
||||||
else:
|
else:
|
||||||
|
@ -791,7 +791,7 @@ def config_gui(full_config: dict, config_file: str):
|
||||||
continue
|
continue
|
||||||
if not isinstance(key, str) or (
|
if not isinstance(key, str) or (
|
||||||
isinstance(key, str)
|
isinstance(key, str)
|
||||||
and (not "." in key and not key in ("repo_uri", "repo_group"))
|
and ("." not in key and key not in ("repo_uri", "repo_group"))
|
||||||
):
|
):
|
||||||
# Don't bother with keys that don't contain with "." since they're not in the YAML config file
|
# Don't bother with keys that don't contain with "." since they're not in the YAML config file
|
||||||
# but are most probably for GUI events
|
# but are most probably for GUI events
|
||||||
|
@ -2975,5 +2975,4 @@ Google Cloud storage: GOOGLE_PROJECT_ID GOOGLE_APPLICATION_CREDENTIALS\n\
|
||||||
# Closing this window takes ages
|
# Closing this window takes ages
|
||||||
window.hide()
|
window.hide()
|
||||||
quick_close_simplegui_window(window)
|
quick_close_simplegui_window(window)
|
||||||
del window # noqa: F821 (undefined name)
|
|
||||||
return full_config
|
return full_config
|
||||||
|
|
|
@ -61,7 +61,7 @@ def get_aes_key():
|
||||||
try:
|
try:
|
||||||
with open(key_location, "rb") as key_file:
|
with open(key_location, "rb") as key_file:
|
||||||
key = key_file.read()
|
key = key_file.read()
|
||||||
msg = f"Encryption key file read"
|
msg = "Encryption key file read"
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
msg = f"Cannot read encryption key file: {exc}"
|
msg = f"Cannot read encryption key file: {exc}"
|
||||||
return False, msg
|
return False, msg
|
||||||
|
|
|
@ -171,7 +171,7 @@ def restic_str_output_to_json(
|
||||||
matches.group(), line
|
matches.group(), line
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except IndexError as exc:
|
except IndexError:
|
||||||
logger.error("Trace:", exc_info=True)
|
logger.error("Trace:", exc_info=True)
|
||||||
errors = True
|
errors = True
|
||||||
|
|
||||||
|
@ -472,7 +472,7 @@ def restic_output_2_metrics(restic_result, output, labels_string=None):
|
||||||
matches.group(), line
|
matches.group(), line
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except IndexError as exc:
|
except IndexError:
|
||||||
logger.error("Trace:", exc_info=True)
|
logger.error("Trace:", exc_info=True)
|
||||||
errors = True
|
errors = True
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ import re
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
import queue
|
import queue
|
||||||
from functools import wraps
|
|
||||||
from command_runner import command_runner
|
from command_runner import command_runner
|
||||||
from packaging.version import parse as version_parse
|
from packaging.version import parse as version_parse
|
||||||
from ofunctions.misc import BytesConverter, fn_name
|
from ofunctions.misc import BytesConverter, fn_name
|
||||||
|
@ -169,7 +168,7 @@ class ResticRunner:
|
||||||
os.environ[encrypted_env_variable] = value
|
os.environ[encrypted_env_variable] = value
|
||||||
|
|
||||||
# Configure default cpu usage when not specifically set
|
# Configure default cpu usage when not specifically set
|
||||||
if not "GOMAXPROCS" in self.environment_variables:
|
if "GOMAXPROCS" not in self.environment_variables:
|
||||||
nb_cores = os.cpu_count()
|
nb_cores = os.cpu_count()
|
||||||
if nb_cores < 2:
|
if nb_cores < 2:
|
||||||
gomaxprocs = nb_cores
|
gomaxprocs = nb_cores
|
||||||
|
@ -377,7 +376,7 @@ class ResticRunner:
|
||||||
logger.debug(f"Skipping output filter for {self._executor_operation}")
|
logger.debug(f"Skipping output filter for {self._executor_operation}")
|
||||||
return output
|
return output
|
||||||
if not isinstance(output, str):
|
if not isinstance(output, str):
|
||||||
logger.debug(f"Skipping output filter for non str output")
|
logger.debug("Skipping output filter for non str output")
|
||||||
return output
|
return output
|
||||||
for filter in restic_output_filters:
|
for filter in restic_output_filters:
|
||||||
output = filter.sub("", output)
|
output = filter.sub("", output)
|
||||||
|
|
|
@ -42,6 +42,7 @@ def serialize_datetime(obj):
|
||||||
|
|
||||||
|
|
||||||
def entrypoint(*args, **kwargs):
|
def entrypoint(*args, **kwargs):
|
||||||
|
verbose = kwargs.pop("verbose", False)
|
||||||
repo_config = kwargs.pop("repo_config", None)
|
repo_config = kwargs.pop("repo_config", None)
|
||||||
json_output = kwargs.pop("json_output")
|
json_output = kwargs.pop("json_output")
|
||||||
operation = kwargs.pop("operation")
|
operation = kwargs.pop("operation")
|
||||||
|
@ -50,6 +51,7 @@ def entrypoint(*args, **kwargs):
|
||||||
npbackup_runner = NPBackupRunner()
|
npbackup_runner = NPBackupRunner()
|
||||||
if repo_config:
|
if repo_config:
|
||||||
npbackup_runner.repo_config = repo_config
|
npbackup_runner.repo_config = repo_config
|
||||||
|
npbackup_runner.verbose = verbose
|
||||||
npbackup_runner.dry_run = kwargs.pop("dry_run")
|
npbackup_runner.dry_run = kwargs.pop("dry_run")
|
||||||
npbackup_runner.verbose = kwargs.pop("verbose")
|
npbackup_runner.verbose = kwargs.pop("verbose")
|
||||||
npbackup_runner.live_output = not json_output
|
npbackup_runner.live_output = not json_output
|
||||||
|
|
|
@ -59,7 +59,7 @@ def get_ev_data(cert_data_path):
|
||||||
cryptographic_provider,
|
cryptographic_provider,
|
||||||
) = ev_cert
|
) = ev_cert
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
print("EV Cert data is corrupt")
|
print("EV Cert data is corrupt: {exc}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return pkcs12_certificate, pkcs12_password, container_name, cryptographic_provider
|
return pkcs12_certificate, pkcs12_password, container_name, cryptographic_provider
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue