mirror of
https://github.com/netinvent/npbackup.git
synced 2025-11-17 22:29:59 +08:00
Reformat files with black
This commit is contained in:
parent
52951925b0
commit
d25392ba33
5 changed files with 19 additions and 12 deletions
|
|
@ -166,7 +166,7 @@ This is free software, and you are welcome to redistribute it under certain cond
|
|||
type=str,
|
||||
default=None,
|
||||
required=False,
|
||||
help="Get repository statistics. If snapshot id is given, only snapshots statistics will be shown."
|
||||
help="Get repository statistics. If snapshot id is given, only snapshots statistics will be shown.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--raw",
|
||||
|
|
|
|||
|
|
@ -593,7 +593,9 @@ def get_repo_config(
|
|||
_repo_config.s(key, value)
|
||||
_config_inheritance.s(key, True)
|
||||
# Case where repo_config contains list but group info has single str
|
||||
elif isinstance(_repo_config.g(key), list) and value is not None:
|
||||
elif (
|
||||
isinstance(_repo_config.g(key), list) and value is not None
|
||||
):
|
||||
merged_lists = _repo_config.g(key) + [value]
|
||||
|
||||
# Special case when merged lists contain multiple dicts, we'll need to merge dicts
|
||||
|
|
|
|||
|
|
@ -41,7 +41,11 @@ logger = logging.getLogger()
|
|||
|
||||
|
||||
def metric_writer(
|
||||
repo_config: dict, restic_result: bool, result_string: str, operation: str, dry_run: bool
|
||||
repo_config: dict,
|
||||
restic_result: bool,
|
||||
result_string: str,
|
||||
operation: str,
|
||||
dry_run: bool,
|
||||
) -> bool:
|
||||
backup_too_small = False
|
||||
operation_success = True
|
||||
|
|
@ -116,9 +120,7 @@ def metric_writer(
|
|||
metrics.append(
|
||||
f'npbackup_oper_state{{{labels},action="{operation}",repo="{repo_name}"}} {0 if restic_result else 1}'
|
||||
)
|
||||
metrics.append(
|
||||
f'npbackup_exec_state{{{labels}}} {exec_state}'
|
||||
)
|
||||
metrics.append(f"npbackup_exec_state{{{labels}}} {exec_state}")
|
||||
logger.debug("Metrics computed:\n{}".format("\n".join(metrics)))
|
||||
if destination:
|
||||
logger.debug("Sending metrics to {}".format(destination))
|
||||
|
|
@ -608,7 +610,7 @@ class NPBackupRunner:
|
|||
return False
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def metrics(fn: Callable):
|
||||
"""
|
||||
Write prometheus metrics
|
||||
|
|
@ -619,8 +621,8 @@ class NPBackupRunner:
|
|||
result = fn(self, *args, **kwargs)
|
||||
metric_writer(self.repo_config, result, None, fn.__name__, self.dry_run)
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
return wrapper
|
||||
|
||||
def create_restic_runner(self) -> bool:
|
||||
can_run = True
|
||||
|
|
@ -1329,7 +1331,6 @@ class NPBackupRunner:
|
|||
)
|
||||
return self.restic_runner.repair(subject)
|
||||
|
||||
|
||||
@threaded
|
||||
@catch_exceptions
|
||||
@metrics
|
||||
|
|
@ -1429,4 +1430,4 @@ class NPBackupRunner:
|
|||
if self.json_output:
|
||||
js["result"] = group_result
|
||||
return js
|
||||
return group_result
|
||||
return group_result
|
||||
|
|
|
|||
|
|
@ -232,7 +232,9 @@ def restic_json_to_prometheus(
|
|||
prom_metrics.append(f'restic_{key}{{{labels},action="backup"}} {value}')
|
||||
|
||||
try:
|
||||
processed_bytes = BytesConverter(str(restic_json['total_bytes_processed'])).human
|
||||
processed_bytes = BytesConverter(
|
||||
str(restic_json["total_bytes_processed"])
|
||||
).human
|
||||
logger.info(f"Processed {processed_bytes} of data")
|
||||
except Exception:
|
||||
logger.error(f"Cannot find processed bytes: {exc}")
|
||||
|
|
|
|||
|
|
@ -818,7 +818,9 @@ class ResticRunner:
|
|||
if exclude_files_larger_than:
|
||||
try:
|
||||
# Default --exclude_larger_than unit is bytes
|
||||
exclude_files_larger_than = int(BytesConverter(exclude_files_larger_than))
|
||||
exclude_files_larger_than = int(
|
||||
BytesConverter(exclude_files_larger_than)
|
||||
)
|
||||
except ValueError:
|
||||
warning = f"Bogus unit for exclude_files_larger_than value given: {exclude_files_larger_than}"
|
||||
self.write_logs(warning, level="warning")
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue