qbit_manage/modules/config.py

738 lines
34 KiB
Python
Raw Normal View History

"""Config class for qBittorrent-Manage"""
2022-10-29 23:19:09 +08:00
import os
import re
import stat
import time
from collections import OrderedDict
2022-10-29 23:19:09 +08:00
import requests
from retrying import retry
from modules import util
2022-10-29 23:19:09 +08:00
from modules.apprise import Apprise
from modules.bhd import BeyondHD
from modules.notifiarr import Notifiarr
from modules.qbittorrent import Qbt
2022-10-29 23:19:09 +08:00
from modules.util import check
from modules.util import Failed
from modules.util import YAML
from modules.webhooks import Webhooks
logger = util.logger
2022-12-07 22:16:26 +08:00
COMMANDS = [
"cross_seed",
"recheck",
"cat_update",
"tag_update",
"rem_unregistered",
"tag_tracker_error",
"rem_orphaned",
"tag_nohardlinks",
"share_limits",
2022-12-07 22:16:26 +08:00
"skip_cleanup",
"skip_qb_version_check",
2022-12-07 22:16:26 +08:00
"dry_run",
]
2021-12-29 01:19:58 +08:00
class Config:
"""Config class for qBittorrent-Manage"""
def __init__(self, default_dir, args):
logger.info("Locating config...")
self.args = args
config_file = args["config_file"]
2022-10-29 23:19:09 +08:00
if config_file and os.path.exists(config_file):
self.config_path = os.path.abspath(config_file)
elif config_file and os.path.exists(os.path.join(default_dir, config_file)):
self.config_path = os.path.abspath(os.path.join(default_dir, config_file))
elif config_file and not os.path.exists(config_file):
raise Failed(f"Config Error: config not found at {os.path.abspath(config_file)}")
elif os.path.exists(os.path.join(default_dir, "config.yml")):
self.config_path = os.path.abspath(os.path.join(default_dir, "config.yml"))
else:
raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
logger.info(f"Using {self.config_path} as config")
self.util = check(self)
self.default_dir = default_dir
self.start_time = args["time_obj"]
2021-12-29 01:19:58 +08:00
2022-08-14 04:49:02 +08:00
loaded_yaml = YAML(self.config_path)
self.data = loaded_yaml.data
2022-08-22 02:40:57 +08:00
# Replace env variables with config commands
if "commands" in self.data:
if self.data["commands"] is not None:
logger.info(f"Commands found in {config_file}, ignoring env variables and using config commands instead.")
2022-12-07 22:16:26 +08:00
self.commands = {}
for command in COMMANDS:
self.commands[command] = self.util.check_for_attribute(
self.data,
command,
parent="commands",
var_type="bool",
default=False,
save=True,
)
2022-08-22 02:40:57 +08:00
logger.debug(f" --cross-seed (QBT_CROSS_SEED): {self.commands['cross_seed']}")
logger.debug(f" --recheck (QBT_RECHECK): {self.commands['recheck']}")
logger.debug(f" --cat-update (QBT_CAT_UPDATE): {self.commands['cat_update']}")
logger.debug(f" --tag-update (QBT_TAG_UPDATE): {self.commands['tag_update']}")
logger.debug(f" --rem-unregistered (QBT_REM_UNREGISTERED): {self.commands['rem_unregistered']}")
logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {self.commands['tag_tracker_error']}")
logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {self.commands['rem_orphaned']}")
logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {self.commands['tag_nohardlinks']}")
logger.debug(f" --share-limits (QBT_SHARE_LIMITS): {self.commands['share_limits']}")
logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {self.commands['skip_cleanup']}")
logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {self.commands['skip_qb_version_check']}")
2022-08-22 02:40:57 +08:00
logger.debug(f" --dry-run (QBT_DRY_RUN): {self.commands['dry_run']}")
else:
self.commands = args
2022-10-29 23:19:09 +08:00
if "qbt" in self.data:
self.data["qbt"] = self.data.pop("qbt")
2022-08-14 04:49:02 +08:00
self.data["settings"] = self.data.pop("settings") if "settings" in self.data else {}
2022-10-29 23:19:09 +08:00
if "directory" in self.data:
self.data["directory"] = self.data.pop("directory")
2022-08-14 04:49:02 +08:00
self.data["cat"] = self.data.pop("cat") if "cat" in self.data else {}
2022-10-29 23:19:09 +08:00
if "cat_change" in self.data:
self.data["cat_change"] = self.data.pop("cat_change")
if "tracker" in self.data:
trackers = self.data.pop("tracker")
self.data["tracker"] = {}
# Splits tracker urls at pipes, useful for trackers with multiple announce urls
for tracker_urls, data in trackers.items():
for tracker_url in tracker_urls.split("|"):
2023-06-12 05:19:43 +08:00
self.data["tracker"][tracker_url.strip()] = data
2022-10-29 23:19:09 +08:00
else:
self.data["tracker"] = {}
if "nohardlinks" in self.data:
self.data["nohardlinks"] = self.data.pop("nohardlinks")
if "recyclebin" in self.data:
self.data["recyclebin"] = self.data.pop("recyclebin")
if "orphaned" in self.data:
self.data["orphaned"] = self.data.pop("orphaned")
if "apprise" in self.data:
self.data["apprise"] = self.data.pop("apprise")
if "notifiarr" in self.data:
self.data["notifiarr"] = self.data.pop("notifiarr")
2022-08-14 04:49:02 +08:00
if "webhooks" in self.data:
temp = self.data.pop("webhooks")
2023-05-06 21:37:11 +08:00
if temp is not None:
if "function" not in temp or ("function" in temp and temp["function"] is None):
temp["function"] = {}
2021-12-29 01:19:58 +08:00
2023-05-06 21:37:11 +08:00
def hooks(attr):
if attr in temp:
items = temp.pop(attr)
if items:
temp["function"][attr] = items
if attr not in temp["function"]:
temp["function"][attr] = {}
temp["function"][attr] = None
2022-10-29 23:19:09 +08:00
2023-05-06 21:37:11 +08:00
hooks("cross_seed")
hooks("recheck")
hooks("cat_update")
hooks("tag_update")
hooks("rem_unregistered")
hooks("rem_orphaned")
hooks("tag_nohardlinks")
hooks("cleanup_dirs")
self.data["webhooks"] = temp
2022-10-29 23:19:09 +08:00
if "bhd" in self.data:
self.data["bhd"] = self.data.pop("bhd")
if "share_limits" in self.data:
self.data["share_limits"] = self.data.pop("share_limits")
2022-11-20 00:13:41 +08:00
self.dry_run = self.commands["dry_run"]
self.loglevel = "DRYRUN" if self.dry_run else "INFO"
self.session = requests.Session()
2021-12-27 06:19:17 +08:00
share_limits_tag = self.data["settings"].get("share_limits_suffix_tag", "~share_limit")
# Convert previous share_limits_suffix_tag to new default share_limits_tag
if share_limits_tag == "share_limit":
share_limits_tag = "~share_limit"
2021-12-27 06:19:17 +08:00
self.settings = {
2022-10-29 23:19:09 +08:00
"force_auto_tmm": self.util.check_for_attribute(
self.data, "force_auto_tmm", parent="settings", var_type="bool", default=False
),
"tracker_error_tag": self.util.check_for_attribute(
self.data, "tracker_error_tag", parent="settings", default="issue"
),
2023-04-01 23:25:01 +08:00
"nohardlinks_tag": self.util.check_for_attribute(self.data, "nohardlinks_tag", parent="settings", default="noHL"),
"share_limits_tag": self.util.check_for_attribute(
self.data, "share_limits_tag", parent="settings", default=share_limits_tag
),
2021-12-27 06:19:17 +08:00
}
2023-04-01 23:25:01 +08:00
self.tracker_error_tag = self.settings["tracker_error_tag"]
self.nohardlinks_tag = self.settings["nohardlinks_tag"]
self.share_limits_tag = self.settings["share_limits_tag"]
2023-04-01 23:25:01 +08:00
default_ignore_tags = [self.nohardlinks_tag, self.tracker_error_tag, "cross-seed"]
2022-10-29 23:19:09 +08:00
self.settings["ignoreTags_OnUpdate"] = self.util.check_for_attribute(
self.data, "ignoreTags_OnUpdate", parent="settings", default=default_ignore_tags, var_type="list"
)
"Migrate settings from v4.0.0 to v4.0.1 and beyond. Convert 'share_limits_suffix_tag' to 'share_limits_tag'"
if "share_limits_suffix_tag" in self.data["settings"]:
self.util.overwrite_attributes(self.settings, "settings")
2021-12-29 01:19:58 +08:00
default_function = {
2022-10-29 23:19:09 +08:00
"cross_seed": None,
"recheck": None,
"cat_update": None,
"tag_update": None,
"rem_unregistered": None,
"tag_tracker_error": None,
"rem_orphaned": None,
"tag_nohardlinks": None,
"share_limits": None,
2022-10-29 23:19:09 +08:00
"cleanup_dirs": None,
}
2021-12-29 01:19:58 +08:00
self.webhooks_factory = {
"error": self.util.check_for_attribute(self.data, "error", parent="webhooks", var_type="list", default_is_none=True),
2022-10-29 23:19:09 +08:00
"run_start": self.util.check_for_attribute(
self.data, "run_start", parent="webhooks", var_type="list", default_is_none=True
),
"run_end": self.util.check_for_attribute(
self.data, "run_end", parent="webhooks", var_type="list", default_is_none=True
),
"function": self.util.check_for_attribute(
self.data, "function", parent="webhooks", var_type="list", default=default_function
),
}
for func in default_function:
self.util.check_for_attribute(self.data, func, parent="webhooks", subparent="function", default_is_none=True)
2022-08-14 04:49:02 +08:00
self.cat_change = self.data["cat_change"] if "cat_change" in self.data else {}
self.apprise_factory = None
2021-12-19 01:38:41 +08:00
if "apprise" in self.data:
if self.data["apprise"] is not None:
logger.info("Connecting to Apprise...")
try:
self.apprise_factory = Apprise(
2022-10-29 23:19:09 +08:00
self,
{
"api_url": self.util.check_for_attribute(
self.data, "api_url", parent="apprise", var_type="url", throw=True
),
"notify_url": self.util.check_for_attribute(
self.data, "notify_url", parent="apprise", var_type="list", throw=True
),
},
)
except Failed as err:
logger.error(err)
logger.info(f"Apprise Connection {'Failed' if self.apprise_factory is None else 'Successful'}")
2021-12-19 01:38:41 +08:00
self.notifiarr_factory = None
if "notifiarr" in self.data:
if self.data["notifiarr"] is not None:
logger.info("Connecting to Notifiarr...")
try:
self.notifiarr_factory = Notifiarr(
2022-10-29 23:19:09 +08:00
self,
{
"apikey": self.util.check_for_attribute(self.data, "apikey", parent="notifiarr", throw=True),
"instance": self.util.check_for_attribute(
self.data, "instance", parent="notifiarr", default=False, do_print=False, save=False
),
},
)
except Failed as err:
logger.error(err)
logger.info(f"Notifiarr Connection {'Failed' if self.notifiarr_factory is None else 'Successful'}")
self.webhooks_factory = Webhooks(
self, self.webhooks_factory, notifiarr=self.notifiarr_factory, apprise=self.apprise_factory
)
try:
self.webhooks_factory.start_time_hooks(self.start_time)
except Failed as err:
logger.stacktrace()
logger.error(f"Webhooks Error: {err}")
self.beyond_hd = None
if "bhd" in self.data:
if self.data["bhd"] is not None:
logger.info("Connecting to BHD API...")
try:
self.beyond_hd = BeyondHD(
2022-10-29 23:19:09 +08:00
self, {"apikey": self.util.check_for_attribute(self.data, "apikey", parent="bhd", throw=True)}
)
except Failed as err:
logger.error(err)
self.notify(err, "BHD")
logger.info(f"BHD Connection {'Failed' if self.beyond_hd is None else 'Successful'}")
2021-12-29 01:19:58 +08:00
# nohardlinks
self.nohardlinks = None
2023-05-31 11:01:18 +08:00
if "nohardlinks" in self.data and self.commands["tag_nohardlinks"] and self.data["nohardlinks"] is not None:
self.nohardlinks = {}
for cat in self.data["nohardlinks"]:
2023-05-31 10:38:46 +08:00
if isinstance(cat, dict):
cat_str = list(cat.keys())[0]
self.nohardlinks[cat_str] = {}
2023-05-31 11:01:18 +08:00
exclude_tags = cat[cat_str].get("exclude_tags", [])
2023-05-31 10:38:46 +08:00
if isinstance(exclude_tags, str):
exclude_tags = [exclude_tags]
self.nohardlinks[cat_str]["exclude_tags"] = exclude_tags
elif isinstance(cat, str):
self.nohardlinks[cat] = {}
2023-05-31 11:01:18 +08:00
self.nohardlinks[cat]["exclude_tags"] = []
else:
2022-08-22 02:40:57 +08:00
if self.commands["tag_nohardlinks"]:
2023-05-31 11:01:18 +08:00
err = "Config Error: nohardlinks must be a list of categories"
self.notify(err, "Config")
raise Failed(err)
# share limits
self.share_limits = None
if "share_limits" in self.data and self.commands["share_limits"]:
def _sort_share_limits(share_limits):
sorted_limits = sorted(
share_limits.items(), key=lambda x: x[1].get("priority", float("inf")) if x[1] is not None else float("inf")
)
priorities = set()
for key, value in sorted_limits:
if value is None:
value = {}
if "priority" in value:
priority = value["priority"]
if priority in priorities:
err = (
f"Config Error: Duplicate priority '{priority}' found in share_limits "
f"for the grouping '{key}'. Priority must be a unique value and greater than or equal to 1"
)
self.notify(err, "Config")
raise Failed(err)
else:
priority = max(priorities) + 1
logger.warning(
2023-06-12 03:54:52 +08:00
f"Priority not defined for the grouping '{key}' in share_limits. Setting priority to {priority}"
)
value["priority"] = self.util.check_for_attribute(
self.data,
"priority",
parent="share_limits",
subparent=key,
var_type="float",
default=priority,
save=True,
)
priorities.add(priority)
return OrderedDict(sorted_limits)
self.share_limits = OrderedDict()
sorted_share_limits = _sort_share_limits(self.data["share_limits"])
for group in sorted_share_limits:
self.share_limits[group] = {}
self.share_limits[group]["priority"] = sorted_share_limits[group]["priority"]
self.share_limits[group]["include_all_tags"] = self.util.check_for_attribute(
self.data,
"include_all_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["include_any_tags"] = self.util.check_for_attribute(
self.data,
"include_any_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["exclude_all_tags"] = self.util.check_for_attribute(
self.data,
"exclude_all_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["exclude_any_tags"] = self.util.check_for_attribute(
self.data,
"exclude_any_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["categories"] = self.util.check_for_attribute(
self.data,
"categories",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["cleanup"] = self.util.check_for_attribute(
self.data, "cleanup", parent="share_limits", subparent=group, var_type="bool", default=False, do_print=False
)
self.share_limits[group]["max_ratio"] = self.util.check_for_attribute(
self.data,
"max_ratio",
parent="share_limits",
subparent=group,
var_type="float",
min_int=-2,
default=-1,
do_print=False,
save=False,
)
self.share_limits[group]["max_seeding_time"] = self.util.check_for_attribute(
self.data,
"max_seeding_time",
parent="share_limits",
subparent=group,
var_type="int",
min_int=-2,
default=-1,
do_print=False,
save=False,
)
self.share_limits[group]["min_seeding_time"] = self.util.check_for_attribute(
self.data,
"min_seeding_time",
parent="share_limits",
subparent=group,
var_type="int",
min_int=0,
default=0,
do_print=False,
save=False,
)
self.share_limits[group]["limit_upload_speed"] = self.util.check_for_attribute(
self.data,
"limit_upload_speed",
parent="share_limits",
subparent=group,
var_type="int",
min_int=-1,
default=0,
do_print=False,
save=False,
)
2023-06-25 00:40:20 +08:00
self.share_limits[group]["min_num_seeds"] = self.util.check_for_attribute(
self.data,
"min_num_seeds",
parent="share_limits",
subparent=group,
var_type="int",
min_int=0,
default=0,
do_print=False,
save=False,
)
self.share_limits[group]["resume_torrent_after_change"] = self.util.check_for_attribute(
self.data,
"resume_torrent_after_change",
parent="share_limits",
subparent=group,
var_type="bool",
default=True,
do_print=False,
save=False,
)
self.share_limits[group]["add_group_to_tag"] = self.util.check_for_attribute(
self.data,
"add_group_to_tag",
parent="share_limits",
subparent=group,
var_type="bool",
default=True,
do_print=False,
save=False,
)
self.share_limits[group]["torrents"] = []
else:
if self.commands["share_limits"]:
err = "Config Error: share_limits. No valid grouping found."
self.notify(err, "Config")
raise Failed(err)
2021-12-29 01:19:58 +08:00
# Add RecycleBin
self.recyclebin = {}
2022-10-29 23:19:09 +08:00
self.recyclebin["enabled"] = self.util.check_for_attribute(
self.data, "enabled", parent="recyclebin", var_type="bool", default=True
)
self.recyclebin["empty_after_x_days"] = self.util.check_for_attribute(
self.data, "empty_after_x_days", parent="recyclebin", var_type="int", default_is_none=True
)
self.recyclebin["save_torrents"] = self.util.check_for_attribute(
self.data, "save_torrents", parent="recyclebin", var_type="bool", default=False
)
self.recyclebin["split_by_category"] = self.util.check_for_attribute(
self.data, "split_by_category", parent="recyclebin", var_type="bool", default=False
)
2021-12-29 01:19:58 +08:00
# Assign directories
if "directory" in self.data:
2022-10-29 23:19:09 +08:00
self.root_dir = os.path.join(
self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True), ""
)
self.remote_dir = os.path.join(
self.util.check_for_attribute(self.data, "remote_dir", parent="directory", default=self.root_dir), ""
)
if self.commands["cross_seed"] or self.commands["tag_nohardlinks"] or self.commands["rem_orphaned"]:
self.remote_dir = self.util.check_for_attribute(
self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir
)
else:
2022-10-29 23:19:09 +08:00
if self.recyclebin["enabled"]:
self.remote_dir = self.util.check_for_attribute(
self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir
)
2022-08-22 02:40:57 +08:00
if self.commands["cross_seed"]:
2021-12-29 01:19:58 +08:00
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", var_type="path")
else:
2022-10-29 23:19:09 +08:00
self.cross_seed_dir = self.util.check_for_attribute(
self.data, "cross_seed", parent="directory", default_is_none=True
)
2022-09-25 04:11:55 +08:00
if self.commands["rem_orphaned"]:
if "orphaned_dir" in self.data["directory"] and self.data["directory"]["orphaned_dir"] is not None:
2022-10-29 23:19:09 +08:00
default_orphaned = os.path.join(
self.remote_dir, os.path.basename(self.data["directory"]["orphaned_dir"].rstrip(os.sep))
)
2022-09-25 04:11:55 +08:00
else:
2022-10-29 23:19:09 +08:00
default_orphaned = os.path.join(self.remote_dir, "orphaned_data")
self.orphaned_dir = self.util.check_for_attribute(
self.data, "orphaned_dir", parent="directory", var_type="path", default=default_orphaned, make_dirs=True
)
2022-09-25 04:11:55 +08:00
else:
self.orphaned_dir = None
2022-10-29 23:19:09 +08:00
if self.recyclebin["enabled"]:
if "recycle_bin" in self.data["directory"] and self.data["directory"]["recycle_bin"] is not None:
2022-10-29 23:19:09 +08:00
default_recycle = os.path.join(
self.remote_dir, os.path.basename(self.data["directory"]["recycle_bin"].rstrip(os.sep))
)
2022-01-11 00:54:18 +08:00
else:
2022-10-29 23:19:09 +08:00
default_recycle = os.path.join(self.remote_dir, ".RecycleBin")
if self.recyclebin["split_by_category"]:
self.recycle_dir = self.util.check_for_attribute(
self.data, "recycle_bin", parent="directory", default=default_recycle
)
else:
2022-10-29 23:19:09 +08:00
self.recycle_dir = self.util.check_for_attribute(
self.data, "recycle_bin", parent="directory", var_type="path", default=default_recycle, make_dirs=True
)
else:
self.recycle_dir = None
2022-10-29 23:19:09 +08:00
if self.recyclebin["enabled"] and self.recyclebin["save_torrents"]:
self.torrents_dir = self.util.check_for_attribute(self.data, "torrents_dir", parent="directory", var_type="path")
if not any(File.endswith(".torrent") for File in os.listdir(self.torrents_dir)):
err = f"Config Error: The location {self.torrents_dir} does not contain any .torrents"
self.notify(err, "Config")
raise Failed(err)
else:
2022-10-29 23:19:09 +08:00
self.torrents_dir = self.util.check_for_attribute(
self.data, "torrents_dir", parent="directory", default_is_none=True
)
else:
e = "Config Error: directory attribute not found"
2022-10-29 23:19:09 +08:00
self.notify(e, "Config")
raise Failed(e)
# Add Orphaned
self.orphaned = {}
2022-10-29 23:19:09 +08:00
self.orphaned["empty_after_x_days"] = self.util.check_for_attribute(
self.data, "empty_after_x_days", parent="orphaned", var_type="int", default_is_none=True
)
self.orphaned["exclude_patterns"] = self.util.check_for_attribute(
self.data, "exclude_patterns", parent="orphaned", var_type="list", default_is_none=True, do_print=False
)
2022-09-26 21:04:33 +08:00
if self.commands["rem_orphaned"]:
exclude_orphaned = f"**{os.sep}{os.path.basename(self.orphaned_dir.rstrip(os.sep))}{os.sep}*"
2023-06-12 03:54:52 +08:00
(
self.orphaned["exclude_patterns"].append(exclude_orphaned)
if exclude_orphaned not in self.orphaned["exclude_patterns"]
else self.orphaned["exclude_patterns"]
)
2022-10-29 23:19:09 +08:00
if self.recyclebin["enabled"]:
2022-05-21 05:36:45 +08:00
exclude_recycle = f"**{os.sep}{os.path.basename(self.recycle_dir.rstrip(os.sep))}{os.sep}*"
2023-06-12 03:54:52 +08:00
(
self.orphaned["exclude_patterns"].append(exclude_recycle)
if exclude_recycle not in self.orphaned["exclude_patterns"]
else self.orphaned["exclude_patterns"]
)
2021-12-29 01:19:58 +08:00
# Connect to Qbittorrent
self.qbt = None
if "qbt" in self.data:
2021-12-29 01:19:58 +08:00
logger.info("Connecting to Qbittorrent...")
2022-10-29 23:19:09 +08:00
self.qbt = Qbt(
self,
{
"host": self.util.check_for_attribute(self.data, "host", parent="qbt", throw=True),
"username": self.util.check_for_attribute(self.data, "user", parent="qbt", default_is_none=True),
"password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True),
},
)
else:
e = "Config Error: qbt attribute not found"
2022-10-29 23:19:09 +08:00
self.notify(e, "Config")
raise Failed(e)
# Empty old files from recycle bin or orphaned
def cleanup_dirs(self, location):
num_del = 0
files = []
size_bytes = 0
skip = self.commands["skip_cleanup"]
if location == "Recycle Bin":
2022-10-29 23:19:09 +08:00
enabled = self.recyclebin["enabled"]
empty_after_x_days = self.recyclebin["empty_after_x_days"]
function = "cleanup_dirs"
location_path = self.recycle_dir
elif location == "Orphaned Data":
enabled = self.commands["rem_orphaned"]
2022-10-29 23:19:09 +08:00
empty_after_x_days = self.orphaned["empty_after_x_days"]
function = "cleanup_dirs"
location_path = self.orphaned_dir
if not skip:
2022-10-21 22:24:10 +08:00
if enabled and empty_after_x_days is not None:
2022-10-29 23:19:09 +08:00
if location == "Recycle Bin" and self.recyclebin["split_by_category"]:
if "cat" in self.data and self.data["cat"] is not None:
save_path = list(self.data["cat"].values())
2022-10-29 23:19:09 +08:00
cleaned_save_path = [
os.path.join(
s.replace(self.root_dir, self.remote_dir), os.path.basename(location_path.rstrip(os.sep))
)
for s in save_path
]
location_path_list = [location_path]
for folder in cleaned_save_path:
if os.path.exists(folder):
location_path_list.append(folder)
else:
2022-10-29 23:19:09 +08:00
e = f"No categories defined. Checking {location} directory {location_path}."
self.notify(e, f"Empty {location}", False)
logger.warning(e)
location_path_list = [location_path]
else:
location_path_list = [location_path]
2022-10-29 23:19:09 +08:00
location_files = [
os.path.join(path, name)
for r_path in location_path_list
for path, subdirs, files in os.walk(r_path)
for name in files
]
location_files = sorted(location_files)
logger.trace(f"location_files: {location_files}")
if location_files:
body = []
logger.separator(f"Emptying {location} (Files > {empty_after_x_days} days)", space=True, border=True)
2022-10-29 23:19:09 +08:00
prevfolder = ""
for file in location_files:
folder = re.search(f".*{os.path.basename(location_path.rstrip(os.sep))}", file).group(0)
2022-10-29 23:19:09 +08:00
if folder != prevfolder:
body += logger.separator(f"Searching: {folder}", space=False, border=False)
try:
fileStats = os.stat(file)
filename = os.path.basename(file)
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
except FileNotFoundError:
ex = logger.print_line(
f"{location} Warning - FileNotFound: No such file or directory: {file} ", "WARNING"
)
self.config.notify(ex, "Cleanup Dirs", False)
continue
2021-12-29 01:19:58 +08:00
now = time.time() # in seconds
days = (now - last_modified) / (60 * 60 * 24)
2022-10-29 23:19:09 +08:00
if empty_after_x_days <= days:
num_del += 1
2022-10-29 23:19:09 +08:00
body += logger.print_line(
2023-06-12 03:54:52 +08:00
(
f"{'Did not delete' if self.dry_run else 'Deleted'} "
f"{filename} from {folder} (Last modified {round(days)} days ago)."
),
2022-11-20 00:13:41 +08:00
self.loglevel,
2022-10-29 23:19:09 +08:00
)
files += [str(filename)]
size_bytes += os.path.getsize(file)
2022-11-20 00:13:41 +08:00
if not self.dry_run:
2022-10-29 23:19:09 +08:00
os.remove(file)
prevfolder = re.search(f".*{os.path.basename(location_path.rstrip(os.sep))}", file).group(0)
if num_del > 0:
2022-11-20 00:13:41 +08:00
if not self.dry_run:
for path in location_path_list:
util.remove_empty_directories(path, "**/*")
2022-10-29 23:19:09 +08:00
body += logger.print_line(
2023-06-12 03:54:52 +08:00
(
f"{'Did not delete' if self.dry_run else 'Deleted'} {num_del} files "
f"({util.human_readable_size(size_bytes)}) from the {location}."
),
2022-11-20 00:13:41 +08:00
self.loglevel,
2022-10-29 23:19:09 +08:00
)
attr = {
"function": function,
"location": location,
"title": f"Emptying {location} (Files > {empty_after_x_days} days)",
2021-12-29 01:19:58 +08:00
"body": "\n".join(body),
"files": files,
"empty_after_x_days": empty_after_x_days,
2022-10-29 23:19:09 +08:00
"size_in_bytes": size_bytes,
}
self.send_notifications(attr)
else:
logger.debug(f'No files found in "{(",".join(location_path_list))}"')
return num_del
def send_notifications(self, attr):
2021-12-29 01:19:58 +08:00
try:
2022-10-29 23:19:09 +08:00
function = attr["function"]
config_webhooks = self.webhooks_factory.function_webhooks
2021-12-29 01:19:58 +08:00
config_function = None
for key in config_webhooks:
if key in function:
config_function = key
break
if config_function:
self.webhooks_factory.function_hooks([config_webhooks[config_function]], attr)
2021-12-29 01:19:58 +08:00
except Failed as e:
logger.stacktrace()
logger.error(f"webhooks_factory Error: {e}")
2021-12-29 01:19:58 +08:00
def notify(self, text, function=None, critical=True):
for error in util.get_list(text, split=False):
try:
self.webhooks_factory.error_hooks(error, function_error=function, critical=critical)
except Failed as e:
logger.stacktrace()
logger.error(f"webhooks_factory Error: {e}")
def get_json(self, url, json=None, headers=None, params=None):
return self.get(url, json=json, headers=headers, params=params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get(self, url, json=None, headers=None, params=None):
return self.session.get(url, json=json, headers=headers, params=params)
def post_json(self, url, data=None, json=None, headers=None):
return self.post(url, data=data, json=json, headers=headers).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def post(self, url, data=None, json=None, headers=None):
return self.session.post(url, data=data, json=json, headers=headers)