mirror of
https://github.com/StuffAnThings/qbit_manage.git
synced 2025-11-17 14:32:21 +08:00
commit
86e827248e
14 changed files with 716 additions and 457 deletions
7
.flake8
7
.flake8
|
|
@ -6,7 +6,8 @@ ignore =
|
|||
E701, # E701 Multiple statements on one line (colon)
|
||||
E241, # E241 Multiple spaces after ','
|
||||
E272, # E272 Multiple spaces before keyword
|
||||
C901 # C901 Function is too complex
|
||||
E722 # E722 Do not use bare except, specify exception instead
|
||||
W503 # W503 Line break occurred before a binary operator
|
||||
C901, # C901 Function is too complex
|
||||
E722, # E722 Do not use bare except, specify exception instead
|
||||
W503, # W503 Line break occurred before a binary operator
|
||||
E402, # E402 module level import not at top of file
|
||||
max-line-length = 200
|
||||
2
VERSION
2
VERSION
|
|
@ -1 +1 @@
|
|||
3.2.4
|
||||
3.2.5
|
||||
|
|
@ -3,8 +3,21 @@
|
|||
# Please refer to the link below for more details on how to set up the configuration file
|
||||
# https://github.com/StuffAnThings/qbit_manage/wiki/Config-Setup
|
||||
|
||||
# qBittorrent parameters
|
||||
commands:
|
||||
# The commands defined below will IGNORE any commands used in command line and docker env variables.
|
||||
dry_run: True
|
||||
cross_seed: False
|
||||
recheck: False
|
||||
cat_update: False
|
||||
tag_update: False
|
||||
rem_unregistered: False
|
||||
tag_tracker_error: False
|
||||
rem_orphaned: False
|
||||
tag_nohardlinks: False
|
||||
skip_recycle: False
|
||||
|
||||
qbt:
|
||||
# qBittorrent parameters
|
||||
host: "localhost:8080"
|
||||
user: "username"
|
||||
pass: "password"
|
||||
|
|
@ -31,14 +44,24 @@ directory:
|
|||
recycle_bin: "/mnt/user/data/torrents/.RecycleBin"
|
||||
torrents_dir: "/qbittorrent/data/BT_backup"
|
||||
|
||||
# Category & Path Parameters
|
||||
cat:
|
||||
# Category & Path Parameters
|
||||
# <Category Name> : <save_path> # Path of your save directory.
|
||||
movies: "/data/torrents/Movies"
|
||||
tv: "/data/torrents/TV"
|
||||
|
||||
# Tag Parameters
|
||||
cat_change:
|
||||
# This moves all the torrents from one category to another category. This executes on --cat-update
|
||||
# WARNING: if the paths are different and Default Torrent Management Mode is set to automatic the files could be moved !!!
|
||||
# <Old Category Name> : <New Category>
|
||||
Radarr-HD.cross-seed: movies-hd
|
||||
Radarr-UHD.cross-seed: movies-uhd
|
||||
movies-hd.cross-seed: movies-hd
|
||||
movies-uhd.cross-seed: movies-uhd
|
||||
|
||||
|
||||
tracker:
|
||||
# Tag Parameters
|
||||
# <Tracker URL Keyword>: # <MANDATORY> This is the keyword in the tracker url
|
||||
# <MANDATORY> Set tag name. Can be a list of tags or a single tag
|
||||
# tag: <Tag Name>
|
||||
|
|
@ -103,8 +126,8 @@ tracker:
|
|||
tv-vault:
|
||||
tag: TV-Vault
|
||||
|
||||
# Tag Movies/Series that are not hard linked
|
||||
nohardlinks:
|
||||
# Tag Movies/Series that are not hard linked
|
||||
# Mandatory to fill out directory parameter above to use this function (root_dir/remote_dir)
|
||||
# This variable should be set to your category name of your completed movies/completed series in qbit. Acceptable variable can be any category you would like to tag if there are no hardlinks found
|
||||
movies-completed:
|
||||
|
|
@ -141,9 +164,9 @@ nohardlinks:
|
|||
# <OPTIONAL> min seeding time var: Will ensure that torrent is not deleted by cleanup variable if torrent does not meet minimum seeding time (min).
|
||||
min_seeding_time: 43200
|
||||
|
||||
# Recycle Bin method of deletion will move files into the recycle bin (Located in /root_dir/.RecycleBin) instead of directly deleting them in qbit
|
||||
# By default the Recycle Bin will be emptied on every run of the qbit_manage script if empty_after_x_days is defined.
|
||||
recyclebin:
|
||||
# Recycle Bin method of deletion will move files into the recycle bin (Located in /root_dir/.RecycleBin) instead of directly deleting them in qbit
|
||||
# By default the Recycle Bin will be emptied on every run of the qbit_manage script if empty_after_x_days is defined.
|
||||
enabled: true
|
||||
# <OPTIONAL> empty_after_x_days var:
|
||||
# Will automatically remove all files and folders in recycle bin after x days. (Checks every script run)
|
||||
|
|
@ -159,8 +182,8 @@ recyclebin:
|
|||
# and add the base folder name of the recycle bin that was defined in the `recycle_bin` sub-attribute under directory.
|
||||
split_by_category: false
|
||||
|
||||
# Orphaned files are those in the root_dir download directory that are not referenced by any active torrents.
|
||||
orphaned:
|
||||
# Orphaned files are those in the root_dir download directory that are not referenced by any active torrents.
|
||||
# File patterns that will not be considered orphaned files. Handy for generated files that aren't part of the torrent but belong with the torrent's files
|
||||
exclude_patterns:
|
||||
- "**/.DS_Store"
|
||||
|
|
@ -169,29 +192,29 @@ orphaned:
|
|||
- "/data/torrents/temp/**"
|
||||
- "**/*.!qB"
|
||||
|
||||
# Apprise integration with webhooks
|
||||
# Leave Empty/Blank to disable
|
||||
apprise:
|
||||
# Apprise integration with webhooks
|
||||
# Leave Empty/Blank to disable
|
||||
# Mandatory to fill out the url of your apprise API endpoint
|
||||
api_url: http://apprise-api:8000
|
||||
# Mandatory to fill out the notification url/urls based on the notification services provided by apprise. https://github.com/caronc/apprise/wiki
|
||||
notify_url:
|
||||
|
||||
# Notifiarr integration with webhooks
|
||||
# Leave Empty/Blank to disable
|
||||
notifiarr:
|
||||
# Notifiarr integration with webhooks
|
||||
# Leave Empty/Blank to disable
|
||||
# Mandatory to fill out API Key
|
||||
apikey: ####################################
|
||||
# <OPTIONAL> Set to a unique value (could be your username on notifiarr for example)
|
||||
instance:
|
||||
|
||||
# Webhook notifications:
|
||||
# Possible values:
|
||||
# Set value to notifiarr if using notifiarr integration
|
||||
# Set value to apprise if using apprise integration
|
||||
# Set value to a valid webhook URL
|
||||
# Leave Empty/Blank to disable
|
||||
webhooks:
|
||||
# Webhook notifications:
|
||||
# Possible values:
|
||||
# Set value to notifiarr if using notifiarr integration
|
||||
# Set value to apprise if using apprise integration
|
||||
# Set value to a valid webhook URL
|
||||
# Leave Empty/Blank to disable
|
||||
error: https://mywebhookurl.com/qbt_manage
|
||||
run_start: notifiarr
|
||||
run_end: apprise
|
||||
|
|
@ -206,6 +229,6 @@ webhooks:
|
|||
tag_nohardlinks: notifiarr
|
||||
empty_recyclebin: notifiarr
|
||||
|
||||
# BHD Integration used for checking unregistered torrents
|
||||
bhd:
|
||||
# BHD Integration used for checking unregistered torrents
|
||||
apikey:
|
||||
|
|
@ -1,12 +1,13 @@
|
|||
import logging
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
|
||||
|
||||
class Apprise:
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.api_url = params["api_url"]
|
||||
logger.secret(self.api_url)
|
||||
self.notify_url = ",".join(params["notify_url"])
|
||||
response = self.config.get(self.api_url)
|
||||
if response.status_code != 200:
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import logging
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from json import JSONDecodeError
|
||||
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
base_url = "https://beyond-hd.me/api/"
|
||||
|
||||
|
||||
|
|
@ -10,6 +10,7 @@ class BeyondHD:
|
|||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.apikey = params["apikey"]
|
||||
logger.secret(self.apikey)
|
||||
json = {"search": "test"}
|
||||
self.search(json)
|
||||
|
||||
|
|
@ -17,7 +18,7 @@ class BeyondHD:
|
|||
url = f"{base_url}{path}{self.apikey}"
|
||||
json["action"] = "search"
|
||||
if self.config.trace_mode:
|
||||
logger.debug(url.replace(self.apikey, "APIKEY"))
|
||||
logger.debug(url)
|
||||
logger.debug(f"JSON: {json}")
|
||||
try:
|
||||
response = self.config.post(url, json=json)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,14 @@
|
|||
import logging, os, requests, stat, time, re
|
||||
import os, requests, stat, time, re
|
||||
from modules import util
|
||||
from modules.util import Failed, check
|
||||
from modules.util import Failed, check, YAML
|
||||
from modules.qbittorrent import Qbt
|
||||
from modules.webhooks import Webhooks
|
||||
from modules.notifiarr import Notifiarr
|
||||
from modules.bhd import BeyondHD
|
||||
from modules.apprise import Apprise
|
||||
from ruamel import yaml
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
|
||||
|
||||
class Config:
|
||||
|
|
@ -30,50 +29,79 @@ class Config:
|
|||
self.trace_mode = args["trace"] if "trace" in args else False
|
||||
self.start_time = args["time_obj"]
|
||||
|
||||
yaml.YAML().allow_duplicate_keys = True
|
||||
try:
|
||||
new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
|
||||
if "qbt" in new_config: new_config["qbt"] = new_config.pop("qbt")
|
||||
new_config["settings"] = new_config.pop("settings") if "settings" in new_config else {}
|
||||
if "directory" in new_config: new_config["directory"] = new_config.pop("directory")
|
||||
new_config["cat"] = new_config.pop("cat") if "cat" in new_config else {}
|
||||
if "tracker" in new_config: new_config["tracker"] = new_config.pop("tracker")
|
||||
elif "tags" in new_config: new_config["tracker"] = new_config.pop("tags")
|
||||
else: new_config["tracker"] = {}
|
||||
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
|
||||
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
|
||||
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
|
||||
if "apprise" in new_config: new_config["apprise"] = new_config.pop("apprise")
|
||||
if "notifiarr" in new_config: new_config["notifiarr"] = new_config.pop("notifiarr")
|
||||
if "webhooks" in new_config:
|
||||
temp = new_config.pop("webhooks")
|
||||
if 'function' not in temp or ('function' in temp and temp['function'] is None): temp["function"] = {}
|
||||
loaded_yaml = YAML(self.config_path)
|
||||
self.data = loaded_yaml.data
|
||||
|
||||
def hooks(attr):
|
||||
if attr in temp:
|
||||
items = temp.pop(attr)
|
||||
if items:
|
||||
temp["function"][attr] = items
|
||||
if attr not in temp["function"]:
|
||||
temp["function"][attr] = {}
|
||||
temp["function"][attr] = None
|
||||
hooks("cross_seed")
|
||||
hooks("recheck")
|
||||
hooks("cat_update")
|
||||
hooks("tag_update")
|
||||
hooks("rem_unregistered")
|
||||
hooks("rem_orphaned")
|
||||
hooks("tag_nohardlinks")
|
||||
hooks("empty_recyclebin")
|
||||
new_config["webhooks"] = temp
|
||||
if "bhd" in new_config: new_config["bhd"] = new_config.pop("bhd")
|
||||
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2)
|
||||
self.data = new_config
|
||||
except yaml.scanner.ScannerError as e:
|
||||
raise Failed(f"YAML Error: {util.tab_new_lines(e)}")
|
||||
except Exception as e:
|
||||
util.print_stacktrace()
|
||||
raise Failed(f"YAML Error: {e}")
|
||||
# Replace env variables with config commands
|
||||
if "commands" in self.data:
|
||||
if self.data["commands"] is not None:
|
||||
logger.info(f"Commands found in {config_file}, ignoring env variables and using config commands instead.")
|
||||
self.commands = self.data.pop("commands")
|
||||
if 'dry_run' not in self.commands:
|
||||
self.commands['dry_run'] = args['dry_run'] if 'dry_run' in args else False
|
||||
# Add default any missing commands as False
|
||||
for v in [
|
||||
'cross_seed',
|
||||
'recheck',
|
||||
'cat_update',
|
||||
'tag_update',
|
||||
'rem_unregistered',
|
||||
'tag_tracker_error',
|
||||
'rem_orphaned',
|
||||
'tag_nohardlinks',
|
||||
'skip_recycle',
|
||||
]:
|
||||
if v not in self.commands:
|
||||
self.commands[v] = False
|
||||
|
||||
logger.debug(f" --cross-seed (QBT_CROSS_SEED): {self.commands['cross_seed']}")
|
||||
logger.debug(f" --recheck (QBT_RECHECK): {self.commands['recheck']}")
|
||||
logger.debug(f" --cat-update (QBT_CAT_UPDATE): {self.commands['cat_update']}")
|
||||
logger.debug(f" --tag-update (QBT_TAG_UPDATE): {self.commands['tag_update']}")
|
||||
logger.debug(f" --rem-unregistered (QBT_REM_UNREGISTERED): {self.commands['rem_unregistered']}")
|
||||
logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {self.commands['tag_tracker_error']}")
|
||||
logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {self.commands['rem_orphaned']}")
|
||||
logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {self.commands['tag_nohardlinks']}")
|
||||
logger.debug(f" --skip-recycle (QBT_SKIP_RECYCLE): {self.commands['skip_recycle']}")
|
||||
logger.debug(f" --dry-run (QBT_DRY_RUN): {self.commands['dry_run']}")
|
||||
else:
|
||||
self.commands = args
|
||||
|
||||
if "qbt" in self.data: self.data["qbt"] = self.data.pop("qbt")
|
||||
self.data["settings"] = self.data.pop("settings") if "settings" in self.data else {}
|
||||
if "directory" in self.data: self.data["directory"] = self.data.pop("directory")
|
||||
self.data["cat"] = self.data.pop("cat") if "cat" in self.data else {}
|
||||
if "cat_change" in self.data: self.data["cat_change"] = self.data.pop("cat_change")
|
||||
if "tracker" in self.data: self.data["tracker"] = self.data.pop("tracker")
|
||||
elif "tags" in self.data: self.data["tracker"] = self.data.pop("tags")
|
||||
else: self.data["tracker"] = {}
|
||||
if "nohardlinks" in self.data: self.data["nohardlinks"] = self.data.pop("nohardlinks")
|
||||
if "recyclebin" in self.data: self.data["recyclebin"] = self.data.pop("recyclebin")
|
||||
if "orphaned" in self.data: self.data["orphaned"] = self.data.pop("orphaned")
|
||||
if "apprise" in self.data: self.data["apprise"] = self.data.pop("apprise")
|
||||
if "notifiarr" in self.data: self.data["notifiarr"] = self.data.pop("notifiarr")
|
||||
if "webhooks" in self.data:
|
||||
temp = self.data.pop("webhooks")
|
||||
if 'function' not in temp or ('function' in temp and temp['function'] is None): temp["function"] = {}
|
||||
|
||||
def hooks(attr):
|
||||
if attr in temp:
|
||||
items = temp.pop(attr)
|
||||
if items:
|
||||
temp["function"][attr] = items
|
||||
if attr not in temp["function"]:
|
||||
temp["function"][attr] = {}
|
||||
temp["function"][attr] = None
|
||||
hooks("cross_seed")
|
||||
hooks("recheck")
|
||||
hooks("cat_update")
|
||||
hooks("tag_update")
|
||||
hooks("rem_unregistered")
|
||||
hooks("rem_orphaned")
|
||||
hooks("tag_nohardlinks")
|
||||
hooks("empty_recyclebin")
|
||||
self.data["webhooks"] = temp
|
||||
if "bhd" in self.data: self.data["bhd"] = self.data.pop("bhd")
|
||||
|
||||
self.session = requests.Session()
|
||||
|
||||
|
|
@ -105,6 +133,8 @@ class Config:
|
|||
for func in default_function:
|
||||
self.util.check_for_attribute(self.data, func, parent="webhooks", subparent="function", default_is_none=True)
|
||||
|
||||
self.cat_change = self.data["cat_change"] if "cat_change" in self.data else {}
|
||||
|
||||
self.AppriseFactory = None
|
||||
if "apprise" in self.data:
|
||||
if self.data["apprise"] is not None:
|
||||
|
|
@ -137,7 +167,7 @@ class Config:
|
|||
try:
|
||||
self.Webhooks.start_time_hooks(self.start_time)
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(f"Webhooks Error: {e}")
|
||||
|
||||
self.BeyondHD = None
|
||||
|
|
@ -155,7 +185,7 @@ class Config:
|
|||
|
||||
# nohardlinks
|
||||
self.nohardlinks = None
|
||||
if "nohardlinks" in self.data and self.args['tag_nohardlinks']:
|
||||
if "nohardlinks" in self.data and self.commands['tag_nohardlinks']:
|
||||
self.nohardlinks = {}
|
||||
for cat in self.data["nohardlinks"]:
|
||||
if cat in list(self.data["cat"].keys()):
|
||||
|
|
@ -176,7 +206,7 @@ class Config:
|
|||
self.notify(e, 'Config')
|
||||
raise Failed(e)
|
||||
else:
|
||||
if self.args["tag_nohardlinks"]:
|
||||
if self.commands["tag_nohardlinks"]:
|
||||
e = "Config Error: nohardlinks attribute not found"
|
||||
self.notify(e, 'Config')
|
||||
raise Failed(e)
|
||||
|
|
@ -192,12 +222,12 @@ class Config:
|
|||
if "directory" in self.data:
|
||||
self.root_dir = os.path.join(self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True), '')
|
||||
self.remote_dir = os.path.join(self.util.check_for_attribute(self.data, "remote_dir", parent="directory", default=self.root_dir), '')
|
||||
if (self.args["cross_seed"] or self.args["tag_nohardlinks"] or self.args["rem_orphaned"]):
|
||||
if (self.commands["cross_seed"] or self.commands["tag_nohardlinks"] or self.commands["rem_orphaned"]):
|
||||
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir)
|
||||
else:
|
||||
if self.recyclebin['enabled']:
|
||||
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir)
|
||||
if self.args["cross_seed"]:
|
||||
if self.commands["cross_seed"]:
|
||||
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", var_type="path")
|
||||
else:
|
||||
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", default_is_none=True)
|
||||
|
|
@ -331,12 +361,12 @@ class Config:
|
|||
|
||||
# Empty the recycle bin
|
||||
def empty_recycle(self):
|
||||
dry_run = self.args['dry_run']
|
||||
dry_run = self.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
num_del = 0
|
||||
files = []
|
||||
size_bytes = 0
|
||||
if not self.args["skip_recycle"]:
|
||||
if not self.commands["skip_recycle"]:
|
||||
if self.recyclebin['enabled'] and self.recyclebin['empty_after_x_days']:
|
||||
if self.recyclebin['split_by_category']:
|
||||
if "cat" in self.data and self.data["cat"] is not None:
|
||||
|
|
@ -356,11 +386,11 @@ class Config:
|
|||
recycle_files = sorted(recycle_files)
|
||||
if recycle_files:
|
||||
body = []
|
||||
util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=True, border=True)
|
||||
logger.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=True, border=True)
|
||||
prevfolder = ''
|
||||
for file in recycle_files:
|
||||
folder = re.search(f".*{os.path.basename(self.recycle_dir.rstrip(os.sep))}", file).group(0)
|
||||
if folder != prevfolder: body += util.separator(f"Searching: {folder}", space=False, border=False)
|
||||
if folder != prevfolder: body += logger.separator(f"Searching: {folder}", space=False, border=False)
|
||||
fileStats = os.stat(file)
|
||||
filename = os.path.basename(file)
|
||||
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
|
||||
|
|
@ -368,7 +398,7 @@ class Config:
|
|||
days = (now - last_modified) / (60 * 60 * 24)
|
||||
if (self.recyclebin['empty_after_x_days'] <= days):
|
||||
num_del += 1
|
||||
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {filename} from {folder} (Last modified {round(days)} days ago).", loglevel)
|
||||
body += logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {filename} from {folder} (Last modified {round(days)} days ago).", loglevel)
|
||||
files += [str(filename)]
|
||||
size_bytes += os.path.getsize(file)
|
||||
if not dry_run: os.remove(file)
|
||||
|
|
@ -377,7 +407,7 @@ class Config:
|
|||
if not dry_run:
|
||||
for path in recycle_path:
|
||||
util.remove_empty_directories(path, "**/*")
|
||||
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.", loglevel)
|
||||
body += logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.", loglevel)
|
||||
attr = {
|
||||
"function": "empty_recyclebin",
|
||||
"title": f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
|
||||
|
|
@ -403,7 +433,7 @@ class Config:
|
|||
if config_function:
|
||||
self.Webhooks.function_hooks([config_webhooks[config_function]], attr)
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(f"Webhooks Error: {e}")
|
||||
|
||||
def notify(self, text, function=None, critical=True):
|
||||
|
|
@ -411,7 +441,7 @@ class Config:
|
|||
try:
|
||||
self.Webhooks.error_hooks(error, function_error=function, critical=critical)
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(f"Webhooks Error: {e}")
|
||||
|
||||
def get_json(self, url, json=None, headers=None, params=None):
|
||||
|
|
|
|||
256
modules/logs.py
Normal file
256
modules/logs.py
Normal file
|
|
@ -0,0 +1,256 @@
|
|||
import io, logging, os, re, sys, traceback
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
LOG_DIR = "logs"
|
||||
|
||||
CRITICAL = 50
|
||||
FATAL = CRITICAL
|
||||
ERROR = 40
|
||||
WARNING = 30
|
||||
WARN = WARNING
|
||||
DRYRUN = 25
|
||||
INFO = 20
|
||||
DEBUG = 10
|
||||
|
||||
|
||||
def fmt_filter(record):
|
||||
record.levelname = f"[{record.levelname}]"
|
||||
record.filename = f"[{record.filename}:{record.lineno}]"
|
||||
return True
|
||||
|
||||
|
||||
_srcfile = os.path.normcase(fmt_filter.__code__.co_filename)
|
||||
|
||||
|
||||
class MyLogger:
|
||||
def __init__(self, logger_name, log_file, log_level, default_dir, screen_width, separating_character, ignore_ghost, is_debug):
|
||||
self.logger_name = logger_name
|
||||
self.default_dir = default_dir
|
||||
self.screen_width = screen_width
|
||||
self.separating_character = separating_character
|
||||
self.is_debug = is_debug
|
||||
self.ignore_ghost = ignore_ghost
|
||||
self.log_dir = os.path.join(default_dir, LOG_DIR)
|
||||
self.main_log = log_file if os.path.exists(os.path.dirname(log_file)) else os.path.join(self.log_dir, log_file)
|
||||
self.main_handler = None
|
||||
self.save_errors = False
|
||||
self.saved_errors = []
|
||||
self.config_handlers = {}
|
||||
self.secrets = []
|
||||
self.spacing = 0
|
||||
os.makedirs(self.log_dir, exist_ok=True)
|
||||
self._logger = logging.getLogger(self.logger_name)
|
||||
logging.DRYRUN = DRYRUN
|
||||
logging.addLevelName(DRYRUN, 'DRYRUN')
|
||||
setattr(self._logger, 'dryrun', lambda dryrun, *args: self._logger._log(DRYRUN, dryrun, args))
|
||||
self._log_level = getattr(logging, log_level.upper())
|
||||
self._logger.setLevel(self._log_level)
|
||||
|
||||
cmd_handler = logging.StreamHandler()
|
||||
cmd_handler.setLevel(self._log_level)
|
||||
|
||||
self._logger.addHandler(cmd_handler)
|
||||
|
||||
def clear_errors(self):
|
||||
self.saved_errors = []
|
||||
|
||||
def _get_handler(self, log_file, count=3):
|
||||
max_bytes = 1024 * 1024 * 2
|
||||
_handler = RotatingFileHandler(log_file, delay=True, mode="w", maxBytes=max_bytes, backupCount=count, encoding="utf-8")
|
||||
self._formatter(_handler)
|
||||
# if os.path.isfile(log_file):
|
||||
# _handler.doRollover()
|
||||
return _handler
|
||||
|
||||
def _formatter(self, handler, border=True):
|
||||
text = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s"
|
||||
if isinstance(handler, RotatingFileHandler):
|
||||
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
|
||||
handler.setFormatter(logging.Formatter(text))
|
||||
|
||||
def add_main_handler(self):
|
||||
self.main_handler = self._get_handler(self.main_log, count=9)
|
||||
self.main_handler.addFilter(fmt_filter)
|
||||
self._logger.addHandler(self.main_handler)
|
||||
|
||||
def remove_main_handler(self):
|
||||
self._logger.removeHandler(self.main_handler)
|
||||
|
||||
def add_config_handler(self, config_key):
|
||||
if config_key in self.config_handlers:
|
||||
self._logger.addHandler(self.config_handlers[config_key])
|
||||
else:
|
||||
self.config_handlers[config_key] = self._get_handler(os.path.join(self.log_dir, config_key + '.log'))
|
||||
self._logger.addHandler(self.config_handlers[config_key])
|
||||
|
||||
def remove_config_handler(self, config_key):
|
||||
if config_key in self.config_handlers:
|
||||
self._logger.removeHandler(self.config_handlers[config_key])
|
||||
|
||||
def _centered(self, text, sep=" ", side_space=True, left=False):
|
||||
if len(text) > self.screen_width - 2:
|
||||
return text
|
||||
space = self.screen_width - len(text) - 2
|
||||
text = f"{' ' if side_space else sep}{text}{' ' if side_space else sep}"
|
||||
if space % 2 == 1:
|
||||
text += sep
|
||||
space -= 1
|
||||
side = int(space / 2) - 1
|
||||
final_text = f"{text}{sep * side}{sep * side}" if left else f"{sep * side}{text}{sep * side}"
|
||||
return final_text
|
||||
|
||||
def separator(self, text=None, space=True, border=True, side_space=True, left=False, loglevel='INFO'):
|
||||
sep = " " if space else self.separating_character
|
||||
for handler in self._logger.handlers:
|
||||
self._formatter(handler, border=False)
|
||||
border_text = f"|{self.separating_character * self.screen_width}|"
|
||||
if border:
|
||||
self.print_line(border_text, loglevel)
|
||||
if text:
|
||||
text_list = text.split("\n")
|
||||
for t in text_list:
|
||||
self.print_line(f"|{sep}{self._centered(t, sep=sep, side_space=side_space, left=left)}{sep}|", loglevel)
|
||||
if border:
|
||||
self.print_line(border_text, loglevel)
|
||||
for handler in self._logger.handlers:
|
||||
self._formatter(handler)
|
||||
return [text]
|
||||
|
||||
def print_line(self, msg, loglevel='INFO', *args, **kwargs):
|
||||
loglvl = getattr(logging, loglevel.upper())
|
||||
if self._logger.isEnabledFor(loglvl):
|
||||
self._log(loglvl, str(msg), args, **kwargs)
|
||||
return [str(msg)]
|
||||
|
||||
def debug(self, msg, *args, **kwargs):
|
||||
if self._logger.isEnabledFor(DEBUG):
|
||||
self._log(DEBUG, str(msg), args, **kwargs)
|
||||
|
||||
def info_center(self, msg, *args, **kwargs):
|
||||
self.info(self._centered(str(msg)), *args, **kwargs)
|
||||
|
||||
def info(self, msg, *args, **kwargs):
|
||||
if self._logger.isEnabledFor(INFO):
|
||||
self._log(INFO, str(msg), args, **kwargs)
|
||||
|
||||
def dryrun(self, msg, *args, **kwargs):
|
||||
if self._logger.isEnabledFor(DRYRUN):
|
||||
self._log(DRYRUN, str(msg), args, **kwargs)
|
||||
|
||||
def warning(self, msg, *args, **kwargs):
|
||||
if self._logger.isEnabledFor(WARNING):
|
||||
self._log(WARNING, str(msg), args, **kwargs)
|
||||
|
||||
def error(self, msg, *args, **kwargs):
|
||||
if self.save_errors:
|
||||
self.saved_errors.append(msg)
|
||||
if self._logger.isEnabledFor(ERROR):
|
||||
self._log(ERROR, str(msg), args, **kwargs)
|
||||
|
||||
def critical(self, msg, *args, **kwargs):
|
||||
if self.save_errors:
|
||||
self.saved_errors.append(msg)
|
||||
if self._logger.isEnabledFor(CRITICAL):
|
||||
self._log(CRITICAL, str(msg), args, **kwargs)
|
||||
|
||||
def stacktrace(self):
|
||||
self.debug(traceback.format_exc())
|
||||
|
||||
def _space(self, display_title):
|
||||
display_title = str(display_title)
|
||||
space_length = self.spacing - len(display_title)
|
||||
if space_length > 0:
|
||||
display_title += " " * space_length
|
||||
return display_title
|
||||
|
||||
def ghost(self, text):
|
||||
if not self.ignore_ghost:
|
||||
try:
|
||||
final_text = f"| {text}"
|
||||
except UnicodeEncodeError:
|
||||
text = text.encode("utf-8")
|
||||
final_text = f"| {text}"
|
||||
print(self._space(final_text), end="\r")
|
||||
self.spacing = len(text) + 2
|
||||
|
||||
def exorcise(self):
|
||||
if not self.ignore_ghost:
|
||||
print(self._space(" "), end="\r")
|
||||
self.spacing = 0
|
||||
|
||||
def secret(self, text):
|
||||
if str(text) not in self.secrets:
|
||||
self.secrets.append(str(text))
|
||||
|
||||
def insert_space(self, display_title, space_length=0):
|
||||
display_title = str(display_title)
|
||||
if space_length == 0:
|
||||
space_length = self.spacing - len(display_title)
|
||||
if space_length > 0:
|
||||
display_title = " " * space_length + display_title
|
||||
return display_title
|
||||
|
||||
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, stacklevel=1):
|
||||
if self.spacing > 0:
|
||||
self.exorcise()
|
||||
if "\n" in msg:
|
||||
for i, line in enumerate(msg.split("\n")):
|
||||
self._log(level, line, args, exc_info=exc_info, extra=extra, stack_info=stack_info, stacklevel=stacklevel)
|
||||
if i == 0:
|
||||
for handler in self._logger.handlers:
|
||||
if isinstance(handler, RotatingFileHandler):
|
||||
handler.setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
|
||||
for handler in self._logger.handlers:
|
||||
if isinstance(handler, RotatingFileHandler):
|
||||
handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
|
||||
else:
|
||||
for secret in self.secrets:
|
||||
if secret in msg:
|
||||
msg = msg.replace(secret, "(redacted)")
|
||||
if "HTTPConnectionPool" in msg:
|
||||
msg = re.sub("HTTPConnectionPool\\((.*?)\\)", "HTTPConnectionPool(redacted)", msg)
|
||||
if "HTTPSConnectionPool" in msg:
|
||||
msg = re.sub("HTTPSConnectionPool\\((.*?)\\)", "HTTPSConnectionPool(redacted)", msg)
|
||||
try:
|
||||
if not _srcfile:
|
||||
raise ValueError
|
||||
fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
|
||||
except ValueError:
|
||||
fn, lno, func, sinfo = "(unknown file)", 0, "(unknown function)", None
|
||||
if exc_info:
|
||||
if isinstance(exc_info, BaseException):
|
||||
exc_info = (type(exc_info), exc_info, exc_info.__traceback__)
|
||||
elif not isinstance(exc_info, tuple):
|
||||
exc_info = sys.exc_info()
|
||||
record = self._logger.makeRecord(self._logger.name, level, fn, lno, msg, args, exc_info, func, extra, sinfo)
|
||||
self._logger.handle(record)
|
||||
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
f = logging.currentframe()
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
if filename == _srcfile:
|
||||
f = f.f_back
|
||||
continue
|
||||
sinfo = None
|
||||
if stack_info:
|
||||
sio = io.StringIO()
|
||||
sio.write('Stack (most recent call last):\n')
|
||||
traceback.print_stack(f, file=sio)
|
||||
sinfo = sio.getvalue()
|
||||
if sinfo[-1] == '\n':
|
||||
sinfo = sinfo[:-1]
|
||||
sio.close()
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
|
||||
break
|
||||
return rv
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
from modules import util
|
||||
|
||||
from modules.util import Failed
|
||||
from json import JSONDecodeError
|
||||
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
|
||||
base_url = "https://notifiarr.com/api/v1/"
|
||||
dev_url = "https://dev.notifiarr.com/api/v1/"
|
||||
|
|
@ -16,6 +16,7 @@ class Notifiarr:
|
|||
self.develop = params["develop"]
|
||||
self.test = params["test"]
|
||||
self.instance = params["instance"]
|
||||
logger.secret(self.apikey)
|
||||
url, _ = self.get_url("user/validate/")
|
||||
response = self.config.get(url)
|
||||
response_json = None
|
||||
|
|
@ -38,7 +39,7 @@ class Notifiarr:
|
|||
def get_url(self, path):
|
||||
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
|
||||
if self.config.trace_mode:
|
||||
logger.debug(url.replace(self.apikey, "APIKEY"))
|
||||
logger.debug(url)
|
||||
if self.test:
|
||||
params = {"event": f"qbitManage-{self.apikey[:5]}", "qbit_client": self.config.data["qbt"]["host"], "instance": self.instance}
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
import logging, os, sys
|
||||
import os, sys
|
||||
from qbittorrentapi import Client, Version, LoginFailed, APIConnectionError, NotFound404Error, Conflict409Error
|
||||
from modules import util
|
||||
from modules.util import Failed, print_line, print_multiline, separator, list_in_text
|
||||
from modules.util import Failed, list_in_text
|
||||
from datetime import timedelta
|
||||
from collections import Counter
|
||||
from fnmatch import fnmatch
|
||||
from alive_progress import alive_it, config_handler
|
||||
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
|
||||
|
||||
class Qbt:
|
||||
|
|
@ -18,7 +18,9 @@ class Qbt:
|
|||
self.host = params["host"]
|
||||
self.username = params["username"]
|
||||
self.password = params["password"]
|
||||
logger.debug(f'Host: {self.host}, Username: {self.username}, Password: {self.password if self.password is None else "[REDACTED]"}')
|
||||
logger.secret(self.username)
|
||||
logger.secret(self.password)
|
||||
logger.debug(f'Host: {self.host}, Username: {self.username}, Password: {self.password}')
|
||||
try:
|
||||
self.client = Client(host=self.host, username=self.username, password=self.password, VERIFY_WEBUI_CERTIFICATE=False)
|
||||
self.client.auth_log_in()
|
||||
|
|
@ -32,7 +34,7 @@ class Qbt:
|
|||
e = (f"Qbittorrent Error: qbit_manage is only comaptible with {SUPPORTED_VERSION} or lower. You are currently on {CURRENT_VERSION}." + '\n'
|
||||
+ f"Please downgrade to your Qbittorrent version to {SUPPORTED_VERSION} to use qbit_manage.")
|
||||
self.config.notify(e, "Qbittorrent")
|
||||
print_multiline(e, 'CRITICAL')
|
||||
logger.print_line(e, 'CRITICAL')
|
||||
sys.exit(0)
|
||||
logger.info("Qbt Connection Successful")
|
||||
except LoginFailed:
|
||||
|
|
@ -47,7 +49,7 @@ class Qbt:
|
|||
e = "Qbittorrent Error: Unable to connect to the client."
|
||||
self.config.notify(e, "Qbittorrent")
|
||||
raise Failed(e)
|
||||
separator("Getting Torrent List", space=False, border=False)
|
||||
logger.separator("Getting Torrent List", space=False, border=False)
|
||||
self.torrent_list = self.get_torrents({'sort': 'added_on'})
|
||||
|
||||
# Will create a 2D Dictionary with the torrent name as the key
|
||||
|
|
@ -67,17 +69,17 @@ class Qbt:
|
|||
# is_complete = Returns the state of torrent (Returns True if at least one of the torrent with the State is categorized as Complete.)
|
||||
# first_hash = Returns the hash number of the original torrent (Assuming the torrent list is sorted by date added (Asc))
|
||||
def get_torrent_info(torrent_list):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
torrentdict = {}
|
||||
t_obj_unreg = []
|
||||
t_obj_valid = []
|
||||
t_obj_list = []
|
||||
settings = self.config.settings
|
||||
separator("Checking Settings", space=False, border=False)
|
||||
logger.separator("Checking Settings", space=False, border=False)
|
||||
if settings['force_auto_tmm']:
|
||||
print_line('force_auto_tmm set to True. Will force Auto Torrent Management for all torrents.', loglevel)
|
||||
separator("Gathering Torrent Information", space=True, border=True)
|
||||
logger.print_line('force_auto_tmm set to True. Will force Auto Torrent Management for all torrents.', loglevel)
|
||||
logger.separator("Gathering Torrent Information", space=True, border=True)
|
||||
for torrent in alive_it(torrent_list):
|
||||
is_complete = False
|
||||
msg = None
|
||||
|
|
@ -150,7 +152,7 @@ class Qbt:
|
|||
self.torrentinfo = None
|
||||
self.torrentissue = None
|
||||
self.torrentvalid = None
|
||||
if config.args['recheck'] or config.args['cross_seed'] or config.args['rem_unregistered'] or config.args['tag_tracker_error'] or config.args['tag_nohardlinks']:
|
||||
if config.commands['recheck'] or config.commands['cross_seed'] or config.commands['rem_unregistered'] or config.commands['tag_tracker_error'] or config.commands['tag_nohardlinks']:
|
||||
# Get an updated torrent dictionary information of the torrents
|
||||
self.torrentinfo, self.torrentissue, self.torrentvalid = get_torrent_info(self.torrent_list)
|
||||
|
||||
|
|
@ -158,53 +160,73 @@ class Qbt:
|
|||
return self.client.torrents.info(**params)
|
||||
|
||||
def category(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
num_cat = 0
|
||||
if self.config.args['cat_update']:
|
||||
separator("Updating Categories", space=False, border=False)
|
||||
|
||||
def update_cat(new_cat, cat_change):
|
||||
nonlocal dry_run, torrent, num_cat
|
||||
tracker = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||
old_cat = torrent.category
|
||||
if not dry_run:
|
||||
try:
|
||||
torrent.set_category(category=new_cat)
|
||||
if torrent.auto_tmm is False and self.config.settings['force_auto_tmm']:
|
||||
torrent.set_auto_management(True)
|
||||
except Conflict409Error:
|
||||
e = logger.print_line(f'Existing category "{new_cat}" not found for save path {torrent.save_path}, category will be created.', loglevel)
|
||||
self.config.notify(e, 'Update Category', False)
|
||||
self.client.torrent_categories.create_category(name=new_cat, save_path=torrent.save_path)
|
||||
torrent.set_category(category=new_cat)
|
||||
body = []
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
if cat_change:
|
||||
body += logger.print_line(logger.insert_space(f'Old Category: {old_cat}', 3), loglevel)
|
||||
title = "Moving Categories"
|
||||
else:
|
||||
title = "Updating Categories"
|
||||
body += logger.print_line(logger.insert_space(f'New Category: {new_cat}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
attr = {
|
||||
"function": "cat_update",
|
||||
"title": title,
|
||||
"body": "\n".join(body),
|
||||
"torrent_name": torrent.name,
|
||||
"torrent_category": new_cat,
|
||||
"torrent_tracker": tracker["url"],
|
||||
"notifiarr_indexer": tracker["notifiarr"]
|
||||
}
|
||||
self.config.send_notifications(attr)
|
||||
num_cat += 1
|
||||
|
||||
if self.config.commands['cat_update']:
|
||||
logger.separator("Updating Categories", space=False, border=False)
|
||||
torrent_list = self.get_torrents({'category': '', 'filter': 'completed'})
|
||||
for torrent in torrent_list:
|
||||
new_cat = self.config.get_category(torrent.save_path)
|
||||
tracker = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||
if not dry_run:
|
||||
try:
|
||||
torrent.set_category(category=new_cat)
|
||||
if torrent.auto_tmm is False and self.config.settings['force_auto_tmm']:
|
||||
torrent.set_auto_management(True)
|
||||
except Conflict409Error:
|
||||
e = print_line(f'Existing category "{new_cat}" not found for save path {torrent.save_path}, category will be created.', loglevel)
|
||||
self.config.notify(e, 'Update Category', False)
|
||||
self.client.torrent_categories.create_category(name=new_cat, save_path=torrent.save_path)
|
||||
torrent.set_category(category=new_cat)
|
||||
body = []
|
||||
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'New Category: {new_cat}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
attr = {
|
||||
"function": "cat_update",
|
||||
"title": "Updating Categories",
|
||||
"body": "\n".join(body),
|
||||
"torrent_name": torrent.name,
|
||||
"torrent_category": new_cat,
|
||||
"torrent_tracker": tracker["url"],
|
||||
"notifiarr_indexer": tracker["notifiarr"]
|
||||
}
|
||||
self.config.send_notifications(attr)
|
||||
num_cat += 1
|
||||
update_cat(new_cat, False)
|
||||
|
||||
# Change categories
|
||||
if self.config.cat_change:
|
||||
for old_cat in self.config.cat_change:
|
||||
torrent_list = self.get_torrents({'category': old_cat, 'filter': 'completed'})
|
||||
for torrent in torrent_list:
|
||||
new_cat = self.config.cat_change[old_cat]
|
||||
update_cat(new_cat, True)
|
||||
|
||||
if num_cat >= 1:
|
||||
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_cat} new categories.", loglevel)
|
||||
logger.print_line(f"{'Did not update' if dry_run else 'Updated'} {num_cat} new categories.", loglevel)
|
||||
else:
|
||||
print_line('No new torrents to categorize.', loglevel)
|
||||
logger.print_line('No new torrents to categorize.', loglevel)
|
||||
return num_cat
|
||||
|
||||
def tags(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
num_tags = 0
|
||||
ignore_tags = self.config.settings['ignoreTags_OnUpdate']
|
||||
if self.config.args['tag_update']:
|
||||
separator("Updating Tags", space=False, border=False)
|
||||
if self.config.commands['tag_update']:
|
||||
logger.separator("Updating Tags", space=False, border=False)
|
||||
for torrent in self.torrent_list:
|
||||
check_tags = util.get_list(torrent.tags)
|
||||
if torrent.tags == '' or (len([x for x in check_tags if x not in ignore_tags]) == 0):
|
||||
|
|
@ -212,9 +234,9 @@ class Qbt:
|
|||
if tracker["tag"]:
|
||||
num_tags += len(tracker["tag"])
|
||||
body = []
|
||||
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'New Tag{"s" if len(tracker["tag"]) > 1 else ""}: {", ".join(tracker["tag"])}', 8), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'New Tag{"s" if len(tracker["tag"]) > 1 else ""}: {", ".join(tracker["tag"])}', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body.extend(self.set_tags_and_limits(torrent, tracker["max_ratio"], tracker["max_seeding_time"], tracker["limit_upload_speed"], tracker["tag"]))
|
||||
category = self.config.get_category(torrent.save_path) if torrent.category == '' else torrent.category
|
||||
attr = {
|
||||
|
|
@ -232,29 +254,29 @@ class Qbt:
|
|||
}
|
||||
self.config.send_notifications(attr)
|
||||
if num_tags >= 1:
|
||||
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_tags} new tags.", loglevel)
|
||||
logger.print_line(f"{'Did not update' if dry_run else 'Updated'} {num_tags} new tags.", loglevel)
|
||||
else:
|
||||
print_line('No new torrents to tag.', loglevel)
|
||||
logger.print_line('No new torrents to tag.', loglevel)
|
||||
return num_tags
|
||||
|
||||
def set_tags_and_limits(self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, tags=None, restore=False):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
body = []
|
||||
# Print Logs
|
||||
if limit_upload_speed:
|
||||
if limit_upload_speed == -1: body += print_line(util.insert_space('Limit UL Speed: Infinity', 1), loglevel)
|
||||
else: body += print_line(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s', 1), loglevel)
|
||||
if limit_upload_speed == -1: body += logger.print_line(logger.insert_space('Limit UL Speed: Infinity', 1), loglevel)
|
||||
else: body += logger.print_line(logger.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s', 1), loglevel)
|
||||
if max_ratio or max_seeding_time:
|
||||
if (max_ratio == -2 or max_seeding_time == -2) and not restore: body += print_line(util.insert_space('Share Limit: Use Global Share Limit', 4), loglevel)
|
||||
elif (max_ratio == -1 or max_seeding_time == -1) and not restore: body += print_line(util.insert_space('Share Limit: Set No Share Limit', 4), loglevel)
|
||||
if (max_ratio == -2 or max_seeding_time == -2) and not restore: body += logger.print_line(logger.insert_space('Share Limit: Use Global Share Limit', 4), loglevel)
|
||||
elif (max_ratio == -1 or max_seeding_time == -1) and not restore: body += logger.print_line(logger.insert_space('Share Limit: Set No Share Limit', 4), loglevel)
|
||||
else:
|
||||
if max_ratio != torrent.max_ratio and (not max_seeding_time or max_seeding_time < 0):
|
||||
body += print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}', 4), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Share Limit: Max Ratio = {max_ratio}', 4), loglevel)
|
||||
elif max_seeding_time != torrent.max_seeding_time and (not max_ratio or max_ratio < 0):
|
||||
body += print_line(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min', 4), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min', 4), loglevel)
|
||||
elif max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time:
|
||||
body += print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min', 4), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min', 4), loglevel)
|
||||
# Update Torrents
|
||||
if not dry_run:
|
||||
if tags: torrent.add_tags(tags)
|
||||
|
|
@ -274,15 +296,15 @@ class Qbt:
|
|||
return body
|
||||
|
||||
def tag_nohardlinks(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
num_tags = 0 # counter for the number of torrents that has no hard links
|
||||
del_tor = 0 # counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion
|
||||
del_tor_cont = 0 # counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion including contents
|
||||
num_untag = 0 # counter for number of torrents that previously had no hard links but now have hard links
|
||||
|
||||
if self.config.args['tag_nohardlinks']:
|
||||
util.separator("Tagging Torrents with No Hardlinks", space=False, border=False)
|
||||
if self.config.commands['tag_nohardlinks']:
|
||||
logger.separator("Tagging Torrents with No Hardlinks", space=False, border=False)
|
||||
nohardlinks = self.config.nohardlinks
|
||||
tdel_dict = {} # dictionary to track the torrent names and content path that meet the deletion criteria
|
||||
root_dir = self.config.root_dir
|
||||
|
|
@ -307,9 +329,9 @@ class Qbt:
|
|||
if 'noHL' not in torrent.tags:
|
||||
num_tags += 1
|
||||
body = []
|
||||
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += print_line(util.insert_space('Added Tag: noHL', 6), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Added Tag: noHL', 6), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body.extend(self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"],
|
||||
nohardlinks[category]["max_seeding_time"], nohardlinks[category]["limit_upload_speed"], tags='noHL'))
|
||||
attr = {
|
||||
|
|
@ -336,10 +358,10 @@ class Qbt:
|
|||
if (not (util.nohardlink(torrent['content_path'].replace(root_dir, root_dir))) and ('noHL' in torrent.tags)):
|
||||
num_untag += 1
|
||||
body = []
|
||||
body += print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.', loglevel)
|
||||
body += print_line(util.insert_space('Removed Tag: noHL', 6), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.", loglevel)
|
||||
body += logger.print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.', loglevel)
|
||||
body += logger.print_line(logger.insert_space('Removed Tag: noHL', 6), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.", loglevel)
|
||||
restore_max_ratio = tracker["max_ratio"]
|
||||
restore_max_seeding_time = tracker["max_seeding_time"]
|
||||
restore_limit_upload_speed = tracker["limit_upload_speed"]
|
||||
|
|
@ -376,9 +398,9 @@ class Qbt:
|
|||
if torrent['content_path'].replace(root_dir, root_dir) == tdel_dict[t_name]:
|
||||
tracker = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||
body = []
|
||||
body += print_line(util.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += print_line(util.insert_space("Cleanup: True [No hard links found and meets Share Limits.]", 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space("Cleanup: True [No hard links found and meets Share Limits.]", 8), loglevel)
|
||||
attr = {
|
||||
"function": "cleanup_tag_nohardlinks",
|
||||
"title": "Removing NoHL Torrents and meets Share Limits",
|
||||
|
|
@ -394,31 +416,31 @@ class Qbt:
|
|||
del_tor += 1
|
||||
attr["torrents_deleted_and_contents"] = False
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
else:
|
||||
del_tor_cont += 1
|
||||
attr["torrents_deleted_and_contents"] = True
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
else:
|
||||
del_tor += 1
|
||||
attr["torrents_deleted_and_contents"] = False
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
attr["body"] = "\n".join(body)
|
||||
self.config.send_notifications(attr)
|
||||
self.torrentinfo[t_name]['count'] -= 1
|
||||
if num_tags >= 1:
|
||||
print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}", loglevel)
|
||||
logger.print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}", loglevel)
|
||||
else:
|
||||
print_line('No torrents to tag with no hard links.', loglevel)
|
||||
if num_untag >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} noHL tags / share limits for {num_untag} .torrent{'s.' if num_untag > 1 else '.'}", loglevel)
|
||||
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.", loglevel)
|
||||
if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.", loglevel)
|
||||
logger.print_line('No torrents to tag with no hard links.', loglevel)
|
||||
if num_untag >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} noHL tags / share limits for {num_untag} .torrent{'s.' if num_untag > 1 else '.'}", loglevel)
|
||||
if del_tor >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.", loglevel)
|
||||
if del_tor_cont >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.", loglevel)
|
||||
return num_tags, num_untag, del_tor, del_tor_cont
|
||||
|
||||
def rem_unregistered(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
del_tor = 0
|
||||
del_tor_cont = 0
|
||||
|
|
@ -426,16 +448,16 @@ class Qbt:
|
|||
num_untag = 0
|
||||
tor_error_summary = ''
|
||||
tag_error = self.config.settings['tracker_error_tag']
|
||||
cfg_rem_unregistered = self.config.args['rem_unregistered']
|
||||
cfg_tag_error = self.config.args['tag_tracker_error']
|
||||
cfg_rem_unregistered = self.config.commands['rem_unregistered']
|
||||
cfg_tag_error = self.config.commands['tag_tracker_error']
|
||||
|
||||
def tag_tracker_error():
|
||||
nonlocal dry_run, t_name, msg_up, msg, tracker, t_cat, torrent, tag_error, tor_error_summary, num_tor_error
|
||||
tor_error = ''
|
||||
tor_error += (util.insert_space(f'Torrent Name: {t_name}', 3)+'\n')
|
||||
tor_error += (util.insert_space(f'Status: {msg}', 9)+'\n')
|
||||
tor_error += (util.insert_space(f'Tracker: {tracker["url"]}', 8)+'\n')
|
||||
tor_error += (util.insert_space(f"Added Tag: {tag_error}", 6)+'\n')
|
||||
tor_error += (logger.insert_space(f'Torrent Name: {t_name}', 3)+'\n')
|
||||
tor_error += (logger.insert_space(f'Status: {msg}', 9)+'\n')
|
||||
tor_error += (logger.insert_space(f'Tracker: {tracker["url"]}', 8)+'\n')
|
||||
tor_error += (logger.insert_space(f"Added Tag: {tag_error}", 6)+'\n')
|
||||
tor_error_summary += tor_error
|
||||
num_tor_error += 1
|
||||
attr = {
|
||||
|
|
@ -455,9 +477,9 @@ class Qbt:
|
|||
def del_unregistered():
|
||||
nonlocal dry_run, loglevel, del_tor, del_tor_cont, t_name, msg_up, msg, tracker, t_cat, t_msg, t_status, torrent
|
||||
body = []
|
||||
body += print_line(util.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'Status: {msg}', 9), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Status: {msg}', 9), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
attr = {
|
||||
"function": "rem_unregistered",
|
||||
"title": "Removing Unregistered Torrents",
|
||||
|
|
@ -472,25 +494,25 @@ class Qbt:
|
|||
if '' in t_msg or 2 in t_status:
|
||||
attr["torrents_deleted_and_contents"] = False
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
|
||||
del_tor += 1
|
||||
else:
|
||||
attr["torrents_deleted_and_contents"] = True
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
del_tor_cont += 1
|
||||
else:
|
||||
attr["torrents_deleted_and_contents"] = True
|
||||
if not dry_run: self.tor_delete_recycle(torrent, attr)
|
||||
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
body += logger.print_line(logger.insert_space('Deleted .torrent AND content files.', 8), loglevel)
|
||||
del_tor_cont += 1
|
||||
attr["body"] = "\n".join(body)
|
||||
self.config.send_notifications(attr)
|
||||
self.torrentinfo[t_name]['count'] -= 1
|
||||
|
||||
if cfg_rem_unregistered or cfg_tag_error:
|
||||
if cfg_tag_error: separator("Tagging Torrents with Tracker Errors", space=False, border=False)
|
||||
elif cfg_rem_unregistered: separator("Removing Unregistered Torrents", space=False, border=False)
|
||||
if cfg_tag_error: logger.separator("Tagging Torrents with Tracker Errors", space=False, border=False)
|
||||
elif cfg_rem_unregistered: logger.separator("Removing Unregistered Torrents", space=False, border=False)
|
||||
unreg_msgs = [
|
||||
'UNREGISTERED',
|
||||
'TORRENT NOT FOUND',
|
||||
|
|
@ -520,10 +542,10 @@ class Qbt:
|
|||
tracker = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||
num_untag += 1
|
||||
body = []
|
||||
body += print_line(f'Previous Tagged {tag_error} torrent currently has a working tracker.', loglevel)
|
||||
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'Removed Tag: {tag_error}', 4), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
body += logger.print_line(f'Previous Tagged {tag_error} torrent currently has a working tracker.', loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {torrent.name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Removed Tag: {tag_error}', 4), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), loglevel)
|
||||
if not dry_run: torrent.remove_tags(tags=tag_error)
|
||||
attr = {
|
||||
"function": "untag_tracker_error",
|
||||
|
|
@ -570,29 +592,29 @@ class Qbt:
|
|||
except NotFound404Error:
|
||||
continue
|
||||
except Exception as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
self.config.notify(e, 'Remove Unregistered Torrents', False)
|
||||
logger.error(f"Unknown Error: {e}")
|
||||
if cfg_rem_unregistered:
|
||||
if del_tor >= 1 or del_tor_cont >= 1:
|
||||
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.", loglevel)
|
||||
if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.", loglevel)
|
||||
if del_tor >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.", loglevel)
|
||||
if del_tor_cont >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.", loglevel)
|
||||
else:
|
||||
print_line('No unregistered torrents found.', loglevel)
|
||||
if num_untag >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {tag_error} tags for {num_untag} .torrent{'s.' if num_untag > 1 else '.'}", loglevel)
|
||||
logger.print_line('No unregistered torrents found.', loglevel)
|
||||
if num_untag >= 1: logger.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {tag_error} tags for {num_untag} .torrent{'s.' if num_untag > 1 else '.'}", loglevel)
|
||||
if num_tor_error >= 1:
|
||||
separator(f"{num_tor_error} Torrents with tracker errors found", space=False, border=False, loglevel=loglevel)
|
||||
print_multiline(tor_error_summary.rstrip(), loglevel)
|
||||
logger.separator(f"{num_tor_error} Torrents with tracker errors found", space=False, border=False, loglevel=loglevel)
|
||||
logger.print_line(tor_error_summary.rstrip(), loglevel)
|
||||
return del_tor, del_tor_cont, num_tor_error, num_untag
|
||||
|
||||
# Function used to move any torrents from the cross seed directory to the correct save directory
|
||||
def cross_seed(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
added = 0 # Keep track of total torrents tagged
|
||||
tagged = 0 # Track # of torrents tagged that are not cross-seeded
|
||||
if self.config.args['cross_seed']:
|
||||
separator("Checking for Cross-Seed Torrents", space=False, border=False)
|
||||
if self.config.commands['cross_seed']:
|
||||
logger.separator("Checking for Cross-Seed Torrents", space=False, border=False)
|
||||
# List of categories for all torrents moved
|
||||
categories = []
|
||||
|
||||
|
|
@ -618,11 +640,11 @@ class Qbt:
|
|||
if self.torrentinfo[t_name]['is_complete']:
|
||||
categories.append(category)
|
||||
body = []
|
||||
body += print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:", loglevel)
|
||||
body += print_line(util.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += print_line(util.insert_space(f'Category: {category}', 7), loglevel)
|
||||
body += print_line(util.insert_space(f'Save_Path: {dest}', 6), loglevel)
|
||||
body += print_line(util.insert_space(f'Tracker: {t_tracker}', 8), loglevel)
|
||||
body += logger.print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:", loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Torrent Name: {t_name}', 3), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Category: {category}', 7), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Save_Path: {dest}', 6), loglevel)
|
||||
body += logger.print_line(logger.insert_space(f'Tracker: {t_tracker}', 8), loglevel)
|
||||
attr = {
|
||||
"function": "cross_seed",
|
||||
"title": "Adding New Cross-Seed Torrent",
|
||||
|
|
@ -639,12 +661,12 @@ class Qbt:
|
|||
self.client.torrents.add(torrent_files=src, save_path=dest, category=category, tags='cross-seed', is_paused=True)
|
||||
util.move_files(src, dir_cs_out)
|
||||
else:
|
||||
print_line(f'Found {t_name} in {dir_cs} but original torrent is not complete.', loglevel)
|
||||
print_line('Not adding to qBittorrent', loglevel)
|
||||
logger.print_line(f'Found {t_name} in {dir_cs} but original torrent is not complete.', loglevel)
|
||||
logger.print_line('Not adding to qBittorrent', loglevel)
|
||||
else:
|
||||
error = f'{t_name} not found in torrents. Cross-seed Torrent not added to qBittorrent.'
|
||||
if dry_run: print_line(error, loglevel)
|
||||
else: print_line(error, 'WARNING')
|
||||
if dry_run: logger.print_line(error, loglevel)
|
||||
else: logger.print_line(error, 'WARNING')
|
||||
self.config.notify(error, 'cross-seed', False)
|
||||
# Tag missing cross-seed torrents tags
|
||||
for torrent in self.torrent_list:
|
||||
|
|
@ -653,7 +675,7 @@ class Qbt:
|
|||
if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash:
|
||||
tracker = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||
tagged += 1
|
||||
body = print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}", loglevel)
|
||||
body = logger.print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}", loglevel)
|
||||
attr = {
|
||||
"function": "tag_cross_seed",
|
||||
"title": "Tagging Cross-Seed Torrent",
|
||||
|
|
@ -668,19 +690,19 @@ class Qbt:
|
|||
|
||||
numcategory = Counter(categories)
|
||||
for c in numcategory:
|
||||
if numcategory[c] > 0: print_line(f"{numcategory[c]} {c} cross-seed .torrents {'not added' if dry_run else 'added'}.", loglevel)
|
||||
if added > 0: print_line(f"Total {added} cross-seed .torrents {'not added' if dry_run else 'added'}.", loglevel)
|
||||
if tagged > 0: print_line(f"Total {tagged} cross-seed .torrents {'not tagged' if dry_run else 'tagged'}.", loglevel)
|
||||
if numcategory[c] > 0: logger.print_line(f"{numcategory[c]} {c} cross-seed .torrents {'not added' if dry_run else 'added'}.", loglevel)
|
||||
if added > 0: logger.print_line(f"Total {added} cross-seed .torrents {'not added' if dry_run else 'added'}.", loglevel)
|
||||
if tagged > 0: logger.print_line(f"Total {tagged} cross-seed .torrents {'not tagged' if dry_run else 'tagged'}.", loglevel)
|
||||
return added, tagged
|
||||
|
||||
# Function used to recheck paused torrents sorted by size and resume torrents that are completed
|
||||
def recheck(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
resumed = 0
|
||||
rechecked = 0
|
||||
if self.config.args['recheck']:
|
||||
separator("Rechecking Paused Torrents", space=False, border=False)
|
||||
if self.config.commands['recheck']:
|
||||
logger.separator("Rechecking Paused Torrents", space=False, border=False)
|
||||
# sort by size and paused
|
||||
torrent_list = self.get_torrents({'status_filter': 'paused', 'sort': 'size'})
|
||||
if torrent_list:
|
||||
|
|
@ -690,7 +712,7 @@ class Qbt:
|
|||
if torrent.progress == 1:
|
||||
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
|
||||
resumed += 1
|
||||
body = print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
body = logger.print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
attr = {
|
||||
"function": "recheck",
|
||||
"title": "Resuming Torrent",
|
||||
|
|
@ -705,14 +727,14 @@ class Qbt:
|
|||
else:
|
||||
# Check to see if torrent meets AutoTorrentManagement criteria
|
||||
logger.debug('DEBUG: Torrent to see if torrent meets AutoTorrentManagement Criteria')
|
||||
logger.debug(util.insert_space(f'- Torrent Name: {torrent.name}', 2))
|
||||
logger.debug(util.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}', 4))
|
||||
logger.debug(util.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}', 4))
|
||||
logger.debug(logger.insert_space(f'- Torrent Name: {torrent.name}', 2))
|
||||
logger.debug(logger.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}', 4))
|
||||
logger.debug(logger.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}', 4))
|
||||
if (torrent.max_ratio >= 0 and torrent.ratio < torrent.max_ratio and torrent.max_seeding_time < 0) \
|
||||
or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \
|
||||
or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))):
|
||||
resumed += 1
|
||||
body = print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
body = logger.print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
attr = {
|
||||
"function": "recheck",
|
||||
"title": "Resuming Torrent",
|
||||
|
|
@ -727,7 +749,7 @@ class Qbt:
|
|||
# Recheck
|
||||
elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking:
|
||||
rechecked += 1
|
||||
body = print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
body = logger.print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{tracker['tag']}] - {torrent.name}", loglevel)
|
||||
attr = {
|
||||
"function": "recheck",
|
||||
"title": "Rechecking Torrent",
|
||||
|
|
@ -742,11 +764,11 @@ class Qbt:
|
|||
return resumed, rechecked
|
||||
|
||||
def rem_orphaned(self):
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||
orphaned = 0
|
||||
if self.config.args['rem_orphaned']:
|
||||
separator("Checking for Orphaned Files", space=False, border=False)
|
||||
if self.config.commands['rem_orphaned']:
|
||||
logger.separator("Checking for Orphaned Files", space=False, border=False)
|
||||
torrent_files = []
|
||||
root_files = []
|
||||
orphaned_files = []
|
||||
|
|
@ -780,24 +802,24 @@ class Qbt:
|
|||
|
||||
orphaned_files = set(orphaned_files) - set(excluded_orphan_files)
|
||||
if self.config.trace_mode:
|
||||
separator("Torrent Files", space=False, border=False, loglevel='DEBUG')
|
||||
print_multiline("\n".join(torrent_files), 'DEBUG')
|
||||
separator("Root Files", space=False, border=False, loglevel='DEBUG')
|
||||
print_multiline("\n".join(root_files), 'DEBUG')
|
||||
separator("Excluded Orphan Files", space=False, border=False, loglevel='DEBUG')
|
||||
print_multiline("\n".join(excluded_orphan_files), 'DEBUG')
|
||||
separator("Orphaned Files", space=False, border=False, loglevel='DEBUG')
|
||||
print_multiline("\n".join(orphaned_files), 'DEBUG')
|
||||
separator("Deleting Orphaned Files", space=False, border=False, loglevel='DEBUG')
|
||||
logger.separator("Torrent Files", space=False, border=False, loglevel='DEBUG')
|
||||
logger.print_line("\n".join(torrent_files), 'DEBUG')
|
||||
logger.separator("Root Files", space=False, border=False, loglevel='DEBUG')
|
||||
logger.print_line("\n".join(root_files), 'DEBUG')
|
||||
logger.separator("Excluded Orphan Files", space=False, border=False, loglevel='DEBUG')
|
||||
logger.print_line("\n".join(excluded_orphan_files), 'DEBUG')
|
||||
logger.separator("Orphaned Files", space=False, border=False, loglevel='DEBUG')
|
||||
logger.print_line("\n".join(orphaned_files), 'DEBUG')
|
||||
logger.separator("Deleting Orphaned Files", space=False, border=False, loglevel='DEBUG')
|
||||
|
||||
if orphaned_files:
|
||||
dir_out = os.path.join(remote_path, 'orphaned_data')
|
||||
os.makedirs(dir_out, exist_ok=True)
|
||||
body = []
|
||||
num_orphaned = len(orphaned_files)
|
||||
print_line(f"{num_orphaned} Orphaned files found", loglevel)
|
||||
body += print_multiline("\n".join(orphaned_files), loglevel)
|
||||
body += print_line(f"{'Did not move' if dry_run else 'Moved'} {num_orphaned} Orphaned files to {dir_out.replace(remote_path,root_path)}", loglevel)
|
||||
logger.print_line(f"{num_orphaned} Orphaned files found", loglevel)
|
||||
body += logger.print_line("\n".join(orphaned_files), loglevel)
|
||||
body += logger.print_line(f"{'Did not move' if dry_run else 'Moved'} {num_orphaned} Orphaned files to {dir_out.replace(remote_path,root_path)}", loglevel)
|
||||
|
||||
attr = {
|
||||
"function": "rem_orphaned",
|
||||
|
|
@ -819,7 +841,7 @@ class Qbt:
|
|||
for parent_path in orphaned_parent_path:
|
||||
util.remove_empty_directories(parent_path, "**/*")
|
||||
else:
|
||||
print_line("No Orphaned Files found.", loglevel)
|
||||
logger.print_line("No Orphaned Files found.", loglevel)
|
||||
return orphaned
|
||||
|
||||
def tor_delete_recycle(self, torrent, info):
|
||||
|
|
@ -861,7 +883,7 @@ class Qbt:
|
|||
try:
|
||||
util.copy_files(os.path.join(self.config.torrents_dir, File), os.path.join(torrent_path, File))
|
||||
except Exception as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
self.config.notify(e, 'Deleting Torrent', False)
|
||||
logger.warning(f"RecycleBin Warning: {e}")
|
||||
if "tracker_torrent_files" in torrent_json:
|
||||
|
|
@ -889,9 +911,9 @@ class Qbt:
|
|||
logger.debug(f"JSON: {torrent_json}")
|
||||
util.save_json(torrent_json, torrent_json_file)
|
||||
if info['torrents_deleted_and_contents'] is True:
|
||||
separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False, loglevel='DEBUG')
|
||||
if len(tor_files) == 1: print_line(tor_files[0], 'DEBUG')
|
||||
else: print_multiline("\n".join(tor_files), 'DEBUG')
|
||||
logger.separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False, loglevel='DEBUG')
|
||||
if len(tor_files) == 1: logger.print_line(tor_files[0], 'DEBUG')
|
||||
else: logger.print_line("\n".join(tor_files), 'DEBUG')
|
||||
logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(self.config.remote_dir,self.config.root_dir)}')
|
||||
|
||||
# Move files from torrent contents to Recycle bin
|
||||
|
|
@ -902,7 +924,7 @@ class Qbt:
|
|||
try:
|
||||
toDelete = util.move_files(src, dest, True)
|
||||
except FileNotFoundError:
|
||||
e = print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ', 'WARNING')
|
||||
e = logger.print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ', 'WARNING')
|
||||
self.config.notify(e, 'Deleting Torrent', False)
|
||||
# Delete torrent and files
|
||||
torrent.delete(delete_files=toDelete)
|
||||
|
|
|
|||
178
modules/util.py
178
modules/util.py
|
|
@ -1,6 +1,4 @@
|
|||
import logging, os, shutil, traceback, time, signal, json
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from ruamel import yaml
|
||||
import logging, os, shutil, time, signal, json, ruamel.yaml
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger('qBit Manage')
|
||||
|
|
@ -62,26 +60,26 @@ class check:
|
|||
if data is None or attribute not in data or (attribute in data and data[attribute] is None and not default_is_none):
|
||||
message = f"{text} not found"
|
||||
if parent and save is True:
|
||||
loaded_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config.config_path))
|
||||
yaml = YAML(self.config.config_path)
|
||||
if subparent:
|
||||
endline = f"\n{subparent} sub-attribute {attribute} added to config"
|
||||
if subparent not in loaded_config[parent] or not loaded_config[parent][subparent]:
|
||||
loaded_config[parent][subparent] = {attribute: default}
|
||||
elif attribute not in loaded_config[parent]:
|
||||
if isinstance(loaded_config[parent][subparent], str):
|
||||
loaded_config[parent][subparent] = {attribute: default}
|
||||
loaded_config[parent][subparent][attribute] = default
|
||||
if subparent not in yaml.data[parent] or not yaml.data[parent][subparent]:
|
||||
yaml.data[parent][subparent] = {attribute: default}
|
||||
elif attribute not in yaml.data[parent]:
|
||||
if isinstance(yaml.data[parent][subparent], str):
|
||||
yaml.data[parent][subparent] = {attribute: default}
|
||||
yaml.data[parent][subparent][attribute] = default
|
||||
else:
|
||||
endline = ""
|
||||
else:
|
||||
endline = f"\n{parent} sub-attribute {attribute} added to config"
|
||||
if parent not in loaded_config or not loaded_config[parent]:
|
||||
loaded_config[parent] = {attribute: default}
|
||||
elif attribute not in loaded_config[parent] or (attribute in loaded_config[parent] and loaded_config[parent][attribute] is None):
|
||||
loaded_config[parent][attribute] = default
|
||||
if parent not in yaml.data or not yaml.data[parent]:
|
||||
yaml.data[parent] = {attribute: default}
|
||||
elif attribute not in yaml.data[parent] or (attribute in yaml.data[parent] and yaml.data[parent][attribute] is None):
|
||||
yaml.data[parent][attribute] = default
|
||||
else:
|
||||
endline = ""
|
||||
yaml.round_trip_dump(loaded_config, open(self.config.config_path, "w"), indent=None, block_seq_indent=2)
|
||||
yaml.save()
|
||||
if default_is_none and var_type in ["list", "int_list"]: return []
|
||||
elif data[attribute] is None:
|
||||
if default_is_none and var_type == "list":
|
||||
|
|
@ -162,9 +160,9 @@ class check:
|
|||
message = message + "\n" + options
|
||||
raise Failed(f"Config Error: {message}")
|
||||
if do_print:
|
||||
print_multiline(f"Config Warning: {message}", "warning")
|
||||
logger.print(f"Config Warning: {message}", "warning")
|
||||
if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
|
||||
print_multiline(options)
|
||||
logger.print(options)
|
||||
return default
|
||||
|
||||
|
||||
|
|
@ -172,23 +170,6 @@ class Failed(Exception):
|
|||
pass
|
||||
|
||||
|
||||
separating_character = "="
|
||||
screen_width = 100
|
||||
spacing = 0
|
||||
|
||||
|
||||
def tab_new_lines(data):
|
||||
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
|
||||
|
||||
|
||||
def add_dict_list(keys, value, dict_map):
|
||||
for key in keys:
|
||||
if key in dict_map:
|
||||
dict_map[key].append(value)
|
||||
else:
|
||||
dict_map[key] = [value]
|
||||
|
||||
|
||||
def list_in_text(text, search_list, match_all=False):
|
||||
if isinstance(search_list, list): search_list = set(search_list)
|
||||
contains = set([x for x in search_list if ' ' in x])
|
||||
|
|
@ -202,98 +183,6 @@ def list_in_text(text, search_list, match_all=False):
|
|||
return False
|
||||
|
||||
|
||||
def print_line(lines, loglevel='INFO'):
|
||||
logger.log(getattr(logging, loglevel.upper()), str(lines))
|
||||
return [str(lines)]
|
||||
|
||||
|
||||
def print_multiline(lines, loglevel='INFO'):
|
||||
for i, line in enumerate(str(lines).split("\n")):
|
||||
logger.log(getattr(logging, loglevel.upper()), line)
|
||||
if i == 0:
|
||||
logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
|
||||
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
|
||||
return [(str(lines))]
|
||||
|
||||
|
||||
def print_stacktrace():
|
||||
print_multiline(traceback.format_exc(), 'CRITICAL')
|
||||
|
||||
|
||||
def my_except_hook(exctype, value, tb):
|
||||
for line in traceback.format_exception(etype=exctype, value=value, tb=tb):
|
||||
print_multiline(line, 'CRITICAL')
|
||||
|
||||
|
||||
def centered(text, sep=" "):
|
||||
if len(text) > screen_width - 2:
|
||||
return text
|
||||
space = screen_width - len(text) - 2
|
||||
text = f" {text} "
|
||||
if space % 2 == 1:
|
||||
text += sep
|
||||
space -= 1
|
||||
side = int(space / 2) - 1
|
||||
final_text = f"{sep * side}{text}{sep * side}"
|
||||
return final_text
|
||||
|
||||
|
||||
def separator(text=None, space=True, border=True, loglevel='INFO'):
|
||||
sep = " " if space else separating_character
|
||||
for handler in logger.handlers:
|
||||
apply_formatter(handler, border=False)
|
||||
border_text = f"|{separating_character * screen_width}|"
|
||||
if border:
|
||||
logger.log(getattr(logging, loglevel.upper()), border_text)
|
||||
if text:
|
||||
text_list = text.split("\n")
|
||||
for t in text_list:
|
||||
logger.log(getattr(logging, loglevel.upper()),
|
||||
f"|{sep}{centered(t, sep=sep)}{sep}|")
|
||||
if border:
|
||||
logger.log(getattr(logging, loglevel.upper()), border_text)
|
||||
for handler in logger.handlers:
|
||||
apply_formatter(handler)
|
||||
return [text]
|
||||
|
||||
|
||||
def apply_formatter(handler, border=True):
|
||||
text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s"
|
||||
if isinstance(handler, RotatingFileHandler):
|
||||
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
|
||||
# text = f"[%(asctime)s] %(levelname)-10s {text}"
|
||||
handler.setFormatter(logging.Formatter(text))
|
||||
|
||||
|
||||
def adjust_space(display_title):
|
||||
display_title = str(display_title)
|
||||
space_length = spacing - len(display_title)
|
||||
if space_length > 0:
|
||||
display_title += " " * space_length
|
||||
return display_title
|
||||
|
||||
|
||||
def insert_space(display_title, space_length=0):
|
||||
display_title = str(display_title)
|
||||
if space_length == 0:
|
||||
space_length = spacing - len(display_title)
|
||||
if space_length > 0:
|
||||
display_title = " " * space_length + display_title
|
||||
return display_title
|
||||
|
||||
|
||||
def print_return(text):
|
||||
print(adjust_space(f"| {text}"), end="\r")
|
||||
global spacing
|
||||
spacing = len(text) + 2
|
||||
|
||||
|
||||
def print_end():
|
||||
print(adjust_space(" "), end="\r")
|
||||
global spacing
|
||||
spacing = 0
|
||||
|
||||
|
||||
# truncate the value of the torrent url to remove sensitive information
|
||||
def trunc_val(s, d, n=3):
|
||||
try:
|
||||
|
|
@ -319,7 +208,7 @@ def move_files(src, dest, mod=False):
|
|||
shutil.copyfile(src, dest)
|
||||
toDelete = True
|
||||
except Exception as e:
|
||||
print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(e)
|
||||
return toDelete
|
||||
|
||||
|
|
@ -332,7 +221,7 @@ def copy_files(src, dest):
|
|||
try:
|
||||
shutil.copyfile(src, dest)
|
||||
except Exception as e:
|
||||
print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(e)
|
||||
|
||||
|
||||
|
|
@ -402,3 +291,36 @@ def human_readable_size(size, decimal_places=3):
|
|||
break
|
||||
size /= 1024.0
|
||||
return f"{size:.{decimal_places}f}{unit}"
|
||||
|
||||
|
||||
class YAML:
|
||||
def __init__(self, path=None, input_data=None, check_empty=False, create=False):
|
||||
self.path = path
|
||||
self.input_data = input_data
|
||||
self.yaml = ruamel.yaml.YAML()
|
||||
self.yaml.indent(mapping=2, sequence=2)
|
||||
try:
|
||||
if input_data:
|
||||
self.data = self.yaml.load(input_data)
|
||||
else:
|
||||
if create and not os.path.exists(self.path):
|
||||
with open(self.path, 'w'):
|
||||
pass
|
||||
self.data = {}
|
||||
else:
|
||||
with open(self.path, encoding="utf-8") as fp:
|
||||
self.data = self.yaml.load(fp)
|
||||
except ruamel.yaml.error.YAMLError as e:
|
||||
e = str(e).replace("\n", "\n ")
|
||||
raise Failed(f"YAML Error: {e}")
|
||||
except Exception as e:
|
||||
raise Failed(f"YAML Error: {e}")
|
||||
if not self.data or not isinstance(self.data, dict):
|
||||
if check_empty:
|
||||
raise Failed("YAML Error: File is empty")
|
||||
self.data = {}
|
||||
|
||||
def save(self):
|
||||
if self.path:
|
||||
with open(self.path, 'w') as fp:
|
||||
self.yaml.dump(self.data, fp)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import logging
|
||||
from json import JSONDecodeError
|
||||
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
|
||||
logger = logging.getLogger("qBit Manage")
|
||||
logger = util.logger
|
||||
|
||||
|
||||
class Webhooks:
|
||||
|
|
@ -73,7 +72,7 @@ class Webhooks:
|
|||
|
||||
def start_time_hooks(self, start_time):
|
||||
if self.run_start_webhooks:
|
||||
dry_run = self.config.args['dry_run']
|
||||
dry_run = self.config.commands['dry_run']
|
||||
if dry_run:
|
||||
start_type = "Dry-"
|
||||
else:
|
||||
|
|
@ -83,7 +82,7 @@ class Webhooks:
|
|||
"title": None,
|
||||
"body": f"Starting {start_type}Run",
|
||||
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"dry_run": self.config.args['dry_run']
|
||||
"dry_run": self.config.commands['dry_run']
|
||||
})
|
||||
|
||||
def end_time_hooks(self, start_time, end_time, run_time, next_run, stats, body):
|
||||
|
|
|
|||
165
qbit_manage.py
165
qbit_manage.py
|
|
@ -1,15 +1,11 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import argparse, logging, os, sys, time
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import argparse, os, sys, time, glob
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
try:
|
||||
import schedule
|
||||
from modules import util
|
||||
from modules.config import Config
|
||||
from modules.util import GracefulKiller
|
||||
from modules.util import Failed
|
||||
from modules.logs import MyLogger
|
||||
except ModuleNotFoundError:
|
||||
print("Requirements Error: Requirements are not installed")
|
||||
sys.exit(0)
|
||||
|
|
@ -25,9 +21,10 @@ parser.add_argument("-tr", "--trace", dest="trace", help=argparse.SUPPRESS, acti
|
|||
parser.add_argument('-r', '--run', dest='run', action='store_true', default=False, help='Run without the scheduler. Script will exit after completion.')
|
||||
parser.add_argument('-sch', '--schedule', dest='min', default='1440', type=str, help='Schedule to run every x minutes. (Default set to 1440 (1 day))')
|
||||
parser.add_argument('-sd', '--startup-delay', dest='startupDelay', default='0', type=str, help='Set delay in seconds on the first run of a schedule (Default set to 0)')
|
||||
parser.add_argument('-c', '--config-file', dest='configfile', action='store', default='config.yml', type=str,
|
||||
help='This is used if you want to use a different name for your config.yml. Example: tv.yml')
|
||||
parser.add_argument('-lf', '--log-file', dest='logfile', action='store', default='activity.log', type=str, help='This is used if you want to use a different name for your log file. Example: tv.log',)
|
||||
parser.add_argument('-c', '--config-file', dest='configfiles', action='store', default='config.yml', type=str,
|
||||
help='This is used if you want to use a different name for your config.yml or if you want to load multiple config files using *. Example: tv.yml or config*.yml')
|
||||
parser.add_argument('-lf', '--log-file', dest='logfile', action='store', default='qbit_manage.log', type=str,
|
||||
help='This is used if you want to use a different name for your log file. Example: tv.log',)
|
||||
parser.add_argument('-cs', '--cross-seed', dest='cross_seed', action="store_true", default=False,
|
||||
help='Use this after running cross-seed script to add torrents from the cross-seed output folder to qBittorrent')
|
||||
parser.add_argument('-re', '--recheck', dest='recheck', action="store_true", default=False, help='Recheck paused torrents sorted by lowest size. Resume if Completed.')
|
||||
|
|
@ -52,19 +49,25 @@ args = parser.parse_args()
|
|||
|
||||
|
||||
def get_arg(env_str, default, arg_bool=False, arg_int=False):
|
||||
env_var = os.environ.get(env_str)
|
||||
if env_var:
|
||||
env_vars = [env_str] if not isinstance(env_str, list) else env_str
|
||||
final_value = None
|
||||
for env_var in env_vars:
|
||||
env_value = os.environ.get(env_var)
|
||||
if env_value is not None:
|
||||
final_value = env_value
|
||||
break
|
||||
if final_value is not None:
|
||||
if arg_bool:
|
||||
if env_var is True or env_var is False:
|
||||
return env_var
|
||||
elif env_var.lower() in ["t", "true"]:
|
||||
if final_value is True or final_value is False:
|
||||
return final_value
|
||||
elif final_value.lower() in ["t", "true"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
elif arg_int:
|
||||
return int(env_var)
|
||||
return int(final_value)
|
||||
else:
|
||||
return str(env_var)
|
||||
return str(final_value)
|
||||
else:
|
||||
return default
|
||||
|
||||
|
|
@ -72,7 +75,7 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False):
|
|||
run = get_arg("QBT_RUN", args.run, arg_bool=True)
|
||||
sch = get_arg("QBT_SCHEDULE", args.min)
|
||||
startupDelay = get_arg("QBT_STARTUP_DELAY", args.startupDelay)
|
||||
config_file = get_arg("QBT_CONFIG", args.configfile)
|
||||
config_files = get_arg("QBT_CONFIG", args.configfiles)
|
||||
log_file = get_arg("QBT_LOGFILE", args.logfile)
|
||||
cross_seed = get_arg("QBT_CROSS_SEED", args.cross_seed, arg_bool=True)
|
||||
recheck = get_arg("QBT_RECHECK", args.recheck, arg_bool=True)
|
||||
|
|
@ -95,16 +98,28 @@ if debug or trace: log_level = 'DEBUG'
|
|||
stats = {}
|
||||
args = {}
|
||||
|
||||
if os.path.isdir('/config') and os.path.exists(os.path.join('/config', config_file)):
|
||||
if os.path.isdir('/config') and glob.glob(os.path.join('/config', config_files)):
|
||||
default_dir = '/config'
|
||||
else:
|
||||
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
|
||||
|
||||
|
||||
if '*' not in config_files:
|
||||
config_files = [config_files]
|
||||
else:
|
||||
glob_configs = glob.glob(os.path.join(default_dir, config_files))
|
||||
if glob_configs:
|
||||
config_files = [os.path.split(x)[-1] for x in glob_configs]
|
||||
else:
|
||||
print(f"Config Error: Unable to find any config files in the pattern '{config_files}'.")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
for v in [
|
||||
'run',
|
||||
'sch',
|
||||
'startupDelay',
|
||||
'config_file',
|
||||
'config_files',
|
||||
'log_file',
|
||||
'cross_seed',
|
||||
'recheck',
|
||||
|
|
@ -124,12 +139,9 @@ for v in [
|
|||
]:
|
||||
args[v] = eval(v)
|
||||
|
||||
util.separating_character = divider[0]
|
||||
|
||||
if screen_width < 90 or screen_width > 300:
|
||||
print(f"Argument Error: width argument invalid: {screen_width} must be an integer between 90 and 300 using the default 100")
|
||||
screen_width = 100
|
||||
util.screen_width = screen_width
|
||||
|
||||
# Check if Schedule parameter is a number
|
||||
try:
|
||||
|
|
@ -145,25 +157,23 @@ except ValueError:
|
|||
print(f"startupDelay Error: startupDelay is not a number. Current value is set to '{startupDelay}'")
|
||||
sys.exit(0)
|
||||
|
||||
logger = logging.getLogger('qBit Manage')
|
||||
logging.DRYRUN = 25
|
||||
logging.addLevelName(logging.DRYRUN, 'DRYRUN')
|
||||
setattr(logger, 'dryrun', lambda dryrun, *args: logger._log(logging.DRYRUN, dryrun, args))
|
||||
log_lev = getattr(logging, log_level.upper())
|
||||
logger.setLevel(log_lev)
|
||||
|
||||
logger = MyLogger('qBit Manage', log_file, log_level, default_dir, screen_width, divider[0], False, debug or trace)
|
||||
from modules import util
|
||||
util.logger = logger
|
||||
from modules.config import Config
|
||||
from modules.util import GracefulKiller
|
||||
from modules.util import Failed
|
||||
|
||||
|
||||
def fmt_filter(record):
|
||||
record.levelname = f"[{record.levelname}]"
|
||||
record.filename = f"[{record.filename}:{record.lineno}]"
|
||||
return True
|
||||
def my_except_hook(exctype, value, tb):
|
||||
if issubclass(exctype, KeyboardInterrupt):
|
||||
sys.__excepthook__(exctype, value, tb)
|
||||
else:
|
||||
logger.critical("Uncaught Exception", exc_info=(exctype, value, tb))
|
||||
|
||||
|
||||
cmd_handler = logging.StreamHandler()
|
||||
cmd_handler.setLevel(log_level)
|
||||
logger.addHandler(cmd_handler)
|
||||
|
||||
sys.excepthook = util.my_except_hook
|
||||
sys.excepthook = my_except_hook
|
||||
|
||||
version = "Unknown"
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle:
|
||||
|
|
@ -173,21 +183,18 @@ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) a
|
|||
version = line
|
||||
break
|
||||
|
||||
if os.path.exists(os.path.dirname(log_file)):
|
||||
file_logger = log_file
|
||||
elif not os.path.exists(os.path.dirname(log_file)) and os.path.dirname(log_file) != '':
|
||||
os.makedirs(os.path.join(default_dir, 'logs'), exist_ok=True)
|
||||
print(f"Log Warning: Log Path {os.path.dirname(log_file)} does not exist. Logs will be saved in the default path: {os.path.join(default_dir, 'logs', os.path.basename(log_file))}")
|
||||
file_logger = os.path.join(default_dir, 'logs', os.path.basename(log_file))
|
||||
else:
|
||||
os.makedirs(os.path.join(default_dir, 'logs'), exist_ok=True)
|
||||
file_logger = os.path.join(default_dir, 'logs', os.path.basename(log_file))
|
||||
max_bytes = 1024 * 1024 * 2
|
||||
file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", maxBytes=max_bytes, backupCount=10, encoding="utf-8")
|
||||
util.apply_formatter(file_handler)
|
||||
file_handler.addFilter(fmt_filter)
|
||||
logger.addHandler(file_handler)
|
||||
logger.debug(f"Logs are saved in {file_logger}")
|
||||
|
||||
def start_loop():
|
||||
if len(config_files) == 1:
|
||||
args["config_file"] = config_files[0]
|
||||
start()
|
||||
else:
|
||||
for config_file in config_files:
|
||||
args["config_file"] = config_file
|
||||
config_base = os.path.splitext(config_file)[0]
|
||||
logger.add_config_handler(config_base)
|
||||
start()
|
||||
logger.remove_config_handler(config_base)
|
||||
|
||||
|
||||
def start():
|
||||
|
|
@ -195,11 +202,7 @@ def start():
|
|||
args["time"] = start_time.strftime("%H:%M")
|
||||
args["time_obj"] = start_time
|
||||
stats_summary = []
|
||||
if dry_run:
|
||||
start_type = "Dry-"
|
||||
else:
|
||||
start_type = ""
|
||||
util.separator(f"Starting {start_type}Run")
|
||||
logger.separator("Starting Run")
|
||||
cfg = None
|
||||
body = ''
|
||||
run_time = ''
|
||||
|
|
@ -224,26 +227,26 @@ def start():
|
|||
}
|
||||
|
||||
def FinishedRun():
|
||||
nonlocal end_time, start_time, start_type, stats_summary, run_time, next_run, body
|
||||
nonlocal end_time, start_time, stats_summary, run_time, next_run, body
|
||||
end_time = datetime.now()
|
||||
run_time = str(end_time - start_time).split('.')[0]
|
||||
_, nr = calc_next_run(sch, True)
|
||||
next_run_str = nr['next_run_str']
|
||||
next_run = nr['next_run']
|
||||
body = util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}\n{next_run_str if len(next_run_str)>0 else ''}"
|
||||
.replace('\n\n', '\n').rstrip())[0]
|
||||
body = logger.separator(f"Finished Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}\n{next_run_str if len(next_run_str)>0 else ''}"
|
||||
.replace('\n\n', '\n').rstrip())[0]
|
||||
return next_run, body
|
||||
try:
|
||||
cfg = Config(default_dir, args)
|
||||
except Exception as e:
|
||||
if 'Qbittorrent Error' in e.args[0]:
|
||||
util.print_multiline(e, 'CRITICAL')
|
||||
util.print_line('Exiting scheduled Run.', 'CRITICAL')
|
||||
logger.print_line(e, 'CRITICAL')
|
||||
logger.print_line('Exiting scheduled Run.', 'CRITICAL')
|
||||
FinishedRun()
|
||||
return None
|
||||
else:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, 'CRITICAL')
|
||||
logger.stacktrace()
|
||||
logger.print_line(e, 'CRITICAL')
|
||||
|
||||
if cfg:
|
||||
# Set Category
|
||||
|
|
@ -305,18 +308,17 @@ def start():
|
|||
if stats["recycle_emptied"] > 0: stats_summary.append(f"Total Files Deleted from Recycle Bin: {stats['recycle_emptied']}")
|
||||
|
||||
FinishedRun()
|
||||
|
||||
if cfg:
|
||||
try:
|
||||
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, next_run, stats, body)
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
logger.stacktrace()
|
||||
logger.error(f"Webhooks Error: {e}")
|
||||
|
||||
|
||||
def end():
|
||||
logger.info("Exiting Qbit_manage")
|
||||
logger.removeHandler(file_handler)
|
||||
logger.remove_main_handler()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
|
@ -348,22 +350,23 @@ def calc_next_run(sch, print=False):
|
|||
|
||||
if __name__ == '__main__':
|
||||
killer = GracefulKiller()
|
||||
util.separator()
|
||||
logger.info(util.centered(" _ _ _ ")) # noqa: W605
|
||||
logger.info(util.centered(" | | (_) | ")) # noqa: W605
|
||||
logger.info(util.centered(" __ _| |__ _| |_ _ __ ___ __ _ _ __ __ _ __ _ ___ ")) # noqa: W605
|
||||
logger.info(util.centered(" / _` | '_ \| | __| | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \\")) # noqa: W605
|
||||
logger.info(util.centered(" | (_| | |_) | | |_ | | | | | | (_| | | | | (_| | (_| | __/")) # noqa: W605
|
||||
logger.info(util.centered(" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|")) # noqa: W605
|
||||
logger.info(util.centered(" | | ______ __/ | ")) # noqa: W605
|
||||
logger.info(util.centered(" |_| |______| |___/ ")) # noqa: W605
|
||||
logger.add_main_handler()
|
||||
logger.separator()
|
||||
logger.info_center(" _ _ _ ") # noqa: W605
|
||||
logger.info_center(" | | (_) | ") # noqa: W605
|
||||
logger.info_center(" __ _| |__ _| |_ _ __ ___ __ _ _ __ __ _ __ _ ___ ") # noqa: W605
|
||||
logger.info_center(" / _` | '_ \| | __| | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \\") # noqa: W605
|
||||
logger.info_center(" | (_| | |_) | | |_ | | | | | | (_| | | | | (_| | (_| | __/") # noqa: W605
|
||||
logger.info_center(" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|") # noqa: W605
|
||||
logger.info_center(" | | ______ __/ | ") # noqa: W605
|
||||
logger.info_center(" |_| |______| |___/ ") # noqa: W605
|
||||
logger.info(f" Version: {version}")
|
||||
|
||||
util.separator(loglevel='DEBUG')
|
||||
logger.separator(loglevel='DEBUG')
|
||||
logger.debug(f" --run (QBT_RUN): {run}")
|
||||
logger.debug(f" --schedule (QBT_SCHEDULE): {sch}")
|
||||
logger.debug(f" --startup-delay (QBT_STARTUP_DELAY): {startupDelay}")
|
||||
logger.debug(f" --config-file (QBT_CONFIG): {config_file}")
|
||||
logger.debug(f" --config-file (QBT_CONFIG): {config_files}")
|
||||
logger.debug(f" --log-file (QBT_LOGFILE): {log_file}")
|
||||
logger.debug(f" --cross-seed (QBT_CROSS_SEED): {cross_seed}")
|
||||
logger.debug(f" --recheck (QBT_RECHECK): {recheck}")
|
||||
|
|
@ -384,15 +387,15 @@ if __name__ == '__main__':
|
|||
try:
|
||||
if run:
|
||||
logger.info(" Run Mode: Script will exit after completion.")
|
||||
start()
|
||||
start_loop()
|
||||
else:
|
||||
schedule.every(sch).minutes.do(start)
|
||||
schedule.every(sch).minutes.do(start_loop)
|
||||
time_str, _ = calc_next_run(sch)
|
||||
logger.info(f" Scheduled Mode: Running every {time_str}.")
|
||||
if startupDelay:
|
||||
logger.info(f" Startup Delay: Initial Run will start after {startupDelay} seconds")
|
||||
time.sleep(startupDelay)
|
||||
start()
|
||||
start_loop()
|
||||
while not killer.kill_now:
|
||||
schedule.run_pending()
|
||||
time.sleep(60)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
ruamel.yaml==0.17.21
|
||||
qbittorrent-api>=2022.8.34
|
||||
qbittorrent-api>=2022.8.37
|
||||
schedule==1.1.0
|
||||
retrying==1.3.3
|
||||
alive_progress==2.4.1
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ qbt_pass = None
|
|||
try:
|
||||
from qbittorrentapi import Client, LoginFailed, APIConnectionError
|
||||
except ModuleNotFoundError:
|
||||
print("Requirements Error: qbittorrentapi not installed. Please install with pip")
|
||||
print("Requirements Error: qbittorrent-api not installed. Please install using the command \"pip install qbittorrent-api\"")
|
||||
sys.exit(0)
|
||||
|
||||
current = datetime.now()
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue