Version 3.0: Rewritten from the ground up

This commit is contained in:
Jon 2021-12-12 22:06:34 -05:00
parent 15d6605602
commit 1da76debf1
No known key found for this signature in database
GPG key ID: 9665BA6CF5DC2671
7 changed files with 1030 additions and 885 deletions

View file

@ -1 +1 @@
2.3
3.0.0

196
modules/config.py Normal file
View file

@ -0,0 +1,196 @@
import logging, os, requests, stat, time
from modules import util
from modules.util import Failed, check
from modules.qbittorrent import Qbt
from ruamel import yaml
from retrying import retry
logger = logging.getLogger("qBit Manage")
class Config:
def __init__(self, default_dir, args):
logger.info("Locating config...")
self.args = args
config_file = args["config_file"]
if config_file and os.path.exists(config_file): self.config_path = os.path.abspath(config_file)
elif config_file and os.path.exists(os.path.join(default_dir, config_file)): self.config_path = os.path.abspath(os.path.join(default_dir, config_file))
elif config_file and not os.path.exists(config_file): raise Failed(f"Config Error: config not found at {os.path.abspath(config_file)}")
elif os.path.exists(os.path.join(default_dir, "config.yml")): self.config_path = os.path.abspath(os.path.join(default_dir, "config.yml"))
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
logger.info(f"Using {self.config_path} as config")
self.util = check(self)
self.default_dir = default_dir
yaml.YAML().allow_duplicate_keys = True
try:
new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
if "qbt" in new_config: new_config["qbt"] = new_config.pop("qbt")
if "directory" in new_config: new_config["directory"] = new_config.pop("directory")
if "cat" in new_config: new_config["cat"] = new_config.pop("cat")
if "tags" in new_config: new_config["tags"] = new_config.pop("tags")
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2)
self.data = new_config
except yaml.scanner.ScannerError as e:
raise Failed(f"YAML Error: {util.tab_new_lines(e)}")
except Exception as e:
util.print_stacktrace()
raise Failed(f"YAML Error: {e}")
self.session = requests.Session()
#nohardlinks
self.nohardlinks = None
if "nohardlinks" in self.data:
self.nohardlinks = {}
for cat in self.data["nohardlinks"]:
if cat in list(self.data["cat"].keys()):
self.nohardlinks[cat] = {}
self.nohardlinks[cat]["exclude_tags"] = self.util.check_for_attribute(self.data, "exclude_tags", parent="nohardlinks", subparent=cat, var_type="list", default_is_none=True,do_print=False)
self.nohardlinks[cat]["cleanup"] = self.util.check_for_attribute(self.data, "cleanup", parent="nohardlinks", subparent=cat, var_type="bool", default=False,do_print=False)
self.nohardlinks[cat]['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="nohardlinks", subparent=cat, var_type="float", default_int=-2, default_is_none=True,do_print=False)
self.nohardlinks[cat]['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="nohardlinks", subparent=cat, var_type="int", default_int=-2, default_is_none=True,do_print=False)
else:
raise Failed(f"Config Error: Category {cat} is defined under nohardlinks attribute but is not defined in the cat attriute.")
else:
if self.args["tag_nohardlinks"]:
raise Failed("C onfig Error: nohardlinks attribute not found")
#Add RecycleBin
self.recyclebin = {}
self.recyclebin['enabled'] = self.util.check_for_attribute(self.data, "enabled", parent="recyclebin",var_type="bool",default=True)
self.recyclebin['empty_after_x_days'] = self.util.check_for_attribute(self.data, "empty_after_x_days", parent="recyclebin",var_type="int",default_is_none=True)
#Add Orphaned
self.orphaned = {}
self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned",var_type="list",default_is_none=True,do_print=False)
#Assign directories
if "directory" in self.data:
self.root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory",default_is_none=True)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",default=self.root_dir)
if (self.args["cross_seed"] or self.args["tag_nohardlinks"] or self.args["rem_orphaned"]):
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir)
else:
if self.recyclebin['enabled']:
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir)
if self.args["cross_seed"]:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",var_type="path")
else:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",default_is_none=True)
self.recycle_dir = os.path.join(self.remote_dir,'.RecycleBin')
else:
raise Failed("Config Error: directory attribute not found")
#Connect to Qbittorrent
self.qbt = None
if "qbt" in self.data:
logger.info("Connecting to Qbittorrent...")
self.qbt = Qbt(self, {
"host": self.util.check_for_attribute(self.data, "host", parent="qbt", throw=True),
"username": self.util.check_for_attribute(self.data, "user", parent="qbt", default_is_none=True),
"password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True)
})
else:
raise Failed("Config Error: qbt attribute not found")
#Get tags from config file based on keyword
def get_tags(self,urls):
tags = {}
tags['new_tag'] = None
tags['max_ratio'] = None
tags['max_seeding_time'] = None
tags['limit_upload_speed'] = None
try:
tags['url'] = util.trunc_val(urls[0], '/')
except IndexError as e:
tags['url'] = None
logger.debug(f"Tracker Url:{urls[0]}")
logger.debug(e)
if 'tags' in self.data and self.data["tags"] is not None and urls:
tag_values = self.data['tags']
for tag_url, tag_details in tag_values.items():
for url in urls:
if tag_url in url:
try:
tags['url'] = util.trunc_val(url, '/')
default_tag = tags['url'].split('/')[2].split(':')[0]
except IndexError as e:
logger.debug(f"Tracker Url:{url}")
logger.debug(e)
# If using Format 1
if isinstance(tag_details,str):
tags['new_tag'] = self.util.check_for_attribute(self.data, tag_url, parent="tags",default=default_tag)
# Using Format 2
else:
if 'tag' not in tag_details:
logger.warning(f'No tags defined for {tag_url}. Please check your config.yml file. Setting tag to {tag_url}')
tags['new_tag'] = self.util.check_for_attribute(self.data, "tag", parent="tags", subparent=tag_url, default=tag_url)
tags['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="tags", subparent=tag_url, var_type="float", default_int=-2, default_is_none=True,do_print=False)
tags['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="tags", subparent=tag_url, var_type="int", default_int=-2, default_is_none=True,do_print=False)
tags['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="tags", subparent=tag_url, var_type="int", default_int=-1, default_is_none=True,do_print=False)
return (tags)
if tags['url']:
default_tag = tags['url'].split('/')[2].split(':')[0]
tags['new_tag'] = self.util.check_for_attribute(self.data, default_tag, parent="tags",default=default_tag)
logger.warning(f'No tags matched for {tags["url"]}. Please check your config.yml file. Setting tag to {default_tag}')
return (tags)
#Get category from config file based on path provided
def get_category(self,path):
category = ''
path = os.path.join(path,'')
if "cat" in self.data and self.data["cat"] is not None:
cat_path = self.data["cat"]
for cat, save_path in cat_path.items():
if save_path in path:
category = cat
break
if not category:
default_cat = path.split('/')[-2]
self.util.check_for_attribute(self.data, default_cat, parent="cat",default=path)
category = default_cat
logger.warning(f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to {default_cat}')
return category
#Empty the recycle bin
def empty_recycle(self):
dry_run = self.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
num_del = 0
if not self.args["skip_recycle"]:
n_info = ''
if self.recyclebin['enabled'] and self.recyclebin['empty_after_x_days']:
recycle_files = [os.path.join(path, name) for path, subdirs, files in os.walk(self.recycle_dir) for name in files]
recycle_files = sorted(recycle_files)
if recycle_files:
util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=False, border=False)
for file in recycle_files:
fileStats = os.stat(file)
filename = file.replace(self.recycle_dir,'')
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
now = time.time() # in seconds
days = (now - last_modified) / (60 * 60 * 24)
if (self.recyclebin['empty_after_x_days'] <= days):
num_del += 1
n_info += (f"{'Did not delete' if dry_run else 'Deleted'} {filename} from the recycle bin. (Last modified {round(days)} days ago).\n")
if not dry_run: os.remove(file)
if num_del > 0:
if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*")
util.print_multiline(n_info,loglevel)
util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files from the Recycle Bin.",loglevel)
else:
logger.debug('No files found in "' + self.recycle_dir + '"')
return num_del
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get(self, url, json=None, headers=None, params=None):
return self.session.get(url, json=json, headers=headers, params=params)
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def post(self, url, data=None, json=None, headers=None):
return self.session.post(url, data=data, json=json, headers=headers)

View file

@ -1,10 +0,0 @@
import signal
#Gracefully kill script when docker stops
class GracefulKiller:
kill_now = False
def __init__(self):
#signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self, *args):
self.kill_now = True

498
modules/qbittorrent.py Normal file
View file

@ -0,0 +1,498 @@
import logging, os
from qbittorrentapi import Client, LoginFailed, APIConnectionError
from modules import util
from modules.util import Failed, print_line, print_multiline, separator
from datetime import timedelta
from collections import Counter
from fnmatch import fnmatch
from alive_progress import alive_it
logger = logging.getLogger("qBit Manage")
class Qbt:
def __init__(self, config, params):
self.config = config
self.host = params["host"]
self.username = params["username"]
self.password = params["password"]
logger.debug(f'Host: {self.host}, Username: {self.username}, Password: {self.password if self.password is None else "[REDACTED]"}')
try:
self.client = Client(host=self.host, username=self.username, password=self.password)
self.client.auth_log_in()
logger.info(f"Qbt Connection Successful")
except LoginFailed:
raise Failed("Qbittorrent Error: Failed to login. Invalid username/password.")
except APIConnectionError:
raise Failed("Qbittorrent Error: Unable to connect to the client.")
except Exception:
raise Failed("Qbittorrent Error: Unable to connect to the client.")
separator(f"Getting Torrent List", space=False, border=False)
self.torrent_list = self.get_torrents({'sort':'added_on'})
# Will create a 2D Dictionary with the torrent name as the key
# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV', 'count':1, 'msg':'[]'...},
# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}, 'count':2, 'msg':'[]'...}
# List of dictionary key definitions
# Category = Returns category of the torrent (str)
# save_path = Returns the save path of the torrent (str)
# count = Returns a count of the total number of torrents with the same name (int)
# msg = Returns a list of torrent messages by name (list of str)
# status = Returns the list of status numbers of the torrent by name (0: Tracker is disabled (used for DHT, PeX, and LSD), 1: Tracker has not been contacted yet, 2:Tracker has been contacted and is working, 3:Tracker is updating, 4:Tracker has been contacted, but it is not working (or doesn't send proper replies)
# is_complete = Returns the state of torrent (Returns True if at least one of the torrent with the State is categorized as Complete.)
# first_hash = Returns the hash number of the original torrent (Assuming the torrent list is sorted by date added (Asc))
def get_torrent_info(torrent_list):
torrentdict = {}
t_obj_unreg = []
for torrent in alive_it(torrent_list):
save_path = torrent.save_path
category = torrent.category
is_complete = False
msg = None
status = None
if torrent.name in torrentdict:
t_obj_list.append(torrent)
t_count = torrentdict[torrent.name]['count'] + 1
msg_list = torrentdict[torrent.name]['msg']
status_list = torrentdict[torrent.name]['status']
is_complete = True if torrentdict[torrent.name]['is_complete'] == True else torrent.state_enum.is_complete
first_hash = torrentdict[torrent.name]['first_hash']
else:
t_obj_list = [torrent]
t_count = 1
msg_list = []
status_list = []
is_complete = torrent.state_enum.is_complete
first_hash = torrent.hash
for x in torrent.trackers:
if x.url.startswith('http'):
status = x.status
msg = x.msg.upper()
#Add any potential unregistered torrents to a list
if x.status == 4 and 'DOWN' not in msg and 'UNREACHABLE' not in msg:
t_obj_unreg.append(torrent)
if msg is not None: msg_list.append(msg)
if status is not None: status_list.append(status)
torrentattr = {'torrents': t_obj_list, 'Category': category, 'save_path': save_path, 'count': t_count, 'msg': msg_list, 'status': status_list, 'is_complete': is_complete, 'first_hash':first_hash}
torrentdict[torrent.name] = torrentattr
return torrentdict,t_obj_unreg
self.torrentinfo = None
self.torrentissue = None
if config.args['recheck'] or config.args['cross_seed'] or config.args['rem_unregistered']:
#Get an updated torrent dictionary information of the torrents
self.torrentinfo,self.torrentissue = get_torrent_info(self.torrent_list)
def get_torrents(self,params):
return self.client.torrents.info(**params)
def category(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
num_cat = 0
if self.config.args['cat_update']:
separator(f"Updating Categories", space=False, border=False)
for torrent in alive_it(self.torrent_list):
if torrent.category == '':
new_cat = self.config.get_category(torrent.save_path)
try:
t_url = [util.trunc_val(x.url, '/') for x in torrent.trackers if x.url.startswith('http')][0]
except IndexError:
t_url = None
if not dry_run: torrent.set_category(category=new_cat)
print_line(util.insert_space(f'- Torrent Name: {torrent.name}',1),loglevel)
print_line(util.insert_space(f'-- New Category: {new_cat}',5),loglevel)
print_line(util.insert_space(f'-- Tracker: {t_url}',5),loglevel)
num_cat += 1
if num_cat >= 1:
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_cat} new categories.",loglevel)
else:
print_line(f'No new torrents to categorize.',loglevel)
return num_cat
def tags(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
num_tags = 0
if self.config.args['tag_update']:
separator(f"Updating Tags", space=False, border=False)
for torrent in alive_it(self.torrent_list):
if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1):
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
if tags["new_tag"]:
num_tags += 1
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
print_line(util.insert_space(f'New Tag: {tags["new_tag"]}',8),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"])
if num_tags >= 1:
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_tags} new tags.",loglevel)
else:
print_line(f'No new torrents to tag.',loglevel)
return num_tags
def set_tags_and_limits(self,torrent,max_ratio,max_seeding_time,limit_upload_speed=None,tags=None):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
#Print Logs
if limit_upload_speed:
if limit_upload_speed == -1: print_line(util.insert_space(f'Limit UL Speed: Infinity',1),loglevel)
else: print_line(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1),loglevel)
if max_ratio or max_seeding_time:
if max_ratio == -2 or max_seeding_time == -2: print_line(util.insert_space(f'Share Limit: Use Global Share Limit',4),loglevel)
elif max_ratio == -1 or max_seeding_time == -1: print_line(util.insert_space(f'Share Limit: Set No Share Limit',4),loglevel)
else:
if max_ratio != torrent.max_ratio and not max_seeding_time:
print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4),loglevel)
elif max_seeding_time != torrent.max_seeding_time and not max_ratio:
print_line(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4),loglevel)
elif max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time:
print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4),loglevel)
#Update Torrents
if not dry_run:
if tags: torrent.add_tags(tags)
if limit_upload_speed:
if limit_upload_speed == -1: torrent.set_upload_limit(-1)
else: torrent.set_upload_limit(limit_upload_speed*1024)
if max_ratio or max_seeding_time:
if max_ratio == -2 or max_seeding_time == -2:
torrent.set_share_limits(-2,-2)
return
elif max_ratio == -1 or max_seeding_time == -1:
torrent.set_share_limits(-1,-1)
return
if not max_ratio: max_ratio = torrent.max_ratio
if not max_seeding_time: max_seeding_time = torrent.max_seeding_time
torrent.set_share_limits(max_ratio,max_seeding_time)
def tag_nohardlinks(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
num_tags = 0 #counter for the number of torrents that has no hard links
del_tor = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion
del_tor_cont = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion including contents
num_untag = 0 #counter for number of torrents that previously had no hard links but now have hard links
if self.config.args['tag_nohardlinks']:
util.separator(f"Tagging Torrents with No Hardlinks", space=False, border=False)
nohardlinks = self.config.nohardlinks
tdel_dict = {} #dictionary to track the torrent names and content path that meet the deletion criteria
root_dir = self.config.root_dir
remote_dir = self.config.remote_dir
for category in nohardlinks:
torrent_list = self.get_torrents({'category':category,'filter':'completed'})
if len(torrent_list) == 0:
logger.error('No torrents found in the category ('+category+') defined in config.yml inside the nohardlinks section. Please check if this matches with any category in qbittorrent and has 1 or more torrents.')
continue
for torrent in alive_it(torrent_list):
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
if any(tag in torrent.tags for tag in nohardlinks[category]['exclude_tags']):
#Skip to the next torrent if we find any torrents that are in the exclude tag
continue
else:
#Checks for any hard links and not already tagged
if util.nohardlink(torrent['content_path'].replace(root_dir,remote_dir)):
#Will only tag new torrents that don't have noHL tag
if 'noHL' not in torrent.tags :
num_tags += 1
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
print_line(util.insert_space(f'Added Tag: noHL',6),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"], nohardlinks[category]["max_seeding_time"],tags='noHL')
#Cleans up previously tagged noHL torrents
else:
# Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements
if (nohardlinks[category]['cleanup'] and torrent.state_enum.is_paused and len(nohardlinks[category])>0):
print_line(f'Torrent Name: {torrent.name} has no hard links found and meets ratio/seeding requirements.',loglevel)
print_line(util.insert_space(f"Cleanup flag set to true. {'Not Deleting' if dry_run else 'Deleting'} torrent + contents.",6),loglevel)
tdel_dict[torrent.name] = torrent['content_path'].replace(root_dir,root_dir)
#Checks to see if previous noHL tagged torrents now have hard links.
if (not (util.nohardlink(torrent['content_path'].replace(root_dir,root_dir))) and ('noHL' in torrent.tags)):
num_untag += 1
print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.',loglevel)
print_line(util.insert_space(f'Removed Tag: noHL',6),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.",loglevel)
if not dry_run:
torrent.remove_tags(tags='noHL')
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"])
#loop through torrent list again for cleanup purposes
if (nohardlinks[category]['cleanup']):
for torrent in alive_it(torrent_list):
if torrent.name in tdel_dict.keys() and 'noHL' in torrent.tags:
#Double check that the content path is the same before we delete anything
if torrent['content_path'].replace(root_dir,root_dir) == tdel_dict[torrent.name]:
if (os.path.exists(torrent['content_path'].replace(root_dir,root_dir))):
if not dry_run: self.tor_delete_recycle(torrent)
del_tor_cont += 1
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
else:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
del_tor += 1
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
if num_tags >= 1:
print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}",loglevel)
else:
print_line(f'No torrents to tag with no hard links.',loglevel)
if num_untag >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} noHL tags / share limits for {num_untag} .torrent{'s.' if num_tags > 1 else '.'}",loglevel)
if del_tor >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if num_tags > 1 else ''} but not content files.",loglevel)
if del_tor_cont >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if num_tags > 1 else ''} AND content files.",loglevel)
return num_tags,num_untag,del_tor,del_tor_cont
def rem_unregistered(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
del_tor = 0
del_tor_cont = 0
if self.config.args['rem_unregistered']:
separator(f"Removing Unregistered Torrents", space=False, border=False)
pot_unr = ''
unreg_msgs = [
'UNREGISTERED',
'TORRENT NOT FOUND',
'TORRENT IS NOT FOUND',
'NOT REGISTERED',
'HTTPS://BEYOND-HD.ME/TORRENTS',
'NOT EXIST',
'UNKNOWN TORRENT',
'REDOWNLOAD',
'PACKS',
'REPACKED',
'PACK',
'TRUMP',
'RETITLED',
]
for torrent in alive_it(self.torrentissue):
t_name = torrent.name
t_count = self.torrentinfo[t_name]['count']
t_msg = self.torrentinfo[t_name]['msg']
t_status = self.torrentinfo[t_name]['status']
for x in torrent.trackers:
if x.url.startswith('http'):
t_url = util.trunc_val(x.url, '/')
msg_up = x.msg.upper()
#Add any potential unregistered torrents to a list
if not any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
pot_unr += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n')
pot_unr += (util.insert_space(f'Status: {msg_up}',9)+'\n')
pot_unr += (util.insert_space(f'Tracker: {t_url}',8)+'\n')
if any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
print_line(util.insert_space(f'Status: {msg_up}',9),loglevel)
print_line(util.insert_space(f'Tracker: {t_url}',8),loglevel)
if t_count > 1:
# Checks if any of the original torrents are working
if '' in t_msg or 2 in t_status:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
del_tor += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
del_tor_cont += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
del_tor_cont += 1
if del_tor >=1 or del_tor_cont >=1:
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.",loglevel)
if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.",loglevel)
else:
print_line('No unregistered torrents found.',loglevel)
if (len(pot_unr) > 0):
separator(f"Potential Unregistered torrents", space=False, border=False,loglevel=loglevel)
print_multiline(pot_unr.rstrip(),loglevel)
return del_tor,del_tor_cont
# Function used to move any torrents from the cross seed directory to the correct save directory
def cross_seed(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
added = 0 # Keep track of total torrents tagged
tagged = 0 #Track # of torrents tagged that are not cross-seeded
if self.config.args['cross_seed']:
separator(f"Checking for Cross-Seed Torrents", space=False, border=False)
# List of categories for all torrents moved
categories = []
# Only get torrent files
cs_files = [f for f in os.listdir(self.config.cross_seed_dir) if f.endswith('torrent')]
dir_cs = self.config.cross_seed_dir
dir_cs_out = os.path.join(dir_cs,'qbit_manage_added')
os.makedirs(dir_cs_out,exist_ok=True)
for file in alive_it(cs_files):
t_name = file.split(']', 2)[2].split('.torrent')[0]
# Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key)
# Returned the dictionary of filtered item
torrentdict_file = dict(filter(lambda item: t_name in item[0], self.torrentinfo.items()))
if torrentdict_file:
# Get the exact torrent match name from torrentdict
t_name = next(iter(torrentdict_file))
category = self.torrentinfo[t_name]['Category']
dest = os.path.join(self.torrentinfo[t_name]['save_path'], '')
src = os.path.join(dir_cs,file)
dir_cs_out = os.path.join(dir_cs,'qbit_manage_added',file)
#Only add cross-seed torrent if original torrent is complete
if self.torrentinfo[t_name]['is_complete']:
categories.append(category)
print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:",loglevel)
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
print_line(util.insert_space(f'Category: {category}',7),loglevel)
print_line(util.insert_space(f'Save_Path: {dest}',6),loglevel)
added += 1
if not dry_run:
client.torrents.add(torrent_files=src, save_path=dest, category=category, tags='cross-seed', is_paused=True)
shutil.move(src, dir_cs_out)
else:
print_line(f'Found {t_name} in {dir_cs} but original torrent is not complete.',loglevel)
print_line(f'Not adding to qBittorrent',loglevel)
else:
if dry_run: print_line(f'{t_name} not found in torrents.',loglevel)
else: print_line(f'{t_name} not found in torrents.','WARNING')
#Tag missing cross-seed torrents tags
for torrent in alive_it(self.torrent_list):
t_name = torrent.name
if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash:
tagged += 1
print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}",loglevel)
if not dry_run: torrent.add_tags(tags='cross-seed')
numcategory = Counter(categories)
for c in numcategory:
if numcategory[c] > 0: print_line(f"{numcategory[c]} {c} cross-seed .torrents {'not added' if dry_run else 'added'}.",loglevel)
if added > 0: print_line(f"Total {added} cross-seed .torrents {'not added' if dry_run else 'added'}.",loglevel)
if tagged > 0: print_line(f"Total {tagged} cross-seed .torrents {'not tagged' if dry_run else 'tagged'}.",loglevel)
return added,tagged
# Function used to recheck paused torrents sorted by size and resume torrents that are completed
def recheck(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
resumed = 0
rechecked = 0
if self.config.args['recheck']:
separator(f"Rechecking Paused Torrents", space=False, border=False)
#sort by size and paused
torrent_list = self.get_torrents({'status_filter':'paused','sort':'size'})
if torrent_list:
for torrent in alive_it(torrent_list):
new_tag = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
#Resume torrent if completed
if torrent.progress == 1:
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
resumed += 1
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
if not dry_run: torrent.resume()
else:
#Check to see if torrent meets AutoTorrentManagement criteria
logger.debug(f'DEBUG: Torrent to see if torrent meets AutoTorrentManagement Criteria')
logger.debug(util.insert_space(f'- Torrent Name: {torrent.name}',2))
logger.debug(util.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}',4))
logger.debug(util.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}',4))
if (torrent.max_ratio >= 0 and torrent.ratio < torrent.max_ratio and torrent.max_seeding_time < 0) \
or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \
or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))):
resumed += 1
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
if not dry_run: torrent.resume()
#Recheck
elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking:
rechecked += 1
print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
if not dry_run: torrent.recheck()
return resumed,rechecked
def rem_orphaned(self):
dry_run = self.config.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
orphaned = 0
if self.config.args['rem_orphaned']:
separator(f"Checking for Orphaned Files", space=False, border=False)
torrent_files = []
root_files = []
orphaned_files = []
excluded_orphan_files = []
orphaned_parent_path = set()
remote_path = self.config.remote_dir
root_path = self.config.root_dir
if (remote_path != root_path):
root_files = [os.path.join(path.replace(remote_path,root_path), name) for path, subdirs, files in alive_it(os.walk(remote_path)) for name in files if os.path.join(remote_path,'orphaned_data') not in path and os.path.join(remote_path,'.RecycleBin') not in path]
else:
root_files = [os.path.join(path, name) for path, subdirs, files in alive_it(os.walk(root_path)) for name in files if os.path.join(root_path,'orphaned_data') not in path and os.path.join(root_path,'.RecycleBin') not in path]
#Get an updated list of torrents
torrent_list = self.get_torrents({'sort':'added_on'})
for torrent in alive_it(torrent_list):
for file in torrent.files:
torrent_files.append(os.path.join(torrent.save_path,file.name))
orphaned_files = set(root_files) - set(torrent_files)
orphaned_files = sorted(orphaned_files)
if self.config.orphaned['exclude_patterns']:
exclude_patterns = self.config.orphaned['exclude_patterns']
excluded_orphan_files = [file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch(file, exclude_pattern.replace(remote_path,root_path))]
orphaned_files = set(orphaned_files) - set(excluded_orphan_files)
separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG')
print_multiline("\n".join(torrent_files),'DEBUG')
separator(f"Root Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(root_files),'DEBUG')
separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(excluded_orphan_files),'DEBUG')
separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(orphaned_files),'DEBUG')
separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG')
if orphaned_files:
dir_out = os.path.join(remote_path,'orphaned_data')
os.makedirs(dir_out,exist_ok=True)
print_line(f"{len(orphaned_files)} Orphaned files found",loglevel)
print_multiline("\n".join(orphaned_files),loglevel)
print_line(f"{'Did not move' if dry_run else 'Moved'} {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}",loglevel)
orphaned = len(orphaned_files)
#Delete empty directories after moving orphan files
logger.info(f'Cleaning up any empty directories...')
if not dry_run:
for file in alive_it(orphaned_files):
src = file.replace(root_path,remote_path)
dest = os.path.join(dir_out,file.replace(root_path,''))
util.move_files(src,dest)
orphaned_parent_path.add(os.path.dirname(file).replace(root_path,remote_path))
for parent_path in orphaned_parent_path:
util.remove_empty_directories(parent_path,"**/*")
else:
print_line(f"No Orphaned Filed found.",loglevel)
return orphaned
def tor_delete_recycle(self,torrent):
if self.config.recyclebin['enabled']:
tor_files = []
#Define torrent files/folders
for file in torrent.files:
tor_files.append(os.path.join(torrent.save_path,file.name))
#Create recycle bin if not exists
recycle_path = os.path.join(self.config.remote_dir,'.RecycleBin')
os.makedirs(recycle_path,exist_ok=True)
separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False,loglevel='DEBUG')
if len(tor_files) == 1: print_line(tor_files[0],'DEBUG')
else: print_multiline("\n".join(tor_files),'DEBUG')
logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(self.config.remote_dir,self.config.root_dir)}')
#Move files from torrent contents to Recycle bin
for file in tor_files:
src = file.replace(self.config.root_dir,self.config.remote_dir)
dest = os.path.join(recycle_path,file.replace(self.config.root_dir,''))
#move files and change date modified
try:
util.move_files(src,dest,True)
except FileNotFoundError:
print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ','WARNING')
#Delete torrent and files
torrent.delete(hash=torrent.hash, delete_files=False)
#Remove any empty directories
util.remove_empty_directories(torrent.save_path.replace(self.config.root_dir,self.config.remote_dir),"**/*")
else:
torrent.delete(hash=torrent.hash, delete_files=True)

View file

@ -1,39 +1,185 @@
import logging, traceback
import logging, os, shutil, traceback, time, signal
from logging.handlers import RotatingFileHandler
from ruamel import yaml
from pathlib import Path
logger = logging.getLogger("qBit Manage")
logger = logging.getLogger('qBit Manage')
class TimeoutExpired(Exception):
pass
def get_list(data, lower=False, split=True, int_list=False):
if data is None: return None
elif isinstance(data, list): return data
elif isinstance(data, dict): return [data]
elif split is False: return [str(data)]
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
elif int_list is True:
try: return [int(d.strip()) for d in str(data).split(",")]
except ValueError: return []
else: return [d.strip() for d in str(data).split(",")]
class check:
def __init__(self, config):
self.config = config
def check_for_attribute(self, data, attribute, parent=None, subparent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", default_int=0, throw=False, save=True):
endline = ""
if parent is not None:
if subparent is not None:
if data and parent in data and subparent in data[parent]:
data = data[parent][subparent]
else:
if data and parent in data:
data = data[parent]
else:
data = None
do_print = False
#save = False
if subparent is not None:
text = f"{parent}->{subparent} sub-attribute {attribute}"
elif parent is None:
text = f"{attribute} attribute"
else:
text = f"{parent} sub-attribute {attribute}"
if data is None or attribute not in data:
message = f"{text} not found"
if parent and save is True:
loaded_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config.config_path))
if subparent:
endline = f"\n{subparent} sub-attribute {attribute} added to config"
if subparent not in loaded_config[parent] or not loaded_config[parent][subparent]:
loaded_config[parent][subparent] = {attribute: default}
elif attribute not in loaded_config[parent]:
loaded_config[parent][subparent][attribute] = default
else:
endline = ""
else:
endline = f"\n{parent} sub-attribute {attribute} added to config"
if parent not in loaded_config or not loaded_config[parent]:
loaded_config[parent] = {attribute: default}
elif attribute not in loaded_config[parent]:
loaded_config[parent][attribute] = default
else:
endline = ""
yaml.round_trip_dump(loaded_config, open(self.config.config_path, "w"), indent=None, block_seq_indent=2)
elif data[attribute] is None:
if default_is_none and var_type == "list":
return []
elif default_is_none:
return None
else:
message = f"{text} is blank"
elif var_type == "url":
if data[attribute].endswith(("\\", "/")):
return data[attribute][:-1]
else:
return data[attribute]
elif var_type == "bool":
if isinstance(data[attribute], bool):
return data[attribute]
else:
message = f"{text} must be either true or false"
elif var_type == "int":
if isinstance(data[attribute], int) and data[attribute] >= default_int:
return data[attribute]
else:
message = f"{text} must an integer >= {default_int}"
elif var_type == "float":
try:
data[attribute] = float(data[attribute])
except:
pass
if isinstance(data[attribute], float) and data[attribute] >= default_int:
return data[attribute]
else:
message = f"{text} must a float >= {float(default_int)}"
elif var_type == "path":
if os.path.exists(os.path.abspath(data[attribute])):
return os.path.join(data[attribute],'')
else:
message = f"Path {os.path.abspath(data[attribute])} does not exist"
elif var_type == "list":
return get_list(data[attribute], split=False)
elif var_type == "list_path":
temp_list = [p for p in get_list(
data[attribute], split=False) if os.path.exists(os.path.abspath(p))]
if len(temp_list) > 0:
return temp_list
else:
message = "No Paths exist"
elif var_type == "lower_list":
return get_list(data[attribute], lower=True)
elif test_list is None or data[attribute] in test_list:
return data[attribute]
else:
message = f"{text}: {data[attribute]} is an invalid input"
if var_type == "path" and default and os.path.exists(os.path.abspath(default)):
return os.path.join(default,'')
elif var_type == "path" and default:
if data and attribute in data and data[attribute]:
message = f"neither {data[attribute]} or the default path {default} could be found"
else:
message = f"no {text} found and the default path {default} could not be found"
default = None
if default is not None or default_is_none:
message = message + f" using {default} as default"
message = message + endline
if req_default and default is None:
raise Failed(
f"Config Error: {attribute} attribute must be set under {parent}.")
options = ""
if test_list:
for option, description in test_list.items():
if len(options) > 0:
options = f"{options}\n"
options = f"{options} {option} ({description})"
if (default is None and not default_is_none) or throw:
if len(options) > 0:
message = message + "\n" + options
raise Failed(f"Config Error: {message}")
if do_print:
print_multiline(f"Config Warning: {message}", "warning")
if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
print_multiline(options)
return default
class Failed(Exception):
pass
class NotScheduled(Exception):
pass
separating_character = "="
screen_width = 100
spacing = 0
def add_dict_list(keys, value, dict_map):
for key in keys:
if key in dict_map:
dict_map[key].append(value)
else:
dict_map[key] = [value]
def get_int_list(data, id_type):
int_values = []
for value in get_list(data):
try: int_values.append(regex_first_int(value, id_type))
except Failed as e: logger.error(e)
return int_values
def print_line(lines, loglevel='INFO'):
logger.log(getattr(logging, loglevel.upper()), str(lines))
def print_multiline(lines, loglevel='INFO'):
line_list = str(lines).split("\n")
for i, line in enumerate(line_list):
if len(line) > 0 and i != len(line_list)-1:
logger.log(getattr(logging, loglevel),line)
for i, line in enumerate(str(lines).split("\n")):
logger.log(getattr(logging, loglevel.upper()), line)
if i == 0:
logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
def print_stacktrace():
print_multiline(traceback.format_exc())
print_multiline(traceback.format_exc(), 'CRITICAL')
def my_except_hook(exctype, value, tb):
for line in traceback.format_exception(etype=exctype, value=value, tb=tb):
print_multiline(line, 'CRITICAL')
def centered(text, sep=" "):
if len(text) > screen_width - 2:
return text
@ -52,13 +198,14 @@ def separator(text=None, space=True, border=True, loglevel='INFO'):
apply_formatter(handler, border=False)
border_text = f"|{separating_character * screen_width}|"
if border:
logger.log(getattr(logging, loglevel),border_text)
logger.log(getattr(logging, loglevel.upper()), border_text)
if text:
text_list = text.split("\n")
for t in text_list:
logger.log(getattr(logging, loglevel),f"|{sep}{centered(t, sep=sep)}{sep}|")
logger.log(getattr(logging, loglevel.upper()),
f"|{sep}{centered(t, sep=sep)}{sep}|")
if border:
logger.log(getattr(logging, loglevel),border_text)
logger.log(getattr(logging, loglevel.upper()), border_text)
for handler in logger.handlers:
apply_formatter(handler)
@ -76,9 +223,9 @@ def adjust_space(display_title):
display_title += " " * space_length
return display_title
def insert_space(display_title,space_length=0):
def insert_space(display_title, space_length=0):
display_title = str(display_title)
if space_length == 0:
if space_length == 0:
space_length = spacing - len(display_title)
if space_length > 0:
display_title = " " * space_length + display_title
@ -92,4 +239,56 @@ def print_return(text):
def print_end():
print(adjust_space(" "), end="\r")
global spacing
spacing = 0
spacing = 0
# truncate the value of the torrent url to remove sensitive information
def trunc_val(s, d, n=3):
return d.join(s.split(d, n)[:n])
# Move files from source to destination, mod variable is to change the date modified of the file being moved
def move_files(src, dest, mod=False):
dest_path = os.path.dirname(dest)
if os.path.isdir(dest_path) == False:
os.makedirs(dest_path)
shutil.move(src, dest)
if mod == True:
modTime = time.time()
os.utime(dest, (modTime, modTime))
# Remove any empty directories after moving files
def remove_empty_directories(pathlib_root_dir, pattern):
pathlib_root_dir = Path(pathlib_root_dir)
# list all directories recursively and sort them by path,
# longest first
L = sorted(
pathlib_root_dir.glob(pattern),
key=lambda p: len(str(p)),
reverse=True,
)
for pdir in L:
try:
pdir.rmdir() # remove directory if empty
except OSError:
continue # catch and continue if non-empty
#will check if there are any hard links if it passes a file or folder
def nohardlink(file):
check = True
if (os.path.isfile(file)):
if (os.stat(file).st_nlink > 1):
check = False
else:
for path, subdirs, files in os.walk(file):
for x in files:
if (os.stat(os.path.join(path,x)).st_nlink > 1):
check = False
return check
#Gracefully kill script when docker stops
class GracefulKiller:
kill_now = False
def __init__(self):
#signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self, *args):
self.kill_now = True

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,5 @@
PyYAML
ruamel.yaml
qbittorrent-api
schedule
schedule
retrying
alive_progress