diff --git a/VERSION b/VERSION index c0943d3..56fea8a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.3 \ No newline at end of file +3.0.0 \ No newline at end of file diff --git a/modules/config.py b/modules/config.py new file mode 100644 index 0000000..22e5893 --- /dev/null +++ b/modules/config.py @@ -0,0 +1,196 @@ +import logging, os, requests, stat, time +from modules import util +from modules.util import Failed, check +from modules.qbittorrent import Qbt +from ruamel import yaml +from retrying import retry + +logger = logging.getLogger("qBit Manage") + +class Config: + def __init__(self, default_dir, args): + logger.info("Locating config...") + self.args = args + config_file = args["config_file"] + + if config_file and os.path.exists(config_file): self.config_path = os.path.abspath(config_file) + elif config_file and os.path.exists(os.path.join(default_dir, config_file)): self.config_path = os.path.abspath(os.path.join(default_dir, config_file)) + elif config_file and not os.path.exists(config_file): raise Failed(f"Config Error: config not found at {os.path.abspath(config_file)}") + elif os.path.exists(os.path.join(default_dir, "config.yml")): self.config_path = os.path.abspath(os.path.join(default_dir, "config.yml")) + else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}") + logger.info(f"Using {self.config_path} as config") + + self.util = check(self) + self.default_dir = default_dir + + yaml.YAML().allow_duplicate_keys = True + try: + new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8")) + if "qbt" in new_config: new_config["qbt"] = new_config.pop("qbt") + if "directory" in new_config: new_config["directory"] = new_config.pop("directory") + if "cat" in new_config: new_config["cat"] = new_config.pop("cat") + if "tags" in new_config: new_config["tags"] = new_config.pop("tags") + if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks") + if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin") + if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned") + yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2) + self.data = new_config + except yaml.scanner.ScannerError as e: + raise Failed(f"YAML Error: {util.tab_new_lines(e)}") + except Exception as e: + util.print_stacktrace() + raise Failed(f"YAML Error: {e}") + + self.session = requests.Session() + + #nohardlinks + self.nohardlinks = None + if "nohardlinks" in self.data: + self.nohardlinks = {} + for cat in self.data["nohardlinks"]: + if cat in list(self.data["cat"].keys()): + self.nohardlinks[cat] = {} + self.nohardlinks[cat]["exclude_tags"] = self.util.check_for_attribute(self.data, "exclude_tags", parent="nohardlinks", subparent=cat, var_type="list", default_is_none=True,do_print=False) + self.nohardlinks[cat]["cleanup"] = self.util.check_for_attribute(self.data, "cleanup", parent="nohardlinks", subparent=cat, var_type="bool", default=False,do_print=False) + self.nohardlinks[cat]['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="nohardlinks", subparent=cat, var_type="float", default_int=-2, default_is_none=True,do_print=False) + self.nohardlinks[cat]['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="nohardlinks", subparent=cat, var_type="int", default_int=-2, default_is_none=True,do_print=False) + else: + raise Failed(f"Config Error: Category {cat} is defined under nohardlinks attribute but is not defined in the cat attriute.") + else: + if self.args["tag_nohardlinks"]: + raise Failed("C onfig Error: nohardlinks attribute not found") + + #Add RecycleBin + self.recyclebin = {} + self.recyclebin['enabled'] = self.util.check_for_attribute(self.data, "enabled", parent="recyclebin",var_type="bool",default=True) + self.recyclebin['empty_after_x_days'] = self.util.check_for_attribute(self.data, "empty_after_x_days", parent="recyclebin",var_type="int",default_is_none=True) + + #Add Orphaned + self.orphaned = {} + self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned",var_type="list",default_is_none=True,do_print=False) + + #Assign directories + if "directory" in self.data: + self.root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory",default_is_none=True) + self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",default=self.root_dir) + if (self.args["cross_seed"] or self.args["tag_nohardlinks"] or self.args["rem_orphaned"]): + self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir) + else: + if self.recyclebin['enabled']: + self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir) + if self.args["cross_seed"]: + self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",var_type="path") + else: + self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",default_is_none=True) + self.recycle_dir = os.path.join(self.remote_dir,'.RecycleBin') + else: + raise Failed("Config Error: directory attribute not found") + + #Connect to Qbittorrent + self.qbt = None + if "qbt" in self.data: + logger.info("Connecting to Qbittorrent...") + self.qbt = Qbt(self, { + "host": self.util.check_for_attribute(self.data, "host", parent="qbt", throw=True), + "username": self.util.check_for_attribute(self.data, "user", parent="qbt", default_is_none=True), + "password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True) + }) + else: + raise Failed("Config Error: qbt attribute not found") + + #Get tags from config file based on keyword + def get_tags(self,urls): + tags = {} + tags['new_tag'] = None + tags['max_ratio'] = None + tags['max_seeding_time'] = None + tags['limit_upload_speed'] = None + try: + tags['url'] = util.trunc_val(urls[0], '/') + except IndexError as e: + tags['url'] = None + logger.debug(f"Tracker Url:{urls[0]}") + logger.debug(e) + if 'tags' in self.data and self.data["tags"] is not None and urls: + tag_values = self.data['tags'] + for tag_url, tag_details in tag_values.items(): + for url in urls: + if tag_url in url: + try: + tags['url'] = util.trunc_val(url, '/') + default_tag = tags['url'].split('/')[2].split(':')[0] + except IndexError as e: + logger.debug(f"Tracker Url:{url}") + logger.debug(e) + # If using Format 1 + if isinstance(tag_details,str): + tags['new_tag'] = self.util.check_for_attribute(self.data, tag_url, parent="tags",default=default_tag) + # Using Format 2 + else: + if 'tag' not in tag_details: + logger.warning(f'No tags defined for {tag_url}. Please check your config.yml file. Setting tag to {tag_url}') + tags['new_tag'] = self.util.check_for_attribute(self.data, "tag", parent="tags", subparent=tag_url, default=tag_url) + tags['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="tags", subparent=tag_url, var_type="float", default_int=-2, default_is_none=True,do_print=False) + tags['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="tags", subparent=tag_url, var_type="int", default_int=-2, default_is_none=True,do_print=False) + tags['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="tags", subparent=tag_url, var_type="int", default_int=-1, default_is_none=True,do_print=False) + return (tags) + if tags['url']: + default_tag = tags['url'].split('/')[2].split(':')[0] + tags['new_tag'] = self.util.check_for_attribute(self.data, default_tag, parent="tags",default=default_tag) + logger.warning(f'No tags matched for {tags["url"]}. Please check your config.yml file. Setting tag to {default_tag}') + return (tags) + + #Get category from config file based on path provided + def get_category(self,path): + category = '' + path = os.path.join(path,'') + if "cat" in self.data and self.data["cat"] is not None: + cat_path = self.data["cat"] + for cat, save_path in cat_path.items(): + if save_path in path: + category = cat + break + if not category: + default_cat = path.split('/')[-2] + self.util.check_for_attribute(self.data, default_cat, parent="cat",default=path) + category = default_cat + logger.warning(f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to {default_cat}') + return category + + #Empty the recycle bin + def empty_recycle(self): + dry_run = self.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + num_del = 0 + if not self.args["skip_recycle"]: + n_info = '' + if self.recyclebin['enabled'] and self.recyclebin['empty_after_x_days']: + recycle_files = [os.path.join(path, name) for path, subdirs, files in os.walk(self.recycle_dir) for name in files] + recycle_files = sorted(recycle_files) + if recycle_files: + util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=False, border=False) + for file in recycle_files: + fileStats = os.stat(file) + filename = file.replace(self.recycle_dir,'') + last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time) + now = time.time() # in seconds + days = (now - last_modified) / (60 * 60 * 24) + if (self.recyclebin['empty_after_x_days'] <= days): + num_del += 1 + n_info += (f"{'Did not delete' if dry_run else 'Deleted'} {filename} from the recycle bin. (Last modified {round(days)} days ago).\n") + if not dry_run: os.remove(file) + if num_del > 0: + if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*") + util.print_multiline(n_info,loglevel) + util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files from the Recycle Bin.",loglevel) + else: + logger.debug('No files found in "' + self.recycle_dir + '"') + return num_del + + @retry(stop_max_attempt_number=6, wait_fixed=10000) + def get(self, url, json=None, headers=None, params=None): + return self.session.get(url, json=json, headers=headers, params=params) + + @retry(stop_max_attempt_number=6, wait_fixed=10000) + def post(self, url, data=None, json=None, headers=None): + return self.session.post(url, data=data, json=json, headers=headers) diff --git a/modules/docker.py b/modules/docker.py deleted file mode 100644 index 26767f4..0000000 --- a/modules/docker.py +++ /dev/null @@ -1,10 +0,0 @@ -import signal - -#Gracefully kill script when docker stops -class GracefulKiller: - kill_now = False - def __init__(self): - #signal.signal(signal.SIGINT, self.exit_gracefully) - signal.signal(signal.SIGTERM, self.exit_gracefully) - def exit_gracefully(self, *args): - self.kill_now = True \ No newline at end of file diff --git a/modules/qbittorrent.py b/modules/qbittorrent.py new file mode 100644 index 0000000..ab720e9 --- /dev/null +++ b/modules/qbittorrent.py @@ -0,0 +1,498 @@ +import logging, os +from qbittorrentapi import Client, LoginFailed, APIConnectionError +from modules import util +from modules.util import Failed, print_line, print_multiline, separator +from datetime import timedelta +from collections import Counter +from fnmatch import fnmatch +from alive_progress import alive_it + +logger = logging.getLogger("qBit Manage") + +class Qbt: + def __init__(self, config, params): + self.config = config + self.host = params["host"] + self.username = params["username"] + self.password = params["password"] + logger.debug(f'Host: {self.host}, Username: {self.username}, Password: {self.password if self.password is None else "[REDACTED]"}') + try: + self.client = Client(host=self.host, username=self.username, password=self.password) + self.client.auth_log_in() + logger.info(f"Qbt Connection Successful") + except LoginFailed: + raise Failed("Qbittorrent Error: Failed to login. Invalid username/password.") + except APIConnectionError: + raise Failed("Qbittorrent Error: Unable to connect to the client.") + except Exception: + raise Failed("Qbittorrent Error: Unable to connect to the client.") + separator(f"Getting Torrent List", space=False, border=False) + self.torrent_list = self.get_torrents({'sort':'added_on'}) + + # Will create a 2D Dictionary with the torrent name as the key + # torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV', 'count':1, 'msg':'[]'...}, + # 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}, 'count':2, 'msg':'[]'...} + # List of dictionary key definitions + # Category = Returns category of the torrent (str) + # save_path = Returns the save path of the torrent (str) + # count = Returns a count of the total number of torrents with the same name (int) + # msg = Returns a list of torrent messages by name (list of str) + # status = Returns the list of status numbers of the torrent by name (0: Tracker is disabled (used for DHT, PeX, and LSD), 1: Tracker has not been contacted yet, 2:Tracker has been contacted and is working, 3:Tracker is updating, 4:Tracker has been contacted, but it is not working (or doesn't send proper replies) + # is_complete = Returns the state of torrent (Returns True if at least one of the torrent with the State is categorized as Complete.) + # first_hash = Returns the hash number of the original torrent (Assuming the torrent list is sorted by date added (Asc)) + def get_torrent_info(torrent_list): + torrentdict = {} + t_obj_unreg = [] + for torrent in alive_it(torrent_list): + save_path = torrent.save_path + category = torrent.category + is_complete = False + msg = None + status = None + if torrent.name in torrentdict: + t_obj_list.append(torrent) + t_count = torrentdict[torrent.name]['count'] + 1 + msg_list = torrentdict[torrent.name]['msg'] + status_list = torrentdict[torrent.name]['status'] + is_complete = True if torrentdict[torrent.name]['is_complete'] == True else torrent.state_enum.is_complete + first_hash = torrentdict[torrent.name]['first_hash'] + else: + t_obj_list = [torrent] + t_count = 1 + msg_list = [] + status_list = [] + is_complete = torrent.state_enum.is_complete + first_hash = torrent.hash + for x in torrent.trackers: + if x.url.startswith('http'): + status = x.status + msg = x.msg.upper() + #Add any potential unregistered torrents to a list + if x.status == 4 and 'DOWN' not in msg and 'UNREACHABLE' not in msg: + t_obj_unreg.append(torrent) + if msg is not None: msg_list.append(msg) + if status is not None: status_list.append(status) + torrentattr = {'torrents': t_obj_list, 'Category': category, 'save_path': save_path, 'count': t_count, 'msg': msg_list, 'status': status_list, 'is_complete': is_complete, 'first_hash':first_hash} + torrentdict[torrent.name] = torrentattr + return torrentdict,t_obj_unreg + self.torrentinfo = None + self.torrentissue = None + if config.args['recheck'] or config.args['cross_seed'] or config.args['rem_unregistered']: + #Get an updated torrent dictionary information of the torrents + self.torrentinfo,self.torrentissue = get_torrent_info(self.torrent_list) + + def get_torrents(self,params): + return self.client.torrents.info(**params) + + def category(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + num_cat = 0 + if self.config.args['cat_update']: + separator(f"Updating Categories", space=False, border=False) + for torrent in alive_it(self.torrent_list): + if torrent.category == '': + new_cat = self.config.get_category(torrent.save_path) + try: + t_url = [util.trunc_val(x.url, '/') for x in torrent.trackers if x.url.startswith('http')][0] + except IndexError: + t_url = None + if not dry_run: torrent.set_category(category=new_cat) + print_line(util.insert_space(f'- Torrent Name: {torrent.name}',1),loglevel) + print_line(util.insert_space(f'-- New Category: {new_cat}',5),loglevel) + print_line(util.insert_space(f'-- Tracker: {t_url}',5),loglevel) + num_cat += 1 + if num_cat >= 1: + print_line(f"{'Did not update' if dry_run else 'Updated'} {num_cat} new categories.",loglevel) + else: + print_line(f'No new torrents to categorize.',loglevel) + return num_cat + + def tags(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + num_tags = 0 + if self.config.args['tag_update']: + separator(f"Updating Tags", space=False, border=False) + for torrent in alive_it(self.torrent_list): + if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1): + tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) + if tags["new_tag"]: + num_tags += 1 + print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel) + print_line(util.insert_space(f'New Tag: {tags["new_tag"]}',8),loglevel) + print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel) + self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"]) + if num_tags >= 1: + print_line(f"{'Did not update' if dry_run else 'Updated'} {num_tags} new tags.",loglevel) + else: + print_line(f'No new torrents to tag.',loglevel) + return num_tags + + def set_tags_and_limits(self,torrent,max_ratio,max_seeding_time,limit_upload_speed=None,tags=None): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + #Print Logs + if limit_upload_speed: + if limit_upload_speed == -1: print_line(util.insert_space(f'Limit UL Speed: Infinity',1),loglevel) + else: print_line(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1),loglevel) + if max_ratio or max_seeding_time: + if max_ratio == -2 or max_seeding_time == -2: print_line(util.insert_space(f'Share Limit: Use Global Share Limit',4),loglevel) + elif max_ratio == -1 or max_seeding_time == -1: print_line(util.insert_space(f'Share Limit: Set No Share Limit',4),loglevel) + else: + if max_ratio != torrent.max_ratio and not max_seeding_time: + print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4),loglevel) + elif max_seeding_time != torrent.max_seeding_time and not max_ratio: + print_line(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4),loglevel) + elif max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time: + print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4),loglevel) + #Update Torrents + if not dry_run: + if tags: torrent.add_tags(tags) + if limit_upload_speed: + if limit_upload_speed == -1: torrent.set_upload_limit(-1) + else: torrent.set_upload_limit(limit_upload_speed*1024) + if max_ratio or max_seeding_time: + if max_ratio == -2 or max_seeding_time == -2: + torrent.set_share_limits(-2,-2) + return + elif max_ratio == -1 or max_seeding_time == -1: + torrent.set_share_limits(-1,-1) + return + if not max_ratio: max_ratio = torrent.max_ratio + if not max_seeding_time: max_seeding_time = torrent.max_seeding_time + torrent.set_share_limits(max_ratio,max_seeding_time) + + def tag_nohardlinks(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + num_tags = 0 #counter for the number of torrents that has no hard links + del_tor = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion + del_tor_cont = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion including contents + num_untag = 0 #counter for number of torrents that previously had no hard links but now have hard links + + if self.config.args['tag_nohardlinks']: + util.separator(f"Tagging Torrents with No Hardlinks", space=False, border=False) + nohardlinks = self.config.nohardlinks + tdel_dict = {} #dictionary to track the torrent names and content path that meet the deletion criteria + root_dir = self.config.root_dir + remote_dir = self.config.remote_dir + for category in nohardlinks: + torrent_list = self.get_torrents({'category':category,'filter':'completed'}) + if len(torrent_list) == 0: + logger.error('No torrents found in the category ('+category+') defined in config.yml inside the nohardlinks section. Please check if this matches with any category in qbittorrent and has 1 or more torrents.') + continue + for torrent in alive_it(torrent_list): + tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) + if any(tag in torrent.tags for tag in nohardlinks[category]['exclude_tags']): + #Skip to the next torrent if we find any torrents that are in the exclude tag + continue + else: + #Checks for any hard links and not already tagged + if util.nohardlink(torrent['content_path'].replace(root_dir,remote_dir)): + #Will only tag new torrents that don't have noHL tag + if 'noHL' not in torrent.tags : + num_tags += 1 + print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel) + print_line(util.insert_space(f'Added Tag: noHL',6),loglevel) + print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel) + self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"], nohardlinks[category]["max_seeding_time"],tags='noHL') + #Cleans up previously tagged noHL torrents + else: + # Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements + if (nohardlinks[category]['cleanup'] and torrent.state_enum.is_paused and len(nohardlinks[category])>0): + print_line(f'Torrent Name: {torrent.name} has no hard links found and meets ratio/seeding requirements.',loglevel) + print_line(util.insert_space(f"Cleanup flag set to true. {'Not Deleting' if dry_run else 'Deleting'} torrent + contents.",6),loglevel) + tdel_dict[torrent.name] = torrent['content_path'].replace(root_dir,root_dir) + #Checks to see if previous noHL tagged torrents now have hard links. + if (not (util.nohardlink(torrent['content_path'].replace(root_dir,root_dir))) and ('noHL' in torrent.tags)): + num_untag += 1 + print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.',loglevel) + print_line(util.insert_space(f'Removed Tag: noHL',6),loglevel) + print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel) + print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.",loglevel) + if not dry_run: + torrent.remove_tags(tags='noHL') + self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"]) + #loop through torrent list again for cleanup purposes + if (nohardlinks[category]['cleanup']): + for torrent in alive_it(torrent_list): + if torrent.name in tdel_dict.keys() and 'noHL' in torrent.tags: + #Double check that the content path is the same before we delete anything + if torrent['content_path'].replace(root_dir,root_dir) == tdel_dict[torrent.name]: + if (os.path.exists(torrent['content_path'].replace(root_dir,root_dir))): + if not dry_run: self.tor_delete_recycle(torrent) + del_tor_cont += 1 + print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel) + else: + if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False) + del_tor += 1 + print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel) + if num_tags >= 1: + print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}",loglevel) + else: + print_line(f'No torrents to tag with no hard links.',loglevel) + if num_untag >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} noHL tags / share limits for {num_untag} .torrent{'s.' if num_tags > 1 else '.'}",loglevel) + if del_tor >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if num_tags > 1 else ''} but not content files.",loglevel) + if del_tor_cont >=1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if num_tags > 1 else ''} AND content files.",loglevel) + return num_tags,num_untag,del_tor,del_tor_cont + + def rem_unregistered(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + del_tor = 0 + del_tor_cont = 0 + if self.config.args['rem_unregistered']: + separator(f"Removing Unregistered Torrents", space=False, border=False) + pot_unr = '' + unreg_msgs = [ + 'UNREGISTERED', + 'TORRENT NOT FOUND', + 'TORRENT IS NOT FOUND', + 'NOT REGISTERED', + 'HTTPS://BEYOND-HD.ME/TORRENTS', + 'NOT EXIST', + 'UNKNOWN TORRENT', + 'REDOWNLOAD', + 'PACKS', + 'REPACKED', + 'PACK', + 'TRUMP', + 'RETITLED', + ] + for torrent in alive_it(self.torrentissue): + t_name = torrent.name + t_count = self.torrentinfo[t_name]['count'] + t_msg = self.torrentinfo[t_name]['msg'] + t_status = self.torrentinfo[t_name]['status'] + for x in torrent.trackers: + if x.url.startswith('http'): + t_url = util.trunc_val(x.url, '/') + msg_up = x.msg.upper() + #Add any potential unregistered torrents to a list + if not any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up: + pot_unr += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') + pot_unr += (util.insert_space(f'Status: {msg_up}',9)+'\n') + pot_unr += (util.insert_space(f'Tracker: {t_url}',8)+'\n') + if any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up: + print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel) + print_line(util.insert_space(f'Status: {msg_up}',9),loglevel) + print_line(util.insert_space(f'Tracker: {t_url}',8),loglevel) + if t_count > 1: + # Checks if any of the original torrents are working + if '' in t_msg or 2 in t_status: + if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False) + print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel) + del_tor += 1 + else: + if not dry_run: self.tor_delete_recycle(torrent) + print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel) + del_tor_cont += 1 + else: + if not dry_run: self.tor_delete_recycle(torrent) + print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel) + del_tor_cont += 1 + if del_tor >=1 or del_tor_cont >=1: + if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.",loglevel) + if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.",loglevel) + else: + print_line('No unregistered torrents found.',loglevel) + + if (len(pot_unr) > 0): + separator(f"Potential Unregistered torrents", space=False, border=False,loglevel=loglevel) + print_multiline(pot_unr.rstrip(),loglevel) + return del_tor,del_tor_cont + + # Function used to move any torrents from the cross seed directory to the correct save directory + def cross_seed(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + added = 0 # Keep track of total torrents tagged + tagged = 0 #Track # of torrents tagged that are not cross-seeded + if self.config.args['cross_seed']: + separator(f"Checking for Cross-Seed Torrents", space=False, border=False) + # List of categories for all torrents moved + categories = [] + + # Only get torrent files + cs_files = [f for f in os.listdir(self.config.cross_seed_dir) if f.endswith('torrent')] + dir_cs = self.config.cross_seed_dir + dir_cs_out = os.path.join(dir_cs,'qbit_manage_added') + os.makedirs(dir_cs_out,exist_ok=True) + for file in alive_it(cs_files): + t_name = file.split(']', 2)[2].split('.torrent')[0] + # Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key) + # Returned the dictionary of filtered item + torrentdict_file = dict(filter(lambda item: t_name in item[0], self.torrentinfo.items())) + if torrentdict_file: + # Get the exact torrent match name from torrentdict + t_name = next(iter(torrentdict_file)) + category = self.torrentinfo[t_name]['Category'] + dest = os.path.join(self.torrentinfo[t_name]['save_path'], '') + src = os.path.join(dir_cs,file) + dir_cs_out = os.path.join(dir_cs,'qbit_manage_added',file) + #Only add cross-seed torrent if original torrent is complete + if self.torrentinfo[t_name]['is_complete']: + categories.append(category) + print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:",loglevel) + print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel) + print_line(util.insert_space(f'Category: {category}',7),loglevel) + print_line(util.insert_space(f'Save_Path: {dest}',6),loglevel) + added += 1 + if not dry_run: + client.torrents.add(torrent_files=src, save_path=dest, category=category, tags='cross-seed', is_paused=True) + shutil.move(src, dir_cs_out) + else: + print_line(f'Found {t_name} in {dir_cs} but original torrent is not complete.',loglevel) + print_line(f'Not adding to qBittorrent',loglevel) + else: + if dry_run: print_line(f'{t_name} not found in torrents.',loglevel) + else: print_line(f'{t_name} not found in torrents.','WARNING') + #Tag missing cross-seed torrents tags + for torrent in alive_it(self.torrent_list): + t_name = torrent.name + if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash: + tagged += 1 + print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}",loglevel) + if not dry_run: torrent.add_tags(tags='cross-seed') + + numcategory = Counter(categories) + for c in numcategory: + if numcategory[c] > 0: print_line(f"{numcategory[c]} {c} cross-seed .torrents {'not added' if dry_run else 'added'}.",loglevel) + if added > 0: print_line(f"Total {added} cross-seed .torrents {'not added' if dry_run else 'added'}.",loglevel) + if tagged > 0: print_line(f"Total {tagged} cross-seed .torrents {'not tagged' if dry_run else 'tagged'}.",loglevel) + return added,tagged + + # Function used to recheck paused torrents sorted by size and resume torrents that are completed + def recheck(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + resumed = 0 + rechecked = 0 + if self.config.args['recheck']: + separator(f"Rechecking Paused Torrents", space=False, border=False) + #sort by size and paused + torrent_list = self.get_torrents({'status_filter':'paused','sort':'size'}) + if torrent_list: + for torrent in alive_it(torrent_list): + new_tag = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) + #Resume torrent if completed + if torrent.progress == 1: + if torrent.max_ratio < 0 and torrent.max_seeding_time < 0: + resumed += 1 + print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel) + if not dry_run: torrent.resume() + else: + #Check to see if torrent meets AutoTorrentManagement criteria + logger.debug(f'DEBUG: Torrent to see if torrent meets AutoTorrentManagement Criteria') + logger.debug(util.insert_space(f'- Torrent Name: {torrent.name}',2)) + logger.debug(util.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}',4)) + logger.debug(util.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}',4)) + if (torrent.max_ratio >= 0 and torrent.ratio < torrent.max_ratio and torrent.max_seeding_time < 0) \ + or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \ + or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))): + resumed += 1 + print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel) + if not dry_run: torrent.resume() + #Recheck + elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking: + rechecked += 1 + print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{new_tag['new_tag']}] - {torrent.name}",loglevel) + if not dry_run: torrent.recheck() + return resumed,rechecked + + def rem_orphaned(self): + dry_run = self.config.args['dry_run'] + loglevel = 'DRYRUN' if dry_run else 'INFO' + orphaned = 0 + if self.config.args['rem_orphaned']: + separator(f"Checking for Orphaned Files", space=False, border=False) + torrent_files = [] + root_files = [] + orphaned_files = [] + excluded_orphan_files = [] + orphaned_parent_path = set() + remote_path = self.config.remote_dir + root_path = self.config.root_dir + if (remote_path != root_path): + root_files = [os.path.join(path.replace(remote_path,root_path), name) for path, subdirs, files in alive_it(os.walk(remote_path)) for name in files if os.path.join(remote_path,'orphaned_data') not in path and os.path.join(remote_path,'.RecycleBin') not in path] + else: + root_files = [os.path.join(path, name) for path, subdirs, files in alive_it(os.walk(root_path)) for name in files if os.path.join(root_path,'orphaned_data') not in path and os.path.join(root_path,'.RecycleBin') not in path] + + #Get an updated list of torrents + torrent_list = self.get_torrents({'sort':'added_on'}) + for torrent in alive_it(torrent_list): + for file in torrent.files: + torrent_files.append(os.path.join(torrent.save_path,file.name)) + + orphaned_files = set(root_files) - set(torrent_files) + orphaned_files = sorted(orphaned_files) + + if self.config.orphaned['exclude_patterns']: + exclude_patterns = self.config.orphaned['exclude_patterns'] + excluded_orphan_files = [file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch(file, exclude_pattern.replace(remote_path,root_path))] + + orphaned_files = set(orphaned_files) - set(excluded_orphan_files) + separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG') + print_multiline("\n".join(torrent_files),'DEBUG') + separator(f"Root Files", space=False, border=False,loglevel='DEBUG') + print_multiline("\n".join(root_files),'DEBUG') + separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG') + print_multiline("\n".join(excluded_orphan_files),'DEBUG') + separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG') + print_multiline("\n".join(orphaned_files),'DEBUG') + separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG') + + if orphaned_files: + dir_out = os.path.join(remote_path,'orphaned_data') + os.makedirs(dir_out,exist_ok=True) + print_line(f"{len(orphaned_files)} Orphaned files found",loglevel) + print_multiline("\n".join(orphaned_files),loglevel) + print_line(f"{'Did not move' if dry_run else 'Moved'} {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}",loglevel) + orphaned = len(orphaned_files) + #Delete empty directories after moving orphan files + logger.info(f'Cleaning up any empty directories...') + if not dry_run: + for file in alive_it(orphaned_files): + src = file.replace(root_path,remote_path) + dest = os.path.join(dir_out,file.replace(root_path,'')) + util.move_files(src,dest) + orphaned_parent_path.add(os.path.dirname(file).replace(root_path,remote_path)) + for parent_path in orphaned_parent_path: + util.remove_empty_directories(parent_path,"**/*") + else: + print_line(f"No Orphaned Filed found.",loglevel) + return orphaned + + + def tor_delete_recycle(self,torrent): + if self.config.recyclebin['enabled']: + tor_files = [] + #Define torrent files/folders + for file in torrent.files: + tor_files.append(os.path.join(torrent.save_path,file.name)) + + #Create recycle bin if not exists + recycle_path = os.path.join(self.config.remote_dir,'.RecycleBin') + os.makedirs(recycle_path,exist_ok=True) + + separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False,loglevel='DEBUG') + if len(tor_files) == 1: print_line(tor_files[0],'DEBUG') + else: print_multiline("\n".join(tor_files),'DEBUG') + logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(self.config.remote_dir,self.config.root_dir)}') + + #Move files from torrent contents to Recycle bin + for file in tor_files: + src = file.replace(self.config.root_dir,self.config.remote_dir) + dest = os.path.join(recycle_path,file.replace(self.config.root_dir,'')) + #move files and change date modified + try: + util.move_files(src,dest,True) + except FileNotFoundError: + print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ','WARNING') + #Delete torrent and files + torrent.delete(hash=torrent.hash, delete_files=False) + #Remove any empty directories + util.remove_empty_directories(torrent.save_path.replace(self.config.root_dir,self.config.remote_dir),"**/*") + else: + torrent.delete(hash=torrent.hash, delete_files=True) \ No newline at end of file diff --git a/modules/util.py b/modules/util.py index 2cf301e..160355d 100644 --- a/modules/util.py +++ b/modules/util.py @@ -1,39 +1,185 @@ -import logging, traceback +import logging, os, shutil, traceback, time, signal from logging.handlers import RotatingFileHandler +from ruamel import yaml +from pathlib import Path -logger = logging.getLogger("qBit Manage") +logger = logging.getLogger('qBit Manage') -class TimeoutExpired(Exception): - pass +def get_list(data, lower=False, split=True, int_list=False): + if data is None: return None + elif isinstance(data, list): return data + elif isinstance(data, dict): return [data] + elif split is False: return [str(data)] + elif lower is True: return [d.strip().lower() for d in str(data).split(",")] + elif int_list is True: + try: return [int(d.strip()) for d in str(data).split(",")] + except ValueError: return [] + else: return [d.strip() for d in str(data).split(",")] +class check: + def __init__(self, config): + self.config = config + + def check_for_attribute(self, data, attribute, parent=None, subparent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", default_int=0, throw=False, save=True): + endline = "" + if parent is not None: + if subparent is not None: + if data and parent in data and subparent in data[parent]: + data = data[parent][subparent] + else: + if data and parent in data: + data = data[parent] + else: + data = None + do_print = False + #save = False + + if subparent is not None: + text = f"{parent}->{subparent} sub-attribute {attribute}" + elif parent is None: + text = f"{attribute} attribute" + else: + text = f"{parent} sub-attribute {attribute}" + + if data is None or attribute not in data: + message = f"{text} not found" + if parent and save is True: + loaded_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config.config_path)) + if subparent: + endline = f"\n{subparent} sub-attribute {attribute} added to config" + if subparent not in loaded_config[parent] or not loaded_config[parent][subparent]: + loaded_config[parent][subparent] = {attribute: default} + elif attribute not in loaded_config[parent]: + loaded_config[parent][subparent][attribute] = default + else: + endline = "" + else: + endline = f"\n{parent} sub-attribute {attribute} added to config" + if parent not in loaded_config or not loaded_config[parent]: + loaded_config[parent] = {attribute: default} + elif attribute not in loaded_config[parent]: + loaded_config[parent][attribute] = default + else: + endline = "" + yaml.round_trip_dump(loaded_config, open(self.config.config_path, "w"), indent=None, block_seq_indent=2) + elif data[attribute] is None: + if default_is_none and var_type == "list": + return [] + elif default_is_none: + return None + else: + message = f"{text} is blank" + elif var_type == "url": + if data[attribute].endswith(("\\", "/")): + return data[attribute][:-1] + else: + return data[attribute] + elif var_type == "bool": + if isinstance(data[attribute], bool): + return data[attribute] + else: + message = f"{text} must be either true or false" + elif var_type == "int": + if isinstance(data[attribute], int) and data[attribute] >= default_int: + return data[attribute] + else: + message = f"{text} must an integer >= {default_int}" + elif var_type == "float": + try: + data[attribute] = float(data[attribute]) + except: + pass + if isinstance(data[attribute], float) and data[attribute] >= default_int: + return data[attribute] + else: + message = f"{text} must a float >= {float(default_int)}" + elif var_type == "path": + if os.path.exists(os.path.abspath(data[attribute])): + return os.path.join(data[attribute],'') + else: + message = f"Path {os.path.abspath(data[attribute])} does not exist" + elif var_type == "list": + return get_list(data[attribute], split=False) + elif var_type == "list_path": + temp_list = [p for p in get_list( + data[attribute], split=False) if os.path.exists(os.path.abspath(p))] + if len(temp_list) > 0: + return temp_list + else: + message = "No Paths exist" + elif var_type == "lower_list": + return get_list(data[attribute], lower=True) + elif test_list is None or data[attribute] in test_list: + return data[attribute] + else: + message = f"{text}: {data[attribute]} is an invalid input" + if var_type == "path" and default and os.path.exists(os.path.abspath(default)): + return os.path.join(default,'') + elif var_type == "path" and default: + if data and attribute in data and data[attribute]: + message = f"neither {data[attribute]} or the default path {default} could be found" + else: + message = f"no {text} found and the default path {default} could not be found" + default = None + if default is not None or default_is_none: + message = message + f" using {default} as default" + message = message + endline + if req_default and default is None: + raise Failed( + f"Config Error: {attribute} attribute must be set under {parent}.") + options = "" + if test_list: + for option, description in test_list.items(): + if len(options) > 0: + options = f"{options}\n" + options = f"{options} {option} ({description})" + if (default is None and not default_is_none) or throw: + if len(options) > 0: + message = message + "\n" + options + raise Failed(f"Config Error: {message}") + if do_print: + print_multiline(f"Config Warning: {message}", "warning") + if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list: + print_multiline(options) + return default class Failed(Exception): pass -class NotScheduled(Exception): - pass - separating_character = "=" screen_width = 100 spacing = 0 +def add_dict_list(keys, value, dict_map): + for key in keys: + if key in dict_map: + dict_map[key].append(value) + else: + dict_map[key] = [value] + +def get_int_list(data, id_type): + int_values = [] + for value in get_list(data): + try: int_values.append(regex_first_int(value, id_type)) + except Failed as e: logger.error(e) + return int_values + +def print_line(lines, loglevel='INFO'): + logger.log(getattr(logging, loglevel.upper()), str(lines)) def print_multiline(lines, loglevel='INFO'): - line_list = str(lines).split("\n") - for i, line in enumerate(line_list): - if len(line) > 0 and i != len(line_list)-1: - logger.log(getattr(logging, loglevel),line) + for i, line in enumerate(str(lines).split("\n")): + logger.log(getattr(logging, loglevel.upper()), line) if i == 0: logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s")) logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s")) def print_stacktrace(): - print_multiline(traceback.format_exc()) + print_multiline(traceback.format_exc(), 'CRITICAL') def my_except_hook(exctype, value, tb): for line in traceback.format_exception(etype=exctype, value=value, tb=tb): print_multiline(line, 'CRITICAL') - def centered(text, sep=" "): if len(text) > screen_width - 2: return text @@ -52,13 +198,14 @@ def separator(text=None, space=True, border=True, loglevel='INFO'): apply_formatter(handler, border=False) border_text = f"|{separating_character * screen_width}|" if border: - logger.log(getattr(logging, loglevel),border_text) + logger.log(getattr(logging, loglevel.upper()), border_text) if text: text_list = text.split("\n") for t in text_list: - logger.log(getattr(logging, loglevel),f"|{sep}{centered(t, sep=sep)}{sep}|") + logger.log(getattr(logging, loglevel.upper()), + f"|{sep}{centered(t, sep=sep)}{sep}|") if border: - logger.log(getattr(logging, loglevel),border_text) + logger.log(getattr(logging, loglevel.upper()), border_text) for handler in logger.handlers: apply_formatter(handler) @@ -76,9 +223,9 @@ def adjust_space(display_title): display_title += " " * space_length return display_title -def insert_space(display_title,space_length=0): +def insert_space(display_title, space_length=0): display_title = str(display_title) - if space_length == 0: + if space_length == 0: space_length = spacing - len(display_title) if space_length > 0: display_title = " " * space_length + display_title @@ -92,4 +239,56 @@ def print_return(text): def print_end(): print(adjust_space(" "), end="\r") global spacing - spacing = 0 \ No newline at end of file + spacing = 0 + +# truncate the value of the torrent url to remove sensitive information +def trunc_val(s, d, n=3): + return d.join(s.split(d, n)[:n]) + +# Move files from source to destination, mod variable is to change the date modified of the file being moved +def move_files(src, dest, mod=False): + dest_path = os.path.dirname(dest) + if os.path.isdir(dest_path) == False: + os.makedirs(dest_path) + shutil.move(src, dest) + if mod == True: + modTime = time.time() + os.utime(dest, (modTime, modTime)) + +# Remove any empty directories after moving files +def remove_empty_directories(pathlib_root_dir, pattern): + pathlib_root_dir = Path(pathlib_root_dir) + # list all directories recursively and sort them by path, + # longest first + L = sorted( + pathlib_root_dir.glob(pattern), + key=lambda p: len(str(p)), + reverse=True, + ) + for pdir in L: + try: + pdir.rmdir() # remove directory if empty + except OSError: + continue # catch and continue if non-empty + +#will check if there are any hard links if it passes a file or folder +def nohardlink(file): + check = True + if (os.path.isfile(file)): + if (os.stat(file).st_nlink > 1): + check = False + else: + for path, subdirs, files in os.walk(file): + for x in files: + if (os.stat(os.path.join(path,x)).st_nlink > 1): + check = False + return check + +#Gracefully kill script when docker stops +class GracefulKiller: + kill_now = False + def __init__(self): + #signal.signal(signal.SIGINT, self.exit_gracefully) + signal.signal(signal.SIGTERM, self.exit_gracefully) + def exit_gracefully(self, *args): + self.kill_now = True \ No newline at end of file diff --git a/qbit_manage.py b/qbit_manage.py index 520bdaf..04406a0 100644 --- a/qbit_manage.py +++ b/qbit_manage.py @@ -1,16 +1,15 @@ #!/usr/bin/python3 -import argparse, logging, os, sys, time, shutil, stat, fnmatch +import argparse, logging, os, sys, fnmatch, time from logging.handlers import RotatingFileHandler from datetime import timedelta,datetime from collections import Counter -from pathlib import Path try: - import yaml, schedule - from qbittorrentapi import Client, LoginFailed, APIConnectionError - from modules.docker import GracefulKiller - from modules import util + import schedule + from modules import util + from modules.config import Config + from modules.util import Failed, GracefulKiller except ModuleNotFoundError: print("Requirements Error: Requirements are not installed") sys.exit(0) @@ -72,10 +71,11 @@ dry_run = get_arg("QBT_DRY_RUN", args.dry_run, arg_bool=True) log_level = get_arg("QBT_LOG_LEVEL", args.log_level) divider = get_arg("QBT_DIVIDER", args.divider) screen_width = get_arg("QBT_WIDTH", args.width, arg_int=True) - +stats = {} +args = {} default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config") -root_path = '' #Global variable -remote_path = '' #Global variable +for v in ['run','sch','config_file','log_file','cross_seed','recheck','cat_update','tag_update','rem_unregistered','rem_orphaned','tag_nohardlinks','skip_recycle','dry_run','log_level','divider','screen_width']: + args[v] = eval(v) util.separating_character = divider[0] @@ -84,58 +84,18 @@ if screen_width < 90 or screen_width > 300: screen_width = 100 util.screen_width = screen_width - #Check if Schedule parameter is a number -if sch.isnumeric(): +try: sch = int(sch) -else: +except ValueError: print(f"Schedule Error: Schedule is not a number. Current value is set to '{sch}'") sys.exit(0) -#Config error handling -if not os.path.exists(os.path.join(default_dir, config_file)): - print(f"Config Error: config not found at {os.path.join(os.path.abspath(default_dir),config_file)}") - sys.exit(0) - -with open(os.path.join(default_dir,config_file), 'r') as cfg_file: - cfg = yaml.load(cfg_file, Loader=yaml.FullLoader) - - - -#Set root and remote directories -def validate_path(): - global root_path - global remote_path - #Assign root_dir - if 'root_dir' in cfg['directory']: - root_path = os.path.join(cfg['directory']['root_dir'], '') - else: - print('root_dir not defined in config.') - sys.exit(0) - #Assign remote_path - if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): - remote_path = os.path.join(cfg['directory']['remote_dir'], '') - else: - remote_path = root_path - #Check to see if path exists - if not os.path.exists(remote_path): - print(f"Config Error: Path does not exist at '{os.path.abspath(remote_path)}'. Is your root_dir/remote_dir correctly defined in the config?") - sys.exit(0) - -#Root_dir/remote_dir error handling -if cross_seed or tag_nohardlinks or rem_orphaned: - validate_path() -else: - if 'recyclebin' in cfg and cfg["recyclebin"] != None: - if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled']: - validate_path() - - os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True) logger = logging.getLogger('qBit Manage') logging.DRYRUN = 25 -logging.addLevelName(logging.DRYRUN, 'DRY-RUN') +logging.addLevelName(logging.DRYRUN, 'DRYRUN') setattr(logger, 'dryrun', lambda dryrun, *args: logger._log(logging.DRYRUN, dryrun, args)) log_lev = getattr(logging, log_level.upper()) logger.setLevel(log_lev) @@ -167,817 +127,97 @@ util.apply_formatter(file_handler) file_handler.addFilter(fmt_filter) logger.addHandler(file_handler) -# Actual API call to connect to qbt. -host = cfg['qbt']['host'] -if 'user' in cfg['qbt']: - username = cfg['qbt']['user'] -else: - username = '' -if 'pass' in cfg['qbt']: - password = cfg['qbt']['pass'] -else: - password = '' - -client = Client(host=host, username=username, password=password) -try: - client.auth_log_in() -except (LoginFailed,APIConnectionError)as e: - logger.error(e) - sys.exit(0) - -############FUNCTIONS############## -#truncate the value of the torrent url to remove sensitive information -def trunc_val(s, d, n=3): - return d.join(s.split(d, n)[:n]) - - -#Get category from config file based on path provided -def get_category(path): - if 'cat' in cfg and cfg["cat"] != None: - cat_path = cfg["cat"] - for i, f in cat_path.items(): - if f in path: - category = i - return category - else: - category = '' - return category - category = '' - logger.warning(f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to NULL') - return category - -#Get tags from config file based on keyword -def get_tags(urls): - new_tag = '' - max_ratio = '' - max_seeding_time = '' - limit_upload_speed = '' - url = '' - try: - url = trunc_val(urls[0], '/') - except IndexError as e: - logger.debug(f"Tracker Urls:{urls}") - logger.debug(e) - if 'tags' in cfg and cfg["tags"] != None and urls: - tag_values = cfg['tags'] - for tag_url, tag_details in tag_values.items(): - new_tag = '' - max_ratio = '' - max_seeding_time = '' - limit_upload_speed = '' - url = '' - # If using Format 1 - if(type(tag_details) == str): - new_tag = tag_details - # Using Format 2 - else: - if 'tag' in tag_details: - new_tag = tag_details['tag'] - else: - logger.warning(f'No tags defined for {tag_url}. Please check your config.yml file.') - if 'max_ratio' in tag_details: max_ratio = tag_details['max_ratio'] - if 'max_seeding_time' in tag_details: max_seeding_time = tag_details['max_seeding_time'] - if 'limit_upload_speed' in tag_details: limit_upload_speed = tag_details['limit_upload_speed'] - for url in urls: - if tag_url in url: - return (new_tag,trunc_val(url, '/'),max_ratio,max_seeding_time,limit_upload_speed) - else: - return (new_tag,url,max_ratio,max_seeding_time,limit_upload_speed) - new_tag = '' - max_ratio = '' - max_seeding_time = '' - limit_upload_speed = '' - url = '' - try: - url = trunc_val(urls[0], '/') - except IndexError as e: - logger.debug(f"Tracker Urls:{urls}") - logger.debug(e) - logger.warning(f'No tags matched for {url}. Please check your config.yml file. Setting tag to NULL') - return (new_tag,url,max_ratio,max_seeding_time,limit_upload_speed) - - -#Move files from source to destination, mod variable is to change the date modified of the file being moved -def move_files(src,dest,mod=False): - dest_path = os.path.dirname(dest) - if os.path.isdir(dest_path) == False: - os.makedirs(dest_path) - shutil.move(src, dest) - if(mod == True): - modTime = time.time() - os.utime(dest,(modTime,modTime)) - - -#Remove any empty directories after moving files -def remove_empty_directories(pathlib_root_dir,pattern): - # list all directories recursively and sort them by path, - # longest first - L = sorted( - pathlib_root_dir.glob(pattern), - key=lambda p: len(str(p)), - reverse=True, - ) - for pdir in L: - try: - pdir.rmdir() # remove directory if empty - except OSError: - continue # catch and continue if non-empty - -# Will create a 2D Dictionary with the torrent name as the key -# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV', 'count':1, 'msg':'[]'...}, -# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}, 'count':2, 'msg':'[]'...} -# List of dictionary key definitions -# Category = Returns category of the torrent (str) -# save_path = Returns the save path of the torrent (str) -# count = Returns a count of the total number of torrents with the same name (int) -# msg = Returns a list of torrent messages by name (list of str) -# status = Returns the list of status numbers of the torrent by name (0: Tracker is disabled (used for DHT, PeX, and LSD), 1: Tracker has not been contacted yet, 2:Tracker has been contacted and is working, 3:Tracker is updating, 4:Tracker has been contacted, but it is not working (or doesn't send proper replies) -# is_complete = Returns the state of torrent (Returns True if at least one of the torrent with the State is categorized as Complete.) -# first_hash = Returns the hash number of the original torrent (Assuming the torrent list is sorted by date added (Asc)) -def get_torrent_info(t_list): - torrentdict = {} - for torrent in t_list: - save_path = torrent.save_path - category = get_category(save_path) - is_complete = False - msg = None - status = None - if torrent.name in torrentdict: - t_count = torrentdict[torrent.name]['count'] + 1 - msg_list = torrentdict[torrent.name]['msg'] - status_list = torrentdict[torrent.name]['status'] - is_complete = True if torrentdict[torrent.name]['is_complete'] == True else torrent.state_enum.is_complete - first_hash = torrentdict[torrent.name]['first_hash'] - else: - t_count = 1 - msg_list = [] - status_list = [] - is_complete = torrent.state_enum.is_complete - first_hash = torrent.hash - try: - msg,status = [(x.msg,x.status) for x in torrent.trackers if x.url.startswith('http')][0] - except IndexError: - pass - if msg != None: msg_list.append(msg) - if status != None: status_list.append(status) - torrentattr = {'Category': category, 'save_path': save_path, 'count': t_count, 'msg': msg_list, 'status': status_list, 'is_complete': is_complete, 'first_hash':first_hash} - torrentdict[torrent.name] = torrentattr - return torrentdict - -# Function used to recheck paused torrents sorted by size and resume torrents that are completed -def set_recheck(): - if recheck: - util.separator(f"Rechecking Paused Torrents", space=False, border=False) - #sort by size and paused - torrent_sorted_list = client.torrents.info(status_filter='paused',sort='size') - if torrent_sorted_list: - for torrent in torrent_sorted_list: - new_tag = get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])[0] - #Resume torrent if completed - if torrent.progress == 1: - #Check to see if torrent meets AutoTorrentManagement criteria - logger.debug(f'Rechecking Torrent to see if torrent meets AutoTorrentManagement Criteria') - logger.debug(util.insert_space(f'- Torrent Name: {torrent.name}',2)) - logger.debug(util.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}',4)) - logger.debug(util.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}',4)) - if torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60)): - if dry_run: - logger.dryrun(f'Not Resuming [{new_tag}] - {torrent.name}') - else: - logger.info(f'Resuming [{new_tag}] - {torrent.name}') - torrent.resume() - #Recheck - elif torrent.progress == 0 and torrentdict[torrent.name]['is_complete'] and not torrent.state_enum.is_checking: - if dry_run: - logger.dryrun(f'Not Rechecking [{new_tag}] - {torrent.name}') - else: - logger.info(f'Rechecking [{new_tag}] - {torrent.name}') - torrent.recheck() - -# Function used to move any torrents from the cross seed directory to the correct save directory -def set_cross_seed(): - if cross_seed: - util.separator(f"Checking for Cross-Seed Torrents", space=False, border=False) - # List of categories for all torrents moved - categories = [] - # Keep track of total torrents moved - total = 0 - #Track # of torrents tagged that are not cross-seeded - t_tagged = 0 - - if not os.path.exists(os.path.join(cfg['directory']['cross_seed'], '')): - logger.error(f"Path Error: cross_seed directory not found at {os.path.abspath(os.path.join(cfg['directory']['cross_seed'], ''))}") - return - - # Only get torrent files - cs_files = [f for f in os.listdir(os.path.join(cfg['directory']['cross_seed'], '')) if f.endswith('torrent')] - dir_cs = os.path.join(cfg['directory']['cross_seed'], '') - dir_cs_out = os.path.join(dir_cs,'qbit_manage_added') - os.makedirs(dir_cs_out,exist_ok=True) - for file in cs_files: - t_name = file.split(']', 2)[2].split('.torrent')[0] - # Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key) - # Returned the dictionary of filtered item - torrentdict_file = dict(filter(lambda item: t_name in item[0], torrentdict.items())) - if torrentdict_file: - # Get the exact torrent match name from torrentdict - t_name = next(iter(torrentdict_file)) - category = torrentdict[t_name]['Category'] - dest = os.path.join(torrentdict[t_name]['save_path'], '') - src = os.path.join(dir_cs,file) - dir_cs_out = os.path.join(dir_cs,'qbit_manage_added',file) - categories.append(category) - if dry_run: - logger.dryrun(f'Not Adding to qBittorrent:') - logger.dryrun(util.insert_space(f'Torrent Name: {t_name}',3)) - logger.dryrun(util.insert_space(f'Category: {category}',7)) - logger.dryrun(util.insert_space(f'Save_Path: {dest}',6)) - else: - if torrentdict[t_name]['is_complete']: - client.torrents.add(torrent_files=src, - save_path=dest, - category=category, - tags='cross-seed', - is_paused=True) - shutil.move(src, dir_cs_out) - logger.info(f'Adding to qBittorrent:') - logger.info(util.insert_space(f'Torrent Name: {t_name}',3)) - logger.info(util.insert_space(f'Category: {category}',7)) - logger.info(util.insert_space(f'Save_Path: {dest}',6)) - else: - logger.info(f'Found {t_name} in {dir_cs} but original torrent is not complete.') - logger.info(f'Not adding to qBittorrent') - else: - if dry_run: - logger.dryrun(f'{t_name} not found in torrents.') - else: - logger.warning(f'{t_name} not found in torrents.') - numcategory = Counter(categories) - #Tag missing cross-seed torrents tags - for torrent in torrent_list: - t_name = torrent.name - if 'cross-seed' not in torrent.tags and torrentdict[t_name]['count'] > 1 and torrentdict[t_name]['first_hash'] != torrent.hash: - t_tagged += 1 - if dry_run: - logger.dryrun(f'Not Adding cross-seed tag to {t_name}') - else: - logger.info(f'Adding cross-seed tag to {t_name}') - torrent.add_tags(tags='cross-seed') - - - if dry_run: - for c in numcategory: - total += numcategory[c] - if numcategory[c] > 0: logger.dryrun(f'{numcategory[c]} {c} cross-seed .torrents not added.') - if total > 0: logger.dryrun(f'Total {total} cross-seed .torrents not added.') - if t_tagged > 0:logger.dryrun(f'Total {t_tagged} cross-seed .torrents not tagged.') - else: - for c in numcategory: - total += numcategory[c] - if numcategory[c] > 0: logger.info(f'{numcategory[c]} {c} cross-seed .torrents added.') - if total > 0: logger.info(f'Total {total} cross-seed .torrents added.') - if t_tagged > 0:logger.info(f'Total {t_tagged} cross-seed .torrents tagged.') - -def set_category(): - if cat_update: - util.separator(f"Updating Categories", space=False, border=False) - num_cat = 0 - for torrent in torrent_list: - if torrent.category == '': - new_cat = get_category(torrent.save_path) - try: - t_url = [trunc_val(x.url, '/') for x in torrent.trackers if x.url.startswith('http')][0] - except IndexError: - t_url = None - if dry_run: - logger.dryrun(util.insert_space(f'Torrent Name: {torrent.name}',3)) - logger.dryrun(util.insert_space(f'New Category: {new_cat}',3)) - logger.dryrun(util.insert_space(f'Tracker: {t_url}',8)) - num_cat += 1 - else: - logger.info(util.insert_space(f'- Torrent Name: {torrent.name}',1)) - logger.info(util.insert_space(f'-- New Category: {new_cat}',5)) - logger.info(util.insert_space(f'-- Tracker: {t_url}',5)) - torrent.set_category(category=new_cat) - num_cat += 1 - if dry_run: - if num_cat >= 1: - logger.dryrun(f'Did not update {num_cat} new categories.') - else: - logger.dryrun(f'No new torrents to categorize.') - else: - if num_cat >= 1: - logger.info(f'Updated {num_cat} new categories.') - else: - logger.info(f'No new torrents to categorize.') - - -def set_tags(): - if tag_update: - util.separator(f"Updating Tags", space=False, border=False) - num_tags = 0 - for torrent in torrent_list: - if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1): - new_tag,url,max_ratio,max_seeding_time,limit_upload_speed = get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) - if new_tag: - if dry_run: - num_tags += 1 - logger.dryrun(util.insert_space(f'Torrent Name: {torrent.name}',3)) - logger.dryrun(util.insert_space(f'New Tag: {new_tag}',8)) - logger.dryrun(util.insert_space(f'Tracker: {url}',8)) - if limit_upload_speed: - if limit_upload_speed == -1: - logger.dryrun(util.insert_space(f'Limit UL Speed: Infinity',1)) - else: - logger.dryrun(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1)) - if max_ratio: - if max_ratio == -2: - logger.dryrun(util.insert_space(f'Share Limit: Use Global Share Limit',4)) - continue - elif max_ratio == -1: - logger.dryrun(util.insert_space(f'Share Limit: Set No Share Limit',4)) - continue - else: - max_ratio = torrent.max_ratio - if max_seeding_time: - if max_seeding_time == -2: - logger.dryrun(util.insert_space(f'Share Limit: Use Global Share Limit',4)) - continue - elif max_seeding_time == -1: - logger.dryrun(util.insert_space(f'Share Limit: Set No Share Limit',4)) - continue - else: - max_seeding_time = torrent.max_seeding_time - if max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time: - logger.dryrun(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4)) - elif max_ratio != torrent.max_ratio: - logger.dryrun(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4)) - elif max_seeding_time != torrent.max_seeding_time: - logger.dryrun(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4)) - else: - torrent.add_tags(tags=new_tag) - num_tags += 1 - logger.info(util.insert_space(f'Torrent Name: {torrent.name}',3)) - logger.info(util.insert_space(f'New Tag: {new_tag}',8)) - logger.info(util.insert_space(f'Tracker: {url}',8)) - if limit_upload_speed: - if limit_upload_speed == -1: - logger.info(util.insert_space(f'Limit UL Speed: Infinity',1)) - else: - logger.info(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1)) - torrent.set_upload_limit(limit_upload_speed*1024) - if max_ratio: - if max_ratio == -2: - logger.info(util.insert_space(f'Share Limit: Use Global Share Limit',4)) - torrent.set_share_limits(-2,-2) - continue - elif max_ratio == -1: - logger.info(util.insert_space(f'Share Limit: Set No Share Limit',4)) - torrent.set_share_limits(-1,-1) - continue - else: - max_ratio = torrent.max_ratio - if max_seeding_time: - if max_seeding_time == -2: - logger.info(util.insert_space(f'Share Limit: Use Global Share Limit',4)) - torrent.set_share_limits(-2,-2) - continue - elif max_seeding_time == -1: - logger.info(util.insert_space(f'Share Limit: Set No Share Limit',4)) - torrent.set_share_limits(-1,-1) - continue - else: - max_seeding_time = torrent.max_seeding_time - if max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time: - logger.info(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4)) - elif max_ratio != torrent.max_ratio: - logger.info(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4)) - elif max_seeding_time != torrent.max_seeding_time: - logger.info(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4)) - torrent.set_share_limits(max_ratio,max_seeding_time) - - if dry_run: - if num_tags >= 1: - logger.dryrun(f'Did not update {num_tags} new tags.') - else: - logger.dryrun('No new torrents to tag.') - else: - if num_tags >= 1: - logger.info(f'Updated {num_tags} new tags.') - else: - logger.info('No new torrents to tag. ') - - -def set_rem_unregistered(): - if rem_unregistered: - util.separator(f"Removing Unregistered Torrents", space=False, border=False) - rem_unr = 0 - del_tor = 0 - pot_unr = '' - for torrent in torrent_list: - t_name = torrent.name - t_count = torrentdict[t_name]['count'] - t_msg = torrentdict[t_name]['msg'] - t_status = torrentdict[t_name]['status'] - for x in torrent.trackers: - if x.url.startswith('http'): - t_url = trunc_val(x.url, '/') - msg_up = x.msg.upper() - n_info = '' - n_d_info = '' - - n_info += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') - n_info += (util.insert_space(f'Status: {msg_up}',9)+'\n') - n_info += (util.insert_space(f'Tracker: {t_url}',8)+'\n') - n_info += (util.insert_space(f'Deleted .torrent but NOT content files.',8)+'\n') - - n_d_info += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') - n_d_info += (util.insert_space(f'Status: {msg_up}',9)+'\n') - n_d_info += (util.insert_space(f'Tracker: {t_url}',8)+'\n') - n_d_info += (util.insert_space(f'Deleted .torrent AND content files.',8)+'\n') - - if (x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up): - pot_unr += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') - pot_unr += (util.insert_space(f'Status: {msg_up}',9)+'\n') - if ('UNREGISTERED' in msg_up or \ - 'TORRENT NOT FOUND' in msg_up or \ - 'TORRENT IS NOT FOUND' in msg_up or \ - 'NOT REGISTERED' in msg_up or \ - 'HTTPS://BEYOND-HD.ME/TORRENTS' in msg_up or \ - 'NOT EXIST' in msg_up or \ - 'UNKNOWN TORRENT' in msg_up or \ - 'REDOWNLOAD' in msg_up or \ - 'PACKS' in msg_up or \ - 'REPACKED' in msg_up or \ - 'PACK' in msg_up or \ - 'TRUMP' in msg_up or \ - 'RETITLED' in msg_up - ) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up: - if t_count > 1: - if dry_run: - if '' in t_msg: - util.print_multiline(n_info,"DRYRUN") - rem_unr += 1 - else: - util.print_multiline(n_d_info,"DRYRUN") - del_tor += 1 - else: - # Checks if any of the original torrents are working - if '' in t_msg or 2 in t_status: - util.print_multiline(n_info) - torrent.delete(hash=torrent.hash, delete_files=False) - rem_unr += 1 - else: - util.print_multiline(n_d_info) - tor_delete_recycle(torrent) - del_tor += 1 - else: - if dry_run: - util.print_multiline(n_d_info,"DRYRUN") - del_tor += 1 - else: - util.print_multiline(n_d_info) - tor_delete_recycle(torrent) - del_tor += 1 - if dry_run: - if rem_unr >= 1 or del_tor >= 1: - if rem_unr >= 1: logger.dryrun(f'Did not delete {rem_unr} .torrents(s) but not content files.') - if del_tor >= 1: logger.dryrun(f'Did not delete {del_tor} .torrents(s) AND content files.') - else: - logger.dryrun('No unregistered torrents found.') - else: - if rem_unr >= 1 or del_tor >= 1: - if rem_unr >= 1: logger.info(f'Deleted {rem_unr} .torrents(s) but not content files.') - if del_tor >= 1: logger.info(f'Deleted {del_tor} .torrents(s) AND content files.') - else: - logger.info('No unregistered torrents found.') - if (len(pot_unr) > 0): - util.separator(f"Potential Unregistered torrents", space=False, border=False, loglevel='DEBUG') - util.print_multiline(pot_unr,"DEBUG") - -def set_rem_orphaned(): - if rem_orphaned: - util.separator(f"Checking for Orphaned Files", space=False, border=False) - global torrent_list - torrent_files = [] - root_files = [] - orphaned_files = [] - excluded_orphan_files = [] - orphaned_parent_path = set() - - if (remote_path != root_path): - root_files = [os.path.join(path.replace(remote_path,root_path), name) for path, subdirs, files in os.walk(remote_path) for name in files if os.path.join(remote_path,'orphaned_data') not in path and os.path.join(remote_path,'.RecycleBin') not in path] - else: - root_files = [os.path.join(path, name) for path, subdirs, files in os.walk(root_path) for name in files if os.path.join(root_path,'orphaned_data') not in path and os.path.join(root_path,'.RecycleBin') not in path] - - #Get an updated list of torrents - torrent_list = client.torrents.info(sort='added_on') - - for torrent in torrent_list: - for file in torrent.files: - torrent_files.append(os.path.join(torrent.save_path,file.name)) - - orphaned_files = set(root_files) - set(torrent_files) - orphaned_files = sorted(orphaned_files) - - if 'orphaned' in cfg and cfg["orphaned"] is not None and 'exclude_patterns' in cfg['orphaned'] and cfg['orphaned']['exclude_patterns'] != '': - exclude_patterns = cfg['orphaned']['exclude_patterns'] - excluded_orphan_files = [file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch.fnmatch(file, exclude_pattern.replace(remote_path,root_path))] - - orphaned_files = set(orphaned_files) - set(excluded_orphan_files) - util.separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG') - util.print_multiline("\n".join(torrent_files),'DEBUG') - util.separator(f"Root Files", space=False, border=False,loglevel='DEBUG') - util.print_multiline("\n".join(root_files),'DEBUG') - util.separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG') - util.print_multiline("\n".join(excluded_orphan_files),'DEBUG') - util.separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG') - util.print_multiline("\n".join(orphaned_files),'DEBUG') - util.separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG') - - if (orphaned_files): - if dry_run: - dir_out = os.path.join(remote_path,'orphaned_data') - util.separator(f"{len(orphaned_files)} Orphaned files found", space=False, border=False,loglevel='DRYRUN') - util.print_multiline("\n".join(orphaned_files),'DRYRUN') - logger.dryrun(f'Did not move {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') - else: - dir_out = os.path.join(remote_path,'orphaned_data') - os.makedirs(dir_out,exist_ok=True) - - for file in orphaned_files: - src = file.replace(root_path,remote_path) - dest = os.path.join(dir_out,file.replace(root_path,'')) - move_files(src,dest) - orphaned_parent_path.add(os.path.dirname(file).replace(root_path,remote_path)) - util.separator(f"{len(orphaned_files)} Orphaned files found", space=False, border=False) - util.print_multiline("\n".join(orphaned_files)) - logger.info(f'Moved {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') - #Delete empty directories after moving orphan files - logger.info(f'Cleaning up any empty directories...') - for parent_path in orphaned_parent_path: - remove_empty_directories(Path(parent_path),"**/*") - else: - if dry_run: - logger.dryrun('No Orphaned Files found.') - else: - logger.info('No Orphaned Files found.') - - -def set_tag_nohardlinks(): - if tag_nohardlinks: - util.separator(f"Tagging Torrents with No Hardlinks", space=False, border=False) - nohardlinks = cfg['nohardlinks'] - n_info = '' - t_count = 0 #counter for the number of torrents that has no hard links - t_del = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion - t_del_cs = 0 #counter for the number of torrents that has no hard links and meets the criteria for ratio limit/seed limit for deletion including cross-seeds - tdel_tags = 0 #counter for number of torrents that previously had no hard links but now have hard links - tdel_dict = {} #dictionary to track the torrent names and content path that meet the deletion criteria - t_excl_tags = []#list of tags to exclude based on config.yml - - for category in nohardlinks: - torrent_list = client.torrents.info(category=category,filter='completed') - #Convert string to list if only one tag defined. - if ('exclude_tags' in nohardlinks[category]): - if isinstance(nohardlinks[category]['exclude_tags'],str): - t_excl_tags.append(nohardlinks[category]['exclude_tags']) - else: - t_excl_tags = nohardlinks[category]['exclude_tags'] - - if len(torrent_list) == 0: - logger.error('No torrents found in the category ('+category+') defined in config.yml inside the nohardlinks section. Please check if this matches with any category in qbittorrent and has 1 or more torrents.') - continue - for torrent in torrent_list: - if not dry_run: - torrent.resume() - if('exclude_tags' in nohardlinks[category] and (any(tag in torrent.tags for tag in t_excl_tags))): - #Skip to the next torrent if we find any torrents that are in the exclude tag - continue - else: - #Checks for any hard links and not already tagged - if (nohardlink(torrent['content_path'].replace(root_path,remote_path))): - #Will only tag new torrents that don't have noHL tag - if('noHL' not in torrent.tags): - t_count += 1 - n_info += (f"No hard links found! Adding tags noHL\n") - n_info += (util.insert_space(f'Torrent Name: {torrent.name}',3)+'\n') - - if(nohardlinks[category] != None): - #set the max seeding time for the torrent - if ('max_seeding_time' in nohardlinks[category]): - seeding_time_limit = nohardlinks[category]['max_seeding_time'] - n_info += (util.insert_space(f'New Max Seed Time: {str(seeding_time_limit)}',3)+'\n') - else: - seeding_time_limit = -2 - #set the max ratio for the torrent - if ('max_ratio' in nohardlinks[category]): - ratio_limit = nohardlinks[category]['max_ratio'] - n_info += (util.insert_space(f'New Max Ratio: {str(ratio_limit)}',3)+'\n') - else: - ratio_limit = -2 - else: - seeding_time_limit = -2 - ratio_limit = -2 - if not dry_run: - #set the tag for no hard links - torrent.add_tags(tags='noHL') - client.torrents_set_share_limits(ratio_limit,seeding_time_limit,torrent.hash) - - #Cleans up previously tagged noHL torrents - else: - if(nohardlinks[category] != None): - # Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements - if ('cleanup' in nohardlinks[category] and nohardlinks[category]['cleanup'] and torrent.state_enum.is_paused and len(nohardlinks[category])>0): - t_del += 1 - n_info += (f'Torrent Name: {torrent.name} has no hard links found and meets ratio/seeding requirements.\n') - tdel_dict[torrent.name] = torrent['content_path'].replace(root_path,remote_path) - if dry_run: - n_info += (util.insert_space(f'Cleanup flag set to true. NOT Deleting torrent + contents.',6)+'\n') - else: - n_info += (util.insert_space(f'Cleanup flag set to true. Deleting torrent + contents.',6)+'\n') - - #Checks to see if previous noHL tagged torrents now have hard links. - if (not (nohardlink(torrent['content_path'].replace(root_path,remote_path))) and ('noHL' in torrent.tags)): - n_info += (f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.\n') - n_info += ('Removing tags noHL.\n') - n_info += ('Removing ratio and seeding time limits.\n') - tdel_tags += 1 - if not dry_run: - #Remove tags and share limits - torrent.remove_tags(tags='noHL') - client.torrents_set_share_limits(-2,-2,torrent.hash) - - if(nohardlinks[category] != None): - #loop through torrent list again for cleanup purposes - if ('cleanup' in nohardlinks[category] and nohardlinks[category]['cleanup']): - for torrent in torrent_list: - if torrent.name in tdel_dict.keys() and 'noHL' in torrent.tags: - #Double check that the content path is the same before we delete anything - if torrent['content_path'].replace(root_path,remote_path) == tdel_dict[torrent.name]: - t_del_cs += 1 - if not dry_run: - if (os.path.exists(torrent['content_path'].replace(root_path,remote_path))): - tor_delete_recycle(torrent) - else: - torrent.delete(hash=torrent.hash, delete_files=False) - - if dry_run: - if t_count >= 1 or len(n_info) > 1: - util.print_multiline(n_info,"DRYRUN") - logger.dryrun(f'Did not tag/set ratio limit/seeding time for {t_count} .torrents(s)') - if t_del >= 1: - logger.dryrun(f'Did not delete {t_del} .torrents(s) or content files.') - logger.dryrun(f'Did not delete {t_del_cs} .torrents(s) (including cross-seed) or content files.') - if tdel_tags >= 1: - logger.dryrun(f'Did not delete noHL tags/ remove ratio limit/seeding time for {tdel_tags} .torrents(s)') - else: - logger.dryrun('No torrents to tag with no hard links.') - else: - if t_count >= 1 or len(n_info) > 1: - util.print_multiline(n_info) - logger.info(f'tag/set ratio limit/seeding time for {t_count} .torrents(s)') - if t_del >= 1: - logger.info(f'Deleted {t_del} .torrents(s) AND content files.') - logger.info(f'Deleted {t_del_cs} .torrents(s) (includes cross-seed torrents) AND content files.') - if tdel_tags >= 1: - logger.info(f'Deleted noHL tags/ remove ratio limit/seeding time for {tdel_tags} .torrents(s)') - else: - logger.info('No torrents to tag with no hard links.') - - -#will check if there are any hard links if it passes a file or folder -def nohardlink(file): - check = True - if (os.path.isfile(file)): - if (os.stat(file).st_nlink > 1): - check = False - else: - for path, subdirs, files in os.walk(file): - for x in files: - if (os.stat(os.path.join(path,x)).st_nlink > 1): - check = False - return check - -def tor_delete_recycle(torrent): - if 'recyclebin' in cfg and cfg["recyclebin"] != None: - if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled']: - tor_files = [] - #Define torrent files/folders - for file in torrent.files: - tor_files.append(os.path.join(torrent.save_path,file.name)) - - #Create recycle bin if not exists - recycle_path = os.path.join(remote_path,'.RecycleBin') - os.makedirs(recycle_path,exist_ok=True) - - #Move files from torrent contents to Recycle bin - for file in tor_files: - src = file.replace(root_path,remote_path) - dest = os.path.join(recycle_path,file.replace(root_path,'')) - #move files and change date modified - move_files(src,dest,True) - util.separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False,loglevel='DEBUG') - util.print_multiline("\n".join(tor_files),'DEBUG') - logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(remote_path,root_path)}') - #Delete torrent and files - torrent.delete(hash=torrent.hash, delete_files=False) - #Remove any empty directories - remove_empty_directories(Path(torrent.save_path.replace(root_path,remote_path)),"**/*") - else: - torrent.delete(hash=torrent.hash, delete_files=True) - else: - logger.error('recyclebin not defined in config.') - return - - - -def set_empty_recycle(): - if not skip_recycle: - num_del = 0 - n_info = '' - if 'recyclebin' in cfg and cfg["recyclebin"] != None: - if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled'] and 'empty_after_x_days' in cfg["recyclebin"]: - if 'root_dir' in cfg['directory']: - root_path = os.path.join(cfg['directory']['root_dir'], '') - else: - logger.error('root_dir not defined in config. This is required to use recyclebin feature') - return - - if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): - remote_path = os.path.join(cfg['directory']['remote_dir'], '') - recycle_path = os.path.join(remote_path,'.RecycleBin') - else: - remote_path = root_path - recycle_path = os.path.join(root_path,'.RecycleBin') - recycle_files = [os.path.join(path, name) for path, subdirs, files in os.walk(recycle_path) for name in files] - recycle_files = sorted(recycle_files) - empty_after_x_days = cfg["recyclebin"]['empty_after_x_days'] - if recycle_files: - util.separator(f"Emptying Recycle Bin (Files > {empty_after_x_days} days)", space=False, border=False) - for file in recycle_files: - fileStats = os.stat(file) - filename = file.replace(recycle_path,'') - last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time) - now = time.time() # in seconds - days = (now - last_modified) / (60 * 60 * 24) - if (empty_after_x_days <= days): - num_del += 1 - if dry_run: - n_info += (f'Did not delete {filename} from the recycle bin. (Last modified {round(days)} days ago).\n') - else: - n_info += (f'Deleted {filename} from the recycle bin. (Last modified {round(days)} days ago).\n') - os.remove(file) - if num_del > 0: - if dry_run: - util.print_multiline(n_info,'DRYRUN') - logger.dryrun(f'Did not delete {num_del} files from the Recycle Bin.') - else: - remove_empty_directories(Path(recycle_path),"**/*") - util.print_multiline(n_info) - logger.info(f'Deleted {num_del} files from the Recycle Bin.') - else: - logger.debug('No files found in "' + recycle_path + '"') - else: - logger.debug('Recycle bin has been disabled or "empty_after_x_days" var not defined in config.') - - else: - logger.error('recyclebin not defined in config.') - return - - -#Define global parameters -torrent_list = None -torrentdict = None - def start(): - #Global parameters to get the torrent dictionary - global torrent_list - global torrentdict start_time = datetime.now() + args["time"] = start_time.strftime("%H:%M") + args["time_obj"] = start_time + stats_summary = [] if dry_run: start_type = "Dry-" else: start_type = "" util.separator(f"Starting {start_type}Run") - util.separator(f"Getting Torrent List", space=False, border=False) - #Get an updated list of torrents - torrent_list = client.torrents.info(sort='added_on') - if recheck or cross_seed or rem_unregistered: - #Get an updated torrent dictionary information of the torrents - torrentdict = get_torrent_info(torrent_list) - set_category() - set_tags() - set_rem_unregistered() - set_cross_seed() - set_recheck() - set_rem_orphaned() - set_tag_nohardlinks() - set_empty_recycle() + cfg = None + global stats + stats = { + "added": 0, + "deleted": 0, + "deleted_contents": 0, + "resumed": 0, + "rechecked": 0, + "orphaned":0, + "recycle_emptied": 0, + "tagged": 0, + "untagged":0, + "categorized": 0, + "rem_unreg": 0, + "taggednoHL": 0 + } + try: + cfg = Config(default_dir,args) + except Exception as e: + util.print_stacktrace() + util.print_multiline(e,'CRITICAL') + + if cfg: + #Set Category + num_categorized = cfg.qbt.category() + stats["categorized"] += num_categorized + + #Set Tags + num_tagged = cfg.qbt.tags() + stats["tagged"] += num_tagged + + #Remove Unregistered Torrents + num_deleted,num_deleted_contents = cfg.qbt.rem_unregistered() + stats["rem_unreg"] += (num_deleted + num_deleted_contents) + stats["deleted"] += num_deleted + stats["deleted_contents"] += num_deleted_contents + + #Set Cross Seed + num_added, num_tagged = cfg.qbt.cross_seed() + stats["added"] += num_added + stats["tagged"] += num_tagged + + #Recheck Torrents + num_resumed, num_rechecked = cfg.qbt.recheck() + stats["resumed"] += num_resumed + stats["rechecked"] += num_rechecked + + #Tag NoHardLinks + num_tagged,num_untagged,num_deleted,num_deleted_contents = cfg.qbt.tag_nohardlinks() + stats["tagged"] += num_tagged + stats["taggednoHL"] += num_tagged + stats["untagged"] += num_untagged + stats["deleted"] += num_deleted + stats["deleted_contents"] += num_deleted_contents + + #Remove Orphaned Files + num_orphaned = cfg.qbt.rem_orphaned() + stats["orphaned"] += num_orphaned + + #Empty RecycleBin + recycle_emptied = cfg.empty_recycle() + stats["recycle_emptied"] += recycle_emptied + + if stats["categorized"] > 0: stats_summary.append(f"Total Torrents Categorized: {stats['categorized']}") + if stats["tagged"] > 0: stats_summary.append(f"Total Torrents Tagged: {stats['tagged']}") + if stats["rem_unreg"] > 0: stats_summary.append(f"Total Unregistered Torrents Removed: {stats['rem_unreg']}") + if stats["added"] > 0: stats_summary.append(f"Total Torrents Added: {stats['added']}") + if stats["resumed"] > 0: stats_summary.append(f"Total Torrents Resumed: {stats['resumed']}") + if stats["rechecked"] > 0: stats_summary.append(f"Total Torrents Rechecked: {stats['rechecked']}") + if stats["deleted"] > 0: stats_summary.append(f"Total Torrents Deleted: {stats['deleted']}") + if stats["deleted_contents"] > 0: stats_summary.append(f"Total Torrents + Contents Deleted : {stats['deleted_contents']}") + if stats["orphaned"] > 0: stats_summary.append(f"Total Orphaned Files: {stats['orphaned']}") + if stats["taggednoHL"] > 0: stats_summary.append(f"Total noHL Torrents Tagged: {stats['taggednoHL']}") + if stats["untagged"] > 0: stats_summary.append(f"Total noHL Torrents untagged: {stats['untagged']}") + if stats["recycle_emptied"] > 0: stats_summary.append(f"Total Files Deleted from Recycle Bin: {stats['recycle_emptied']}") + end_time = datetime.now() run_time = str(end_time - start_time).split('.')[0] - util.separator(f"Finished {start_type}Run\nRun Time: {run_time}") + #util.separator(f"Finished {start_type}Run\n {', '.join(stats_summary) if len(stats_summary)>0 else ''} \nRun Time: {run_time}") + util.separator(f"Finished {start_type}Run\n {os.linesep.join(stats_summary) if len(stats_summary)>0 else ''} \nRun Time: {run_time}") def end(): logger.info("Exiting Qbit_manage") @@ -996,6 +236,26 @@ if __name__ == '__main__': logger.info(util.centered(" | | ______ __/ | ")) logger.info(util.centered(" |_| |______| |___/ ")) logger.info(f" Version: {version}") + + util.separator(loglevel='DEBUG') + logger.debug(f" --run (QBT_RUN): {run}") + logger.debug(f" --schedule (QBT_SCHEDULE): {sch}") + logger.debug(f" --config-file (QBT_CONFIG): {config_file}") + logger.debug(f" --log-file (QBT_LOGFILE): {log_file}") + logger.debug(f" --cross-seed (QBT_CROSS_SEED): {cross_seed}") + logger.debug(f" --recheck (QBT_RECHECK): {recheck}") + logger.debug(f" --cat-update (QBT_CAT_UPDATE): {cat_update}") + logger.debug(f" --tag-update (QBT_TAG_UPDATE): {tag_update}") + logger.debug(f" --rem-unregistered (QBT_REM_UNREGISTERED): {rem_unregistered}") + logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {rem_orphaned}") + logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {tag_nohardlinks}") + logger.debug(f" --skip-recycle (QBT_SKIP_RECYCLE): {skip_recycle}") + logger.debug(f" --dry-run (QBT_DRY_RUN): {dry_run}") + logger.debug(f" --log-level (QBT_LOG_LEVEL): {log_level}") + logger.debug(f" --divider (QBT_DIVIDER): {divider}") + logger.debug(f" --width (QBT_WIDTH): {screen_width}") + logger.debug("") + try: if run: logger.info(f" Run Mode: Script will exit after completion.") diff --git a/requirements.txt b/requirements.txt index 431e554..bf8db4a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ -PyYAML +ruamel.yaml qbittorrent-api -schedule \ No newline at end of file +schedule +retrying +alive_progress \ No newline at end of file