Added webhooks and Notifiarr functionality to test

This commit is contained in:
bobokun 2021-12-17 09:19:40 -05:00
parent 23fe121d1d
commit 4f5fdda44f
No known key found for this signature in database
GPG key ID: 9665BA6CF5DC2671
8 changed files with 472 additions and 38 deletions

View file

@ -1 +1 @@
3.0.2
3.1.0

View file

@ -31,17 +31,23 @@ tags:
# max_seeding_time: 129600
# <OPTIONAL> Will limit the upload speed KiB/s (KiloBytes/second) (-1 sets the limit to infinity)
# limit_upload_speed: 150
# <OPTIONAL> Set this to the notifiarr react. If using custom indexer you must specify the entire reaction id, otherwise just specify the reaction name
# notifiarr: <notifiarr indexer>
animebytes.tv:
tag: AnimeBytes
notifiarr: animebytes:#############
avistaz:
tag: Avistaz
max_ratio: 5.0
max_seeding_time: 129600
limit_upload_speed: 150
notifiarr: avistaz:#############
beyond-hd:
tag: Beyond-HD
notifiarr: beyondhd
blutopia:
tag: Blutopia
notifiarr: blutopia:#############
cartoonchaos:
tag: CartoonChaos
digitalcore
@ -55,12 +61,15 @@ tags:
max_seeding_time: 129600
landof.tv:
tag: BroadcasTheNet
notifiarr: broadcasthenet
myanonamouse:
tag: MaM
passthepopcorn:
tag: PassThePopcorn
notifiarr: passthepopcorn
privatehd:
tag: PrivateHD
notifiarr:
tleechreload:
tag: TorrentLeech
torrentdb:
@ -117,4 +126,25 @@ orphaned:
- "**/.DS_Store"
- "**/Thumbs.db"
- "**/@eaDir"
- "/data/torrents/temp/**"
- "/data/torrents/temp/**"
#Notifiarr integration with webhooks
notifiarr:
apikey: ####################################
test: true
develop: true
# Webhook notifications: Set value to notifiarr if using notifiarr integration, otherwise set to webhook URL
webhooks:
error: notifiarr
run_start: notifiarr
run_end: notifiarr
function:
cross_seed: notifiarr
recheck: notifiarr
cat_update: notifiarr
tag_update: notifiarr
rem_unregistered: notifiarr
rem_orphaned: notifiarr
tag_nohardlinks: notifiarr
empty_recyclebin: notifiarr

View file

@ -2,6 +2,8 @@ import logging, os, requests, stat, time
from modules import util
from modules.util import Failed, check
from modules.qbittorrent import Qbt
from modules.webhooks import Webhooks
from modules.notifiarr import Notifiarr
from ruamel import yaml
from retrying import retry
@ -22,7 +24,10 @@ class Config:
self.util = check(self)
self.default_dir = default_dir
self.test_mode = args["test"] if "test" in args else False
self.trace_mode = args["trace"] if "trace" in args else False
self.start_time = args["time_obj"]
yaml.YAML().allow_duplicate_keys = True
try:
new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
@ -33,6 +38,24 @@ class Config:
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
if "notifiarr" in new_config: new_config["notifiarr"] = new_config.pop("notifiarr")
if "webhooks" in new_config:
temp = new_config.pop("webhooks")
if 'function' not in temp: temp["function"] = {}
def hooks(attr):
if attr in temp:
items = temp.pop(attr)
if items:
temp["function"][attr]=items
hooks("cross_seed")
hooks("recheck")
hooks("cat_update")
hooks("tag_update")
hooks("rem_unregistered")
hooks("rem_orphaned")
hooks("tag_nohardlinks")
hooks("empty_recyclebin")
new_config["webhooks"] = temp
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2)
self.data = new_config
except yaml.scanner.ScannerError as e:
@ -41,9 +64,40 @@ class Config:
util.print_stacktrace()
raise Failed(f"YAML Error: {e}")
self.session = requests.Session()
if self.data["cat"] is None: self.data["cat"] = {}
if self.data["tags"] is None: self.data["tags"] = {}
self.session = requests.Session()
self.webhooks = {
"error": self.util.check_for_attribute(self.data, "error", parent="webhooks", var_type="list", default_is_none=True),
"run_start": self.util.check_for_attribute(self.data, "run_start", parent="webhooks", var_type="list", default_is_none=True),
"run_end": self.util.check_for_attribute(self.data, "run_end", parent="webhooks", var_type="list", default_is_none=True),
"function": self.util.check_for_attribute(self.data, "function", parent="webhooks", var_type="list", default_is_none=True)
}
self.NotifiarrFactory = None
if "notifiarr" in self.data:
if self.data["notifiarr"] is not None:
logger.info("Connecting to Notifiarr...")
try:
self.NotifiarrFactory = Notifiarr(self, {
"apikey": self.util.check_for_attribute(self.data, "apikey", parent="notifiarr", throw=True),
"develop": self.util.check_for_attribute(self.data, "develop", parent="notifiarr", var_type="bool", default=False, do_print=False, save=False),
"test": self.util.check_for_attribute(self.data, "test", parent="notifiarr", var_type="bool", default=False, do_print=False, save=False)
})
except Failed as e:
logger.error(e)
logger.info(f"Notifiarr Connection {'Failed' if self.NotifiarrFactory is None else 'Successful'}")
else:
logger.warning("Config Warning: notifiarr attribute not found")
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory)
try:
self.Webhooks.start_time_hooks(self.start_time)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
#nohardlinks
self.nohardlinks = None
if "nohardlinks" in self.data and self.args['tag_nohardlinks']:
@ -57,10 +111,15 @@ class Config:
self.nohardlinks[cat]['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="nohardlinks", subparent=cat, var_type="int", default_int=-2, default_is_none=True,do_print=False)
self.nohardlinks[cat]['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="nohardlinks", subparent=cat, var_type="int", default_int=-1, default_is_none=True,do_print=False)
else:
raise Failed(f"Config Error: Category {cat} is defined under nohardlinks attribute but is not defined in the cat attribute.")
e = (f"Config Error: Category {cat} is defined under nohardlinks attribute but is not defined in the cat attribute.")
self.notify(e,'Config')
raise Failed(e)
else:
if self.args["tag_nohardlinks"]:
raise Failed("Config Error: nohardlinks attribute not found")
e = "Config Error: nohardlinks attribute not found"
self.notify(e,'Config')
raise Failed(e)
#Add RecycleBin
self.recyclebin = {}
@ -86,7 +145,9 @@ class Config:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",default_is_none=True)
self.recycle_dir = os.path.join(self.remote_dir,'.RecycleBin')
else:
raise Failed("Config Error: directory attribute not found")
e = "Config Error: directory attribute not found"
self.notify(e,'Config')
raise Failed(e)
#Connect to Qbittorrent
self.qbt = None
@ -98,7 +159,9 @@ class Config:
"password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True)
})
else:
raise Failed("Config Error: qbt attribute not found")
e = "Config Error: qbt attribute not found"
self.notify(e,'Config')
raise Failed(e)
#Get tags from config file based on keyword
def get_tags(self,urls):
@ -107,6 +170,7 @@ class Config:
tags['max_ratio'] = None
tags['max_seeding_time'] = None
tags['limit_upload_speed'] = None
tags['notifiarr'] = None
if not urls: return tags
try:
tags['url'] = util.trunc_val(urls[0], '/')
@ -141,6 +205,7 @@ class Config:
tags['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="tags", subparent=tag_url, var_type="float", default_int=-2, default_is_none=True,do_print=False,save=False)
tags['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="tags", subparent=tag_url, var_type="int", default_int=-2, default_is_none=True,do_print=False,save=False)
tags['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="tags", subparent=tag_url, var_type="int", default_int=-1, default_is_none=True,do_print=False,save=False)
tags['notifiarr'] = self.util.check_for_attribute(self.data, "notifiarr", parent="tags", subparent=tag_url, default_is_none=True, do_print=False,save=False)
return (tags)
if tags['url']:
default_tag = tags['url'].split('/')[2].split(':')[0]
@ -149,7 +214,9 @@ class Config:
self.data['tags'][default_tag]['tag'] = default_tag
except Exception as e:
self.data['tags'][default_tag] = {'tag': default_tag}
logger.warning(f'No tags matched for {tags["url"]}. Please check your config.yml file. Setting tag to {default_tag}')
e = (f'No tags matched for {tags["url"]}. Please check your config.yml file. Setting tag to {default_tag}')
self.notify(e,'Tag',False)
logger.warning(e)
return (tags)
#Get category from config file based on path provided
@ -166,7 +233,9 @@ class Config:
default_cat = path.split('/')[-2]
category = self.util.check_for_attribute(self.data, default_cat, parent="cat",default=path)
self.data['cat'][str(default_cat)] = path
logger.warning(f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to {default_cat}')
e = (f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to {default_cat}')
self.notify(e,'Category',False)
logger.warning(e)
return category
#Empty the recycle bin
@ -174,6 +243,8 @@ class Config:
dry_run = self.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
num_del = 0
files = []
size_bytes = 0
if not self.args["skip_recycle"]:
n_info = ''
if self.recyclebin['enabled'] and self.recyclebin['empty_after_x_days']:
@ -190,19 +261,65 @@ class Config:
if (self.recyclebin['empty_after_x_days'] <= days):
num_del += 1
n_info += (f"{'Did not delete' if dry_run else 'Deleted'} {filename} from the recycle bin. (Last modified {round(days)} days ago).\n")
files += [str(filename)]
size_bytes += os.path.getsize(file)
if not dry_run: os.remove(file)
if num_del > 0:
if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*")
attr = {
"function":"empty_recyclebin",
"title":f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
"files":files,
"empty_after_x_days": self.recyclebin['empty_after_x_days'],
"size_in_bytes":size_bytes
}
self.send_notifications(attr)
util.print_multiline(n_info,loglevel)
util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files from the Recycle Bin.",loglevel)
util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.",loglevel)
else:
logger.debug('No files found in "' + self.recycle_dir + '"')
return num_del
def send_notifications(self, attr):
try:
function = attr['function']
config_webhooks = self.Webhooks.function_webhooks
for key in config_webhooks:
if key in function:
config_function = key
break
self.Webhooks.function_hooks([config_webhooks[config_function]],attr)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def notify(self, text, function=None, critical=True):
for error in util.get_list(text, split=False):
try:
self.Webhooks.error_hooks(error, function_error=function, critical=critical)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def get_html(self, url, headers=None, params=None):
return html.fromstring(self.get(url, headers=headers, params=params).content)
def get_json(self, url, json=None, headers=None, params=None):
return self.get(url, json=json, headers=headers, params=params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get(self, url, json=None, headers=None, params=None):
return self.session.get(url, json=json, headers=headers, params=params)
def get_image_encoded(self, url):
return base64.b64encode(self.get(url).content).decode('utf-8')
def post_html(self, url, data=None, json=None, headers=None):
return html.fromstring(self.post(url, data=data, json=json, headers=headers).content)
def post_json(self, url, data=None, json=None, headers=None):
return self.post(url, data=data, json=json, headers=headers).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def post(self, url, data=None, json=None, headers=None):
return self.session.post(url, data=data, json=json, headers=headers)

31
modules/notifiarr.py Normal file
View file

@ -0,0 +1,31 @@
import logging
from modules.util import Failed
logger = logging.getLogger("qBit Manage")
base_url = "https://notifiarr.com/api/v1/"
dev_url = "https://dev.notifiarr.com/api/v1/"
class Notifiarr:
def __init__(self, config, params):
self.config = config
self.apikey = params["apikey"]
self.develop = params["develop"]
self.test = params["test"]
url, _ = self.get_url("user/validate/")
response = self.config.get(url)
response_json = response.json()
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
logger.debug(f"Response: {response_json}")
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
if not params["test"] and not response_json["details"]["response"]:
raise Failed("Notifiarr Error: Invalid apikey")
def get_url(self, path):
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
if self.config.trace_mode:
logger.debug(url.replace(self.apikey, "APIKEY"))
params = {"event": "qbtManage" if self.test else "notify"}
return url, params

View file

@ -56,6 +56,7 @@ class Qbt:
category = torrent.category
torrent_trackers = torrent.trackers
except Exception as e:
self.config.notify(e,'Get Torrent Info',False)
logger.warning(e)
if torrent_name in torrentdict:
t_obj_list.append(torrent)
@ -101,14 +102,20 @@ class Qbt:
for torrent in self.torrent_list:
if torrent.category == '':
new_cat = self.config.get_category(torrent.save_path)
try:
t_url = [util.trunc_val(x.url, '/') for x in torrent.trackers if x.url.startswith('http')][0]
except IndexError:
t_url = None
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
if not dry_run: torrent.set_category(category=new_cat)
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
print_line(util.insert_space(f'New Category: {new_cat}',3),loglevel)
print_line(util.insert_space(f'Tracker: {t_url}',8),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
attr = {
"function":"cat_update",
"title":"Updating Categories",
"torrent_name":torrent.name,
"torrent_new_cat": new_cat,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"]
}
self.config.send_notifications(attr)
num_cat += 1
if num_cat >= 1:
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_cat} new categories.",loglevel)
@ -130,7 +137,19 @@ class Qbt:
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
print_line(util.insert_space(f'New Tag: {tags["new_tag"]}',8),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"])
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"])
attr = {
"function":"tag_update",
"title":"Updating Tags",
"torrent_name":torrent.name,
"torrent_new_tag": tags["new_tag"],
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
"torrent_max_ratio": tags["max_ratio"],
"torrent_max_seeding_time": tags["max_seeding_time"],
"torrent_limit_upload_speed": tags["limit_upload_speed"]
}
self.config.send_notifications(attr)
if num_tags >= 1:
print_line(f"{'Did not update' if dry_run else 'Updated'} {num_tags} new tags.",loglevel)
else:
@ -188,7 +207,9 @@ class Qbt:
for category in nohardlinks:
torrent_list = self.get_torrents({'category':category,'filter':'completed'})
if len(torrent_list) == 0:
logger.error('No torrents found in the category ('+category+') defined in config.yml inside the nohardlinks section. Please check if this matches with any category in qbittorrent and has 1 or more torrents.')
e = f('No torrents found in the category ('+category+') defined under nohardlinks attribute in the config. Please check if this matches with any category in qbittorrent and has 1 or more torrents.')
self.config.notify(e,'Tag No Hard Links',False)
logger.error(e)
continue
for torrent in alive_it(torrent_list):
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
@ -205,6 +226,18 @@ class Qbt:
print_line(util.insert_space(f'Added Tag: noHL',6),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"], nohardlinks[category]["max_seeding_time"],nohardlinks[category]["limit_upload_speed"],tags='noHL')
attr = {
"function":"tag_nohardlinks",
"title":"Tagging Torrents with No Hardlinks",
"torrent_name":torrent.name,
"torrent_add_tag": 'noHL',
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
"torrent_max_ratio": nohardlinks[category]["max_ratio"],
"torrent_max_seeding_time": nohardlinks[category]["max_seeding_time"],
"torrent_limit_upload_speed": nohardlinks[category]["limit_upload_speed"]
}
self.config.send_notifications(attr)
#Cleans up previously tagged noHL torrents
else:
# Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements
@ -217,6 +250,18 @@ class Qbt:
print_line(util.insert_space(f'Removed Tag: noHL',6),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.",loglevel)
attr = {
"function":"untag_nohardlinks",
"title":"Untagging Previous Torrents that now have Hard Links",
"torrent_name":torrent.name,
"torrent_remove_tag": 'noHL',
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
"torrent_max_ratio": tags["max_ratio"],
"torrent_max_seeding_time": tags["max_seeding_time"],
"torrent_limit_upload_speed": tags["limit_upload_speed"]
}
self.config.send_notifications(attr)
if not dry_run:
torrent.remove_tags(tags='noHL')
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"])
@ -226,16 +271,29 @@ class Qbt:
if torrent.name in tdel_dict.keys() and 'noHL' in torrent.tags:
#Double check that the content path is the same before we delete anything
if torrent['content_path'].replace(root_dir,root_dir) == tdel_dict[torrent.name]:
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
print_line(util.insert_space(f"Cleanup: True [No hard links found and meets Share Limits.]",8),loglevel)
attr = {
"function":"cleanup_tag_nohardlinks",
"title":"Removing NoHL Torrents and meets Share Limits",
"torrent_name":torrent.name,
"cleanup": 'True',
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
if (os.path.exists(torrent['content_path'].replace(root_dir,root_dir))):
if not dry_run: self.tor_delete_recycle(torrent)
del_tor_cont += 1
attr["torrents_deleted_and_contents"]: True
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
else:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
del_tor += 1
attr["torrents_deleted_and_contents"]: False
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
self.config.send_notifications(attr)
if num_tags >= 1:
print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}",loglevel)
else:
@ -275,31 +333,52 @@ class Qbt:
t_status = self.torrentinfo[t_name]['status']
for x in torrent.trackers:
if x.url.startswith('http'):
t_url = util.trunc_val(x.url, '/')
tags = self.config.get_tags(x.url)
msg_up = x.msg.upper()
#Add any potential unregistered torrents to a list
if not any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
pot_unr += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n')
pot_unr += (util.insert_space(f'Status: {msg_up}',9)+'\n')
pot_unr += (util.insert_space(f'Tracker: {t_url}',8)+'\n')
pot_unr += (util.insert_space(f'Tracker: {tags["url"]}',8)+'\n')
attr = {
"function":"potential_rem_unregistered",
"title":"Potential Unregistered Torrents",
"torrent_name":t_name,
"torrent_status": msg_up,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
self.config.send_notifications(attr)
if any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
print_line(util.insert_space(f'Status: {msg_up}',9),loglevel)
print_line(util.insert_space(f'Tracker: {t_url}',8),loglevel)
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
attr = {
"function":"rem_unregistered",
"title":"Removing Unregistered Torrents",
"torrent_name":t_name,
"torrent_status": msg_up,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
if t_count > 1:
# Checks if any of the original torrents are working
if '' in t_msg or 2 in t_status:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
attr["torrents_deleted_and_contents"]: False
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
del_tor += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
attr["torrents_deleted_and_contents"]: True
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
del_tor_cont += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
attr["torrents_deleted_and_contents"]: True
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
del_tor_cont += 1
self.config.send_notifications(attr)
if del_tor >=1 or del_tor_cont >=1:
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.",loglevel)
if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.",loglevel)
@ -346,6 +425,15 @@ class Qbt:
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
print_line(util.insert_space(f'Category: {category}',7),loglevel)
print_line(util.insert_space(f'Save_Path: {dest}',6),loglevel)
attr = {
"function":"cross_seed",
"title":"Adding New Cross-Seed Torrent",
"torrent_name":t_name,
"torrent_category": category,
"torrent_save_path": dest,
"torrent_add_tag": "cross-seed"
}
self.config.send_notifications(attr)
added += 1
if not dry_run:
self.client.torrents.add(torrent_files=src, save_path=dest, category=category, tags='cross-seed', is_paused=True)
@ -361,6 +449,13 @@ class Qbt:
t_name = torrent.name
if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash:
tagged += 1
attr = {
"function":"tag_cross_seed",
"title":"Tagging Cross-Seed Torrent",
"torrent_name":t_name,
"torrent_add_tag": "cross-seed"
}
self.config.send_notifications(attr)
print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}",loglevel)
if not dry_run: torrent.add_tags(tags='cross-seed')
@ -383,12 +478,20 @@ class Qbt:
torrent_list = self.get_torrents({'status_filter':'paused','sort':'size'})
if torrent_list:
for torrent in torrent_list:
new_tag = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
#Resume torrent if completed
if torrent.progress == 1:
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
resumed += 1
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
attr = {
"function":"recheck",
"title":"Resuming Torrent",
"torrent_name":torrent.name,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
self.config.send_notifications(attr)
if not dry_run: torrent.resume()
else:
#Check to see if torrent meets AutoTorrentManagement criteria
@ -400,12 +503,28 @@ class Qbt:
or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \
or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))):
resumed += 1
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
attr = {
"function":"recheck",
"title":"Resuming Torrent",
"torrent_name":torrent.name,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
self.config.send_notifications(attr)
if not dry_run: torrent.resume()
#Recheck
elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking:
rechecked += 1
print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{new_tag['new_tag']}] - {torrent.name}",loglevel)
attr = {
"function":"recheck",
"title":"Rechecking Torrent",
"torrent_name":torrent.name,
"torrent_tracker": tags["url"],
"notifiarr_indexer": tags["notifiarr"],
}
self.config.send_notifications(attr)
print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{tags['new_tag']}] - {torrent.name}",loglevel)
if not dry_run: torrent.recheck()
return resumed,rechecked
@ -441,15 +560,16 @@ class Qbt:
excluded_orphan_files = [file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch(file, exclude_pattern.replace(remote_path,root_path))]
orphaned_files = set(orphaned_files) - set(excluded_orphan_files)
separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG')
print_multiline("\n".join(torrent_files),'DEBUG')
separator(f"Root Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(root_files),'DEBUG')
separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(excluded_orphan_files),'DEBUG')
separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(orphaned_files),'DEBUG')
separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG')
if self.config.trace_mode:
separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG')
print_multiline("\n".join(torrent_files),'DEBUG')
separator(f"Root Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(root_files),'DEBUG')
separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(excluded_orphan_files),'DEBUG')
separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG')
print_multiline("\n".join(orphaned_files),'DEBUG')
separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG')
if orphaned_files:
dir_out = os.path.join(remote_path,'orphaned_data')
@ -458,6 +578,14 @@ class Qbt:
print_multiline("\n".join(orphaned_files),loglevel)
print_line(f"{'Did not move' if dry_run else 'Moved'} {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}",loglevel)
orphaned = len(orphaned_files)
attr = {
"function":"rem_orphaned",
"title":f"Removing {len(orphaned_files)} Orphaned Files",
"orphaned_files":orphaned_files,
"orphaned_directory": dir_out.replace(remote_path,root_path),
"total_orphaned_files": orphaned,
}
self.config.send_notifications(attr)
#Delete empty directories after moving orphan files
logger.info(f'Cleaning up any empty directories...')
if not dry_run:

View file

@ -155,6 +155,12 @@ separating_character = "="
screen_width = 100
spacing = 0
def tab_new_lines(data):
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
def print_stacktrace():
print_multiline(traceback.format_exc())
def add_dict_list(keys, value, dict_map):
for key in keys:
if key in dict_map:
@ -301,4 +307,11 @@ class GracefulKiller:
#signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self, *args):
self.kill_now = True
self.kill_now = True
def human_readable_size(size, decimal_places=3):
for unit in ['B','KiB','MiB','GiB','TiB']:
if size < 1024.0:
break
size /= 1024.0
return f"{size:.{decimal_places}f}{unit}"

100
modules/webhooks.py Normal file
View file

@ -0,0 +1,100 @@
import logging
from json import JSONDecodeError
from modules.util import Failed
logger = logging.getLogger("qBit Manage")
class Webhooks:
def __init__(self, config, system_webhooks, notifiarr=None):
self.config = config
self.error_webhooks = system_webhooks["error"] if "error" in system_webhooks else []
self.run_start_webhooks = system_webhooks["run_start"] if "run_start" in system_webhooks else []
self.run_end_webhooks = system_webhooks["run_end"] if "run_end" in system_webhooks else []
if "function" in system_webhooks and system_webhooks["function"] is not None:
try:
self.function_webhooks = system_webhooks["function"][0]
except IndexError:
self.function_webhooks = []
else:
self.function_webhooks = []
self.notifiarr = notifiarr
def _request(self, webhooks, json):
if self.config.trace_mode:
logger.debug("")
logger.debug(f"JSON: {json}")
for webhook in list(set(webhooks)):
if self.config.trace_mode:
logger.debug(f"Webhook: {webhook}")
if webhook == "notifiarr":
url, params = self.notifiarr.get_url("notification/qbtManage/")
for x in range(6):
response = self.config.get(url, json=json, params=params)
if response.status_code < 500:
break
else:
response = self.config.post(webhook, json=json)
try:
response_json = response.json()
if self.config.trace_mode:
logger.debug(f"Response: {response_json}")
if "result" in response_json and response_json["result"] == "error" and "details" in response_json and "response" in response_json["details"]:
raise Failed(f"Notifiarr Error: {response_json['details']['response']}")
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
except JSONDecodeError:
if response.status_code >= 400:
raise Failed(f"({response.status_code} [{response.reason}])")
def start_time_hooks(self, start_time):
if self.run_start_webhooks:
dry_run = self.config.args['dry_run']
if dry_run:
start_type = "Dry-"
else:
start_type = ""
self._request(self.run_start_webhooks, {
"function":"Run_Start",
"title":f"Starting {start_type}Run",
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
"dry_run": self.config.args['dry_run']
})
def end_time_hooks(self, start_time, end_time, run_time, stats):
dry_run = self.config.args['dry_run']
if dry_run:
start_type = "Dry-"
else:
start_type = ""
if self.run_end_webhooks:
self._request(self.run_end_webhooks, {
"function":"Run_End",
"title":f"Finished {start_type}Run",
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
"run_time": run_time,
"torrents_added": stats["added"],
"torrents_deleted": stats["deleted"],
"torrents_deleted_and_contents": stats["deleted_contents"],
"torrents_resumed": stats["resumed"],
"torrents_rechecked": stats["rechecked"],
"torrents_categorized": stats["categorized"],
"torrents_tagged": stats["tagged"],
"remove_unregistered": stats["rem_unreg"],
"orphaned_files_found": stats["orphaned"],
"torrents_tagged_no_hardlinks": stats["taggednoHL"],
"torrents_untagged_no_hardlinks": stats["untagged"],
"files_deleted_from_recyclebin": stats["recycle_emptied"]
})
def error_hooks(self, text, function_error=None, critical=True):
if self.error_webhooks:
json = {"function":"Run_Error","title":f"{function_error} Error","error": str(text), "critical": critical}
if function_error:
json["function_error"] = function_error
self._request(self.error_webhooks, json)
def function_hooks(self, webhook, json):
if self.function_webhooks:
self._request(webhook, json)

View file

@ -9,6 +9,7 @@ try:
from modules import util
from modules.config import Config
from modules.util import GracefulKiller
from modules.util import Failed
except ModuleNotFoundError:
print("Requirements Error: Requirements are not installed")
sys.exit(0)
@ -19,6 +20,8 @@ if sys.version_info[0] != 3 or sys.version_info[1] < 6:
sys.exit(0)
parser = argparse.ArgumentParser('qBittorrent Manager.', description='A mix of scripts combined for managing qBittorrent.')
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-tr", "--trace", dest="trace", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument('-r', '--run', dest='run', action='store_true', default=False, help='Run without the scheduler. Script will exit after completion.')
parser.add_argument('-sch', '--schedule', dest='min', default='30', type=str, help='Schedule to run every x minutes. (Default set to 30)')
parser.add_argument('-c', '--config-file', dest='configfile', action='store', default='config.yml', type=str, help='This is used if you want to use a different name for your config.yml. Example: tv.yml')
@ -70,6 +73,11 @@ dry_run = get_arg("QBT_DRY_RUN", args.dry_run, arg_bool=True)
log_level = get_arg("QBT_LOG_LEVEL", args.log_level)
divider = get_arg("QBT_DIVIDER", args.divider)
screen_width = get_arg("QBT_WIDTH", args.width, arg_int=True)
debug = get_arg("QBT_DEBUG", args.debug, arg_bool=True)
trace = get_arg("QBT_TRACE", args.trace, arg_bool=True)
if debug or trace: log_level = 'DEBUG'
stats = {}
args = {}
@ -78,7 +86,7 @@ if os.path.isdir('/config') and os.path.exists(os.path.join('/config',config_fil
else:
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
for v in ['run','sch','config_file','log_file','cross_seed','recheck','cat_update','tag_update','rem_unregistered','rem_orphaned','tag_nohardlinks','skip_recycle','dry_run','log_level','divider','screen_width']:
for v in ['run','sch','config_file','log_file','cross_seed','recheck','cat_update','tag_update','rem_unregistered','rem_orphaned','tag_nohardlinks','skip_recycle','dry_run','log_level','divider','screen_width','debug','trace']:
args[v] = eval(v)
util.separating_character = divider[0]
@ -223,8 +231,13 @@ def start():
end_time = datetime.now()
run_time = str(end_time - start_time).split('.')[0]
util.separator(f"Finished {start_type}Run\n {os.linesep.join(stats_summary) if len(stats_summary)>0 else ''} \nRun Time: {run_time}")
if cfg:
try:
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}".replace('\n\n', '\n'))
def end():
logger.info("Exiting Qbit_manage")
logger.removeHandler(file_handler)
@ -260,6 +273,8 @@ if __name__ == '__main__':
logger.debug(f" --log-level (QBT_LOG_LEVEL): {log_level}")
logger.debug(f" --divider (QBT_DIVIDER): {divider}")
logger.debug(f" --width (QBT_WIDTH): {screen_width}")
logger.debug(f" --debug (QBT_DEBUG): {debug}")
logger.debug(f" --trace (QBT_TRACE): {trace}")
logger.debug("")
try:
os.chmod(file_logger, 0o777)