mirror of
https://github.com/StuffAnThings/qbit_manage.git
synced 2025-10-06 20:05:57 +08:00
apprise integration
This commit is contained in:
parent
beb34c4f72
commit
a7b20bf6a4
9 changed files with 166 additions and 84 deletions
|
@ -18,6 +18,7 @@ This is a program used to manage your qBittorrent instance such as:
|
||||||
* Tag any torrents that have no hard links and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded
|
* Tag any torrents that have no hard links and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded
|
||||||
* RecycleBin function to move files into a RecycleBin folder instead of deleting the data directly when deleting a torrent
|
* RecycleBin function to move files into a RecycleBin folder instead of deleting the data directly when deleting a torrent
|
||||||
* Built-in scheduler to run the script every x minutes. (Can use `--run` command to run without the scheduler)
|
* Built-in scheduler to run the script every x minutes. (Can use `--run` command to run without the scheduler)
|
||||||
|
* Webhook notifications with [Notifiarr](https://notifiarr.com/) and [Apprise API](https://github.com/caronc/apprise-api) integration
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
Check out the [wiki](https://github.com/StuffAnThings/qbit_manage/wiki) for installation help
|
Check out the [wiki](https://github.com/StuffAnThings/qbit_manage/wiki) for installation help
|
||||||
|
|
|
@ -128,16 +128,25 @@ orphaned:
|
||||||
- "**/@eaDir"
|
- "**/@eaDir"
|
||||||
- "/data/torrents/temp/**"
|
- "/data/torrents/temp/**"
|
||||||
|
|
||||||
|
#Apprise integration with webhooks
|
||||||
|
apprise:
|
||||||
|
#Mandatory to fill out the url of your apprise API endpoint
|
||||||
|
api_url: http://apprise-api:8000
|
||||||
|
#Mandatory to fill out the notification url/urls based on the notification services provided by apprise. https://github.com/caronc/apprise/wiki
|
||||||
|
notify_url:
|
||||||
|
|
||||||
#Notifiarr integration with webhooks
|
#Notifiarr integration with webhooks
|
||||||
notifiarr:
|
notifiarr:
|
||||||
#Mandatory to fill out API Key
|
#Mandatory to fill out API Key
|
||||||
apikey: ####################################
|
apikey: ####################################
|
||||||
#Your qBittorrent instance, can be set to any unique value
|
#<OPTIONAL> Set to a unique value (could be your username on notifiarr for example)
|
||||||
instance:
|
instance:
|
||||||
test: true
|
|
||||||
develop: true
|
|
||||||
|
|
||||||
# Webhook notifications: Set value to notifiarr if using notifiarr integration, otherwise set to webhook URL
|
# Webhook notifications:
|
||||||
|
# Possible values:
|
||||||
|
# Set value to notifiarr if using notifiarr integration
|
||||||
|
# Set value to apprise if using apprise integration
|
||||||
|
# Set value to a valid webhook URL
|
||||||
webhooks:
|
webhooks:
|
||||||
error: notifiarr
|
error: notifiarr
|
||||||
run_start: notifiarr
|
run_start: notifiarr
|
||||||
|
|
14
modules/apprise.py
Normal file
14
modules/apprise.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from modules.util import Failed
|
||||||
|
|
||||||
|
logger = logging.getLogger("qBit Manage")
|
||||||
|
|
||||||
|
class Apprise:
|
||||||
|
def __init__(self, config, params):
|
||||||
|
self.config = config
|
||||||
|
self.api_url = params["api_url"]
|
||||||
|
self.notify_url = ",".join(params["notify_url"])
|
||||||
|
response = self.config.get(self.api_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise Failed(f"Apprise Error: Unable to connect to Apprise using {self.api_url}")
|
|
@ -4,6 +4,7 @@ from modules.util import Failed, check
|
||||||
from modules.qbittorrent import Qbt
|
from modules.qbittorrent import Qbt
|
||||||
from modules.webhooks import Webhooks
|
from modules.webhooks import Webhooks
|
||||||
from modules.notifiarr import Notifiarr
|
from modules.notifiarr import Notifiarr
|
||||||
|
from modules.apprise import Apprise
|
||||||
from ruamel import yaml
|
from ruamel import yaml
|
||||||
from retrying import retry
|
from retrying import retry
|
||||||
|
|
||||||
|
@ -38,6 +39,7 @@ class Config:
|
||||||
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
|
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
|
||||||
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
|
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
|
||||||
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
|
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
|
||||||
|
if "apprise" in new_config: new_config["apprise"] = new_config.pop("apprise")
|
||||||
if "notifiarr" in new_config: new_config["notifiarr"] = new_config.pop("notifiarr")
|
if "notifiarr" in new_config: new_config["notifiarr"] = new_config.pop("notifiarr")
|
||||||
if "webhooks" in new_config:
|
if "webhooks" in new_config:
|
||||||
temp = new_config.pop("webhooks")
|
temp = new_config.pop("webhooks")
|
||||||
|
@ -75,6 +77,22 @@ class Config:
|
||||||
"function": self.util.check_for_attribute(self.data, "function", parent="webhooks", var_type="list", default_is_none=True)
|
"function": self.util.check_for_attribute(self.data, "function", parent="webhooks", var_type="list", default_is_none=True)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.AppriseFactory = None
|
||||||
|
if "apprise" in self.data:
|
||||||
|
if self.data["apprise"] is not None:
|
||||||
|
logger.info("Connecting to Apprise...")
|
||||||
|
try:
|
||||||
|
self.AppriseFactory = Apprise(self, {
|
||||||
|
"api_url": self.util.check_for_attribute(self.data, "api_url", parent="apprise", var_type="url", throw=True),
|
||||||
|
"notify_url": self.util.check_for_attribute(self.data, "notify_url", parent="apprise", var_type="list", throw=True),
|
||||||
|
})
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
logger.info(f"Apprise Connection {'Failed' if self.AppriseFactory is None else 'Successful'}")
|
||||||
|
else:
|
||||||
|
logger.warning("Config Warning: apprise attribute not found")
|
||||||
|
|
||||||
|
|
||||||
self.NotifiarrFactory = None
|
self.NotifiarrFactory = None
|
||||||
if "notifiarr" in self.data:
|
if "notifiarr" in self.data:
|
||||||
if self.data["notifiarr"] is not None:
|
if self.data["notifiarr"] is not None:
|
||||||
|
@ -92,7 +110,7 @@ class Config:
|
||||||
else:
|
else:
|
||||||
logger.warning("Config Warning: notifiarr attribute not found")
|
logger.warning("Config Warning: notifiarr attribute not found")
|
||||||
|
|
||||||
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory)
|
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory,apprise=self.AppriseFactory)
|
||||||
try:
|
try:
|
||||||
self.Webhooks.start_time_hooks(self.start_time)
|
self.Webhooks.start_time_hooks(self.start_time)
|
||||||
except Failed as e:
|
except Failed as e:
|
||||||
|
@ -267,16 +285,18 @@ class Config:
|
||||||
if not dry_run: os.remove(file)
|
if not dry_run: os.remove(file)
|
||||||
if num_del > 0:
|
if num_del > 0:
|
||||||
if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*")
|
if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*")
|
||||||
|
body = []
|
||||||
|
body += util.print_multiline(n_info,loglevel)
|
||||||
|
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.",loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"empty_recyclebin",
|
"function":"empty_recyclebin",
|
||||||
"title":f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
|
"title":f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
|
||||||
|
"body": "\n".join(body),
|
||||||
"files":files,
|
"files":files,
|
||||||
"empty_after_x_days": self.recyclebin['empty_after_x_days'],
|
"empty_after_x_days": self.recyclebin['empty_after_x_days'],
|
||||||
"size_in_bytes":size_bytes
|
"size_in_bytes":size_bytes
|
||||||
}
|
}
|
||||||
self.send_notifications(attr)
|
self.send_notifications(attr)
|
||||||
util.print_multiline(n_info,loglevel)
|
|
||||||
util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.",loglevel)
|
|
||||||
else:
|
else:
|
||||||
logger.debug('No files found in "' + self.recycle_dir + '"')
|
logger.debug('No files found in "' + self.recycle_dir + '"')
|
||||||
return num_del
|
return num_del
|
||||||
|
|
|
@ -28,6 +28,8 @@ class Notifiarr:
|
||||||
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
|
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
|
||||||
if self.config.trace_mode:
|
if self.config.trace_mode:
|
||||||
logger.debug(url.replace(self.apikey, "APIKEY"))
|
logger.debug(url.replace(self.apikey, "APIKEY"))
|
||||||
test_payload = (f"qbitManage-{self.apikey[:5]}")
|
if self.test:
|
||||||
params = {"event": test_payload, "qbit_client":self.config.data["qbt"]["host"], "instance":self.instance if self.test else "notify"}
|
params = {"event": f"qbitManage-{self.apikey[:5]}", "qbit_client":self.config.data["qbt"]["host"], "instance":self.instance}
|
||||||
|
else:
|
||||||
|
params = {"qbit_client":self.config.data["qbt"]["host"], "instance":self.instance}
|
||||||
return url, params
|
return url, params
|
|
@ -104,12 +104,14 @@ class Qbt:
|
||||||
new_cat = self.config.get_category(torrent.save_path)
|
new_cat = self.config.get_category(torrent.save_path)
|
||||||
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||||
if not dry_run: torrent.set_category(category=new_cat)
|
if not dry_run: torrent.set_category(category=new_cat)
|
||||||
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'New Category: {new_cat}',3),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'New Category: {new_cat}',3),loglevel)
|
||||||
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"cat_update",
|
"function":"cat_update",
|
||||||
"title":"Updating Categories",
|
"title":"Updating Categories",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_new_cat": new_cat,
|
"torrent_new_cat": new_cat,
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
|
@ -134,13 +136,15 @@ class Qbt:
|
||||||
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||||
if tags["new_tag"]:
|
if tags["new_tag"]:
|
||||||
num_tags += 1
|
num_tags += 1
|
||||||
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'New Tag: {tags["new_tag"]}',8),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'New Tag: {tags["new_tag"]}',8),loglevel)
|
||||||
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"])
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
|
body.extend(self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"],tags["new_tag"]))
|
||||||
attr = {
|
attr = {
|
||||||
"function":"tag_update",
|
"function":"tag_update",
|
||||||
"title":"Updating Tags",
|
"title":"Updating Tags",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_new_tag": tags["new_tag"],
|
"torrent_new_tag": tags["new_tag"],
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
|
@ -158,21 +162,22 @@ class Qbt:
|
||||||
|
|
||||||
def set_tags_and_limits(self,torrent,max_ratio,max_seeding_time,limit_upload_speed=None,tags=None):
|
def set_tags_and_limits(self,torrent,max_ratio,max_seeding_time,limit_upload_speed=None,tags=None):
|
||||||
dry_run = self.config.args['dry_run']
|
dry_run = self.config.args['dry_run']
|
||||||
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
loglevel = 'DRYRUN' if dry_run else 'INFO'
|
||||||
|
body = []
|
||||||
#Print Logs
|
#Print Logs
|
||||||
if limit_upload_speed:
|
if limit_upload_speed:
|
||||||
if limit_upload_speed == -1: print_line(util.insert_space(f'Limit UL Speed: Infinity',1),loglevel)
|
if limit_upload_speed == -1: body += print_line(util.insert_space(f'Limit UL Speed: Infinity',1),loglevel)
|
||||||
else: print_line(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1),loglevel)
|
else: body += print_line(util.insert_space(f'Limit UL Speed: {limit_upload_speed} kB/s',1),loglevel)
|
||||||
if max_ratio or max_seeding_time:
|
if max_ratio or max_seeding_time:
|
||||||
if max_ratio == -2 or max_seeding_time == -2: print_line(util.insert_space(f'Share Limit: Use Global Share Limit',4),loglevel)
|
if max_ratio == -2 or max_seeding_time == -2: body += print_line(util.insert_space(f'Share Limit: Use Global Share Limit',4),loglevel)
|
||||||
elif max_ratio == -1 or max_seeding_time == -1: print_line(util.insert_space(f'Share Limit: Set No Share Limit',4),loglevel)
|
elif max_ratio == -1 or max_seeding_time == -1: body += print_line(util.insert_space(f'Share Limit: Set No Share Limit',4),loglevel)
|
||||||
else:
|
else:
|
||||||
if max_ratio != torrent.max_ratio and not max_seeding_time:
|
if max_ratio != torrent.max_ratio and not max_seeding_time:
|
||||||
print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4),loglevel)
|
body += print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}',4),loglevel)
|
||||||
elif max_seeding_time != torrent.max_seeding_time and not max_ratio:
|
elif max_seeding_time != torrent.max_seeding_time and not max_ratio:
|
||||||
print_line(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4),loglevel)
|
body += print_line(util.insert_space(f'Share Limit: Max Seed Time = {max_seeding_time} min',4),loglevel)
|
||||||
elif max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time:
|
elif max_ratio != torrent.max_ratio and max_seeding_time != torrent.max_seeding_time:
|
||||||
print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4),loglevel)
|
body += print_line(util.insert_space(f'Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min',4),loglevel)
|
||||||
#Update Torrents
|
#Update Torrents
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
if tags: torrent.add_tags(tags)
|
if tags: torrent.add_tags(tags)
|
||||||
|
@ -189,6 +194,7 @@ class Qbt:
|
||||||
if not max_ratio: max_ratio = torrent.max_ratio
|
if not max_ratio: max_ratio = torrent.max_ratio
|
||||||
if not max_seeding_time: max_seeding_time = torrent.max_seeding_time
|
if not max_seeding_time: max_seeding_time = torrent.max_seeding_time
|
||||||
torrent.set_share_limits(max_ratio,max_seeding_time)
|
torrent.set_share_limits(max_ratio,max_seeding_time)
|
||||||
|
return body
|
||||||
|
|
||||||
def tag_nohardlinks(self):
|
def tag_nohardlinks(self):
|
||||||
dry_run = self.config.args['dry_run']
|
dry_run = self.config.args['dry_run']
|
||||||
|
@ -222,13 +228,15 @@ class Qbt:
|
||||||
#Will only tag new torrents that don't have noHL tag
|
#Will only tag new torrents that don't have noHL tag
|
||||||
if 'noHL' not in torrent.tags :
|
if 'noHL' not in torrent.tags :
|
||||||
num_tags += 1
|
num_tags += 1
|
||||||
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'Added Tag: noHL',6),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'Added Tag: noHL',6),loglevel)
|
||||||
self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"], nohardlinks[category]["max_seeding_time"],nohardlinks[category]["limit_upload_speed"],tags='noHL')
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
|
body.extend(self.set_tags_and_limits(torrent, nohardlinks[category]["max_ratio"], nohardlinks[category]["max_seeding_time"],nohardlinks[category]["limit_upload_speed"],tags='noHL'))
|
||||||
attr = {
|
attr = {
|
||||||
"function":"tag_nohardlinks",
|
"function":"tag_nohardlinks",
|
||||||
"title":"Tagging Torrents with No Hardlinks",
|
"title":"Tagging Torrents with No Hardlinks",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_add_tag": 'noHL',
|
"torrent_add_tag": 'noHL',
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
|
@ -246,13 +254,18 @@ class Qbt:
|
||||||
#Checks to see if previous noHL tagged torrents now have hard links.
|
#Checks to see if previous noHL tagged torrents now have hard links.
|
||||||
if (not (util.nohardlink(torrent['content_path'].replace(root_dir,root_dir))) and ('noHL' in torrent.tags)):
|
if (not (util.nohardlink(torrent['content_path'].replace(root_dir,root_dir))) and ('noHL' in torrent.tags)):
|
||||||
num_untag += 1
|
num_untag += 1
|
||||||
print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.',loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'Removed Tag: noHL',6),loglevel)
|
body += print_line(f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.',loglevel)
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'Removed Tag: noHL',6),loglevel)
|
||||||
print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.",loglevel)
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
|
body += print_line(f"{'Not Reverting' if dry_run else 'Reverting'} share limits.",loglevel)
|
||||||
|
if not dry_run:
|
||||||
|
torrent.remove_tags(tags='noHL')
|
||||||
|
body.extend(self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"]))
|
||||||
attr = {
|
attr = {
|
||||||
"function":"untag_nohardlinks",
|
"function":"untag_nohardlinks",
|
||||||
"title":"Untagging Previous Torrents that now have Hard Links",
|
"title":"Untagging Previous Torrents that now have Hard Links",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_remove_tag": 'noHL',
|
"torrent_remove_tag": 'noHL',
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
|
@ -262,9 +275,6 @@ class Qbt:
|
||||||
"torrent_limit_upload_speed": tags["limit_upload_speed"]
|
"torrent_limit_upload_speed": tags["limit_upload_speed"]
|
||||||
}
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
if not dry_run:
|
|
||||||
torrent.remove_tags(tags='noHL')
|
|
||||||
self.set_tags_and_limits(torrent, tags["max_ratio"], tags["max_seeding_time"],tags["limit_upload_speed"])
|
|
||||||
#loop through torrent list again for cleanup purposes
|
#loop through torrent list again for cleanup purposes
|
||||||
if (nohardlinks[category]['cleanup']):
|
if (nohardlinks[category]['cleanup']):
|
||||||
for torrent in torrent_list:
|
for torrent in torrent_list:
|
||||||
|
@ -272,27 +282,29 @@ class Qbt:
|
||||||
#Double check that the content path is the same before we delete anything
|
#Double check that the content path is the same before we delete anything
|
||||||
if torrent['content_path'].replace(root_dir,root_dir) == tdel_dict[torrent.name]:
|
if torrent['content_path'].replace(root_dir,root_dir) == tdel_dict[torrent.name]:
|
||||||
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
tags = self.config.get_tags([x.url for x in torrent.trackers if x.url.startswith('http')])
|
||||||
print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {torrent.name}',3),loglevel)
|
||||||
print_line(util.insert_space(f"Cleanup: True [No hard links found and meets Share Limits.]",8),loglevel)
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
|
body += print_line(util.insert_space(f"Cleanup: True [No hard links found and meets Share Limits.]",8),loglevel)
|
||||||
|
if (os.path.exists(torrent['content_path'].replace(root_dir,root_dir))):
|
||||||
|
if not dry_run: self.tor_delete_recycle(torrent)
|
||||||
|
del_tor_cont += 1
|
||||||
|
attr["torrents_deleted_and_contents"]: True
|
||||||
|
body += print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
||||||
|
else:
|
||||||
|
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
|
||||||
|
del_tor += 1
|
||||||
|
attr["torrents_deleted_and_contents"]: False
|
||||||
|
body += print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"cleanup_tag_nohardlinks",
|
"function":"cleanup_tag_nohardlinks",
|
||||||
"title":"Removing NoHL Torrents and meets Share Limits",
|
"title":"Removing NoHL Torrents and meets Share Limits",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"cleanup": 'True',
|
"cleanup": 'True',
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
"notifiarr_indexer": tags["notifiarr"],
|
"notifiarr_indexer": tags["notifiarr"],
|
||||||
}
|
}
|
||||||
if (os.path.exists(torrent['content_path'].replace(root_dir,root_dir))):
|
|
||||||
if not dry_run: self.tor_delete_recycle(torrent)
|
|
||||||
del_tor_cont += 1
|
|
||||||
attr["torrents_deleted_and_contents"]: True
|
|
||||||
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
|
||||||
else:
|
|
||||||
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
|
|
||||||
del_tor += 1
|
|
||||||
attr["torrents_deleted_and_contents"]: False
|
|
||||||
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
|
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
if num_tags >= 1:
|
if num_tags >= 1:
|
||||||
print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}",loglevel)
|
print_line(f"{'Did not Tag/set' if dry_run else 'Tag/set'} share limits for {num_tags} .torrent{'s.' if num_tags > 1 else '.'}",loglevel)
|
||||||
|
@ -345,6 +357,7 @@ class Qbt:
|
||||||
attr = {
|
attr = {
|
||||||
"function":"potential_rem_unregistered",
|
"function":"potential_rem_unregistered",
|
||||||
"title":"Potential Unregistered Torrents",
|
"title":"Potential Unregistered Torrents",
|
||||||
|
"body": pot_unr,
|
||||||
"torrent_name":t_name,
|
"torrent_name":t_name,
|
||||||
"torrent_status": msg_up,
|
"torrent_status": msg_up,
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
|
@ -352,34 +365,36 @@ class Qbt:
|
||||||
}
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
if any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
|
if any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up:
|
||||||
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'Status: {msg_up}',9),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
|
||||||
print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
body += print_line(util.insert_space(f'Status: {msg_up}',9),loglevel)
|
||||||
attr = {
|
body += print_line(util.insert_space(f'Tracker: {tags["url"]}',8),loglevel)
|
||||||
"function":"rem_unregistered",
|
|
||||||
"title":"Removing Unregistered Torrents",
|
|
||||||
"torrent_name":t_name,
|
|
||||||
"torrent_status": msg_up,
|
|
||||||
"torrent_tracker": tags["url"],
|
|
||||||
"notifiarr_indexer": tags["notifiarr"],
|
|
||||||
}
|
|
||||||
if t_count > 1:
|
if t_count > 1:
|
||||||
# Checks if any of the original torrents are working
|
# Checks if any of the original torrents are working
|
||||||
if '' in t_msg or 2 in t_status:
|
if '' in t_msg or 2 in t_status:
|
||||||
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
|
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
|
||||||
attr["torrents_deleted_and_contents"]: False
|
attr["torrents_deleted_and_contents"]: False
|
||||||
print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
|
body += print_line(util.insert_space(f'Deleted .torrent but NOT content files.',8),loglevel)
|
||||||
del_tor += 1
|
del_tor += 1
|
||||||
else:
|
else:
|
||||||
if not dry_run: self.tor_delete_recycle(torrent)
|
if not dry_run: self.tor_delete_recycle(torrent)
|
||||||
attr["torrents_deleted_and_contents"]: True
|
attr["torrents_deleted_and_contents"]: True
|
||||||
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
body += print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
||||||
del_tor_cont += 1
|
del_tor_cont += 1
|
||||||
else:
|
else:
|
||||||
if not dry_run: self.tor_delete_recycle(torrent)
|
if not dry_run: self.tor_delete_recycle(torrent)
|
||||||
attr["torrents_deleted_and_contents"]: True
|
attr["torrents_deleted_and_contents"]: True
|
||||||
print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
body += print_line(util.insert_space(f'Deleted .torrent AND content files.',8),loglevel)
|
||||||
del_tor_cont += 1
|
del_tor_cont += 1
|
||||||
|
attr = {
|
||||||
|
"function":"rem_unregistered",
|
||||||
|
"title":"Removing Unregistered Torrents",
|
||||||
|
"body": "\n".join(body),
|
||||||
|
"torrent_name":t_name,
|
||||||
|
"torrent_status": msg_up,
|
||||||
|
"torrent_tracker": tags["url"],
|
||||||
|
"notifiarr_indexer": tags["notifiarr"],
|
||||||
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
if del_tor >=1 or del_tor_cont >=1:
|
if del_tor >=1 or del_tor_cont >=1:
|
||||||
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.",loglevel)
|
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.",loglevel)
|
||||||
|
@ -423,13 +438,15 @@ class Qbt:
|
||||||
#Only add cross-seed torrent if original torrent is complete
|
#Only add cross-seed torrent if original torrent is complete
|
||||||
if self.torrentinfo[t_name]['is_complete']:
|
if self.torrentinfo[t_name]['is_complete']:
|
||||||
categories.append(category)
|
categories.append(category)
|
||||||
print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:",loglevel)
|
body = []
|
||||||
print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
|
body += print_line(f"{'Not Adding' if dry_run else 'Adding'} to qBittorrent:",loglevel)
|
||||||
print_line(util.insert_space(f'Category: {category}',7),loglevel)
|
body += print_line(util.insert_space(f'Torrent Name: {t_name}',3),loglevel)
|
||||||
print_line(util.insert_space(f'Save_Path: {dest}',6),loglevel)
|
body += print_line(util.insert_space(f'Category: {category}',7),loglevel)
|
||||||
|
body += print_line(util.insert_space(f'Save_Path: {dest}',6),loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"cross_seed",
|
"function":"cross_seed",
|
||||||
"title":"Adding New Cross-Seed Torrent",
|
"title":"Adding New Cross-Seed Torrent",
|
||||||
|
"body": "\n".join(body),
|
||||||
"torrent_name":t_name,
|
"torrent_name":t_name,
|
||||||
"torrent_category": category,
|
"torrent_category": category,
|
||||||
"torrent_save_path": dest,
|
"torrent_save_path": dest,
|
||||||
|
@ -451,14 +468,15 @@ class Qbt:
|
||||||
t_name = torrent.name
|
t_name = torrent.name
|
||||||
if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash:
|
if 'cross-seed' not in torrent.tags and self.torrentinfo[t_name]['count'] > 1 and self.torrentinfo[t_name]['first_hash'] != torrent.hash:
|
||||||
tagged += 1
|
tagged += 1
|
||||||
|
body = print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}",loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"tag_cross_seed",
|
"function":"tag_cross_seed",
|
||||||
"title":"Tagging Cross-Seed Torrent",
|
"title":"Tagging Cross-Seed Torrent",
|
||||||
|
"body":body,
|
||||||
"torrent_name":t_name,
|
"torrent_name":t_name,
|
||||||
"torrent_add_tag": "cross-seed"
|
"torrent_add_tag": "cross-seed"
|
||||||
}
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
print_line(f"{'Not Adding' if dry_run else 'Adding'} 'cross-seed' tag to {t_name}",loglevel)
|
|
||||||
if not dry_run: torrent.add_tags(tags='cross-seed')
|
if not dry_run: torrent.add_tags(tags='cross-seed')
|
||||||
|
|
||||||
numcategory = Counter(categories)
|
numcategory = Counter(categories)
|
||||||
|
@ -485,10 +503,11 @@ class Qbt:
|
||||||
if torrent.progress == 1:
|
if torrent.progress == 1:
|
||||||
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
|
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
|
||||||
resumed += 1
|
resumed += 1
|
||||||
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
body = print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"recheck",
|
"function":"recheck",
|
||||||
"title":"Resuming Torrent",
|
"title":"Resuming Torrent",
|
||||||
|
"body": body,
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
"notifiarr_indexer": tags["notifiarr"],
|
"notifiarr_indexer": tags["notifiarr"],
|
||||||
|
@ -505,10 +524,11 @@ class Qbt:
|
||||||
or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \
|
or (torrent.max_seeding_time >= 0 and (torrent.seeding_time < (torrent.max_seeding_time * 60)) and torrent.max_ratio < 0) \
|
||||||
or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))):
|
or (torrent.max_ratio >= 0 and torrent.max_seeding_time >= 0 and torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60))):
|
||||||
resumed += 1
|
resumed += 1
|
||||||
print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
body = print_line(f"{'Not Resuming' if dry_run else 'Resuming'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"recheck",
|
"function":"recheck",
|
||||||
"title":"Resuming Torrent",
|
"title":"Resuming Torrent",
|
||||||
|
"body": body,
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
"notifiarr_indexer": tags["notifiarr"],
|
"notifiarr_indexer": tags["notifiarr"],
|
||||||
|
@ -518,15 +538,16 @@ class Qbt:
|
||||||
#Recheck
|
#Recheck
|
||||||
elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking:
|
elif torrent.progress == 0 and self.torrentinfo[torrent.name]['is_complete'] and not torrent.state_enum.is_checking:
|
||||||
rechecked += 1
|
rechecked += 1
|
||||||
|
body = print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
||||||
attr = {
|
attr = {
|
||||||
"function":"recheck",
|
"function":"recheck",
|
||||||
"title":"Rechecking Torrent",
|
"title":"Rechecking Torrent",
|
||||||
|
"body": body,
|
||||||
"torrent_name":torrent.name,
|
"torrent_name":torrent.name,
|
||||||
"torrent_tracker": tags["url"],
|
"torrent_tracker": tags["url"],
|
||||||
"notifiarr_indexer": tags["notifiarr"],
|
"notifiarr_indexer": tags["notifiarr"],
|
||||||
}
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
print_line(f"{'Not Rechecking' if dry_run else 'Rechecking'} [{tags['new_tag']}] - {torrent.name}",loglevel)
|
|
||||||
if not dry_run: torrent.recheck()
|
if not dry_run: torrent.recheck()
|
||||||
return resumed,rechecked
|
return resumed,rechecked
|
||||||
|
|
||||||
|
@ -576,16 +597,19 @@ class Qbt:
|
||||||
if orphaned_files:
|
if orphaned_files:
|
||||||
dir_out = os.path.join(remote_path,'orphaned_data')
|
dir_out = os.path.join(remote_path,'orphaned_data')
|
||||||
os.makedirs(dir_out,exist_ok=True)
|
os.makedirs(dir_out,exist_ok=True)
|
||||||
print_line(f"{len(orphaned_files)} Orphaned files found",loglevel)
|
body = []
|
||||||
print_multiline("\n".join(orphaned_files),loglevel)
|
num_orphaned = len(orphaned_files)
|
||||||
print_line(f"{'Did not move' if dry_run else 'Moved'} {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}",loglevel)
|
print_line(f"{num_orphaned} Orphaned files found",loglevel)
|
||||||
orphaned = len(orphaned_files)
|
body += print_multiline("\n".join(orphaned_files),loglevel)
|
||||||
|
body += print_line(f"{'Did not move' if dry_run else 'Moved'} {num_orphaned} Orphaned files to {dir_out.replace(remote_path,root_path)}",loglevel)
|
||||||
|
|
||||||
attr = {
|
attr = {
|
||||||
"function":"rem_orphaned",
|
"function":"rem_orphaned",
|
||||||
"title":f"Removing {len(orphaned_files)} Orphaned Files",
|
"title":f"Removing {num_orphaned} Orphaned Files",
|
||||||
|
"body": "\n".join(body),
|
||||||
"orphaned_files": list(orphaned_files),
|
"orphaned_files": list(orphaned_files),
|
||||||
"orphaned_directory": dir_out.replace(remote_path,root_path),
|
"orphaned_directory": dir_out.replace(remote_path,root_path),
|
||||||
"total_orphaned_files": orphaned,
|
"total_orphaned_files": num_orphaned,
|
||||||
}
|
}
|
||||||
self.config.send_notifications(attr)
|
self.config.send_notifications(attr)
|
||||||
#Delete empty directories after moving orphan files
|
#Delete empty directories after moving orphan files
|
||||||
|
|
|
@ -177,6 +177,7 @@ def get_int_list(data, id_type):
|
||||||
|
|
||||||
def print_line(lines, loglevel='INFO'):
|
def print_line(lines, loglevel='INFO'):
|
||||||
logger.log(getattr(logging, loglevel.upper()), str(lines))
|
logger.log(getattr(logging, loglevel.upper()), str(lines))
|
||||||
|
return [str(lines)]
|
||||||
|
|
||||||
def print_multiline(lines, loglevel='INFO'):
|
def print_multiline(lines, loglevel='INFO'):
|
||||||
for i, line in enumerate(str(lines).split("\n")):
|
for i, line in enumerate(str(lines).split("\n")):
|
||||||
|
@ -184,6 +185,7 @@ def print_multiline(lines, loglevel='INFO'):
|
||||||
if i == 0:
|
if i == 0:
|
||||||
logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
|
logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
|
||||||
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
|
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
|
||||||
|
return [(str(lines))]
|
||||||
|
|
||||||
def print_stacktrace():
|
def print_stacktrace():
|
||||||
print_multiline(traceback.format_exc(), 'CRITICAL')
|
print_multiline(traceback.format_exc(), 'CRITICAL')
|
||||||
|
@ -220,6 +222,7 @@ def separator(text=None, space=True, border=True, loglevel='INFO'):
|
||||||
logger.log(getattr(logging, loglevel.upper()), border_text)
|
logger.log(getattr(logging, loglevel.upper()), border_text)
|
||||||
for handler in logger.handlers:
|
for handler in logger.handlers:
|
||||||
apply_formatter(handler)
|
apply_formatter(handler)
|
||||||
|
return [text]
|
||||||
|
|
||||||
def apply_formatter(handler, border=True):
|
def apply_formatter(handler, border=True):
|
||||||
text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s"
|
text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s"
|
||||||
|
|
|
@ -6,7 +6,7 @@ from modules.util import Failed
|
||||||
logger = logging.getLogger("qBit Manage")
|
logger = logging.getLogger("qBit Manage")
|
||||||
|
|
||||||
class Webhooks:
|
class Webhooks:
|
||||||
def __init__(self, config, system_webhooks, notifiarr=None):
|
def __init__(self, config, system_webhooks, notifiarr=None, apprise=None):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.error_webhooks = system_webhooks["error"] if "error" in system_webhooks else []
|
self.error_webhooks = system_webhooks["error"] if "error" in system_webhooks else []
|
||||||
self.run_start_webhooks = system_webhooks["run_start"] if "run_start" in system_webhooks else []
|
self.run_start_webhooks = system_webhooks["run_start"] if "run_start" in system_webhooks else []
|
||||||
|
@ -19,6 +19,7 @@ class Webhooks:
|
||||||
else:
|
else:
|
||||||
self.function_webhooks = []
|
self.function_webhooks = []
|
||||||
self.notifiarr = notifiarr
|
self.notifiarr = notifiarr
|
||||||
|
self.apprise = apprise
|
||||||
|
|
||||||
def _request(self, webhooks, json):
|
def _request(self, webhooks, json):
|
||||||
if self.config.trace_mode:
|
if self.config.trace_mode:
|
||||||
|
@ -28,11 +29,16 @@ class Webhooks:
|
||||||
if self.config.trace_mode:
|
if self.config.trace_mode:
|
||||||
logger.debug(f"Webhook: {webhook}")
|
logger.debug(f"Webhook: {webhook}")
|
||||||
if webhook == "notifiarr":
|
if webhook == "notifiarr":
|
||||||
url, params = self.notifiarr.get_url("notification/qbtManage/")
|
url, params = self.notifiarr.get_url("notification/qbitManage/")
|
||||||
for x in range(6):
|
for x in range(6):
|
||||||
response = self.config.get(url, json=json, params=params)
|
response = self.config.get(url, json=json, params=params)
|
||||||
if response.status_code < 500:
|
if response.status_code < 500:
|
||||||
break
|
break
|
||||||
|
elif webhook == "apprise":
|
||||||
|
if self.apprise is None:
|
||||||
|
raise Failed(f"Webhook attribute set to apprise but apprise attribute is not configured.")
|
||||||
|
json['urls'] = self.apprise.notify_url
|
||||||
|
response = self.config.post(f"{self.apprise.api_url}/notify", json=json)
|
||||||
else:
|
else:
|
||||||
response = self.config.post(webhook, json=json)
|
response = self.config.post(webhook, json=json)
|
||||||
try:
|
try:
|
||||||
|
@ -56,12 +62,13 @@ class Webhooks:
|
||||||
start_type = ""
|
start_type = ""
|
||||||
self._request(self.run_start_webhooks, {
|
self._request(self.run_start_webhooks, {
|
||||||
"function":"run_start",
|
"function":"run_start",
|
||||||
"title":f"Starting {start_type}Run",
|
"title": None,
|
||||||
|
"body":f"Starting {start_type}Run",
|
||||||
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
"dry_run": self.config.args['dry_run']
|
"dry_run": self.config.args['dry_run']
|
||||||
})
|
})
|
||||||
|
|
||||||
def end_time_hooks(self, start_time, end_time, run_time, stats):
|
def end_time_hooks(self, start_time, end_time, run_time, stats, body):
|
||||||
dry_run = self.config.args['dry_run']
|
dry_run = self.config.args['dry_run']
|
||||||
if dry_run:
|
if dry_run:
|
||||||
start_type = "Dry-"
|
start_type = "Dry-"
|
||||||
|
@ -70,7 +77,8 @@ class Webhooks:
|
||||||
if self.run_end_webhooks:
|
if self.run_end_webhooks:
|
||||||
self._request(self.run_end_webhooks, {
|
self._request(self.run_end_webhooks, {
|
||||||
"function":"run_end",
|
"function":"run_end",
|
||||||
"title":f"Finished {start_type}Run",
|
"title": None,
|
||||||
|
"body": body,
|
||||||
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
|
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
"run_time": run_time,
|
"run_time": run_time,
|
||||||
|
@ -90,7 +98,8 @@ class Webhooks:
|
||||||
|
|
||||||
def error_hooks(self, text, function_error=None, critical=True):
|
def error_hooks(self, text, function_error=None, critical=True):
|
||||||
if self.error_webhooks:
|
if self.error_webhooks:
|
||||||
json = {"function":"run_error","title":f"{function_error} Error","error": str(text), "critical": critical}
|
type = "failure" if critical == True else "warning"
|
||||||
|
json = {"function":"run_error","title":f"{function_error} Error","body": str(text), "critical": critical, "type": type}
|
||||||
if function_error:
|
if function_error:
|
||||||
json["function_error"] = function_error
|
json["function_error"] = function_error
|
||||||
self._request(self.error_webhooks, json)
|
self._request(self.error_webhooks, json)
|
||||||
|
|
|
@ -227,13 +227,13 @@ def start():
|
||||||
|
|
||||||
end_time = datetime.now()
|
end_time = datetime.now()
|
||||||
run_time = str(end_time - start_time).split('.')[0]
|
run_time = str(end_time - start_time).split('.')[0]
|
||||||
|
body = util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}".replace('\n\n', '\n'))[0]
|
||||||
if cfg:
|
if cfg:
|
||||||
try:
|
try:
|
||||||
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
|
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, stats, body)
|
||||||
except Failed as e:
|
except Failed as e:
|
||||||
util.print_stacktrace()
|
util.print_stacktrace()
|
||||||
logger.error(f"Webhooks Error: {e}")
|
logger.error(f"Webhooks Error: {e}")
|
||||||
util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}".replace('\n\n', '\n'))
|
|
||||||
def end():
|
def end():
|
||||||
logger.info("Exiting Qbit_manage")
|
logger.info("Exiting Qbit_manage")
|
||||||
logger.removeHandler(file_handler)
|
logger.removeHandler(file_handler)
|
||||||
|
|
Loading…
Add table
Reference in a new issue