2023-04-01 02:37:33 +08:00
|
|
|
"""Class to handle webhooks."""
|
2023-04-01 06:32:58 +08:00
|
|
|
import time
|
2021-12-17 22:19:40 +08:00
|
|
|
from json import JSONDecodeError
|
2022-10-29 23:19:09 +08:00
|
|
|
|
2023-03-30 02:16:50 +08:00
|
|
|
from requests.exceptions import JSONDecodeError as requestsJSONDecodeError
|
|
|
|
|
2022-08-15 09:41:33 +08:00
|
|
|
from modules import util
|
2021-12-17 22:19:40 +08:00
|
|
|
from modules.util import Failed
|
|
|
|
|
2022-08-15 09:41:33 +08:00
|
|
|
logger = util.logger
|
2021-12-17 22:19:40 +08:00
|
|
|
|
2023-06-03 08:40:05 +08:00
|
|
|
GROUP_NOTIFICATION_LIMIT = 10
|
|
|
|
|
2021-12-29 01:19:58 +08:00
|
|
|
|
2021-12-17 22:19:40 +08:00
|
|
|
class Webhooks:
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Class to handle webhooks."""
|
|
|
|
|
2021-12-19 01:38:41 +08:00
|
|
|
def __init__(self, config, system_webhooks, notifiarr=None, apprise=None):
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Initialize the class."""
|
2021-12-17 22:19:40 +08:00
|
|
|
self.config = config
|
|
|
|
self.error_webhooks = system_webhooks["error"] if "error" in system_webhooks else []
|
|
|
|
self.run_start_webhooks = system_webhooks["run_start"] if "run_start" in system_webhooks else []
|
|
|
|
self.run_end_webhooks = system_webhooks["run_end"] if "run_end" in system_webhooks else []
|
|
|
|
if "function" in system_webhooks and system_webhooks["function"] is not None:
|
|
|
|
try:
|
|
|
|
self.function_webhooks = system_webhooks["function"][0]
|
2021-12-29 01:19:58 +08:00
|
|
|
except (IndexError, KeyError):
|
2021-12-17 22:19:40 +08:00
|
|
|
self.function_webhooks = []
|
|
|
|
else:
|
|
|
|
self.function_webhooks = []
|
|
|
|
self.notifiarr = notifiarr
|
2021-12-19 01:38:41 +08:00
|
|
|
self.apprise = apprise
|
2021-12-17 22:19:40 +08:00
|
|
|
|
2023-04-01 06:59:15 +08:00
|
|
|
def request_and_check(self, webhook, json):
|
2023-04-01 06:32:58 +08:00
|
|
|
"""
|
|
|
|
Send a webhook request and check for errors.
|
|
|
|
retry up to 6 times if the response is a 500+ error.
|
|
|
|
"""
|
|
|
|
retry_count = 0
|
|
|
|
retry_attempts = 6
|
|
|
|
request_delay = 2
|
|
|
|
for retry_count in range(retry_attempts):
|
2023-04-01 06:59:15 +08:00
|
|
|
if webhook == "notifiarr":
|
2023-04-01 06:32:58 +08:00
|
|
|
response = self.notifiarr.notification(json=json)
|
|
|
|
else:
|
2023-04-01 07:04:37 +08:00
|
|
|
webhook_post = webhook
|
2023-04-01 06:59:15 +08:00
|
|
|
if webhook == "apprise":
|
2023-04-01 06:32:58 +08:00
|
|
|
json["urls"] = self.apprise.notify_url
|
2023-04-01 07:04:37 +08:00
|
|
|
webhook_post = f"{self.apprise.api_url}/notify"
|
|
|
|
response = self.config.post(webhook_post, json=json)
|
2023-04-01 06:32:58 +08:00
|
|
|
if response.status_code < 500:
|
|
|
|
return response
|
2023-04-01 07:32:51 +08:00
|
|
|
logger.debug(f"({response.status_code} [{response.reason}]) Retrying in {request_delay} seconds.")
|
2023-04-01 06:32:58 +08:00
|
|
|
time.sleep(request_delay)
|
2023-04-01 07:32:51 +08:00
|
|
|
logger.debug(f"(Retry {retry_count + 1} of {retry_attempts}.")
|
2023-04-01 06:32:58 +08:00
|
|
|
retry_count += 1
|
2023-04-01 06:59:15 +08:00
|
|
|
logger.warning(f"({response.status_code} [{response.reason}]) after {retry_attempts} attempts.")
|
2023-04-01 06:32:58 +08:00
|
|
|
|
2021-12-17 22:19:40 +08:00
|
|
|
def _request(self, webhooks, json):
|
2023-04-01 07:04:37 +08:00
|
|
|
"""
|
|
|
|
Send a webhook request via request_and_check.
|
|
|
|
Check for errors and log them.
|
|
|
|
"""
|
2022-11-19 22:46:38 +08:00
|
|
|
logger.trace("")
|
|
|
|
logger.trace(f"JSON: {json}")
|
2021-12-17 22:19:40 +08:00
|
|
|
for webhook in list(set(webhooks)):
|
2021-12-20 23:28:29 +08:00
|
|
|
response = None
|
2022-11-19 22:46:38 +08:00
|
|
|
logger.trace(f"Webhook: {webhook}")
|
2021-12-29 01:19:58 +08:00
|
|
|
if webhook is None:
|
2021-12-21 22:20:30 +08:00
|
|
|
break
|
2023-04-01 07:32:51 +08:00
|
|
|
elif (webhook == "notifiarr" and self.notifiarr is None) or (webhook == "apprise" and self.apprise is None):
|
|
|
|
logger.warning(f"Webhook attribute set to {webhook} but {webhook} attribute is not configured.")
|
|
|
|
break
|
2023-04-01 07:04:37 +08:00
|
|
|
response = self.request_and_check(webhook, json)
|
2021-12-20 23:28:29 +08:00
|
|
|
if response:
|
|
|
|
skip = False
|
|
|
|
try:
|
|
|
|
response_json = response.json()
|
2022-11-19 22:46:38 +08:00
|
|
|
logger.trace(f"Response: {response_json}")
|
2022-10-29 23:19:09 +08:00
|
|
|
if (
|
|
|
|
"result" in response_json
|
|
|
|
and response_json["result"] == "error"
|
|
|
|
and "details" in response_json
|
|
|
|
and "response" in response_json["details"]
|
|
|
|
):
|
|
|
|
if "trigger is not enabled" in response_json["details"]["response"]:
|
2023-04-01 07:32:51 +08:00
|
|
|
logger.info(f"Notifiarr Warning: {response_json['details']['response']}")
|
2021-12-20 23:28:29 +08:00
|
|
|
skip = True
|
|
|
|
else:
|
|
|
|
raise Failed(f"Notifiarr Error: {response_json['details']['response']}")
|
2022-10-29 23:19:09 +08:00
|
|
|
if (
|
|
|
|
response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error")
|
|
|
|
) and skip is False:
|
2021-12-20 23:28:29 +08:00
|
|
|
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
|
2023-04-01 02:37:33 +08:00
|
|
|
except (JSONDecodeError, requestsJSONDecodeError) as exc:
|
2021-12-20 23:28:29 +08:00
|
|
|
if response.status_code >= 400:
|
2023-04-01 02:37:33 +08:00
|
|
|
raise Failed(f"({response.status_code} [{response.reason}])") from exc
|
2021-12-17 22:19:40 +08:00
|
|
|
|
|
|
|
def start_time_hooks(self, start_time):
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Send a webhook to notify that the run has started."""
|
2021-12-17 22:19:40 +08:00
|
|
|
if self.run_start_webhooks:
|
2022-10-29 23:19:09 +08:00
|
|
|
dry_run = self.config.commands["dry_run"]
|
2021-12-17 22:19:40 +08:00
|
|
|
if dry_run:
|
|
|
|
start_type = "Dry-"
|
|
|
|
else:
|
|
|
|
start_type = ""
|
2022-10-29 23:19:09 +08:00
|
|
|
self._request(
|
|
|
|
self.run_start_webhooks,
|
|
|
|
{
|
|
|
|
"function": "run_start",
|
|
|
|
"title": None,
|
|
|
|
"body": f"Starting {start_type}Run",
|
|
|
|
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
|
"dry_run": self.config.commands["dry_run"],
|
|
|
|
},
|
|
|
|
)
|
2021-12-17 22:19:40 +08:00
|
|
|
|
2022-01-01 22:54:59 +08:00
|
|
|
def end_time_hooks(self, start_time, end_time, run_time, next_run, stats, body):
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Send a webhook to notify that the run has ended."""
|
2021-12-17 22:19:40 +08:00
|
|
|
if self.run_end_webhooks:
|
2022-10-29 23:19:09 +08:00
|
|
|
self._request(
|
|
|
|
self.run_end_webhooks,
|
|
|
|
{
|
|
|
|
"function": "run_end",
|
|
|
|
"title": None,
|
|
|
|
"body": body,
|
|
|
|
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
|
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
|
"next_run": next_run.strftime("%Y-%m-%d %H:%M:%S") if next_run is not None else next_run,
|
|
|
|
"run_time": run_time,
|
|
|
|
"torrents_added": stats["added"],
|
|
|
|
"torrents_deleted": stats["deleted"],
|
|
|
|
"torrents_deleted_and_contents_count": stats["deleted_contents"],
|
|
|
|
"torrents_resumed": stats["resumed"],
|
|
|
|
"torrents_rechecked": stats["rechecked"],
|
|
|
|
"torrents_categorized": stats["categorized"],
|
|
|
|
"torrents_tagged": stats["tagged"],
|
|
|
|
"remove_unregistered": stats["rem_unreg"],
|
|
|
|
"torrents_tagged_tracker_error": stats["tagged_tracker_error"],
|
|
|
|
"torrents_untagged_tracker_error": stats["untagged_tracker_error"],
|
|
|
|
"orphaned_files_found": stats["orphaned"],
|
|
|
|
"torrents_tagged_no_hardlinks": stats["tagged_noHL"],
|
|
|
|
"torrents_untagged_no_hardlinks": stats["untagged_noHL"],
|
2023-06-04 22:31:06 +08:00
|
|
|
"torrents_updated_share_limits": stats["updated_share_limits"],
|
|
|
|
"torrents_cleaned_share_limits": stats["cleaned_share_limits"],
|
2022-10-29 23:19:09 +08:00
|
|
|
"files_deleted_from_recyclebin": stats["recycle_emptied"],
|
|
|
|
"files_deleted_from_orphaned": stats["orphaned_emptied"],
|
|
|
|
},
|
|
|
|
)
|
2021-12-17 22:19:40 +08:00
|
|
|
|
|
|
|
def error_hooks(self, text, function_error=None, critical=True):
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Send a webhook to notify that an error has occurred."""
|
2021-12-17 22:19:40 +08:00
|
|
|
if self.error_webhooks:
|
2023-04-01 02:37:33 +08:00
|
|
|
err_type = "failure" if critical is True else "warning"
|
2021-12-29 01:19:58 +08:00
|
|
|
json = {
|
|
|
|
"function": "run_error",
|
|
|
|
"title": f"{function_error} Error",
|
|
|
|
"body": str(text),
|
|
|
|
"critical": critical,
|
2023-04-01 02:37:33 +08:00
|
|
|
"type": err_type,
|
2021-12-29 01:19:58 +08:00
|
|
|
}
|
2021-12-17 22:19:40 +08:00
|
|
|
if function_error:
|
|
|
|
json["function_error"] = function_error
|
|
|
|
self._request(self.error_webhooks, json)
|
|
|
|
|
|
|
|
def function_hooks(self, webhook, json):
|
2023-04-01 02:37:33 +08:00
|
|
|
"""Send a webhook to notify that a function has completed."""
|
2021-12-17 22:19:40 +08:00
|
|
|
if self.function_webhooks:
|
2021-12-29 01:19:58 +08:00
|
|
|
self._request(webhook, json)
|
2023-06-04 11:24:56 +08:00
|
|
|
|
|
|
|
def notify(self, torrents_updated=[], payload={}, group_by=""):
|
|
|
|
if len(torrents_updated) > GROUP_NOTIFICATION_LIMIT:
|
|
|
|
logger.trace(
|
|
|
|
f"Number of torrents updated > {GROUP_NOTIFICATION_LIMIT}, grouping notifications"
|
|
|
|
f"{f' by {group_by}' if group_by else ''}",
|
|
|
|
)
|
|
|
|
if group_by == "category":
|
|
|
|
group_attr = group_notifications_by_key(payload, "torrent_category")
|
|
|
|
elif group_by == "tag":
|
|
|
|
group_attr = group_notifications_by_key(payload, "torrent_tag")
|
2023-06-05 03:13:31 +08:00
|
|
|
elif group_by == "tracker":
|
|
|
|
group_attr = group_notifications_by_key(payload, "torrent_tracker")
|
2023-06-04 11:24:56 +08:00
|
|
|
|
|
|
|
# group notifications by grouping attribute
|
|
|
|
for group in group_attr:
|
|
|
|
num_torrents_updated = len(group_attr[group]["torrents"])
|
|
|
|
only_one_torrent_updated = num_torrents_updated == 1
|
|
|
|
|
|
|
|
attr = {
|
|
|
|
"function": group_attr[group]["function"],
|
|
|
|
"title": f"{group_attr[group]['title']} for {group}",
|
|
|
|
"body": group_attr[group]["body"]
|
|
|
|
if only_one_torrent_updated
|
|
|
|
else f"Updated {num_torrents_updated} "
|
|
|
|
f"{'torrent' if only_one_torrent_updated else 'torrents'} with {group_by} '{group}'",
|
|
|
|
"torrents": group_attr[group]["torrents"],
|
|
|
|
}
|
|
|
|
if group_by == "category":
|
|
|
|
attr["torrent_category"] = group
|
2023-06-05 02:32:52 +08:00
|
|
|
attr["torrent_tag"] = group_attr[group].get("torrent_tag") if only_one_torrent_updated else None
|
|
|
|
attr["torrent_tracker"] = group_attr[group].get("torrent_tracker") if only_one_torrent_updated else None
|
|
|
|
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer") if only_one_torrent_updated else None
|
2023-06-04 11:24:56 +08:00
|
|
|
elif group_by == "tag":
|
|
|
|
attr["torrent_tag"] = group
|
2023-06-05 02:32:52 +08:00
|
|
|
attr["torrent_category"] = group_attr[group].get("torrent_category") if only_one_torrent_updated else None
|
|
|
|
attr["torrent_tracker"] = group_attr[group].get("torrent_tracker")
|
|
|
|
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer")
|
2023-06-05 03:13:31 +08:00
|
|
|
elif group_by == "tracker":
|
|
|
|
attr["torrent_tracker"] = group
|
|
|
|
attr["torrent_category"] = group_attr[group].get("torrent_category") if only_one_torrent_updated else None
|
|
|
|
attr["torrent_tag"] = group_attr[group].get("torrent_tag") if only_one_torrent_updated else None
|
|
|
|
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer")
|
2023-06-05 02:32:52 +08:00
|
|
|
|
|
|
|
for extra_attr in payload:
|
|
|
|
if extra_attr not in attr:
|
|
|
|
attr[extra_attr] = payload[extra_attr]
|
2023-06-04 11:24:56 +08:00
|
|
|
|
|
|
|
self.config.send_notifications(attr)
|
|
|
|
else:
|
|
|
|
for attr in payload:
|
|
|
|
self.config.send_notifications(attr)
|
|
|
|
|
|
|
|
|
|
|
|
def group_notifications_by_key(payload, key):
|
|
|
|
"""Group notifications by key"""
|
|
|
|
group_attr = {}
|
|
|
|
for attr in payload:
|
|
|
|
group = attr[key]
|
|
|
|
if group not in group_attr:
|
|
|
|
group_attr[group] = {
|
2023-06-05 02:32:52 +08:00
|
|
|
"function": attr.get("function"),
|
|
|
|
"title": attr.get("title"),
|
|
|
|
"body": attr.get("body"),
|
|
|
|
"torrent_category": attr.get("torrent_category"),
|
|
|
|
"torrent_tag": attr.get("torrent_tag"),
|
|
|
|
"torrents": [attr.get("torrents", [None])[0]],
|
|
|
|
"torrent_tracker": attr.get("torrent_tracker"),
|
|
|
|
"notifiarr_indexer": attr.get("notifiarr_indexer"),
|
2023-06-04 11:24:56 +08:00
|
|
|
}
|
|
|
|
else:
|
2023-06-05 02:32:52 +08:00
|
|
|
group_attr[group]["torrents"].append(attr.get("torrents", [None])[0])
|
2023-06-04 11:24:56 +08:00
|
|
|
return group_attr
|