Merge pull request #72 from StuffAnThings/develop

v3.1.2
This commit is contained in:
bobokun 2021-12-29 08:39:47 -05:00 committed by GitHub
commit 062b32a4c5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 757 additions and 568 deletions

View file

@ -19,4 +19,5 @@ venv
.idea
.venv
test.py
!config/config.yml.sample
!config/config.yml.sample
.flake8

11
.flake8 Normal file
View file

@ -0,0 +1,11 @@
[flake8]
ignore =
E226, # E226 Missing whitespace around arithmetic operator
#E302, # E302 Expected 2 blank lines, found 0
E401, # E401 Multiple imports on one line
E701, # E701 Multiple statements on one line (colon)
E241, # E241 Multiple spaces after ','
E272, # E272 Multiple spaces before keyword
C901 # C901 Function is too complex
E722 # E722 Do not use bare except, specify exception instead
max-line-length = 200

View file

@ -1 +1 @@
3.1.1
3.1.2

View file

@ -7,6 +7,9 @@ qbt:
user: "username"
pass: "password"
settings:
force_auto_tmm: False # Will force qBittorrent to enable Automatic Torrent Management for each torrent.
directory:
# Do not remove these
# Cross-seed var: </your/path/here/> # Output directory of cross-seed
@ -19,12 +22,12 @@ directory:
# Category & Path Parameters
cat:
# <Category Name> : <save_path> # Path of your save directory. Can be a keyword or full path
# <Category Name> : <save_path> # Path of your save directory.
movies: "/data/torrents/Movies"
tv: "TV"
tv: "/data/torrents/TV"
# Tag Parameters
tags:
tracker:
# <Tracker URL Keyword>: # <MANDATORY> This is the keyword in the tracker url
# <MANDATORY> Set tag name
# tag: <Tag Name>
@ -170,3 +173,7 @@ webhooks:
rem_orphaned: notifiarr
tag_nohardlinks: notifiarr
empty_recyclebin: notifiarr
# BHD Integration used for checking unregistered torrents
bhd:
apikey:

View file

@ -1,9 +1,8 @@
import logging
from modules.util import Failed
logger = logging.getLogger("qBit Manage")
class Apprise:
def __init__(self, config, params):
self.config = config
@ -11,4 +10,4 @@ class Apprise:
self.notify_url = ",".join(params["notify_url"])
response = self.config.get(self.api_url)
if response.status_code != 200:
raise Failed(f"Apprise Error: Unable to connect to Apprise using {self.api_url}")
raise Failed(f"Apprise Error: Unable to connect to Apprise using {self.api_url}")

33
modules/bhd.py Normal file
View file

@ -0,0 +1,33 @@
import logging
from modules.util import Failed
from json import JSONDecodeError
logger = logging.getLogger("qBit Manage")
base_url = "https://beyond-hd.me/api/"
class BeyondHD:
def __init__(self, config, params):
self.config = config
self.apikey = params["apikey"]
json = {"search": "test"}
self.search(json)
def search(self, json, path="torrents/"):
url = f"{base_url}{path}{self.apikey}"
json["action"] = "search"
if self.config.trace_mode:
logger.debug(url.replace(self.apikey, "APIKEY"))
logger.debug(f"JSON: {json}")
try:
response = self.config.post(url, json=json)
response_json = response.json()
except JSONDecodeError as e:
if response.status_code >= 400:
raise Failed(e)
if response.status_code >= 400:
logger.debug(f"Response: {response_json}")
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
if not response_json["success"]:
raise Failed(f"BHD Error: {response_json['status_message']}")
return response.json()

View file

@ -4,18 +4,19 @@ from modules.util import Failed, check
from modules.qbittorrent import Qbt
from modules.webhooks import Webhooks
from modules.notifiarr import Notifiarr
from modules.bhd import BeyondHD
from modules.apprise import Apprise
from ruamel import yaml
from retrying import retry
logger = logging.getLogger("qBit Manage")
class Config:
def __init__(self, default_dir, args):
logger.info("Locating config...")
self.args = args
config_file = args["config_file"]
if config_file and os.path.exists(config_file): self.config_path = os.path.abspath(config_file)
elif config_file and os.path.exists(os.path.join(default_dir, config_file)): self.config_path = os.path.abspath(os.path.join(default_dir, config_file))
elif config_file and not os.path.exists(config_file): raise Failed(f"Config Error: config not found at {os.path.abspath(config_file)}")
@ -28,14 +29,19 @@ class Config:
self.test_mode = args["test"] if "test" in args else False
self.trace_mode = args["trace"] if "trace" in args else False
self.start_time = args["time_obj"]
yaml.YAML().allow_duplicate_keys = True
try:
new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
if "settings" not in new_config: new_config["settings"] = {}
if "cat" not in new_config: new_config["cat"] = {}
if "tracker" not in new_config and "tags" not in new_config: new_config["tracker"] = {}
if "qbt" in new_config: new_config["qbt"] = new_config.pop("qbt")
if "settings" in new_config: new_config["settings"] = new_config.pop("settings")
if "directory" in new_config: new_config["directory"] = new_config.pop("directory")
if "cat" in new_config: new_config["cat"] = new_config.pop("cat")
if "tags" in new_config: new_config["tags"] = new_config.pop("tags")
if "tracker" in new_config: new_config["tracker"] = new_config.pop("tracker")
elif "tags" in new_config: new_config["tracker"] = new_config.pop("tags")
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
@ -44,14 +50,15 @@ class Config:
if "webhooks" in new_config:
temp = new_config.pop("webhooks")
if 'function' not in temp or ('function' in temp and temp['function'] is None): temp["function"] = {}
def hooks(attr):
if attr in temp:
items = temp.pop(attr)
if items:
temp["function"][attr]=items
temp["function"][attr] = items
if attr not in temp["function"]:
temp["function"][attr] = {}
temp["function"][attr]= None
temp["function"][attr] = None
hooks("cross_seed")
hooks("recheck")
hooks("cat_update")
@ -61,6 +68,7 @@ class Config:
hooks("tag_nohardlinks")
hooks("empty_recyclebin")
new_config["webhooks"] = temp
if "bhd" in new_config: new_config["bhd"] = new_config.pop("bhd")
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2)
self.data = new_config
except yaml.scanner.ScannerError as e:
@ -70,10 +78,21 @@ class Config:
raise Failed(f"YAML Error: {e}")
self.session = requests.Session()
if self.data["cat"] is None: self.data["cat"] = {}
if self.data["tags"] is None: self.data["tags"] = {}
default_function = {'cross_seed':None,'recheck':None,'cat_update':None,'tag_update':None,'rem_unregistered':None,'rem_orphaned':None,'tag_nohardlinks':None,'empty_recyclebin':None}
self.settings = {
"force_auto_tmm": self.util.check_for_attribute(self.data, "force_auto_tmm", parent="settings", var_type="bool", default=False),
}
default_function = {
'cross_seed': None,
'recheck': None,
'cat_update': None,
'tag_update': None,
'rem_unregistered': None,
'rem_orphaned': None,
'tag_nohardlinks': None,
'empty_recyclebin': None}
self.webhooks = {
"error": self.util.check_for_attribute(self.data, "error", parent="webhooks", var_type="list", default_is_none=True),
"run_start": self.util.check_for_attribute(self.data, "run_start", parent="webhooks", var_type="list", default_is_none=True),
@ -94,7 +113,6 @@ class Config:
logger.error(e)
logger.info(f"Apprise Connection {'Failed' if self.AppriseFactory is None else 'Successful'}")
self.NotifiarrFactory = None
if "notifiarr" in self.data:
if self.data["notifiarr"] is not None:
@ -110,139 +128,158 @@ class Config:
logger.error(e)
logger.info(f"Notifiarr Connection {'Failed' if self.NotifiarrFactory is None else 'Successful'}")
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory,apprise=self.AppriseFactory)
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory, apprise=self.AppriseFactory)
try:
self.Webhooks.start_time_hooks(self.start_time)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
#nohardlinks
self.BeyondHD = None
if "bhd" in self.data:
if self.data["bhd"] is not None:
logger.info("Connecting to BHD API...")
try:
self.BeyondHD = BeyondHD(self, {
"apikey": self.util.check_for_attribute(self.data, "apikey", parent="bhd", throw=True)
})
except Failed as e:
logger.error(e)
self.notify(e, 'BHD')
logger.info(f"BHD Connection {'Failed' if self.BeyondHD is None else 'Successful'}")
# nohardlinks
self.nohardlinks = None
if "nohardlinks" in self.data and self.args['tag_nohardlinks']:
self.nohardlinks = {}
for cat in self.data["nohardlinks"]:
if cat in list(self.data["cat"].keys()):
self.nohardlinks[cat] = {}
self.nohardlinks[cat]["exclude_tags"] = self.util.check_for_attribute(self.data, "exclude_tags", parent="nohardlinks", subparent=cat, var_type="list", default_is_none=True,do_print=False)
self.nohardlinks[cat]["cleanup"] = self.util.check_for_attribute(self.data, "cleanup", parent="nohardlinks", subparent=cat, var_type="bool", default=False,do_print=False)
self.nohardlinks[cat]['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="nohardlinks", subparent=cat, var_type="float", default_int=-2, default_is_none=True,do_print=False)
self.nohardlinks[cat]['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="nohardlinks", subparent=cat, var_type="int", default_int=-2, default_is_none=True,do_print=False)
self.nohardlinks[cat]['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="nohardlinks", subparent=cat, var_type="int", default_int=-1, default_is_none=True,do_print=False)
self.nohardlinks[cat]["exclude_tags"] = self.util.check_for_attribute(self.data, "exclude_tags", parent="nohardlinks", subparent=cat,
var_type="list", default_is_none=True, do_print=False)
self.nohardlinks[cat]["cleanup"] = self.util.check_for_attribute(self.data, "cleanup", parent="nohardlinks", subparent=cat, var_type="bool", default=False, do_print=False)
self.nohardlinks[cat]['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="nohardlinks", subparent=cat,
var_type="float", default_int=-2, default_is_none=True, do_print=False)
self.nohardlinks[cat]['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="nohardlinks", subparent=cat,
var_type="int", default_int=-2, default_is_none=True, do_print=False)
self.nohardlinks[cat]['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="nohardlinks", subparent=cat,
var_type="int", default_int=-1, default_is_none=True, do_print=False)
else:
e = (f"Config Error: Category {cat} is defined under nohardlinks attribute but is not defined in the cat attribute.")
self.notify(e,'Config')
self.notify(e, 'Config')
raise Failed(e)
else:
if self.args["tag_nohardlinks"]:
e = "Config Error: nohardlinks attribute not found"
self.notify(e,'Config')
self.notify(e, 'Config')
raise Failed(e)
#Add RecycleBin
# Add RecycleBin
self.recyclebin = {}
self.recyclebin['enabled'] = self.util.check_for_attribute(self.data, "enabled", parent="recyclebin",var_type="bool",default=True)
self.recyclebin['empty_after_x_days'] = self.util.check_for_attribute(self.data, "empty_after_x_days", parent="recyclebin",var_type="int",default_is_none=True)
#Add Orphaned
self.orphaned = {}
self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned",var_type="list",default_is_none=True,do_print=False)
self.recyclebin['enabled'] = self.util.check_for_attribute(self.data, "enabled", parent="recyclebin", var_type="bool", default=True)
self.recyclebin['empty_after_x_days'] = self.util.check_for_attribute(self.data, "empty_after_x_days", parent="recyclebin", var_type="int", default_is_none=True)
#Assign directories
# Add Orphaned
self.orphaned = {}
self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned", var_type="list", default_is_none=True, do_print=False)
# Assign directories
if "directory" in self.data:
self.root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory",default_is_none=True)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",default=self.root_dir)
self.root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", default=self.root_dir)
if (self.args["cross_seed"] or self.args["tag_nohardlinks"] or self.args["rem_orphaned"]):
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir)
else:
if self.recyclebin['enabled']:
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory",var_type="path",default=self.root_dir)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir)
if self.args["cross_seed"]:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",var_type="path")
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", var_type="path")
else:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory",default_is_none=True)
self.recycle_dir = os.path.join(self.remote_dir,'.RecycleBin')
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", default_is_none=True)
self.recycle_dir = os.path.join(self.remote_dir, '.RecycleBin')
else:
e = "Config Error: directory attribute not found"
self.notify(e,'Config')
self.notify(e, 'Config')
raise Failed(e)
#Connect to Qbittorrent
# Connect to Qbittorrent
self.qbt = None
if "qbt" in self.data:
logger.info("Connecting to Qbittorrent...")
self.qbt = Qbt(self, {
"host": self.util.check_for_attribute(self.data, "host", parent="qbt", throw=True),
"username": self.util.check_for_attribute(self.data, "user", parent="qbt", default_is_none=True),
"password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True)
})
logger.info("Connecting to Qbittorrent...")
self.qbt = Qbt(self, {
"host": self.util.check_for_attribute(self.data, "host", parent="qbt", throw=True),
"username": self.util.check_for_attribute(self.data, "user", parent="qbt", default_is_none=True),
"password": self.util.check_for_attribute(self.data, "pass", parent="qbt", default_is_none=True)
})
else:
e = "Config Error: qbt attribute not found"
self.notify(e,'Config')
self.notify(e, 'Config')
raise Failed(e)
#Get tags from config file based on keyword
def get_tags(self,urls):
tags = {}
tags['new_tag'] = None
tags['max_ratio'] = None
tags['max_seeding_time'] = None
tags['limit_upload_speed'] = None
tags['notifiarr'] = None
tags['url'] = None
if not urls: return tags
# Get tags from config file based on keyword
def get_tags(self, urls):
tracker = {}
tracker['tag'] = None
tracker['max_ratio'] = None
tracker['max_seeding_time'] = None
tracker['limit_upload_speed'] = None
tracker['notifiarr'] = None
tracker['url'] = None
if not urls: return tracker
try:
tags['url'] = util.trunc_val(urls[0], '/')
tracker['url'] = util.trunc_val(urls[0], '/')
except IndexError as e:
tags['url'] = None
tracker['url'] = None
logger.debug(f"Tracker Url:{urls}")
logger.debug(e)
if 'tags' in self.data and self.data["tags"] is not None:
tag_values = self.data['tags']
if 'tracker' in self.data and self.data["tracker"] is not None:
tag_values = self.data['tracker']
for tag_url, tag_details in tag_values.items():
for url in urls:
if tag_url in url:
try:
tags['url'] = util.trunc_val(url, '/')
default_tag = tags['url'].split('/')[2].split(':')[0]
tracker['url'] = util.trunc_val(url, '/')
default_tag = tracker['url'].split('/')[2].split(':')[0]
except IndexError as e:
logger.debug(f"Tracker Url:{url}")
logger.debug(e)
# If using Format 1 convert to format 2
if isinstance(tag_details,str):
tags['new_tag'] = self.util.check_for_attribute(self.data, tag_url, parent="tags",default=default_tag)
self.util.check_for_attribute(self.data, "tag", parent="tags",subparent=tag_url, default=tags['new_tag'],do_print=False)
if tags['new_tag'] == default_tag:
if isinstance(tag_details, str):
tracker['tag'] = self.util.check_for_attribute(self.data, tag_url, parent="tracker", default=default_tag)
self.util.check_for_attribute(self.data, "tag", parent="tracker", subparent=tag_url, default=tracker['tag'], do_print=False)
if tracker['tag'] == default_tag:
try:
self.data['tags'][tag_url]['tag'] = default_tag
except Exception as e:
self.data['tags'][tag_url] = {'tag': default_tag}
self.data['tracker'][tag_url]['tag'] = default_tag
except Exception:
self.data['tracker'][tag_url] = {'tag': default_tag}
# Using Format 2
else:
tags['new_tag'] = self.util.check_for_attribute(self.data, "tag", parent="tags", subparent=tag_url, default=tag_url)
if tags['new_tag'] == tag_url: self.data['tags'][tag_url]['tag'] = tag_url
tags['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="tags", subparent=tag_url, var_type="float", default_int=-2, default_is_none=True,do_print=False,save=False)
tags['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="tags", subparent=tag_url, var_type="int", default_int=-2, default_is_none=True,do_print=False,save=False)
tags['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="tags", subparent=tag_url, var_type="int", default_int=-1, default_is_none=True,do_print=False,save=False)
tags['notifiarr'] = self.util.check_for_attribute(self.data, "notifiarr", parent="tags", subparent=tag_url, default_is_none=True, do_print=False,save=False)
return (tags)
if tags['url']:
default_tag = tags['url'].split('/')[2].split(':')[0]
tags['new_tag'] = self.util.check_for_attribute(self.data, "tag", parent="tags",subparent=default_tag, default=default_tag)
tracker['tag'] = self.util.check_for_attribute(self.data, "tag", parent="tracker", subparent=tag_url, default=tag_url)
if tracker['tag'] == tag_url: self.data['tracker'][tag_url]['tag'] = tag_url
tracker['max_ratio'] = self.util.check_for_attribute(self.data, "max_ratio", parent="tracker", subparent=tag_url,
var_type="float", default_int=-2, default_is_none=True, do_print=False, save=False)
tracker['max_seeding_time'] = self.util.check_for_attribute(self.data, "max_seeding_time", parent="tracker", subparent=tag_url,
var_type="int", default_int=-2, default_is_none=True, do_print=False, save=False)
tracker['limit_upload_speed'] = self.util.check_for_attribute(self.data, "limit_upload_speed", parent="tracker", subparent=tag_url,
var_type="int", default_int=-1, default_is_none=True, do_print=False, save=False)
tracker['notifiarr'] = self.util.check_for_attribute(self.data, "notifiarr", parent="tracker", subparent=tag_url, default_is_none=True, do_print=False, save=False)
return (tracker)
if tracker['url']:
default_tag = tracker['url'].split('/')[2].split(':')[0]
tracker['tag'] = self.util.check_for_attribute(self.data, "tag", parent="tracker", subparent=default_tag, default=default_tag)
try:
self.data['tags'][default_tag]['tag'] = default_tag
except Exception as e:
self.data['tags'][default_tag] = {'tag': default_tag}
e = (f'No tags matched for {tags["url"]}. Please check your config.yml file. Setting tag to {default_tag}')
self.notify(e,'Tag',False)
self.data['tracker'][default_tag]['tag'] = default_tag
except Exception:
self.data['tracker'][default_tag] = {'tag': default_tag}
e = (f'No tags matched for {tracker["url"]}. Please check your config.yml file. Setting tag to {default_tag}')
self.notify(e, 'Tag', False)
logger.warning(e)
return (tags)
return (tracker)
#Get category from config file based on path provided
def get_category(self,path):
# Get category from config file based on path provided
def get_category(self, path):
category = ''
path = os.path.join(path,'')
path = os.path.join(path, '')
if "cat" in self.data and self.data["cat"] is not None:
cat_path = self.data["cat"]
for cat, save_path in cat_path.items():
@ -251,14 +288,15 @@ class Config:
break
if not category:
default_cat = path.split('/')[-2]
category = self.util.check_for_attribute(self.data, default_cat, parent="cat",default=path)
category = str(default_cat)
self.util.check_for_attribute(self.data, default_cat, parent="cat", default=path)
self.data['cat'][str(default_cat)] = path
e = (f'No categories matched for the save path {path}. Check your config.yml file. - Setting category to {default_cat}')
self.notify(e,'Category',False)
self.notify(e, 'Category', False)
logger.warning(e)
return category
#Empty the recycle bin
# Empty the recycle bin
def empty_recycle(self):
dry_run = self.args['dry_run']
loglevel = 'DRYRUN' if dry_run else 'INFO'
@ -274,9 +312,9 @@ class Config:
util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=False, border=False)
for file in recycle_files:
fileStats = os.stat(file)
filename = file.replace(self.recycle_dir,'')
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
now = time.time() # in seconds
filename = file.replace(self.recycle_dir, '')
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
now = time.time() # in seconds
days = (now - last_modified) / (60 * 60 * 24)
if (self.recyclebin['empty_after_x_days'] <= days):
num_del += 1
@ -285,17 +323,17 @@ class Config:
size_bytes += os.path.getsize(file)
if not dry_run: os.remove(file)
if num_del > 0:
if not dry_run: util.remove_empty_directories(self.recycle_dir,"**/*")
if not dry_run: util.remove_empty_directories(self.recycle_dir, "**/*")
body = []
body += util.print_multiline(n_info,loglevel)
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.",loglevel)
body += util.print_multiline(n_info, loglevel)
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.", loglevel)
attr = {
"function":"empty_recyclebin",
"title":f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
"body": "\n".join(body),
"files":files,
"empty_after_x_days": self.recyclebin['empty_after_x_days'],
"size_in_bytes":size_bytes
"function": "empty_recyclebin",
"title": f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)",
"body": "\n".join(body),
"files": files,
"empty_after_x_days": self.recyclebin['empty_after_x_days'],
"size_in_bytes": size_bytes
}
self.send_notifications(attr)
else:
@ -303,20 +341,20 @@ class Config:
return num_del
def send_notifications(self, attr):
try:
function = attr['function']
config_webhooks = self.Webhooks.function_webhooks
config_function = None
for key in config_webhooks:
if key in function:
config_function = key
break
if config_function:
self.Webhooks.function_hooks([config_webhooks[config_function]],attr)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
try:
function = attr['function']
config_webhooks = self.Webhooks.function_webhooks
config_function = None
for key in config_webhooks:
if key in function:
config_function = key
break
if config_function:
self.Webhooks.function_hooks([config_webhooks[config_function]], attr)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def notify(self, text, function=None, critical=True):
for error in util.get_list(text, split=False):
try:
@ -325,9 +363,6 @@ class Config:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def get_html(self, url, headers=None, params=None):
return html.fromstring(self.get(url, headers=headers, params=params).content)
def get_json(self, url, json=None, headers=None, params=None):
return self.get(url, json=json, headers=headers, params=params).json()
@ -335,12 +370,6 @@ class Config:
def get(self, url, json=None, headers=None, params=None):
return self.session.get(url, json=json, headers=headers, params=params)
def get_image_encoded(self, url):
return base64.b64encode(self.get(url).content).decode('utf-8')
def post_html(self, url, data=None, json=None, headers=None):
return html.fromstring(self.post(url, data=data, json=json, headers=headers).content)
def post_json(self, url, data=None, json=None, headers=None):
return self.post(url, data=data, json=json, headers=headers).json()

View file

@ -23,7 +23,7 @@ class Notifiarr:
except JSONDecodeError as e:
if response.status_code >= 400:
if response.status_code == 525:
raise Failed(f"Notifiarr Error (Response: 525): SSL handshake between Cloudflare and the origin web server failed.")
raise Failed("Notifiarr Error (Response: 525): SSL handshake between Cloudflare and the origin web server failed.")
else:
raise Failed(e)
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
@ -37,7 +37,7 @@ class Notifiarr:
if self.config.trace_mode:
logger.debug(url.replace(self.apikey, "APIKEY"))
if self.test:
params = {"event": f"qbitManage-{self.apikey[:5]}", "qbit_client":self.config.data["qbt"]["host"], "instance":self.instance}
params = {"event": f"qbitManage-{self.apikey[:5]}", "qbit_client": self.config.data["qbt"]["host"], "instance": self.instance}
else:
params = {"qbit_client":self.config.data["qbt"]["host"], "instance":self.instance}
return url, params
params = {"qbit_client": self.config.data["qbt"]["host"], "instance": self.instance}
return url, params

File diff suppressed because it is too large Load diff

View file

@ -5,6 +5,7 @@ from pathlib import Path
logger = logging.getLogger('qBit Manage')
def get_list(data, lower=False, split=True, int_list=False):
if data is None: return None
elif isinstance(data, list): return data
@ -16,11 +17,25 @@ def get_list(data, lower=False, split=True, int_list=False):
except ValueError: return []
else: return [d.strip() for d in str(data).split(",")]
class check:
def __init__(self, config):
self.config = config
def check_for_attribute(self, data, attribute, parent=None, subparent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", default_int=0, throw=False, save=True):
def check_for_attribute(self,
data,
attribute,
parent=None,
subparent=None,
test_list=None,
default=None,
do_print=True,
default_is_none=False,
req_default=False,
var_type="str",
default_int=0,
throw=False,
save=True):
endline = ""
if parent is not None:
if subparent is not None:
@ -35,7 +50,6 @@ class check:
else:
data = None
do_print = False
#save = False
if subparent is not None:
text = f"{parent}->{subparent} sub-attribute {attribute}"
@ -53,7 +67,7 @@ class check:
if subparent not in loaded_config[parent] or not loaded_config[parent][subparent]:
loaded_config[parent][subparent] = {attribute: default}
elif attribute not in loaded_config[parent]:
if isinstance(loaded_config[parent][subparent],str):
if isinstance(loaded_config[parent][subparent], str):
loaded_config[parent][subparent] = {attribute: default}
loaded_config[parent][subparent][attribute] = default
else:
@ -101,7 +115,7 @@ class check:
message = f"{text} must a float >= {float(default_int)}"
elif var_type == "path":
if os.path.exists(os.path.abspath(data[attribute])):
return os.path.join(data[attribute],'')
return os.path.join(data[attribute], '')
else:
message = f"Path {os.path.abspath(data[attribute])} does not exist"
elif var_type == "list":
@ -120,7 +134,7 @@ class check:
else:
message = f"{text}: {data[attribute]} is an invalid input"
if var_type == "path" and default and os.path.exists(os.path.abspath(default)):
return os.path.join(default,'')
return os.path.join(default, '')
elif var_type == "path" and default:
if data and attribute in data and data[attribute]:
message = f"neither {data[attribute]} or the default path {default} could be found"
@ -148,18 +162,20 @@ class check:
if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
print_multiline(options)
return default
class Failed(Exception):
pass
separating_character = "="
screen_width = 100
spacing = 0
def tab_new_lines(data):
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
def print_stacktrace():
print_multiline(traceback.format_exc())
def add_dict_list(keys, value, dict_map):
for key in keys:
@ -168,17 +184,12 @@ def add_dict_list(keys, value, dict_map):
else:
dict_map[key] = [value]
def get_int_list(data, id_type):
int_values = []
for value in get_list(data):
try: int_values.append(regex_first_int(value, id_type))
except Failed as e: logger.error(e)
return int_values
def print_line(lines, loglevel='INFO'):
logger.log(getattr(logging, loglevel.upper()), str(lines))
return [str(lines)]
def print_multiline(lines, loglevel='INFO'):
for i, line in enumerate(str(lines).split("\n")):
logger.log(getattr(logging, loglevel.upper()), line)
@ -187,13 +198,16 @@ def print_multiline(lines, loglevel='INFO'):
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
return [(str(lines))]
def print_stacktrace():
print_multiline(traceback.format_exc(), 'CRITICAL')
def my_except_hook(exctype, value, tb):
for line in traceback.format_exception(etype=exctype, value=value, tb=tb):
print_multiline(line, 'CRITICAL')
def centered(text, sep=" "):
if len(text) > screen_width - 2:
return text
@ -206,6 +220,7 @@ def centered(text, sep=" "):
final_text = f"{sep * side}{text}{sep * side}"
return final_text
def separator(text=None, space=True, border=True, loglevel='INFO'):
sep = " " if space else separating_character
for handler in logger.handlers:
@ -224,13 +239,15 @@ def separator(text=None, space=True, border=True, loglevel='INFO'):
apply_formatter(handler)
return [text]
def apply_formatter(handler, border=True):
text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s"
if isinstance(handler, RotatingFileHandler):
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
#text = f"[%(asctime)s] %(levelname)-10s {text}"
# text = f"[%(asctime)s] %(levelname)-10s {text}"
handler.setFormatter(logging.Formatter(text))
def adjust_space(display_title):
display_title = str(display_title)
space_length = spacing - len(display_title)
@ -238,6 +255,7 @@ def adjust_space(display_title):
display_title += " " * space_length
return display_title
def insert_space(display_title, space_length=0):
display_title = str(display_title)
if space_length == 0:
@ -246,34 +264,39 @@ def insert_space(display_title, space_length=0):
display_title = " " * space_length + display_title
return display_title
def print_return(text):
print(adjust_space(f"| {text}"), end="\r")
global spacing
spacing = len(text) + 2
def print_end():
print(adjust_space(" "), end="\r")
global spacing
spacing = 0
# truncate the value of the torrent url to remove sensitive information
def trunc_val(s, d, n=3):
try:
x = d.join(s.split(d, n)[:n])
except IndexError as e:
except IndexError:
x = None
return x
# Move files from source to destination, mod variable is to change the date modified of the file being moved
def move_files(src, dest, mod=False):
dest_path = os.path.dirname(dest)
if os.path.isdir(dest_path) == False:
if os.path.isdir(dest_path) is False:
os.makedirs(dest_path)
shutil.move(src, dest)
if mod == True:
if mod is True:
modTime = time.time()
os.utime(dest, (modTime, modTime))
# Remove any empty directories after moving files
def remove_empty_directories(pathlib_root_dir, pattern):
pathlib_root_dir = Path(pathlib_root_dir)
@ -290,7 +313,8 @@ def remove_empty_directories(pathlib_root_dir, pattern):
except OSError:
continue # catch and continue if non-empty
#will check if there are any hard links if it passes a file or folder
# will check if there are any hard links if it passes a file or folder
def nohardlink(file):
check = True
if (os.path.isfile(file)):
@ -299,22 +323,26 @@ def nohardlink(file):
else:
for path, subdirs, files in os.walk(file):
for x in files:
if (os.stat(os.path.join(path,x)).st_nlink > 1):
if (os.stat(os.path.join(path, x)).st_nlink > 1):
check = False
return check
#Gracefully kill script when docker stops
# Gracefully kill script when docker stops
class GracefulKiller:
kill_now = False
def __init__(self):
#signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self, *args):
self.kill_now = True
kill_now = False
def __init__(self):
# signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self, *args):
self.kill_now = True
def human_readable_size(size, decimal_places=3):
for unit in ['B','KiB','MiB','GiB','TiB']:
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB']:
if size < 1024.0:
break
size /= 1024.0
return f"{size:.{decimal_places}f}{unit}"
return f"{size:.{decimal_places}f}{unit}"

View file

@ -5,6 +5,7 @@ from modules.util import Failed
logger = logging.getLogger("qBit Manage")
class Webhooks:
def __init__(self, config, system_webhooks, notifiarr=None, apprise=None):
self.config = config
@ -14,7 +15,7 @@ class Webhooks:
if "function" in system_webhooks and system_webhooks["function"] is not None:
try:
self.function_webhooks = system_webhooks["function"][0]
except (IndexError,KeyError) as e:
except (IndexError, KeyError):
self.function_webhooks = []
else:
self.function_webhooks = []
@ -29,7 +30,7 @@ class Webhooks:
response = None
if self.config.trace_mode:
logger.debug(f"Webhook: {webhook}")
if webhook == None:
if webhook is None:
break
elif webhook == "notifiarr":
if self.notifiarr is None:
@ -42,7 +43,7 @@ class Webhooks:
break
elif webhook == "apprise":
if self.apprise is None:
logger.warning(f"Webhook attribute set to apprise but apprise attribute is not configured.")
logger.warning("Webhook attribute set to apprise but apprise attribute is not configured.")
break
else:
json['urls'] = self.apprise.notify_url
@ -64,7 +65,7 @@ class Webhooks:
skip = True
else:
raise Failed(f"Notifiarr Error: {response_json['details']['response']}")
if (response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error")) and skip == False:
if (response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error")) and skip is False:
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
except JSONDecodeError:
if response.status_code >= 400:
@ -78,22 +79,17 @@ class Webhooks:
else:
start_type = ""
self._request(self.run_start_webhooks, {
"function":"run_start",
"function": "run_start",
"title": None,
"body":f"Starting {start_type}Run",
"body": f"Starting {start_type}Run",
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
"dry_run": self.config.args['dry_run']
})
def end_time_hooks(self, start_time, end_time, run_time, stats, body):
dry_run = self.config.args['dry_run']
if dry_run:
start_type = "Dry-"
else:
start_type = ""
if self.run_end_webhooks:
self._request(self.run_end_webhooks, {
"function":"run_end",
"function": "run_end",
"title": None,
"body": body,
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
@ -107,6 +103,7 @@ class Webhooks:
"torrents_categorized": stats["categorized"],
"torrents_tagged": stats["tagged"],
"remove_unregistered": stats["rem_unreg"],
"potential_unregistered": stats["pot_unreg"],
"orphaned_files_found": stats["orphaned"],
"torrents_tagged_no_hardlinks": stats["taggednoHL"],
"torrents_untagged_no_hardlinks": stats["untagged"],
@ -115,12 +112,18 @@ class Webhooks:
def error_hooks(self, text, function_error=None, critical=True):
if self.error_webhooks:
type = "failure" if critical == True else "warning"
json = {"function":"run_error","title":f"{function_error} Error","body": str(text), "critical": critical, "type": type}
type = "failure" if critical is True else "warning"
json = {
"function": "run_error",
"title": f"{function_error} Error",
"body": str(text),
"critical": critical,
"type": type
}
if function_error:
json["function_error"] = function_error
self._request(self.error_webhooks, json)
def function_hooks(self, webhook, json):
if self.function_webhooks:
self._request(webhook, json)
self._request(webhook, json)

View file

@ -2,7 +2,7 @@
import argparse, logging, os, sys, time
from logging.handlers import RotatingFileHandler
from datetime import datetime,timedelta
from datetime import datetime, timedelta
try:
import schedule
@ -24,22 +24,31 @@ parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, acti
parser.add_argument("-tr", "--trace", dest="trace", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument('-r', '--run', dest='run', action='store_true', default=False, help='Run without the scheduler. Script will exit after completion.')
parser.add_argument('-sch', '--schedule', dest='min', default='1440', type=str, help='Schedule to run every x minutes. (Default set to 1440 (1 day))')
parser.add_argument('-c', '--config-file', dest='configfile', action='store', default='config.yml', type=str, help='This is used if you want to use a different name for your config.yml. Example: tv.yml')
parser.add_argument('-lf', '--log-file', dest='logfile', action='store',default='activity.log', type=str, help='This is used if you want to use a different name for your log file. Example: tv.log',)
parser.add_argument('-cs', '--cross-seed', dest='cross_seed', action="store_true", default=False, help='Use this after running cross-seed script to add torrents from the cross-seed output folder to qBittorrent')
parser.add_argument('-c', '--config-file', dest='configfile', action='store', default='config.yml', type=str,
help='This is used if you want to use a different name for your config.yml. Example: tv.yml')
parser.add_argument('-lf', '--log-file', dest='logfile', action='store', default='activity.log', type=str, help='This is used if you want to use a different name for your log file. Example: tv.log',)
parser.add_argument('-cs', '--cross-seed', dest='cross_seed', action="store_true", default=False,
help='Use this after running cross-seed script to add torrents from the cross-seed output folder to qBittorrent')
parser.add_argument('-re', '--recheck', dest='recheck', action="store_true", default=False, help='Recheck paused torrents sorted by lowest size. Resume if Completed.')
parser.add_argument('-cu', '--cat-update', dest='cat_update', action="store_true", default=False, help='Use this if you would like to update your categories.')
parser.add_argument('-tu', '--tag-update', dest='tag_update', action="store_true", default=False, help='Use this if you would like to update your tags and/or set seed goals/limit upload speed by tag. (Only adds tags to untagged torrents)')
parser.add_argument('-tu', '--tag-update', dest='tag_update', action="store_true", default=False,
help='Use this if you would like to update your tags and/or set seed goals/limit upload speed by tag. (Only adds tags to untagged torrents)')
parser.add_argument('-ru', '--rem-unregistered', dest='rem_unregistered', action="store_true", default=False, help='Use this if you would like to remove unregistered torrents.')
parser.add_argument('-ro', '--rem-orphaned', dest='rem_orphaned', action="store_true", default=False, help='Use this if you would like to remove unregistered torrents.')
parser.add_argument('-tnhl', '--tag-nohardlinks', dest='tag_nohardlinks', action="store_true", default=False, help='Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr which hard link your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder.')
parser.add_argument('-tnhl', '--tag-nohardlinks', dest='tag_nohardlinks', action="store_true", default=False,
help='Use this to tag any torrents that do not have any hard links associated with any of the files. \
This is useful for those that use Sonarr/Radarr which hard link your media files with the torrents for seeding. \
When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. \
You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder.')
parser.add_argument('-sr', '--skip-recycle', dest='skip_recycle', action="store_true", default=False, help='Use this to skip emptying the Reycle Bin folder.')
parser.add_argument('-dr', '--dry-run', dest='dry_run', action="store_true", default=False, help='If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything.')
parser.add_argument('-dr', '--dry-run', dest='dry_run', action="store_true", default=False,
help='If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything.')
parser.add_argument('-ll', '--log-level', dest='log_level', action="store", default='INFO', type=str, help='Change your log level.')
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
args = parser.parse_args()
def get_arg(env_str, default, arg_bool=False, arg_int=False):
env_var = os.environ.get(env_str)
if env_var:
@ -57,6 +66,7 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False):
else:
return default
run = get_arg("QBT_RUN", args.run, arg_bool=True)
sch = get_arg("QBT_SCHEDULE", args.min)
config_file = get_arg("QBT_CONFIG", args.configfile)
@ -81,12 +91,31 @@ if debug or trace: log_level = 'DEBUG'
stats = {}
args = {}
if os.path.isdir('/config') and os.path.exists(os.path.join('/config',config_file)):
if os.path.isdir('/config') and os.path.exists(os.path.join('/config', config_file)):
default_dir = '/config'
else:
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
for v in ['run','sch','config_file','log_file','cross_seed','recheck','cat_update','tag_update','rem_unregistered','rem_orphaned','tag_nohardlinks','skip_recycle','dry_run','log_level','divider','screen_width','debug','trace']:
for v in [
'run',
'sch',
'config_file',
'log_file',
'cross_seed',
'recheck',
'cat_update',
'tag_update',
'rem_unregistered',
'rem_orphaned',
'tag_nohardlinks',
'skip_recycle',
'dry_run',
'log_level',
'divider',
'screen_width',
'debug',
'trace'
]:
args[v] = eval(v)
util.separating_character = divider[0]
@ -96,7 +125,7 @@ if screen_width < 90 or screen_width > 300:
screen_width = 100
util.screen_width = screen_width
#Check if Schedule parameter is a number
# Check if Schedule parameter is a number
try:
sch = int(sch)
except ValueError:
@ -110,11 +139,13 @@ setattr(logger, 'dryrun', lambda dryrun, *args: logger._log(logging.DRYRUN, dryr
log_lev = getattr(logging, log_level.upper())
logger.setLevel(log_lev)
def fmt_filter(record):
record.levelname = f"[{record.levelname}]"
record.filename = f"[{record.filename}:{record.lineno}]"
return True
cmd_handler = logging.StreamHandler()
cmd_handler.setLevel(log_level)
logger.addHandler(cmd_handler)
@ -140,6 +171,7 @@ util.apply_formatter(file_handler)
file_handler.addFilter(fmt_filter)
logger.addHandler(file_handler)
def start():
start_time = datetime.now()
args["time"] = start_time.strftime("%H:%M")
@ -152,70 +184,73 @@ def start():
util.separator(f"Starting {start_type}Run")
cfg = None
global stats
stats = {
stats = {
"added": 0,
"deleted": 0,
"deleted_contents": 0,
"resumed": 0,
"rechecked": 0,
"orphaned":0,
"orphaned": 0,
"recycle_emptied": 0,
"tagged": 0,
"untagged":0,
"untagged": 0,
"categorized": 0,
"rem_unreg": 0,
"pot_unreg": 0,
"taggednoHL": 0
}
try:
cfg = Config(default_dir,args)
cfg = Config(default_dir, args)
except Exception as e:
util.print_stacktrace()
util.print_multiline(e,'CRITICAL')
util.print_multiline(e, 'CRITICAL')
if cfg:
#Set Category
# Set Category
num_categorized = cfg.qbt.category()
stats["categorized"] += num_categorized
#Set Tags
# Set Tags
num_tagged = cfg.qbt.tags()
stats["tagged"] += num_tagged
#Remove Unregistered Torrents
num_deleted,num_deleted_contents = cfg.qbt.rem_unregistered()
# Remove Unregistered Torrents
num_deleted, num_deleted_contents, num_pot_unreg = cfg.qbt.rem_unregistered()
stats["rem_unreg"] += (num_deleted + num_deleted_contents)
stats["deleted"] += num_deleted
stats["deleted_contents"] += num_deleted_contents
stats["pot_unreg"] += num_pot_unreg
#Set Cross Seed
# Set Cross Seed
num_added, num_tagged = cfg.qbt.cross_seed()
stats["added"] += num_added
stats["tagged"] += num_tagged
#Recheck Torrents
# Recheck Torrents
num_resumed, num_rechecked = cfg.qbt.recheck()
stats["resumed"] += num_resumed
stats["rechecked"] += num_rechecked
#Tag NoHardLinks
num_tagged,num_untagged,num_deleted,num_deleted_contents = cfg.qbt.tag_nohardlinks()
# Tag NoHardLinks
num_tagged, num_untagged, num_deleted, num_deleted_contents = cfg.qbt.tag_nohardlinks()
stats["tagged"] += num_tagged
stats["taggednoHL"] += num_tagged
stats["untagged"] += num_untagged
stats["deleted"] += num_deleted
stats["deleted_contents"] += num_deleted_contents
#Remove Orphaned Files
# Remove Orphaned Files
num_orphaned = cfg.qbt.rem_orphaned()
stats["orphaned"] += num_orphaned
#Empty RecycleBin
# mpty RecycleBin
recycle_emptied = cfg.empty_recycle()
stats["recycle_emptied"] += recycle_emptied
if stats["categorized"] > 0: stats_summary.append(f"Total Torrents Categorized: {stats['categorized']}")
if stats["tagged"] > 0: stats_summary.append(f"Total Torrents Tagged: {stats['tagged']}")
if stats["rem_unreg"] > 0: stats_summary.append(f"Total Unregistered Torrents Removed: {stats['rem_unreg']}")
if stats["pot_unreg"] > 0: stats_summary.append(f"Total Potential Unregistered Torrents Found: {stats['pot_unreg']}")
if stats["added"] > 0: stats_summary.append(f"Total Torrents Added: {stats['added']}")
if stats["resumed"] > 0: stats_summary.append(f"Total Torrents Resumed: {stats['resumed']}")
if stats["rechecked"] > 0: stats_summary.append(f"Total Torrents Rechecked: {stats['rechecked']}")
@ -235,12 +270,15 @@ def start():
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def end():
logger.info("Exiting Qbit_manage")
logger.removeHandler(file_handler)
sys.exit(0)
def calc_next_run(sch,print=False):
def calc_next_run(sch, print=False):
current = datetime.now().strftime("%H:%M")
seconds = sch*60
time_to_run = (datetime.now() + timedelta(minutes=sch)).strftime("%H:%M")
@ -258,17 +296,18 @@ def calc_next_run(sch,print=False):
if print: util.print_return(f"Current Time: {current} | {time_str} until the next run at {time_to_run}")
return time_str
if __name__ == '__main__':
killer = GracefulKiller()
util.separator()
logger.info(util.centered(" _ _ _ "))
logger.info(util.centered(" | | (_) | "))
logger.info(util.centered(" __ _| |__ _| |_ _ __ ___ __ _ _ __ __ _ __ _ ___ "))
logger.info(util.centered(" / _` | '_ \| | __| | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \\"))
logger.info(util.centered(" | (_| | |_) | | |_ | | | | | | (_| | | | | (_| | (_| | __/"))
logger.info(util.centered(" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|"))
logger.info(util.centered(" | | ______ __/ | "))
logger.info(util.centered(" |_| |______| |___/ "))
logger.info(util.centered(" _ _ _ ")) # noqa: W605
logger.info(util.centered(" | | (_) | ")) # noqa: W605
logger.info(util.centered(" __ _| |__ _| |_ _ __ ___ __ _ _ __ __ _ __ _ ___ ")) # noqa: W605
logger.info(util.centered(" / _` | '_ \| | __| | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \\")) # noqa: W605
logger.info(util.centered(" | (_| | |_) | | |_ | | | | | | (_| | | | | (_| | (_| | __/")) # noqa: W605
logger.info(util.centered(" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|")) # noqa: W605
logger.info(util.centered(" | | ______ __/ | ")) # noqa: W605
logger.info(util.centered(" |_| |______| |___/ ")) # noqa: W605
logger.info(f" Version: {version}")
util.separator(loglevel='DEBUG')
@ -293,7 +332,7 @@ if __name__ == '__main__':
logger.debug("")
try:
if run:
logger.info(f" Run Mode: Script will exit after completion.")
logger.info(" Run Mode: Script will exit after completion.")
start()
else:
schedule.every(sch).minutes.do(start)
@ -301,8 +340,8 @@ if __name__ == '__main__':
start()
while not killer.kill_now:
schedule.run_pending()
calc_next_run(sch,True)
calc_next_run(sch, True)
time.sleep(60)
end()
except KeyboardInterrupt:
end()
end()