Merge pull request #84 from StuffAnThings/develop

v3.1.4
This commit is contained in:
bobokun 2022-01-04 13:05:41 -05:00 committed by GitHub
commit 0f2cb4fd52
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 307 additions and 133 deletions

View file

@ -9,4 +9,14 @@ updates:
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"
target-branch: "develop"
assignees:
- "bobokun"
- package-ecosystem: github-actions
directory: '/'
schedule:
interval: daily
assignees:
- "bobokun"
ignore:
- dependency-name: "salsify/action-detect-and-tag-new-version"

View file

@ -1,11 +1,15 @@
FROM hotio/base:alpine
FROM python:3.9-alpine
# install packages
RUN apk add --no-cache gcc g++ libxml2-dev libxslt-dev shadow bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates
COPY requirements.txt /
RUN apk add --no-cache py3-pip
COPY --chown=hotio:users requirements.txt /
RUN echo "**** install python packages ****" \
&& pip3 install --user --no-cache-dir --upgrade --requirement /requirements.txt \
&& pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \
&& rm -rf /requirements.txt /tmp/* /var/tmp/*
COPY --chown=hotio:users . "${APP_DIR}"
WORKDIR ${APP_DIR}
COPY . /app
WORKDIR /app
VOLUME /config
ENTRYPOINT ["python3", "qbit_manage.py"]

View file

@ -1 +1 @@
3.1.3
3.1.4

View file

@ -1,5 +1,7 @@
# This is an example configuration file that documents all the options.
# It will need to be modified for your specific use case.
# Please refer to the link below for more details on how to set up the configuration file
# https://github.com/StuffAnThings/qbit_manage/wiki/Config-Setup
# qBittorrent parameters
qbt:
@ -16,9 +18,14 @@ directory:
# root_dir var: </your/path/here/> # Root downloads directory used to check for orphaned files, noHL, and RecycleBin.
# <OPTIONAL> remote_dir var: </your/path/here/> # Path of docker host mapping of root_dir.
# Must be set if you're running qbit_manage locally and qBittorrent/cross_seed is in a docker
# <OPTIONAL> recycle_bin var: </your/path/here/> # Path of the RecycleBin folder. Default location is set to remote_dir/.RecycleBin
# <OPTIONAL> torrents_dir var: </your/path/here/> # Path of the your qbittorrent torrents directory. Required for `save_torrents` attribute in recyclebin
cross_seed: "/your/path/here/"
root_dir: "/data/torrents/"
remote_dir: "/mnt/user/data/torrents/"
recycle_bin: "/mnt/user/data/torrents/.RecycleBin"
torrents_dir: "/qbittorrent/data/BT_backup"
# Category & Path Parameters
cat:
@ -123,10 +130,19 @@ nohardlinks:
# By default the Recycle Bin will be emptied on every run of the qbit_manage script if empty_after_x_days is defined.
recyclebin:
enabled: true
# <OPTIONAL> empty_after_x_days var: Will automatically remove all files and folders in recycle bin after x days. (Checks every script run)
# If this variable is not defined it, the RecycleBin will never be emptied.
# WARNING: Setting this variable to 0 will delete all files immediately upon script run!
# <OPTIONAL> empty_after_x_days var:
# Will automatically remove all files and folders in recycle bin after x days. (Checks every script run)
# If this variable is not defined it, the RecycleBin will never be emptied.
# WARNING: Setting this variable to 0 will delete all files immediately upon script run!
empty_after_x_days: 60
# <OPTIONAL> save_torrents var:
# If this option is set to true you MUST fill out the torrents_dir in the directory attribute.
# This will save a copy of your .torrent and .fastresume file in the recycle bin before deleting it from qbittorrent
save_torrents: true
# <OPTIONAL> split_by_category var:
# This will split the recycle bin folder by the save path defined in the `cat` attribute
# and add the base folder name of the recycle bin that was defined in the `recycle_bin` sub-attribute under directory.
split_by_category: false
# Orphaned files are those in the root_dir download directory that are not referenced by any active torrents.
orphaned:

View file

@ -1,4 +1,4 @@
import logging, os, requests, stat, time
import logging, os, requests, stat, time, re
from modules import util
from modules.util import Failed, check
from modules.qbittorrent import Qbt
@ -33,15 +33,13 @@ class Config:
yaml.YAML().allow_duplicate_keys = True
try:
new_config, _, _ = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
if "settings" not in new_config: new_config["settings"] = {}
if "cat" not in new_config: new_config["cat"] = {}
if "tracker" not in new_config and "tags" not in new_config: new_config["tracker"] = {}
if "qbt" in new_config: new_config["qbt"] = new_config.pop("qbt")
if "settings" in new_config: new_config["settings"] = new_config.pop("settings")
new_config["settings"] = new_config.pop("settings") if "settings" in new_config else {}
if "directory" in new_config: new_config["directory"] = new_config.pop("directory")
if "cat" in new_config: new_config["cat"] = new_config.pop("cat")
new_config["cat"] = new_config.pop("cat") if "cat" in new_config else {}
if "tracker" in new_config: new_config["tracker"] = new_config.pop("tracker")
elif "tags" in new_config: new_config["tracker"] = new_config.pop("tags")
else: new_config["tracker"] = {}
if "nohardlinks" in new_config: new_config["nohardlinks"] = new_config.pop("nohardlinks")
if "recyclebin" in new_config: new_config["recyclebin"] = new_config.pop("recyclebin")
if "orphaned" in new_config: new_config["orphaned"] = new_config.pop("orphaned")
@ -178,15 +176,13 @@ class Config:
self.recyclebin = {}
self.recyclebin['enabled'] = self.util.check_for_attribute(self.data, "enabled", parent="recyclebin", var_type="bool", default=True)
self.recyclebin['empty_after_x_days'] = self.util.check_for_attribute(self.data, "empty_after_x_days", parent="recyclebin", var_type="int", default_is_none=True)
# Add Orphaned
self.orphaned = {}
self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned", var_type="list", default_is_none=True, do_print=False)
self.recyclebin['save_torrents'] = self.util.check_for_attribute(self.data, "save_torrents", parent="recyclebin", var_type="bool", default=False)
self.recyclebin['split_by_category'] = self.util.check_for_attribute(self.data, "split_by_category", parent="recyclebin", var_type="bool", default=False)
# Assign directories
if "directory" in self.data:
self.root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True)
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", default=self.root_dir)
self.root_dir = os.path.join(self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True), '')
self.remote_dir = os.path.join(self.util.check_for_attribute(self.data, "remote_dir", parent="directory", default=self.root_dir), '')
if (self.args["cross_seed"] or self.args["tag_nohardlinks"] or self.args["rem_orphaned"]):
self.remote_dir = self.util.check_for_attribute(self.data, "remote_dir", parent="directory", var_type="path", default=self.root_dir)
else:
@ -196,12 +192,26 @@ class Config:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", var_type="path")
else:
self.cross_seed_dir = self.util.check_for_attribute(self.data, "cross_seed", parent="directory", default_is_none=True)
self.recycle_dir = os.path.join(self.remote_dir, '.RecycleBin')
self.recycle_dir = self.util.check_for_attribute(self.data, "recycle_bin", parent="directory", var_type="path", default=os.path.join(self.remote_dir, '.RecycleBin'), make_dirs=True)
if self.recyclebin['enabled'] and self.recyclebin['save_torrents']:
self.torrents_dir = self.util.check_for_attribute(self.data, "torrents_dir", parent="directory", var_type="path")
if not any(File.endswith(".torrent") for File in os.listdir(self.torrents_dir)):
e = f"Config Error: The location {self.torrents_dir} does not contain any .torrents"
self.notify(e, 'Config')
raise Failed(e)
else:
self.torrents_dir = self.util.check_for_attribute(self.data, "torrents_dir", parent="directory", default_is_none=True)
else:
e = "Config Error: directory attribute not found"
self.notify(e, 'Config')
raise Failed(e)
# Add Orphaned
exclude_recycle = f"**/{os.path.basename(self.recycle_dir.rstrip('/'))}/*"
self.orphaned = {}
self.orphaned['exclude_patterns'] = self.util.check_for_attribute(self.data, "exclude_patterns", parent="orphaned", var_type="list", default_is_none=True, do_print=False)
self.orphaned['exclude_patterns'].append(exclude_recycle) if exclude_recycle not in self.orphaned['exclude_patterns'] else self.orphaned['exclude_patterns']
# Connect to Qbittorrent
self.qbt = None
if "qbt" in self.data:
@ -304,28 +314,46 @@ class Config:
files = []
size_bytes = 0
if not self.args["skip_recycle"]:
n_info = ''
if self.recyclebin['enabled'] and self.recyclebin['empty_after_x_days']:
recycle_files = [os.path.join(path, name) for path, subdirs, files in os.walk(self.recycle_dir) for name in files]
if self.recyclebin['split_by_category']:
if "cat" in self.data and self.data["cat"] is not None:
save_path = list(self.data["cat"].values())
cleaned_save_path = [os.path.join(s.replace(self.root_dir, self.remote_dir), os.path.basename(self.recycle_dir.rstrip('/'))) for s in save_path]
recycle_path = [self.recycle_dir]
for dir in cleaned_save_path:
if os.path.exists(dir): recycle_path.append(dir)
else:
e = (f'No categories defined. Checking Recycle Bin directory {self.recycle_dir}.')
self.notify(e, 'Empty Recycle Bin', False)
logger.warning(e)
recycle_path = [self.recycle_dir]
else:
recycle_path = [self.recycle_dir]
recycle_files = [os.path.join(path, name) for r_path in recycle_path for path, subdirs, files in os.walk(r_path) for name in files]
recycle_files = sorted(recycle_files)
if recycle_files:
util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=False, border=False)
body = []
util.separator(f"Emptying Recycle Bin (Files > {self.recyclebin['empty_after_x_days']} days)", space=True, border=True)
prevfolder = ''
for file in recycle_files:
folder = re.search(f".*{os.path.basename(self.recycle_dir.rstrip('/'))}", file).group(0)
if folder != prevfolder: body += util.separator(f"Searching: {folder}", space=False, border=False)
fileStats = os.stat(file)
filename = file.replace(self.recycle_dir, '')
filename = os.path.basename(file)
last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time)
now = time.time() # in seconds
days = (now - last_modified) / (60 * 60 * 24)
if (self.recyclebin['empty_after_x_days'] <= days):
num_del += 1
n_info += (f"{'Did not delete' if dry_run else 'Deleted'} {filename} from the recycle bin. (Last modified {round(days)} days ago).\n")
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {filename} from {folder} (Last modified {round(days)} days ago).", loglevel)
files += [str(filename)]
size_bytes += os.path.getsize(file)
if not dry_run: os.remove(file)
prevfolder = re.search(f".*{os.path.basename(self.recycle_dir.rstrip('/'))}", file).group(0)
if num_del > 0:
if not dry_run: util.remove_empty_directories(self.recycle_dir, "**/*")
body = []
body += util.print_multiline(n_info, loglevel)
if not dry_run:
for path in recycle_path:
util.remove_empty_directories(path, "**/*")
body += util.print_line(f"{'Did not delete' if dry_run else 'Deleted'} {num_del} files ({util.human_readable_size(size_bytes)}) from the Recycle Bin.", loglevel)
attr = {
"function": "empty_recyclebin",
@ -337,7 +365,7 @@ class Config:
}
self.send_notifications(attr)
else:
logger.debug('No files found in "' + self.recycle_dir + '"')
logger.debug(f'No files found in "{(",".join(recycle_path))}"')
return num_del
def send_notifications(self, attr):

View file

@ -1,5 +1,5 @@
import logging, os
from qbittorrentapi import Client, LoginFailed, APIConnectionError
from qbittorrentapi import Client, LoginFailed, APIConnectionError, NotFound404Error
from modules import util
from modules.util import Failed, print_line, print_multiline, separator
from datetime import timedelta
@ -89,16 +89,21 @@ class Qbt:
status_list = []
is_complete = torrent_is_complete
first_hash = torrent_hash
for x in torrent_trackers:
if x.url.startswith('http'):
status = x.status
msg = x.msg.upper()
exception = ["DOWN", "UNREACHABLE", "BAD GATEWAY", "TRACKER UNAVAILABLE"]
# Add any potential unregistered torrents to a list
if x.status == 4 and all(x not in msg for x in exception):
t_obj_unreg.append(torrent)
if x.status == 2:
t_obj_valid.append(torrent)
working_tracker = torrent.tracker
if working_tracker:
status = 2
msg = ''
t_obj_valid.append(torrent)
else:
for x in torrent_trackers:
if x.url.startswith('http'):
status = x.status
msg = x.msg.upper()
exception = ["DOWN", "UNREACHABLE", "BAD GATEWAY", "TRACKER UNAVAILABLE"]
# Add any potential unregistered torrents to a list
if x.status == 4 and all(x not in msg for x in exception):
t_obj_unreg.append(torrent)
break
if msg is not None: msg_list.append(msg)
if status is not None: status_list.append(status)
torrentattr = {
@ -335,14 +340,14 @@ class Qbt:
"notifiarr_indexer": tracker["notifiarr"],
}
if (os.path.exists(torrent['content_path'].replace(root_dir, root_dir))):
if not dry_run: self.tor_delete_recycle(torrent)
del_tor_cont += 1
attr["torrents_deleted_and_contents"] = True
if not dry_run: self.tor_delete_recycle(torrent, attr)
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
else:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
del_tor += 1
attr["torrents_deleted_and_contents"] = False
if not dry_run: self.tor_delete_recycle(torrent, attr)
body += print_line(util.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
attr["body"] = "\n".join(body)
self.config.send_notifications(attr)
@ -381,18 +386,18 @@ class Qbt:
if t_count > 1:
# Checks if any of the original torrents are working
if '' in t_msg or 2 in t_status:
if not dry_run: torrent.delete(hash=torrent.hash, delete_files=False)
attr["torrents_deleted_and_contents"] = False
if not dry_run: self.tor_delete_recycle(torrent, attr)
body += print_line(util.insert_space('Deleted .torrent but NOT content files.', 8), loglevel)
del_tor += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
attr["torrents_deleted_and_contents"] = True
if not dry_run: self.tor_delete_recycle(torrent, attr)
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
del_tor_cont += 1
else:
if not dry_run: self.tor_delete_recycle(torrent)
attr["torrents_deleted_and_contents"] = True
if not dry_run: self.tor_delete_recycle(torrent, attr)
body += print_line(util.insert_space('Deleted .torrent AND content files.', 8), loglevel)
del_tor_cont += 1
attr["body"] = "\n".join(body)
@ -423,41 +428,49 @@ class Qbt:
t_msg = self.torrentinfo[t_name]['msg']
t_status = self.torrentinfo[t_name]['status']
check_tags = util.get_list(torrent.tags)
for x in torrent.trackers:
if x.url.startswith('http'):
tracker = self.config.get_tags([x.url])
msg_up = x.msg.upper()
# Tag any potential unregistered torrents
if not any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'issue' not in check_tags:
# Check for unregistered torrents using BHD API if the tracker is BHD
if 'tracker.beyond-hd.me' in tracker['url'] and self.config.BeyondHD is not None:
json = {"info_hash": torrent.hash}
response = self.config.BeyondHD.search(json)
if response['total_results'] <= 1:
del_unregistered()
break
pot_unr = ''
pot_unr += (util.insert_space(f'Torrent Name: {t_name}', 3)+'\n')
pot_unr += (util.insert_space(f'Status: {msg_up}', 9)+'\n')
pot_unr += (util.insert_space(f'Tracker: {tracker["url"]}', 8)+'\n')
pot_unr += (util.insert_space("Added Tag: 'issue'", 6)+'\n')
pot_unr_summary += pot_unr
pot_unreg += 1
attr = {
"function": "potential_rem_unregistered",
"title": "Potential Unregistered Torrents",
"body": pot_unr,
"torrent_name": t_name,
"torrent_category": t_cat,
"torrent_tag": "issue",
"torrent_status": msg_up,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
if not dry_run: torrent.add_tags(tags='issue')
if any(m in msg_up for m in unreg_msgs) and x.status == 4:
del_unregistered()
try:
for x in torrent.trackers:
if x.url.startswith('http'):
tracker = self.config.get_tags([x.url])
msg_up = x.msg.upper()
# Tag any potential unregistered torrents
if not any(m in msg_up for m in unreg_msgs) and x.status == 4 and 'issue' not in check_tags:
# Check for unregistered torrents using BHD API if the tracker is BHD
if 'tracker.beyond-hd.me' in tracker['url'] and self.config.BeyondHD is not None:
json = {"info_hash": torrent.hash}
response = self.config.BeyondHD.search(json)
if response['total_results'] <= 1:
del_unregistered()
break
pot_unr = ''
pot_unr += (util.insert_space(f'Torrent Name: {t_name}', 3)+'\n')
pot_unr += (util.insert_space(f'Status: {msg_up}', 9)+'\n')
pot_unr += (util.insert_space(f'Tracker: {tracker["url"]}', 8)+'\n')
pot_unr += (util.insert_space("Added Tag: 'issue'", 6)+'\n')
pot_unr_summary += pot_unr
pot_unreg += 1
attr = {
"function": "potential_rem_unregistered",
"title": "Potential Unregistered Torrents",
"body": pot_unr,
"torrent_name": t_name,
"torrent_category": t_cat,
"torrent_tag": "issue",
"torrent_status": msg_up,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
if not dry_run: torrent.add_tags(tags='issue')
if any(m in msg_up for m in unreg_msgs) and x.status == 4:
del_unregistered()
break
except NotFound404Error:
continue
except Exception as e:
util.print_stacktrace()
self.config.notify(e, 'Remove Unregistered Torrents', False)
logger.error(f"Unknown Error: {e}")
if del_tor >= 1 or del_tor_cont >= 1:
if del_tor >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor} .torrent{'s' if del_tor > 1 else ''} but not content files.", loglevel)
if del_tor_cont >= 1: print_line(f"{'Did not delete' if dry_run else 'Deleted'} {del_tor_cont} .torrent{'s' if del_tor_cont > 1 else ''} AND content files.", loglevel)
@ -635,10 +648,10 @@ class Qbt:
if (remote_path != root_path):
root_files = [os.path.join(path.replace(remote_path, root_path), name)
for path, subdirs, files in alive_it(os.walk(remote_path))
for name in files if os.path.join(remote_path, 'orphaned_data') not in path and os.path.join(remote_path, '.RecycleBin') not in path]
for name in files if os.path.join(remote_path, 'orphaned_data') not in path]
else:
root_files = [os.path.join(path, name) for path, subdirs, files in alive_it(os.walk(root_path))
for name in files if os.path.join(root_path, 'orphaned_data') not in path and os.path.join(root_path, '.RecycleBin') not in path]
for name in files if os.path.join(root_path, 'orphaned_data') not in path]
# Get an updated list of torrents
torrent_list = self.get_torrents({'sort': 'added_on'})
@ -697,34 +710,96 @@ class Qbt:
print_line("No Orphaned Files found.", loglevel)
return orphaned
def tor_delete_recycle(self, torrent):
def tor_delete_recycle(self, torrent, info):
if self.config.recyclebin['enabled']:
tor_files = []
# Define torrent files/folders
for file in torrent.files:
tor_files.append(os.path.join(torrent.save_path, file.name))
try:
info_hash = torrent.hash
save_path = torrent.save_path.replace(self.config.root_dir, self.config.remote_dir)
# Define torrent files/folders
for file in torrent.files:
tor_files.append(os.path.join(save_path, file.name))
except NotFound404Error:
return
if self.config.recyclebin['split_by_category']:
recycle_path = os.path.join(save_path, os.path.basename(self.config.recycle_dir.rstrip('/')))
else:
recycle_path = self.config.recycle_dir
# Create recycle bin if not exists
recycle_path = os.path.join(self.config.remote_dir, '.RecycleBin')
torrent_path = os.path.join(recycle_path, 'torrents')
torrents_json_path = os.path.join(recycle_path, 'torrents_json')
os.makedirs(recycle_path, exist_ok=True)
if self.config.recyclebin['save_torrents']:
if os.path.isdir(torrent_path) is False: os.makedirs(torrent_path)
if os.path.isdir(torrents_json_path) is False: os.makedirs(torrents_json_path)
torrent_json_file = os.path.join(torrents_json_path, f"{info['torrent_name']}.json")
torrent_json = util.load_json(torrent_json_file)
if not torrent_json:
logger.info(f"Saving Torrent JSON file to {torrent_json_file}")
torrent_json["torrent_name"] = info["torrent_name"]
torrent_json["category"] = info["torrent_category"]
else:
logger.info(f"Adding {info['torrent_tracker']} to existing {os.path.basename(torrent_json_file)}")
dot_torrent_files = []
for File in os.listdir(self.config.torrents_dir):
if File.startswith(info_hash):
dot_torrent_files.append(File)
try:
util.copy_files(os.path.join(self.config.torrents_dir, File), os.path.join(torrent_path, File))
except Exception as e:
util.print_stacktrace()
self.config.notify(e, 'Deleting Torrent', False)
logger.warning(f"RecycleBin Warning: {e}")
if "tracker_torrent_files" in torrent_json:
tracker_torrent_files = torrent_json["tracker_torrent_files"]
else:
tracker_torrent_files = {}
tracker_torrent_files[info["torrent_tracker"]] = dot_torrent_files
if dot_torrent_files:
backup_str = "Backing up "
for idx, val in enumerate(dot_torrent_files):
if idx == 0: backup_str += val
else: backup_str += f" and {val.replace(info_hash,'')}"
backup_str += f" to {torrent_path}"
logger.info(backup_str)
torrent_json["tracker_torrent_files"] = tracker_torrent_files
if "files" not in torrent_json:
files_cleaned = [f.replace(self.config.remote_dir, '') for f in tor_files]
torrent_json["files"] = files_cleaned
if "deleted_contents" not in torrent_json:
torrent_json["deleted_contents"] = info['torrents_deleted_and_contents']
else:
if torrent_json["deleted_contents"] is False and info['torrents_deleted_and_contents'] is True:
torrent_json["deleted_contents"] = info['torrents_deleted_and_contents']
logger.debug("")
logger.debug(f"JSON: {torrent_json}")
util.save_json(torrent_json, torrent_json_file)
if info['torrents_deleted_and_contents'] is True:
separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False, loglevel='DEBUG')
if len(tor_files) == 1: print_line(tor_files[0], 'DEBUG')
else: print_multiline("\n".join(tor_files), 'DEBUG')
logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(self.config.remote_dir,self.config.root_dir)}')
separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False, loglevel='DEBUG')
if len(tor_files) == 1: print_line(tor_files[0], 'DEBUG')
else: print_multiline("\n".join(tor_files), 'DEBUG')
logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(self.config.remote_dir,self.config.root_dir)}')
# Move files from torrent contents to Recycle bin
for file in tor_files:
src = file.replace(self.config.root_dir, self.config.remote_dir)
dest = os.path.join(recycle_path, file.replace(self.config.root_dir, ''))
# Move files and change date modified
try:
util.move_files(src, dest, True)
except FileNotFoundError:
print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ', 'WARNING')
# Delete torrent and files
torrent.delete(hash=torrent.hash, delete_files=False)
# Remove any empty directories
util.remove_empty_directories(torrent.save_path.replace(self.config.root_dir, self.config.remote_dir), "**/*")
# Move files from torrent contents to Recycle bin
for file in tor_files:
src = file
dest = os.path.join(recycle_path, file.replace(self.config.remote_dir, ''))
# Move files and change date modified
try:
util.move_files(src, dest, True)
except FileNotFoundError:
e = print_line(f'RecycleBin Warning - FileNotFound: No such file or directory: {src} ', 'WARNING')
self.config.notify(e, 'Deleting Torrent', False)
# Delete torrent and files
torrent.delete(delete_files=False)
# Remove any empty directories
util.remove_empty_directories(save_path, "**/*")
else:
torrent.delete(delete_files=False)
else:
torrent.delete(hash=torrent.hash, delete_files=True)
if info['torrents_deleted_and_contents'] is True:
torrent.delete(delete_files=True)
else:
torrent.delete(delete_files=False)

View file

@ -1,4 +1,4 @@
import logging, os, shutil, traceback, time, signal
import logging, os, shutil, traceback, time, signal, json
from logging.handlers import RotatingFileHandler
from ruamel import yaml
from pathlib import Path
@ -35,7 +35,8 @@ class check:
var_type="str",
default_int=0,
throw=False,
save=True):
save=True,
make_dirs=False):
endline = ""
if parent is not None:
if subparent is not None:
@ -135,6 +136,9 @@ class check:
message = f"{text}: {data[attribute]} is an invalid input"
if var_type == "path" and default and os.path.exists(os.path.abspath(default)):
return os.path.join(default, '')
elif var_type == "path" and default and make_dirs:
os.makedirs(default, exist_ok=True)
return os.path.join(default, '')
elif var_type == "path" and default:
if data and attribute in data and data[attribute]:
message = f"neither {data[attribute]} or the default path {default} could be found"
@ -297,6 +301,14 @@ def move_files(src, dest, mod=False):
os.utime(dest, (modTime, modTime))
# Copy Files from source to destination
def copy_files(src, dest):
dest_path = os.path.dirname(dest)
if os.path.isdir(dest_path) is False:
os.makedirs(dest_path)
shutil.copyfile(src, dest)
# Remove any empty directories after moving files
def remove_empty_directories(pathlib_root_dir, pattern):
pathlib_root_dir = Path(pathlib_root_dir)
@ -328,6 +340,23 @@ def nohardlink(file):
return check
# Load json file if exists
def load_json(file):
if (os.path.isfile(file)):
f = open(file, "r")
data = json.load(f)
f.close()
else:
data = {}
return data
# Save json file overwrite if exists
def save_json(torrent_json, dest):
with open(dest, 'w', encoding='utf-8') as f:
json.dump(torrent_json, f, ensure_ascii=False, indent=4)
# Gracefully kill script when docker stops
class GracefulKiller:
kill_now = False

View file

@ -86,7 +86,7 @@ class Webhooks:
"dry_run": self.config.args['dry_run']
})
def end_time_hooks(self, start_time, end_time, run_time, stats, body):
def end_time_hooks(self, start_time, end_time, run_time, next_run, stats, body):
if self.run_end_webhooks:
self._request(self.run_end_webhooks, {
"function": "run_end",
@ -94,6 +94,7 @@ class Webhooks:
"body": body,
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
"next_run": next_run.strftime("%Y-%m-%d %H:%M:%S") if next_run is not None else next_run,
"run_time": run_time,
"torrents_added": stats["added"],
"torrents_deleted": stats["deleted"],

View file

@ -268,10 +268,14 @@ def start():
end_time = datetime.now()
run_time = str(end_time - start_time).split('.')[0]
body = util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}".replace('\n\n', '\n'))[0]
_, nr = calc_next_run(sch, True)
next_run_str = nr['next_run_str']
next_run = nr['next_run']
body = util.separator(f"Finished {start_type}Run\n{os.linesep.join(stats_summary) if len(stats_summary)>0 else ''}\nRun Time: {run_time}\n{next_run_str if len(next_run_str)>0 else ''}"
.replace('\n\n', '\n').rstrip())[0]
if cfg:
try:
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, stats, body)
cfg.Webhooks.end_time_hooks(start_time, end_time, run_time, next_run, stats, body)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
@ -286,20 +290,27 @@ def end():
def calc_next_run(sch, print=False):
current = datetime.now().strftime("%H:%M")
seconds = sch*60
time_to_run = (datetime.now() + timedelta(minutes=sch)).strftime("%H:%M")
new_seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
time_to_run = datetime.now() + timedelta(minutes=sch)
time_to_run_str = time_to_run.strftime("%H:%M")
new_seconds = (datetime.strptime(time_to_run_str, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
time_str = ''
if new_seconds < 0:
new_seconds += 86400
if (seconds is None or new_seconds < seconds) and new_seconds > 0:
seconds = new_seconds
if seconds is not None:
hours = int(seconds // 3600)
minutes = int((seconds % 3600) // 60)
time_str = f"{hours} Hour{'s' if hours > 1 else ''}{' and ' if minutes > 1 else ''}" if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" if minutes > 0 else ""
if print: util.print_return(f"Current Time: {current} | {time_str} until the next run at {time_to_run}")
return time_str
next_run = {}
if run is False:
next_run['next_run'] = time_to_run
if new_seconds < 0:
new_seconds += 86400
if (seconds is None or new_seconds < seconds) and new_seconds > 0:
seconds = new_seconds
if seconds is not None:
hours = int(seconds // 3600)
minutes = int((seconds % 3600) // 60)
time_str = f"{hours} Hour{'s' if hours > 1 else ''}{' and ' if minutes > 1 else ''}" if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" if minutes > 0 else ""
if print: next_run['next_run_str'] = (f"Current Time: {current} | {time_str} until the next run at {time_to_run_str}")
else:
next_run['next_run'] = None
next_run['next_run_str'] = ''
return time_str, next_run
if __name__ == '__main__':
@ -341,11 +352,11 @@ if __name__ == '__main__':
start()
else:
schedule.every(sch).minutes.do(start)
logger.info(f" Scheduled Mode: Running every {calc_next_run(sch)}.")
time_str, _ = calc_next_run(sch)
logger.info(f" Scheduled Mode: Running every {time_str}.")
start()
while not killer.kill_now:
schedule.run_pending()
calc_next_run(sch, True)
time.sleep(60)
end()
except KeyboardInterrupt:

View file

@ -1,6 +1,6 @@
ruamel.yaml==0.17.17
qbittorrent-api==2021.8.23
ruamel.yaml==0.17.20
qbittorrent-api==2021.12.26
schedule==1.1.0
retrying==1.3.3
alive_progress==2.1.0
requests==2.26.0
requests==2.27.0