mirror of
https://github.com/StuffAnThings/qbit_manage.git
synced 2024-09-20 15:26:02 +08:00
4e08f62aaf
* 4.1.7 * chore(docs): Sync wiki to docs [skip-cd] * Bump flake8 from 7.0.0 to 7.1.0 (#585) Bumps [flake8](https://github.com/pycqa/flake8) from 7.0.0 to 7.1.0. - [Commits](https://github.com/pycqa/flake8/compare/7.0.0...7.1.0) --- updated-dependencies: - dependency-name: flake8 dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump docker/build-push-action from 5 to 6 (#588) * chore(docs): Sync wiki to docs [skip-cd] * fix(unregistered): Ignore 520 (#592) * fix: max vs min seeding time check (#596) * fix: max vs min seeding time check Allow max_seeding_time to be unlimited (-1) even if a min_seeding_time is set * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * fix pre-commit test * fix: check tracker status in udp & wss as well (#586) * fix: check tracker status in udp & wss as well * bump VERSION --------- Co-authored-by: bobokun <12660469+bobokun@users.noreply.github.com> * [pre-commit.ci] pre-commit autoupdate (#584) updates: - [github.com/hhatto/autopep8: v2.2.0 → v2.3.1](https://github.com/hhatto/autopep8/compare/v2.2.0...v2.3.1) - [github.com/asottile/pyupgrade: v3.15.2 → v3.16.0](https://github.com/asottile/pyupgrade/compare/v3.15.2...v3.16.0) - [github.com/PyCQA/flake8: 7.0.0 → 7.1.0](https://github.com/PyCQA/flake8/compare/7.0.0...7.1.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> * Fixes #595 to use BHD deleted reason list instead of API * Fixes #591 [FR]: Logging header doesn't get logged for every run * minor fixes in #591 * Adds deprecated message for bhd attribute in config * Fix min_seeding_time tag removal when max_seeding_time is -1 (#598) * Update share_limits.py to fix #590 --------- Co-authored-by: bobokun <12660469+bobokun@users.noreply.github.com> * Adds logging to mover script * Fixes [FR]: Remove orphaned without moving to orphaned_dir #590 * Fixes bug in printing schedule mode when run is True * Fix bug in Use BHD list of reasons instead of API #595 * Add additional BHD deletion reason #595 * Fix bug in #595 when status contains optional message * Update mover.py: add optional arg --status-filter to allow fine tune which torrents to pauseUpdate mover.py: add optional arg --status-filter to allow fine tune which torrents to pause (#599) * Update mover.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update mover.py: making the default behavior to only move `completed` torrents This will leave 1) unfinished files 2) actively seeding files in `downloading` torrents in cache. This helps to keep write-heavy operation on Cache, not on hard drive. Change this to "all" if you want this to always move everything every time. * minor fixes in help description --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: bobokun <12660469+bobokun@users.noreply.github.com> * modify noHL threshold to address false positives * Add additional BHD deletion reason #595 * Bump dependabot/fetch-metadata from 2.1.0 to 2.2.0 (#601) Bumps [dependabot/fetch-metadata](https://github.com/dependabot/fetch-metadata) from 2.1.0 to 2.2.0. - [Release notes](https://github.com/dependabot/fetch-metadata/releases) - [Commits](https://github.com/dependabot/fetch-metadata/compare/v2.1.0...v2.2.0) --- updated-dependencies: - dependency-name: dependabot/fetch-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * 4.1.7 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: Actionbot <actions@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Amos (lflare) <me@amosng.com> Co-authored-by: ineednewpajamas <73252768+ineednewpajamas@users.noreply.github.com> Co-authored-by: Tony <5747393+convexshiba@users.noreply.github.com>
129 lines
5.4 KiB
Python
129 lines
5.4 KiB
Python
import os
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
from fnmatch import fnmatch
|
|
|
|
from modules import util
|
|
|
|
logger = util.logger
|
|
|
|
|
|
class RemoveOrphaned:
|
|
def __init__(self, qbit_manager):
|
|
self.qbt = qbit_manager
|
|
self.config = qbit_manager.config
|
|
self.client = qbit_manager.client
|
|
self.stats = 0
|
|
|
|
self.remote_dir = qbit_manager.config.remote_dir
|
|
self.root_dir = qbit_manager.config.root_dir
|
|
self.orphaned_dir = qbit_manager.config.orphaned_dir
|
|
|
|
max_workers = max(os.cpu_count() - 1, 1)
|
|
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
|
self.rem_orphaned()
|
|
self.executor.shutdown()
|
|
|
|
def rem_orphaned(self):
|
|
"""Remove orphaned files from remote directory"""
|
|
self.stats = 0
|
|
logger.separator("Checking for Orphaned Files", space=False, border=False)
|
|
torrent_files = []
|
|
orphaned_files = []
|
|
excluded_orphan_files = []
|
|
|
|
root_files = self.executor.submit(util.get_root_files, self.root_dir, self.remote_dir, self.orphaned_dir)
|
|
|
|
# Get an updated list of torrents
|
|
logger.print_line("Locating orphan files", self.config.loglevel)
|
|
torrent_list = self.qbt.get_torrents({"sort": "added_on"})
|
|
|
|
torrent_files.extend(
|
|
[
|
|
fullpath
|
|
for fullpathlist in self.executor.map(self.get_full_path_of_torrent_files, torrent_list)
|
|
for fullpath in fullpathlist
|
|
]
|
|
)
|
|
|
|
orphaned_files = set(root_files.result()) - set(torrent_files)
|
|
|
|
if self.config.orphaned["exclude_patterns"]:
|
|
logger.print_line("Processing orphan exclude patterns")
|
|
exclude_patterns = [
|
|
exclude_pattern.replace(self.remote_dir, self.root_dir)
|
|
for exclude_pattern in self.config.orphaned["exclude_patterns"]
|
|
]
|
|
excluded_orphan_files = [
|
|
file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch(file, exclude_pattern)
|
|
]
|
|
|
|
orphaned_files = set(orphaned_files) - set(excluded_orphan_files)
|
|
|
|
if orphaned_files:
|
|
orphaned_files = sorted(orphaned_files)
|
|
os.makedirs(self.orphaned_dir, exist_ok=True)
|
|
body = []
|
|
num_orphaned = len(orphaned_files)
|
|
logger.print_line(f"{num_orphaned} Orphaned files found", self.config.loglevel)
|
|
body += logger.print_line("\n".join(orphaned_files), self.config.loglevel)
|
|
if self.config.orphaned["empty_after_x_days"] == 0:
|
|
body += logger.print_line(
|
|
f"{'Not Deleting' if self.config.dry_run else 'Deleting'} {num_orphaned} Orphaned files",
|
|
self.config.loglevel,
|
|
)
|
|
else:
|
|
body += logger.print_line(
|
|
f"{'Not moving' if self.config.dry_run else 'Moving'} {num_orphaned} Orphaned files "
|
|
f"to {self.orphaned_dir.replace(self.remote_dir, self.root_dir)}",
|
|
self.config.loglevel,
|
|
)
|
|
|
|
attr = {
|
|
"function": "rem_orphaned",
|
|
"title": f"Removing {num_orphaned} Orphaned Files",
|
|
"body": "\n".join(body),
|
|
"orphaned_files": list(orphaned_files),
|
|
"orphaned_directory": self.orphaned_dir.replace(self.remote_dir, self.root_dir),
|
|
"total_orphaned_files": num_orphaned,
|
|
}
|
|
self.config.send_notifications(attr)
|
|
# Delete empty directories after moving orphan files
|
|
if not self.config.dry_run:
|
|
orphaned_parent_path = set(self.executor.map(self.handle_orphaned_files, orphaned_files))
|
|
logger.print_line("Removing newly empty directories", self.config.loglevel)
|
|
self.executor.map(
|
|
lambda directory: util.remove_empty_directories(directory, self.qbt.get_category_save_paths()),
|
|
orphaned_parent_path,
|
|
)
|
|
|
|
else:
|
|
logger.print_line("No Orphaned Files found.", self.config.loglevel)
|
|
|
|
def handle_orphaned_files(self, file):
|
|
src = file.replace(self.root_dir, self.remote_dir)
|
|
dest = os.path.join(self.orphaned_dir, file.replace(self.root_dir, ""))
|
|
orphaned_parent_path = os.path.dirname(file).replace(self.root_dir, self.remote_dir)
|
|
|
|
"""Delete orphaned files directly if empty_after_x_days is set to 0"""
|
|
if self.config.orphaned["empty_after_x_days"] == 0:
|
|
try:
|
|
util.delete_files(src)
|
|
except Exception:
|
|
logger.error(f"Error deleting orphaned file: {file}")
|
|
util.move_files(src, dest, True)
|
|
else: # Move orphaned files to orphaned directory
|
|
util.move_files(src, dest, True)
|
|
return orphaned_parent_path
|
|
|
|
def get_full_path_of_torrent_files(self, torrent):
|
|
torrent_files = map(lambda dict: dict.name, torrent.files)
|
|
save_path = torrent.save_path
|
|
|
|
fullpath_torrent_files = []
|
|
for file in torrent_files:
|
|
fullpath = os.path.join(save_path, file)
|
|
# Replace fullpath with \\ if qbm is running in docker (linux) but qbt is on windows
|
|
fullpath = fullpath.replace(r"/", "\\") if ":\\" in fullpath else fullpath
|
|
fullpath_torrent_files.append(fullpath)
|
|
return fullpath_torrent_files
|