* 4.1.4-develop1

* Fixes #545

* Fixes #546

* oops forgot to check if group_tag is enabled

* (fix) blu unregistered torrent message (#547)

InfoHash not found.

* fix broken tags check

* Fixes MinSeedTimeNotReached not being applied correctly #548

* Bump pre-commit from 3.7.0 to 3.7.1 (#549)

Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.7.0 to 3.7.1.
- [Release notes](https://github.com/pre-commit/pre-commit/releases)
- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md)
- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.7.0...v3.7.1)

---
updated-dependencies:
- dependency-name: pre-commit
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

* Fixes exclude directory of recently deleted torrents from rem_orphaned #550

* minor debug comment update

* Fixes exclude directory of recently deleted torrents from rem_orphaned #550 for those that don't use recycle bin

* Fixes cross-seed bug introduced in v6 with cross-seed-data

* optimize remove_empty_dirs and remove empty folders in orphaned_data

* Additional blutopia unregistered_torrent status, do not delete orphaned_data root folder if it's empty

* Adds additional logging for cleaning up empty folders

* 4.1.4

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: bakerboy448 <55419169+bakerboy448@users.noreply.github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
bobokun 2024-05-18 10:39:24 -04:00 committed by GitHub
parent f4dcc3900c
commit 3696f91b7c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 132 additions and 115 deletions

View file

@ -1,23 +1,11 @@
# Requirements Added
- croniter==2.0.5
- humanize==4.9.0
- pytimeparse2==1.7.1
# New Updates
- Adds support for [cron scheduling](https://crontab.guru/examples.html) via QBT_SCHEDULE / `--schedule`
- Supports [time parsing](https://github.com/onegreyonewhite/pytimeparse2?tab=readme-ov-file#pytimeparse2-time-expression-parser) in `max_seeding_time`, `min_seeding_time`, and `last_active` config variables.
Example:
- `32m`
- `2h32m`
- `3d2h32m`
- `1w3d2h32m`
- Removes the config option `ignoreTags_OnUpdate`, and `force_retag_all` option (These config options are no longer used, the tag-update function will now automatically update all torents if the tracker tag is missing)
- Better trace logs for share limits (Closes #533)
- Adds new config option `ignore_root_dir` [#538 - NoHardlinking detection outside of root directory](https://github.com/StuffAnThings/qbit_manage/issues/538)
- Adds additional remove unregistered logic for Blutopia
# Bug Fixes
- Fixes [#540](https://github.com/StuffAnThings/qbit_manage/issues/540)
- Fixes [#545](https://github.com/StuffAnThings/qbit_manage/issues/545)
- Fixes [#546](https://github.com/StuffAnThings/qbit_manage/issues/546)
- Fixes [#548](https://github.com/StuffAnThings/qbit_manage/issues/548)
- Fixes [#550](https://github.com/StuffAnThings/qbit_manage/issues/550)
- Optimizes Remove Empty Directories and remove empty folders in orphaned_data
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.1.2...v4.1.3
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.1.3...v4.1.4

View file

@ -1 +1 @@
4.1.3
4.1.4

View file

@ -757,7 +757,9 @@ class Config:
if not self.dry_run:
for path in location_path_list:
if path != location_path:
util.remove_empty_directories(path, "**/*", self.qbt.get_category_save_paths())
util.remove_empty_directories(path, self.qbt.get_category_save_paths())
# Delete empty folders inside the location_path
util.remove_empty_directories(location_path, [location_path])
body += logger.print_line(
f"{'Did not delete' if self.dry_run else 'Deleted'} {num_del} files "
f"({util.human_readable_size(size_bytes)}) from the {location}.",

View file

@ -89,7 +89,7 @@ class CrossSeed:
logger.warning(f"Unable to find hash {torrent_hash} in qbt: {e}")
if torrent_info:
torrent = torrent_info[0]
self.qbt.add_torrent_files(torrent.hash, torrent.files)
self.qbt.add_torrent_files(torrent.hash, torrent.files, torrent.save_path)
self.qbt.torrentvalid.append(torrent)
self.qbt.torrentinfo[t_name]["torrents"].append(torrent)
self.qbt.torrent_list.append(torrent)

View file

@ -60,11 +60,11 @@ class ReCheck:
logger.debug("DEBUG: Torrent to see if torrent meets AutoTorrentManagement Criteria")
logger.debug(logger.insert_space(f"- Torrent Name: {t_name}", 2))
logger.debug(
logger.insert_space(f"-- Ratio vs Max Ratio: {torrent.ratio:.2f} < {torrent.max_ratio:.2f}", 4)
logger.insert_space(f"-- Ratio vs Max Ratio: {torrent.ratio:.2f} vs {torrent.max_ratio:.2f}", 4)
)
logger.debug(
logger.insert_space(
f"-- Seeding Time vs Max Seed Time: {str(timedelta(seconds=torrent.seeding_time))} < "
f"-- Seeding Time vs Max Seed Time: {str(timedelta(seconds=torrent.seeding_time))} vs "
f"{str(timedelta(minutes=torrent.max_seeding_time))}",
4,
)

View file

@ -86,7 +86,7 @@ class RemoveOrphaned:
orphaned_parent_path = set(self.executor.map(self.move_orphan, orphaned_files))
logger.print_line("Removing newly empty directories", self.config.loglevel)
self.executor.map(
lambda dir: util.remove_empty_directories(dir, "**/*", self.qbt.get_category_save_paths()),
lambda directory: util.remove_empty_directories(directory, self.qbt.get_category_save_paths()),
orphaned_parent_path,
)

View file

@ -198,6 +198,9 @@ class ShareLimits:
share_limits_not_yet_tagged = (
True if self.group_tag and not is_tag_in_torrent(self.group_tag, torrent.tags) else False
)
check_multiple_share_limits_tag = (
self.group_tag and len(is_tag_in_torrent(self.share_limits_tag, torrent.tags, exact=False)) > 1
)
logger.trace(f"Torrent: {t_name} [Hash: {t_hash}]")
logger.trace(f"Torrent Category: {torrent.category}")
logger.trace(f"Torrent Tags: {torrent.tags}")
@ -227,6 +230,9 @@ class ShareLimits:
logger.trace(f"check_limit_upload_speed: {check_limit_upload_speed}")
logger.trace(f"hash_not_prev_checked: {hash_not_prev_checked}")
logger.trace(f"share_limits_not_yet_tagged: {share_limits_not_yet_tagged}")
logger.trace(
f"check_multiple_share_limits_tag: {is_tag_in_torrent(self.share_limits_tag, torrent.tags, exact=False)}"
)
tor_reached_seed_limit = self.has_reached_seed_limit(
torrent=torrent,
@ -241,13 +247,21 @@ class ShareLimits:
# Get updated torrent after checking if the torrent has reached seed limits
torrent = self.qbt.get_torrents({"torrent_hashes": t_hash})[0]
if (
check_max_ratio or check_max_seeding_time or check_limit_upload_speed or share_limits_not_yet_tagged
check_max_ratio
or check_max_seeding_time
or check_limit_upload_speed
or share_limits_not_yet_tagged
or check_multiple_share_limits_tag
) and hash_not_prev_checked:
if (
not is_tag_in_torrent(self.min_seeding_time_tag, torrent.tags)
and not is_tag_in_torrent(self.min_num_seeds_tag, torrent.tags)
and not is_tag_in_torrent(self.last_active_tag, torrent.tags)
) or share_limits_not_yet_tagged:
(
not is_tag_in_torrent(self.min_seeding_time_tag, torrent.tags)
and not is_tag_in_torrent(self.min_num_seeds_tag, torrent.tags)
and not is_tag_in_torrent(self.last_active_tag, torrent.tags)
)
or share_limits_not_yet_tagged
or check_multiple_share_limits_tag
):
logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
if self.group_tag:
@ -344,44 +358,26 @@ class ShareLimits:
if limit_upload_speed is not None:
if limit_upload_speed != -1:
msg = logger.insert_space(f"Limit UL Speed: {limit_upload_speed} kB/s", 1)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
if max_ratio is not None or max_seeding_time is not None:
if max_ratio == -2 and max_seeding_time == -2:
msg = logger.insert_space("Share Limit: Use Global Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
elif max_ratio == -1 and max_seeding_time == -1:
msg = logger.insert_space("Share Limit: Set No Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
else:
if max_ratio != torrent.max_ratio and (max_seeding_time is None or max_seeding_time < 0):
msg = logger.insert_space(f"Share Limit: Max Ratio = {max_ratio}", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
elif max_seeding_time != torrent.max_seeding_time and (max_ratio is None or max_ratio < 0):
msg = logger.insert_space(f"Share Limit: Max Seed Time = {str(timedelta(minutes=max_seeding_time))}", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
elif max_ratio != torrent.max_ratio or max_seeding_time != torrent.max_seeding_time:
msg = logger.insert_space(
f"Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {str(timedelta(minutes=max_seeding_time))}", 4
)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
body.append(msg)
# Update Torrents
if not self.config.dry_run:
if tags:
@ -403,6 +399,7 @@ class ShareLimits:
if is_tag_in_torrent(self.last_active_tag, torrent.tags):
return []
torrent.set_share_limits(ratio_limit=max_ratio, seeding_time_limit=max_seeding_time, inactive_seeding_time_limit=-2)
[logger.print_line(msg, self.config.loglevel) for msg in body if do_print]
return body
def has_reached_seed_limit(
@ -527,7 +524,7 @@ class ShareLimits:
_remove_min_seeding_time_tag()
return False
if seeding_time_limit:
if _has_reached_min_seeding_time_limit() and (torrent.seeding_time >= seeding_time_limit * 60):
if (torrent.seeding_time >= seeding_time_limit * 60) and _has_reached_min_seeding_time_limit():
body += logger.insert_space(
f"Seeding Time vs Max Seed Time: {str(timedelta(seconds=torrent.seeding_time))} >= "
f"{str(timedelta(minutes=seeding_time_limit))}",

View file

@ -140,7 +140,7 @@ class Qbt:
save_path = torrent.save_path
category = torrent.category
torrent_trackers = torrent.trackers
self.add_torrent_files(torrent_hash, torrent.files)
self.add_torrent_files(torrent_hash, torrent.files, save_path)
except Exception as ex:
self.config.notify(ex, "Get Torrent Info", False)
logger.warning(ex)
@ -190,7 +190,7 @@ class Qbt:
}
self.torrentinfo[torrent_name] = torrentattr
def add_torrent_files(self, torrent_hash, torrent_files):
def add_torrent_files(self, torrent_hash, torrent_files, save_path):
"""Process torrent files by adding the hash to the appropriate torrent_files list.
Example structure:
torrent_files = {
@ -200,11 +200,11 @@ class Qbt:
}
"""
for file in torrent_files:
file_name = file.name
if file_name not in self.torrentfiles:
self.torrentfiles[file_name] = {"original": torrent_hash, "cross_seed": []}
full_path = os.path.join(save_path, file.name)
if full_path not in self.torrentfiles:
self.torrentfiles[full_path] = {"original": torrent_hash, "cross_seed": []}
else:
self.torrentfiles[file_name]["cross_seed"].append(torrent_hash)
self.torrentfiles[full_path]["cross_seed"].append(torrent_hash)
def is_cross_seed(self, torrent):
"""Check if the torrent is a cross seed if it has one or more files that are cross seeded."""
@ -215,12 +215,12 @@ class Qbt:
return False
cross_seed = True
for file in torrent.files:
file_name = file.name
if self.torrentfiles[file_name]["original"] == t_hash or t_hash not in self.torrentfiles[file_name]["cross_seed"]:
logger.trace(f"File: [{file_name}] is found in Torrent: {t_name} [Hash: {t_hash}] as the original torrent")
full_path = os.path.join(torrent.save_path, file.name)
if self.torrentfiles[full_path]["original"] == t_hash or t_hash not in self.torrentfiles[full_path]["cross_seed"]:
logger.trace(f"File: [{full_path}] is found in Torrent: {t_name} [Hash: {t_hash}] as the original torrent")
cross_seed = False
break
elif self.torrentfiles[file_name]["original"] is None:
elif self.torrentfiles[full_path]["original"] is None:
cross_seed = False
break
logger.trace(f"Torrent: {t_name} [Hash: {t_hash}] {'is' if cross_seed else 'is not'} a cross seed torrent.")
@ -232,9 +232,9 @@ class Qbt:
t_hash = torrent.hash
t_name = torrent.name
for file in torrent.files:
file_name = file.name
if len(self.torrentfiles[file_name]["cross_seed"]) > 0:
logger.trace(f"{file_name} has cross seeds: {self.torrentfiles[file_name]['cross_seed']}")
full_path = os.path.join(torrent.save_path, file.name)
if len(self.torrentfiles[full_path]["cross_seed"]) > 0:
logger.trace(f"{full_path} has cross seeds: {self.torrentfiles[full_path]['cross_seed']}")
cross_seed = True
break
logger.trace(f"Torrent: {t_name} [Hash: {t_hash}] {'has' if cross_seed else 'has no'} cross seeds.")
@ -244,19 +244,19 @@ class Qbt:
"""Update the torrent_files list after a torrent is deleted"""
torrent_hash = torrent.hash
for file in torrent.files:
file_name = file.name
if self.torrentfiles[file_name]["original"] == torrent_hash:
if len(self.torrentfiles[file_name]["cross_seed"]) > 0:
self.torrentfiles[file_name]["original"] = self.torrentfiles[file_name]["cross_seed"].pop(0)
logger.trace(f"Updated {file_name} original to {self.torrentfiles[file_name]['original']}")
full_path = os.path.join(torrent.save_path, file.name)
if self.torrentfiles[full_path]["original"] == torrent_hash:
if len(self.torrentfiles[full_path]["cross_seed"]) > 0:
self.torrentfiles[full_path]["original"] = self.torrentfiles[full_path]["cross_seed"].pop(0)
logger.trace(f"Updated {full_path} original to {self.torrentfiles[full_path]['original']}")
else:
self.torrentfiles[file_name]["original"] = None
self.torrentfiles[full_path]["original"] = None
else:
if torrent_hash in self.torrentfiles[file_name]["cross_seed"]:
self.torrentfiles[file_name]["cross_seed"].remove(torrent_hash)
logger.trace(f"Removed {torrent_hash} from {file_name} cross seeds")
logger.trace(f"{file_name} original: {self.torrentfiles[file_name]['original']}")
logger.trace(f"{file_name} cross seeds: {self.torrentfiles[file_name]['cross_seed']}")
if torrent_hash in self.torrentfiles[full_path]["cross_seed"]:
self.torrentfiles[full_path]["cross_seed"].remove(torrent_hash)
logger.trace(f"Removed {torrent_hash} from {full_path} cross seeds")
logger.trace(f"{full_path} original: {self.torrentfiles[full_path]['original']}")
logger.trace(f"{full_path} cross seeds: {self.torrentfiles[full_path]['cross_seed']}")
def get_torrents(self, params):
"""Get torrents from qBittorrent"""
@ -403,17 +403,17 @@ class Qbt:
except ValueError:
logger.debug(f"Torrent {torrent.name} has already been removed from torrent files.")
if self.config.recyclebin["enabled"]:
tor_files = []
try:
info_hash = torrent.hash
save_path = torrent.save_path.replace(self.config.root_dir, self.config.remote_dir)
# Define torrent files/folders
for file in torrent.files:
tor_files.append(os.path.join(save_path, file.name))
except NotFound404Error:
return
tor_files = []
try:
info_hash = torrent.hash
save_path = torrent.save_path.replace(self.config.root_dir, self.config.remote_dir)
# Define torrent files/folders
for file in torrent.files:
tor_files.append(os.path.join(save_path, file.name))
except NotFound404Error:
return
if self.config.recyclebin["enabled"]:
if self.config.recyclebin["split_by_category"]:
recycle_path = os.path.join(save_path, os.path.basename(self.config.recycle_dir.rstrip(os.sep)))
else:
@ -492,14 +492,23 @@ class Qbt:
except FileNotFoundError:
ex = logger.print_line(f"RecycleBin Warning - FileNotFound: No such file or directory: {src} ", "WARNING")
self.config.notify(ex, "Deleting Torrent", False)
# Add src file to orphan exclusion since sometimes deleting files are slow in certain environments
exclude_file = src.replace(self.config.remote_dir, self.config.root_dir)
if exclude_file not in self.config.orphaned["exclude_patterns"]:
self.config.orphaned["exclude_patterns"].append(exclude_file)
# Delete torrent and files
torrent.delete(delete_files=to_delete)
# Remove any empty directories
util.remove_empty_directories(save_path, "**/*", self.get_category_save_paths())
util.remove_empty_directories(save_path, self.get_category_save_paths())
else:
torrent.delete(delete_files=False)
else:
if info["torrents_deleted_and_contents"] is True:
for file in tor_files:
# Add src file to orphan exclusion since sometimes deleting files are slow in certain environments
exclude_file = file.replace(self.config.remote_dir, self.config.root_dir)
if exclude_file not in self.config.orphaned["exclude_patterns"]:
self.config.orphaned["exclude_patterns"].append(exclude_file)
torrent.delete(delete_files=True)
else:
torrent.delete(delete_files=False)

View file

@ -43,13 +43,21 @@ def is_tag_in_torrent(check_tag, torrent_tags, exact=True):
if exact:
return check_tag in tags
else:
return any(check_tag in t for t in tags)
tags_to_remove = []
for tag in tags:
if check_tag in tag:
tags_to_remove.append(tag)
return tags_to_remove
elif isinstance(check_tag, list):
if exact:
return all(tag in tags for tag in check_tag)
else:
return any(any(tag in t for t in tags) for tag in check_tag)
return False
tags_to_remove = []
for tag in tags:
for ctag in check_tag:
if ctag in tag:
tags_to_remove.append(tag)
return tags_to_remove
class TorrentMessages:
@ -66,6 +74,8 @@ class TorrentMessages:
"RETITLED",
"TRUNCATED",
"TORRENT IS NOT AUTHORIZED FOR USE ON THIS TRACKER",
"INFOHASH NOT FOUND.", # blutopia
"TORRENT HAS BEEN DELETED.", # blutopia
]
IGNORE_MSGS = [
@ -440,27 +450,38 @@ def copy_files(src, dest):
logger.error(ex)
def remove_empty_directories(pathlib_root_dir, pattern, excluded_paths=None):
"""Remove empty directories recursively."""
def remove_empty_directories(pathlib_root_dir, excluded_paths=None):
"""Remove empty directories recursively, optimized version."""
pathlib_root_dir = Path(pathlib_root_dir)
try:
# list all directories recursively and sort them by path,
# longest first
longest = sorted(
pathlib_root_dir.glob(pattern),
key=lambda p: len(str(p)),
reverse=True,
)
longest.append(pathlib_root_dir) # delete the folder itself if it's empty
for pdir in longest:
try:
if str(pdir) in excluded_paths:
continue
pdir.rmdir() # remove directory if empty
except (FileNotFoundError, OSError):
continue # catch and continue if non-empty, folders within could already be deleted if run in parallel
except FileNotFoundError:
pass # if this is being run in parallel, pathlib_root_dir could already be deleted
if excluded_paths is not None:
# Ensure excluded_paths is a set of Path objects for efficient lookup
excluded_paths = {Path(p) for p in excluded_paths}
for root, dirs, files in os.walk(pathlib_root_dir, topdown=False):
root_path = Path(root)
# Skip excluded paths
if excluded_paths and root_path in excluded_paths:
continue
# Attempt to remove the directory if it's empty
try:
os.rmdir(root)
except PermissionError as perm:
logger.warning(f"{perm} : Unable to delete folder {root} as it has permission issues. Skipping...")
pass
except OSError:
# Directory not empty or other error - safe to ignore here
pass
# Attempt to remove the root directory if it's now empty and not excluded
if not excluded_paths or pathlib_root_dir not in excluded_paths:
try:
pathlib_root_dir.rmdir()
except PermissionError as perm:
logger.warning(f"{perm} : Unable to delete folder {root} as it has permission issues. Skipping...")
pass
except OSError:
pass
class CheckHardLinks:

View file

@ -51,7 +51,7 @@ parser.add_argument(
type=str,
help=(
"Schedule to run every x minutes. (Default set to 1440 (1 day))."
"Can also customize schedule via cron syntax (See https://crontab.guru/examples.html)",
"Can also customize schedule via cron syntax (See https://crontab.guru/examples.html)"
),
)
parser.add_argument(

View file

@ -1,2 +1,2 @@
flake8==7.0.0
pre-commit==3.7.0
pre-commit==3.7.1