diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index 8ab0f6d..eed04ff 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -35,7 +35,7 @@ jobs: ENTRY: qbit_manage.py steps: - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: ref: develop @@ -404,7 +404,7 @@ jobs: OWNER: '${{ github.repository_owner }}' - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: ref: develop diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 06eb380..6de87a1 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,7 +20,7 @@ jobs: if: github.event_name != 'gollum' steps: - name: Checkout Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Sync docs to wiki uses: newrelic/wiki-sync-action@main with: @@ -35,7 +35,7 @@ jobs: if: github.event_name == 'gollum' steps: - name: Checkout Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: token: ${{ secrets.PAT }} # allows us to push back to repo ref: develop diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 6b746f0..aa08093 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -24,7 +24,7 @@ jobs: id-token: write # Required for trusted publishing to PyPI steps: - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Python uses: actions/setup-python@v6 diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml index 3938101..36895ec 100644 --- a/.github/workflows/tag.yml +++ b/.github/workflows/tag.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: token: ${{ secrets.PAT }} fetch-depth: 2 diff --git a/.github/workflows/update-develop-branch.yml b/.github/workflows/update-develop-branch.yml index 8f215c9..7aa8d52 100644 --- a/.github/workflows/update-develop-branch.yml +++ b/.github/workflows/update-develop-branch.yml @@ -14,7 +14,7 @@ jobs: steps: - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} fetch-depth: 0 diff --git a/.github/workflows/update-supported-versions.yml b/.github/workflows/update-supported-versions.yml index c7b07ff..a0b657f 100644 --- a/.github/workflows/update-supported-versions.yml +++ b/.github/workflows/update-supported-versions.yml @@ -23,7 +23,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: ref: ${{ github.event.inputs.targetBranch || github.ref_name }} diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 85ef079..9de1171 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -35,7 +35,7 @@ jobs: ENTRY: qbit_manage.py steps: - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Python uses: actions/setup-python@v6 @@ -432,7 +432,7 @@ jobs: OWNER: '${{ github.repository_owner }}' - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Login to Docker Hub uses: docker/login-action@v3 @@ -502,7 +502,7 @@ jobs: contents: write steps: - name: Check Out Repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Get the version id: get_version diff --git a/SUPPORTED_VERSIONS.json b/SUPPORTED_VERSIONS.json index 2ecc96b..2c4ee14 100644 --- a/SUPPORTED_VERSIONS.json +++ b/SUPPORTED_VERSIONS.json @@ -1,10 +1,10 @@ { "master": { - "qbit": "v5.1.2", - "qbitapi": "2025.7.0" + "qbit": "v5.1.3", + "qbitapi": "2025.11.0" }, "develop": { - "qbit": "v5.1.2", - "qbitapi": "2025.7.0" + "qbit": "v5.1.4", + "qbitapi": "2025.11.1" } } diff --git a/VERSION b/VERSION index ef36c9a..5be6b72 100755 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -4.6.4 +4.6.5-develop6 diff --git a/modules/config.py b/modules/config.py index 2c90e55..7617a02 100755 --- a/modules/config.py +++ b/modules/config.py @@ -871,15 +871,16 @@ class Config: """ # Assign directories if "directory" in self.data: - self.root_dir = os.path.join( - self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True), "" - ) - self.remote_dir = os.path.join( - self.util.check_for_attribute( - self.data, "remote_dir", parent="directory", default=self.root_dir, do_print=False, save=False - ), - "", + root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True) + if isinstance(root_dir, list): + root_dir = root_dir[0] + self.root_dir = os.path.join(root_dir, "") + remote_dir = self.util.check_for_attribute( + self.data, "remote_dir", parent="directory", default=self.root_dir, do_print=False, save=False ) + if isinstance(remote_dir, list): + remote_dir = remote_dir[0] + self.remote_dir = os.path.join(remote_dir, "") if self.commands["tag_nohardlinks"] or self.commands["rem_orphaned"]: self.remote_dir = self.util.check_for_attribute( self.data, diff --git a/modules/core/share_limits.py b/modules/core/share_limits.py index cdf3b04..502e8f1 100644 --- a/modules/core/share_limits.py +++ b/modules/core/share_limits.py @@ -305,6 +305,10 @@ class ShareLimits: tracker=tracker["url"], reset_upload_speed_on_unmet_minimums=group_config["reset_upload_speed_on_unmet_minimums"], ) + logger.trace(f"tor_reached_seed_limit: {tor_reached_seed_limit}") + # Update share limits tag if needed + self.update_share_limits_tag_for_torrent(torrent) + # Get updated torrent after checking if the torrent has reached seed limits torrent = self.qbt.get_torrents({"torrent_hashes": t_hash})[0] if ( @@ -313,6 +317,7 @@ class ShareLimits: or check_limit_upload_speed or share_limits_not_yet_tagged or check_multiple_share_limits_tag + or tor_reached_seed_limit ) and hash_not_prev_checked: if ( ( @@ -322,58 +327,23 @@ class ShareLimits: ) or share_limits_not_yet_tagged or check_multiple_share_limits_tag + or tor_reached_seed_limit ): logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel) logger.print_line(logger.insert_space(f"Tracker: {tracker['url']}", 8), self.config.loglevel) if self.group_tag: logger.print_line(logger.insert_space(f"Added Tag: {self.group_tag}", 8), self.config.loglevel) - self.tag_and_update_share_limits_for_torrent(torrent, group_config) + self.process_share_limits_for_torrent(torrent, group_config, tor_reached_seed_limit, torrent_upload_limit) self.stats_tagged += 1 self.torrents_updated.append(t_name) - # Take action when torrent meets share limits - if tor_reached_seed_limit: - if group_config["cleanup"]: - # Queue for cleanup (delete .torrent and possibly contents) - if t_hash not in self.tdel_dict: - self.tdel_dict[t_hash] = {} - self.tdel_dict[t_hash]["torrent"] = torrent - self.tdel_dict[t_hash]["content_path"] = util.path_replace( - torrent["content_path"], self.root_dir, self.remote_dir - ) - self.tdel_dict[t_hash]["body"] = tor_reached_seed_limit - else: - # New behavior: throttle upload speed instead of pausing/removing - throttle_kib = group_config.get("upload_speed_on_limit_reached", 0) - # Apply per-torrent upload throttle (KiB/s) or unlimited if -1/0 - limit_val = -1 if throttle_kib == -1 else throttle_kib * 1024 - if limit_val and throttle_kib != torrent_upload_limit: - body = [] - body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel) - body += logger.print_line(logger.insert_space(f"Tracker: {tracker['url']}", 8), self.config.loglevel) - body += logger.print_line( - logger.insert_space("Cleanup: False [Meets Share Limits]", 8), - self.config.loglevel, - ) - disp = "unlimited" if throttle_kib == -1 else f"{throttle_kib} kB/s" - body += logger.print_line( - logger.insert_space(f"Applied upload throttle after limits reached: {disp}", 8), - self.config.loglevel, - ) - # Clear share limits to prevent qBittorrent from pausing again, then apply throttle - if not self.config.dry_run: - # Allow continued seeding by removing share limits - torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1) - torrent.set_upload_limit(limit_val) - # Optionally resume if configured - if group_config["resume_torrent_after_change"] and torrent.state_enum.is_complete: - torrent.resume() self.torrent_hash_checked.append(t_hash) - def tag_and_update_share_limits_for_torrent(self, torrent, group_config): - """Removes previous share limits tag, updates tag and share limits for a torrent, and resumes the torrent""" - # Remove previous share_limits tag + def update_share_limits_tag_for_torrent(self, torrent): + """Updates share limits tag for a torrent if needed""" + # Check if the share limits tag needs to be updated if not self.config.dry_run: + # Remove previous share_limits tag tag = is_tag_in_torrent(self.share_limits_tag, torrent.tags, exact=False) if tag: torrent.remove_tags(tag) @@ -381,15 +351,74 @@ class ShareLimits: for custom_tag in self.share_limits_custom_tags: if is_tag_in_torrent(custom_tag, torrent.tags): torrent.remove_tags(custom_tag) + # Will tag the torrent with the group name if add_group_to_tag is True + if self.group_tag: + torrent.add_tags(self.group_tag) + + def process_share_limits_for_torrent(self, torrent, group_config, tor_reached_seed_limit, torrent_upload_limit): + """Updates share limits for a torrent""" + # Take action when torrent meets share limits + if tor_reached_seed_limit: + if group_config["cleanup"]: + # Queue for cleanup (delete .torrent and possibly contents) + t_hash = torrent.hash + if t_hash not in self.tdel_dict: + self.tdel_dict[t_hash] = {} + self.tdel_dict[t_hash]["torrent"] = torrent + self.tdel_dict[t_hash]["content_path"] = util.path_replace( + torrent["content_path"], self.root_dir, self.remote_dir + ) + self.tdel_dict[t_hash]["body"] = tor_reached_seed_limit + else: + # New behavior: throttle upload speed instead of pausing/removing + throttle_kib = group_config.get("upload_speed_on_limit_reached", 0) + + # Skip if throttle not configured (0 means not set) + if throttle_kib == 0: + logger.debug(f"Skipping throttle for {torrent.name}: upload_speed_on_limit_reached not configured") + self.set_limits( + torrent=torrent, + max_ratio=group_config["max_ratio"], + max_seeding_time=group_config["max_seeding_time"], + limit_upload_speed=group_config["limit_upload_speed"], + ) + return + + # Validate throttle value (must be -1 for unlimited or positive) + if throttle_kib < -1: + logger.warning( + f"Invalid upload_speed_on_limit_reached value: {throttle_kib}. Must be >= -1. " + f"Skipping throttle for {torrent.name}" + ) + return + + # Apply per-torrent upload throttle (KiB/s) or unlimited if -1 + limit_val = -1 if throttle_kib == -1 else throttle_kib * 1024 + + # Check if throttle needs to be applied (compare in KiB/s) + if throttle_kib != torrent_upload_limit: + logger.print_line( + logger.insert_space("Cleanup: False [Meets Share Limits]", 8), + self.config.loglevel, + ) + disp = "unlimited" if throttle_kib == -1 else f"{throttle_kib} kB/s" + logger.print_line( + logger.insert_space(f"Applied upload throttle after limits reached: {disp}", 8), + self.config.loglevel, + ) + # Clear share limits to prevent qBittorrent from pausing again, then apply throttle + if not self.config.dry_run: + # Allow continued seeding by removing share limits + torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1) + torrent.set_upload_limit(limit_val) + else: + self.set_limits( + torrent=torrent, + max_ratio=group_config["max_ratio"], + max_seeding_time=group_config["max_seeding_time"], + limit_upload_speed=group_config["limit_upload_speed"], + ) - # Will tag the torrent with the group name if add_group_to_tag is True and set the share limits - self.set_tags_and_limits( - torrent=torrent, - max_ratio=group_config["max_ratio"], - max_seeding_time=group_config["max_seeding_time"], - limit_upload_speed=group_config["limit_upload_speed"], - tags=self.group_tag, - ) # Resume torrent if it was paused now that the share limit has changed if torrent.state_enum.is_complete and group_config["resume_torrent_after_change"]: if not self.config.dry_run: @@ -501,7 +530,7 @@ class ShareLimits: return None - def set_tags_and_limits(self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, tags=None, do_print=True): + def set_limits(self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, do_print=True): """Set tags and limits for a torrent""" body = [] if limit_upload_speed is not None: @@ -529,8 +558,6 @@ class ShareLimits: body.append(msg) # Update Torrents if not self.config.dry_run: - if tags: - torrent.add_tags(tags) torrent_upload_limit = -1 if round(torrent.up_limit / 1024) == 0 else round(torrent.up_limit / 1024) if limit_upload_speed is not None and limit_upload_speed != torrent_upload_limit: if limit_upload_speed == -1: diff --git a/modules/util.py b/modules/util.py index cf02a71..acbc16e 100755 --- a/modules/util.py +++ b/modules/util.py @@ -1390,6 +1390,12 @@ def path_replace(path, old_path, new_path): return path # Normalize all paths to use forward slashes for comparison + if isinstance(path, list): + path = path[0] + if isinstance(old_path, list): + old_path = old_path[0] + if isinstance(new_path, list): + new_path = new_path[0] path_norm = path.replace("\\", "/") old_norm = old_path.replace("\\", "/") new_norm = new_path.replace("\\", "/") if new_path else "" diff --git a/pyproject.toml b/pyproject.toml index 3b44664..68b0e3b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,11 +20,11 @@ dependencies = [ "argon2-cffi==25.1.0", "bencodepy==0.9.5", "croniter==6.0.0", - "fastapi==0.121.2", + "fastapi==0.122.0", "GitPython==3.1.45", "humanize==4.13.0", "pytimeparse2==1.7.1", - "qbittorrent-api==2025.11.0", + "qbittorrent-api==2025.11.1", "requests==2.32.5", "retrying==1.4.2", "ruamel.yaml==0.18.16", @@ -42,7 +42,7 @@ Repository = "https://github.com/StuffAnThings/qbit_manage" [project.optional-dependencies] dev = [ "pre-commit==4.3.0", - "ruff==0.14.5", + "ruff==0.14.6", ] [tool.ruff]