mirror of
https://github.com/StuffAnThings/qbit_manage.git
synced 2025-12-10 13:56:13 +08:00
Merge branch 'develop' into pre-commit-ci-update-config
This commit is contained in:
commit
ba7c43af9c
13 changed files with 113 additions and 79 deletions
4
.github/workflows/develop.yml
vendored
4
.github/workflows/develop.yml
vendored
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
ENTRY: qbit_manage.py
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: develop
|
||||
|
||||
|
|
@ -404,7 +404,7 @@ jobs:
|
|||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: develop
|
||||
|
||||
|
|
|
|||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
if: github.event_name != 'gollum'
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Sync docs to wiki
|
||||
uses: newrelic/wiki-sync-action@main
|
||||
with:
|
||||
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
if: github.event_name == 'gollum'
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.PAT }} # allows us to push back to repo
|
||||
ref: develop
|
||||
|
|
|
|||
2
.github/workflows/pypi-publish.yml
vendored
2
.github/workflows/pypi-publish.yml
vendored
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
id-token: write # Required for trusted publishing to PyPI
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
|
|
|
|||
2
.github/workflows/tag.yml
vendored
2
.github/workflows/tag.yml
vendored
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
fetch-depth: 2
|
||||
|
|
|
|||
2
.github/workflows/update-develop-branch.yml
vendored
2
.github/workflows/update-develop-branch.yml
vendored
|
|
@ -14,7 +14,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.inputs.targetBranch || github.ref_name }}
|
||||
|
||||
|
|
|
|||
6
.github/workflows/version.yml
vendored
6
.github/workflows/version.yml
vendored
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
ENTRY: qbit_manage.py
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
|
|
@ -432,7 +432,7 @@ jobs:
|
|||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
|
|
@ -502,7 +502,7 @@ jobs:
|
|||
contents: write
|
||||
steps:
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Get the version
|
||||
id: get_version
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
{
|
||||
"master": {
|
||||
"qbit": "v5.1.2",
|
||||
"qbitapi": "2025.7.0"
|
||||
"qbit": "v5.1.3",
|
||||
"qbitapi": "2025.11.0"
|
||||
},
|
||||
"develop": {
|
||||
"qbit": "v5.1.2",
|
||||
"qbitapi": "2025.7.0"
|
||||
"qbit": "v5.1.4",
|
||||
"qbitapi": "2025.11.1"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2
VERSION
2
VERSION
|
|
@ -1 +1 @@
|
|||
4.6.4
|
||||
4.6.5-develop6
|
||||
|
|
|
|||
|
|
@ -871,15 +871,16 @@ class Config:
|
|||
"""
|
||||
# Assign directories
|
||||
if "directory" in self.data:
|
||||
self.root_dir = os.path.join(
|
||||
self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True), ""
|
||||
)
|
||||
self.remote_dir = os.path.join(
|
||||
self.util.check_for_attribute(
|
||||
root_dir = self.util.check_for_attribute(self.data, "root_dir", parent="directory", default_is_none=True)
|
||||
if isinstance(root_dir, list):
|
||||
root_dir = root_dir[0]
|
||||
self.root_dir = os.path.join(root_dir, "")
|
||||
remote_dir = self.util.check_for_attribute(
|
||||
self.data, "remote_dir", parent="directory", default=self.root_dir, do_print=False, save=False
|
||||
),
|
||||
"",
|
||||
)
|
||||
if isinstance(remote_dir, list):
|
||||
remote_dir = remote_dir[0]
|
||||
self.remote_dir = os.path.join(remote_dir, "")
|
||||
if self.commands["tag_nohardlinks"] or self.commands["rem_orphaned"]:
|
||||
self.remote_dir = self.util.check_for_attribute(
|
||||
self.data,
|
||||
|
|
|
|||
|
|
@ -305,6 +305,10 @@ class ShareLimits:
|
|||
tracker=tracker["url"],
|
||||
reset_upload_speed_on_unmet_minimums=group_config["reset_upload_speed_on_unmet_minimums"],
|
||||
)
|
||||
logger.trace(f"tor_reached_seed_limit: {tor_reached_seed_limit}")
|
||||
# Update share limits tag if needed
|
||||
self.update_share_limits_tag_for_torrent(torrent)
|
||||
|
||||
# Get updated torrent after checking if the torrent has reached seed limits
|
||||
torrent = self.qbt.get_torrents({"torrent_hashes": t_hash})[0]
|
||||
if (
|
||||
|
|
@ -313,6 +317,7 @@ class ShareLimits:
|
|||
or check_limit_upload_speed
|
||||
or share_limits_not_yet_tagged
|
||||
or check_multiple_share_limits_tag
|
||||
or tor_reached_seed_limit
|
||||
) and hash_not_prev_checked:
|
||||
if (
|
||||
(
|
||||
|
|
@ -322,19 +327,41 @@ class ShareLimits:
|
|||
)
|
||||
or share_limits_not_yet_tagged
|
||||
or check_multiple_share_limits_tag
|
||||
or tor_reached_seed_limit
|
||||
):
|
||||
logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
|
||||
logger.print_line(logger.insert_space(f"Tracker: {tracker['url']}", 8), self.config.loglevel)
|
||||
if self.group_tag:
|
||||
logger.print_line(logger.insert_space(f"Added Tag: {self.group_tag}", 8), self.config.loglevel)
|
||||
self.tag_and_update_share_limits_for_torrent(torrent, group_config)
|
||||
self.process_share_limits_for_torrent(torrent, group_config, tor_reached_seed_limit, torrent_upload_limit)
|
||||
self.stats_tagged += 1
|
||||
self.torrents_updated.append(t_name)
|
||||
|
||||
self.torrent_hash_checked.append(t_hash)
|
||||
|
||||
def update_share_limits_tag_for_torrent(self, torrent):
|
||||
"""Updates share limits tag for a torrent if needed"""
|
||||
# Check if the share limits tag needs to be updated
|
||||
if not self.config.dry_run:
|
||||
# Remove previous share_limits tag
|
||||
tag = is_tag_in_torrent(self.share_limits_tag, torrent.tags, exact=False)
|
||||
if tag:
|
||||
torrent.remove_tags(tag)
|
||||
# Check if any of the previous share limits custom tags are there
|
||||
for custom_tag in self.share_limits_custom_tags:
|
||||
if is_tag_in_torrent(custom_tag, torrent.tags):
|
||||
torrent.remove_tags(custom_tag)
|
||||
# Will tag the torrent with the group name if add_group_to_tag is True
|
||||
if self.group_tag:
|
||||
torrent.add_tags(self.group_tag)
|
||||
|
||||
def process_share_limits_for_torrent(self, torrent, group_config, tor_reached_seed_limit, torrent_upload_limit):
|
||||
"""Updates share limits for a torrent"""
|
||||
# Take action when torrent meets share limits
|
||||
if tor_reached_seed_limit:
|
||||
if group_config["cleanup"]:
|
||||
# Queue for cleanup (delete .torrent and possibly contents)
|
||||
t_hash = torrent.hash
|
||||
if t_hash not in self.tdel_dict:
|
||||
self.tdel_dict[t_hash] = {}
|
||||
self.tdel_dict[t_hash]["torrent"] = torrent
|
||||
|
|
@ -345,18 +372,37 @@ class ShareLimits:
|
|||
else:
|
||||
# New behavior: throttle upload speed instead of pausing/removing
|
||||
throttle_kib = group_config.get("upload_speed_on_limit_reached", 0)
|
||||
# Apply per-torrent upload throttle (KiB/s) or unlimited if -1/0
|
||||
|
||||
# Skip if throttle not configured (0 means not set)
|
||||
if throttle_kib == 0:
|
||||
logger.debug(f"Skipping throttle for {torrent.name}: upload_speed_on_limit_reached not configured")
|
||||
self.set_limits(
|
||||
torrent=torrent,
|
||||
max_ratio=group_config["max_ratio"],
|
||||
max_seeding_time=group_config["max_seeding_time"],
|
||||
limit_upload_speed=group_config["limit_upload_speed"],
|
||||
)
|
||||
return
|
||||
|
||||
# Validate throttle value (must be -1 for unlimited or positive)
|
||||
if throttle_kib < -1:
|
||||
logger.warning(
|
||||
f"Invalid upload_speed_on_limit_reached value: {throttle_kib}. Must be >= -1. "
|
||||
f"Skipping throttle for {torrent.name}"
|
||||
)
|
||||
return
|
||||
|
||||
# Apply per-torrent upload throttle (KiB/s) or unlimited if -1
|
||||
limit_val = -1 if throttle_kib == -1 else throttle_kib * 1024
|
||||
if limit_val and throttle_kib != torrent_upload_limit:
|
||||
body = []
|
||||
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
|
||||
body += logger.print_line(logger.insert_space(f"Tracker: {tracker['url']}", 8), self.config.loglevel)
|
||||
body += logger.print_line(
|
||||
|
||||
# Check if throttle needs to be applied (compare in KiB/s)
|
||||
if throttle_kib != torrent_upload_limit:
|
||||
logger.print_line(
|
||||
logger.insert_space("Cleanup: False [Meets Share Limits]", 8),
|
||||
self.config.loglevel,
|
||||
)
|
||||
disp = "unlimited" if throttle_kib == -1 else f"{throttle_kib} kB/s"
|
||||
body += logger.print_line(
|
||||
logger.print_line(
|
||||
logger.insert_space(f"Applied upload throttle after limits reached: {disp}", 8),
|
||||
self.config.loglevel,
|
||||
)
|
||||
|
|
@ -365,31 +411,14 @@ class ShareLimits:
|
|||
# Allow continued seeding by removing share limits
|
||||
torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1)
|
||||
torrent.set_upload_limit(limit_val)
|
||||
# Optionally resume if configured
|
||||
if group_config["resume_torrent_after_change"] and torrent.state_enum.is_complete:
|
||||
torrent.resume()
|
||||
self.torrent_hash_checked.append(t_hash)
|
||||
|
||||
def tag_and_update_share_limits_for_torrent(self, torrent, group_config):
|
||||
"""Removes previous share limits tag, updates tag and share limits for a torrent, and resumes the torrent"""
|
||||
# Remove previous share_limits tag
|
||||
if not self.config.dry_run:
|
||||
tag = is_tag_in_torrent(self.share_limits_tag, torrent.tags, exact=False)
|
||||
if tag:
|
||||
torrent.remove_tags(tag)
|
||||
# Check if any of the previous share limits custom tags are there
|
||||
for custom_tag in self.share_limits_custom_tags:
|
||||
if is_tag_in_torrent(custom_tag, torrent.tags):
|
||||
torrent.remove_tags(custom_tag)
|
||||
|
||||
# Will tag the torrent with the group name if add_group_to_tag is True and set the share limits
|
||||
self.set_tags_and_limits(
|
||||
else:
|
||||
self.set_limits(
|
||||
torrent=torrent,
|
||||
max_ratio=group_config["max_ratio"],
|
||||
max_seeding_time=group_config["max_seeding_time"],
|
||||
limit_upload_speed=group_config["limit_upload_speed"],
|
||||
tags=self.group_tag,
|
||||
)
|
||||
|
||||
# Resume torrent if it was paused now that the share limit has changed
|
||||
if torrent.state_enum.is_complete and group_config["resume_torrent_after_change"]:
|
||||
if not self.config.dry_run:
|
||||
|
|
@ -501,7 +530,7 @@ class ShareLimits:
|
|||
|
||||
return None
|
||||
|
||||
def set_tags_and_limits(self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, tags=None, do_print=True):
|
||||
def set_limits(self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, do_print=True):
|
||||
"""Set tags and limits for a torrent"""
|
||||
body = []
|
||||
if limit_upload_speed is not None:
|
||||
|
|
@ -529,8 +558,6 @@ class ShareLimits:
|
|||
body.append(msg)
|
||||
# Update Torrents
|
||||
if not self.config.dry_run:
|
||||
if tags:
|
||||
torrent.add_tags(tags)
|
||||
torrent_upload_limit = -1 if round(torrent.up_limit / 1024) == 0 else round(torrent.up_limit / 1024)
|
||||
if limit_upload_speed is not None and limit_upload_speed != torrent_upload_limit:
|
||||
if limit_upload_speed == -1:
|
||||
|
|
|
|||
|
|
@ -1390,6 +1390,12 @@ def path_replace(path, old_path, new_path):
|
|||
return path
|
||||
|
||||
# Normalize all paths to use forward slashes for comparison
|
||||
if isinstance(path, list):
|
||||
path = path[0]
|
||||
if isinstance(old_path, list):
|
||||
old_path = old_path[0]
|
||||
if isinstance(new_path, list):
|
||||
new_path = new_path[0]
|
||||
path_norm = path.replace("\\", "/")
|
||||
old_norm = old_path.replace("\\", "/")
|
||||
new_norm = new_path.replace("\\", "/") if new_path else ""
|
||||
|
|
|
|||
|
|
@ -20,11 +20,11 @@ dependencies = [
|
|||
"argon2-cffi==25.1.0",
|
||||
"bencodepy==0.9.5",
|
||||
"croniter==6.0.0",
|
||||
"fastapi==0.121.2",
|
||||
"fastapi==0.122.0",
|
||||
"GitPython==3.1.45",
|
||||
"humanize==4.13.0",
|
||||
"pytimeparse2==1.7.1",
|
||||
"qbittorrent-api==2025.11.0",
|
||||
"qbittorrent-api==2025.11.1",
|
||||
"requests==2.32.5",
|
||||
"retrying==1.4.2",
|
||||
"ruamel.yaml==0.18.16",
|
||||
|
|
@ -42,7 +42,7 @@ Repository = "https://github.com/StuffAnThings/qbit_manage"
|
|||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pre-commit==4.3.0",
|
||||
"ruff==0.14.5",
|
||||
"ruff==0.14.6",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue