diff --git a/.dockerignore b/.dockerignore
index b2930b1..09871fb 100755
--- a/.dockerignore
+++ b/.dockerignore
@@ -24,3 +24,8 @@ test.py
qbit_manage.egg-info/
.tox
*.env
+__pycache__
+*.pyc
+*.pyo
+*.pyd
+.env
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..73a8407
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,10 @@
+[flake8]
+extend-ignore =
+ # E722 Do not use bare except, specify exception instead
+ E722,
+ # E402 module level import not at top of file
+ E402,
+ # E501 line too long
+ E501,
+max-line-length = 130
+exclude = .git,__pycache__,build,dist
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
old mode 100755
new mode 100644
index c3d8d98..9a09470
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -12,6 +12,12 @@ updates:
target-branch: "develop"
assignees:
- "bobokun"
+ # Specify the file to check for dependencies
+ # Dependabot will now look at pyproject.toml instead of requirements.txt
+ allow:
+ - dependency-type: "direct"
+ # Specify the file to update
+ versioning-strategy: increase-if-necessary
- package-ecosystem: github-actions
directory: '/'
schedule:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 92576db..cbb19d4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -21,12 +21,20 @@ jobs:
with:
python-version: '3.9'
+ - name: Install uv
+ run: |
+ curl -LsSf https://astral.sh/uv/install.sh | sh
+ echo "$HOME/.local/bin" >> $GITHUB_PATH
+
- name: Install dependencies
run: |
- pip install pre-commit
+ uv venv .venv
+ source .venv/bin/activate
+ uv pip install pre-commit
- name: Run pre-commit version check
run: |
+ source .venv/bin/activate
pre-commit run increase-version --all-files
ruff:
diff --git a/.github/workflows/update-supported-versions.yml b/.github/workflows/update-supported-versions.yml
index af54058..dcb7135 100644
--- a/.github/workflows/update-supported-versions.yml
+++ b/.github/workflows/update-supported-versions.yml
@@ -6,7 +6,7 @@ on:
- master
- develop
paths:
- - "requirements.txt"
+ - "pyproject.toml"
workflow_dispatch:
inputs:
targetBranch:
@@ -32,32 +32,54 @@ jobs:
with:
python-version: "3.x"
- - name: Install dependencies from requirements.txt
+ - name: Install uv
run: |
- python -m pip install --upgrade pip
- pip install -r requirements.txt
+ curl -LsSf https://astral.sh/uv/install.sh | sh
+ echo "$HOME/.local/bin" >> $GITHUB_PATH
+
+ - name: Install dependencies with uv
+ run: |
+ uv venv .venv
+ source .venv/bin/activate
+ uv pip install .
- name: Run update script
- run: python scripts/update-readme-version.py ${{ github.event.inputs.targetBranch || github.ref_name }}
+ run: |
+ source .venv/bin/activate
+ python scripts/update-readme-version.py ${{ github.event.inputs.targetBranch || github.ref_name }}
+
+ - name: Update develop versions
+ if: ${{ github.event.inputs.targetBranch || github.ref_name == 'develop' }}
+ id: get-develop-version
+ run: |
+ # Run the script and capture its output
+ output=$(bash scripts/pre-commit/update_develop_version.sh)
+ # Extract the last line which contains the version
+ version=$(echo "$output" | tail -n 1)
+ # Set the version as an output parameter for later steps
+ echo "version=$version" >> $GITHUB_OUTPUT
+ # Debug info
+ echo "Script output: $output"
+ echo "Captured Version: $version"
- name: Create Pull Request
- id: cpr
+ id: create-pr
uses: peter-evans/create-pull-request@v7
with:
commit-message: Update SUPPORTED_VERSIONS.json
- title: "Update SUPPORTED_VERSIONS.json for ${{ github.event.inputs.targetBranch || github.ref_name }}"
+ title: "Update SUPPORTED_VERSIONS.json for ${{ steps.get-develop-version.outputs.version || github.event.inputs.targetBranch || github.ref_name }}"
branch: update-supported-versions-${{ github.event.inputs.targetBranch || github.ref_name }}
base: develop
body: "This PR updates the SUPPORTED_VERSIONS.json to reflect new versions."
- name: Approve the Pull Request
- if: ${{ steps.cpr.outputs.pull-request-number }}
- run: gh pr review ${{ steps.cpr.outputs.pull-request-number }} --approve
+ if: ${{ steps.create-pr.outputs.pull-request-number }}
+ run: gh pr review ${{ steps.create-pr.outputs.pull-request-number }} --approve
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Merge the Pull Request
- if: ${{ steps.cpr.outputs.pull-request-number }}
- run: gh pr merge ${{ steps.cpr.outputs.pull-request-number }} --auto --squash
+ if: ${{ steps.create-pr.outputs.pull-request-number }}
+ run: gh pr merge ${{ steps.create-pr.outputs.pull-request-number }} --auto --squash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index cbb341b..64a85fd 100755
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,4 @@ __pycache__/
qbit_manage.egg-info/
.tox
*.env
+**/build
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index cc6339c..de6fd99 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -8,13 +8,12 @@ repos:
- id: check-merge-conflict
- id: check-json
- id: check-yaml
- - id: requirements-txt-fixer
- id: check-added-large-files
- id: fix-byte-order-marker
- id: pretty-format-json
args: [--autofix, --indent, '4', --no-sort-keys]
- repo: https://github.com/adrienverge/yamllint.git
- rev: v1.37.0 # or higher tag
+ rev: v1.37.1 # or higher tag
hooks:
- id: yamllint
args: [--format, parsable, --strict]
@@ -26,7 +25,7 @@ repos:
exclude: ^.github/
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
- rev: v0.11.6
+ rev: v0.11.8
hooks:
# Run the linter.
- id: ruff
diff --git a/CHANGELOG b/CHANGELOG
index 7b1dad1..6739a6a 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,9 +1,18 @@
+# Breaking Change
+- `requirements.txt` is now replaced with `pyproject.toml` meaning that **local installs** will need to replace their update command `pip install -r requirements.txt` with `pip install .`
+- Those that are running qbit-manage in docker don't need to do anything and things will continue to work as is
+
# Requirements Updated
-qbittorrent-api==2025.4.1
-humanize==4.12.2
+qbittorrent-api==2025.5.0
+humanize==4.12.3
# New Updates
-- Adds warning to share_limits not being applied in dry-run (closes #786)
-- Adds credit to remove_scross-seed_tag.py script (Thanks to @zakkarry)
+- Added user defined stalled_tag. Configurable through config.yml. (Closes #802 Thanks to @Patchy3767)
-**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.2.1...v4.2.2
+## Bug Fixes
+- Fixed max_seeding time of 0 for share_limits (Fixes #790 Thanks to @glau-bd)
+- Fixed Upload Limit not reset when LastActive/MinSeedsNotMet (Fixes #804)
+- Fixed Share limits not showing in logs when 0 torrents are in the group(Fixes #789)
+- Fixes bug where it tries to remove root_dir when not using category (Fixes #777)
+
+**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.2.2...v4.3.0
diff --git a/Dockerfile b/Dockerfile
old mode 100755
new mode 100644
index d22953d..24a974e
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,21 +1,49 @@
-FROM python:3.11-alpine
+# Use a multi-stage build to minimize final image size
+FROM python:3.13-alpine as builder
+
ARG BRANCH_NAME=master
ENV BRANCH_NAME=${BRANCH_NAME}
+
+# Install build-time dependencies only
+RUN apk add --no-cache \
+ gcc \
+ g++ \
+ libxml2-dev \
+ libxslt-dev \
+ zlib-dev \
+ curl \
+ bash
+
+# Install UV (fast pip alternative)
+RUN curl -LsSf https://astral.sh/uv/install.sh | sh
+
+# Copy only dependency files first (better layer caching)
+COPY pyproject.toml setup.py VERSION /app/
+WORKDIR /app
+
+# Install project in a virtual env (lightweight & reproducible)
+RUN /root/.local/bin/uv pip install --system .
+
+# Final stage: minimal runtime image
+FROM python:3.13-alpine
+
ENV TINI_VERSION=v0.19.0
-ENV QBM_DOCKER=True
-COPY requirements.txt /
-
-# install packages
-RUN echo "**** install system packages ****" \
- && apk update \
- && apk upgrade \
- && apk add --no-cache tzdata gcc g++ libxml2-dev libxslt-dev zlib-dev bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates tini\
- && pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \
- && apk del gcc g++ libxml2-dev libxslt-dev zlib-dev \
- && rm -rf /requirements.txt /tmp/* /var/tmp/* /var/cache/apk/*
+# Runtime dependencies (smaller than build stage)
+RUN apk add --no-cache \
+ tzdata \
+ bash \
+ curl \
+ jq \
+ tini \
+ && rm -rf /var/cache/apk/*
+# Copy installed packages and scripts from builder
+COPY --from=builder /usr/local/lib/python3.13/site-packages/ /usr/local/lib/python3.13/site-packages/
+COPY --from=builder /app /app
COPY . /app
WORKDIR /app
VOLUME /config
-ENTRYPOINT ["/sbin/tini", "-s", "python3", "qbit_manage.py"]
+
+ENTRYPOINT ["/sbin/tini", "-s", "--"]
+CMD ["python3", "qbit_manage.py"]
diff --git a/Makefile b/Makefile
index 788e185..94861ed 100644
--- a/Makefile
+++ b/Makefile
@@ -1,24 +1,89 @@
-.PHONY: minimal
-minimal: venv
+# Define the path to uv
+UV_PATH := $(shell which uv 2>/dev/null || echo "")
+UV_LOCAL_PATH := $(HOME)/.local/bin/uv
+UV_CARGO_PATH := $(HOME)/.cargo/bin/uv
-venv: requirements.txt setup.py tox.ini
- tox -e venv
+# Check if uv is installed, if not set UV_INSTALL to 1
+ifeq ($(UV_PATH),)
+ ifeq ($(wildcard $(UV_LOCAL_PATH)),)
+ ifeq ($(wildcard $(UV_CARGO_PATH)),)
+ UV_INSTALL := 1
+ else
+ UV_PATH := $(UV_CARGO_PATH)
+ endif
+ else
+ UV_PATH := $(UV_LOCAL_PATH)
+ endif
+endif
+
+# Define the virtual environment path
+VENV := .venv
+VENV_ACTIVATE := $(VENV)/bin/activate
+VENV_PYTHON := $(VENV)/bin/python
+VENV_UV := $(VENV)/bin/uv
+VENV_PIP := $(VENV)/bin/pip
+VENV_PRE_COMMIT := $(VENV)/bin/pre-commit
+VENV_RUFF := $(VENV)/bin/ruff
+
+.PHONY: all
+all: venv
+
+.PHONY: install-uv
+install-uv:
+ifdef UV_INSTALL
+ @echo "Installing uv..."
+ @curl -LsSf https://astral.sh/uv/install.sh | sh
+ @echo "uv installed to $(HOME)/.local/bin/uv"
+ $(eval UV_PATH := $(HOME)/.local/bin/uv)
+endif
+
+.PHONY: venv
+venv: install-uv
+ @echo "Creating virtual environment..."
+ @$(UV_PATH) venv $(VENV)
+ @echo "Installing project dependencies..."
+ @$(UV_PATH) pip install -e .
+ @echo "Installing development dependencies..."
+ @$(UV_PATH) pip install pre-commit ruff
+ @echo "Virtual environment created and dependencies installed."
+ @echo "To activate the virtual environment, run: source $(VENV_ACTIVATE)"
+
+.PHONY: sync
+sync: venv
+ @echo "Syncing dependencies from pyproject.toml..."
+ @$(UV_PATH) pip sync pyproject.toml
.PHONY: test
-test:
- tox -e tests
+test: venv
+ @echo "Running tests..."
+ @. $(VENV_ACTIVATE) && $(VENV_PYTHON) -m pytest
.PHONY: pre-commit
-pre-commit:
- tox -e pre-commit
+pre-commit: venv
+ @echo "Running pre-commit hooks..."
+ @. $(VENV_ACTIVATE) && $(VENV_PRE_COMMIT) run --all-files
+
+.PHONY: install-hooks
+install-hooks: venv
+ @echo "Installing pre-commit hooks..."
+ @. $(VENV_ACTIVATE) && $(VENV_PRE_COMMIT) install -f --install-hooks
.PHONY: clean
clean:
- find -name '*.pyc' -delete
- find -name '__pycache__' -delete
- rm -rf .tox
- rm -rf venv
+ @echo "Cleaning up..."
+ @find -name '*.pyc' -delete
+ @find -name '__pycache__' -delete
+ @rm -rf $(VENV)
+ @rm -rf .pytest_cache
+ @rm -rf .ruff_cache
+ @echo "Cleanup complete."
-.PHONY: install-hooks
-install-hooks:
- tox -e install-hooks
+.PHONY: lint
+lint: venv
+ @echo "Running linter..."
+ @. $(VENV_ACTIVATE) && $(VENV_RUFF) check --fix .
+
+.PHONY: format
+format: venv
+ @echo "Running formatter..."
+ @. $(VENV_ACTIVATE) && $(VENV_RUFF) format .
diff --git a/SUPPORTED_VERSIONS.json b/SUPPORTED_VERSIONS.json
index b486834..107a920 100644
--- a/SUPPORTED_VERSIONS.json
+++ b/SUPPORTED_VERSIONS.json
@@ -1,10 +1,10 @@
{
"master": {
- "qbit": "v5.0.4",
- "qbitapi": "2025.2.0"
- },
- "develop": {
"qbit": "v5.0.5",
"qbitapi": "2025.4.1"
+ },
+ "develop": {
+ "qbit": "v5.1.0",
+ "qbitapi": "2025.5.0"
}
}
diff --git a/VERSION b/VERSION
index af8c8ec..8089590 100755
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-4.2.2
+4.3.0
diff --git a/activate.sh b/activate.sh
old mode 100644
new mode 100755
index 72c43ac..28ee5d4
--- a/activate.sh
+++ b/activate.sh
@@ -1 +1,2 @@
-venv/bin/activate
+#!/bin/bash
+source .venv/bin/activate
diff --git a/config/config.yml.sample b/config/config.yml.sample
index d7d5661..19f0aac 100755
--- a/config/config.yml.sample
+++ b/config/config.yml.sample
@@ -32,6 +32,7 @@ settings:
- Upload
tracker_error_tag: issue # Will set the tag of any torrents that do not have a working tracker.
nohardlinks_tag: noHL # Will set the tag of any torrents with no hardlinks.
+ stalled_tag: stalledDL # Will set the tag of any torrents stalled downloading.
share_limits_tag: ~share_limit # Will add this tag when applying share limits to provide an easy way to filter torrents by share limit group/priority for each torrent
share_limits_min_seeding_time_tag: MinSeedTimeNotReached # Tag to be added to torrents that have not yet reached the minimum seeding time
share_limits_min_num_seeds_tag: MinSeedsNotMet # Tag to be added to torrents that have not yet reached the minimum number of seeds
diff --git a/docs/Config-Setup.md b/docs/Config-Setup.md
index a10d006..ea41dfb 100644
--- a/docs/Config-Setup.md
+++ b/docs/Config-Setup.md
@@ -57,7 +57,7 @@ This section defines any settings defined in the configuration.
| `tag_nohardlinks_filter_completed` | When running `--tag-nohardlinks` function, , it will filter for completed torrents only. | True |
❌ |
| `cat_update_all` | When running `--cat-update` function, it will check and update all torrents categories, otherwise it will only update uncategorized torrents. | True | ❌ |
| `disable_qbt_default_share_limits` | When running `--share-limits` function, it allows QBM to handle share limits by disabling qBittorrents default Share limits. | True | ❌ |
-| `tag_stalled_torrents` | Tags any downloading torrents that are stalled with the `stalledDL` tag when running the tag_update command | True | ❌ |
+| `tag_stalled_torrents` | Tags any downloading torrents that are stalled with the user defined `stalledDL` tag when running the tag_update command | True | ❌ |
| `rem_unregistered_ignore_list` | Ignores a list of words found in the status of the tracker when running rem_unregistered command and will not remove the torrent if matched | | ❌ |
## **directory:**
diff --git a/docs/Local-Installations.md b/docs/Local-Installations.md
index 5c6da8f..5757eed 100644
--- a/docs/Local-Installations.md
+++ b/docs/Local-Installations.md
@@ -15,13 +15,13 @@ git clone https://github.com/StuffAnThings/qbit_manage
Install requirements
```bash
-pip install -r requirements.txt
+pip install .
```
If there are issues installing dependencies try:
```bash
-pip install -r requirements.txt --ignore-installed
+pip install . --ignore-installed
```
## Usage
diff --git a/docs/Nix-Installation.md b/docs/Nix-Installation.md
index 339c562..1499e1b 100644
--- a/docs/Nix-Installation.md
+++ b/docs/Nix-Installation.md
@@ -15,7 +15,7 @@ chmod +x qbit_manage.py
* Get & Install Requirements
```bash
-pip install -r requirements.txt
+pip install .
```
* Create Config
@@ -35,26 +35,73 @@ nano qbm-update.sh
* Paste the below into the update script and update the Paths and Service Name (if using systemd)
```bash
-#!/bin/bash
+#!/usr/bin/env bash
+set -e
+set -o pipefail
-qbmPath="/home/bakerboy448/QbitManage"
-qbmVenvPath="$qbmPath"/"qbit-venv/"
-qbmServiceName="qbm"
-cd "$qbmPath" || exit
-currentVersion=$(cat VERSION)
-branch=$(git rev-parse --abbrev-ref HEAD)
-git fetch
-if [ "$(git rev-parse HEAD)" = "$(git rev-parse @'{u}')" ]; then
- echo "=== Already up to date $currentVersion on $branch ==="
- exit 0
-fi
-git pull
-newVersion=$(cat VERSION)
-"$qbmVenvPath"/bin/python -m pip install -r requirements.txt
-echo "=== Updated from $currentVersion to $newVersion on $branch ==="
-echo "=== Restarting qbm Service ==="
-sudo systemctl restart "$qbmServiceName"
-exit 0
+force_update=${1:-false}
+
+# Constants
+QBM_PATH="/opt/qbit_manage"
+QBM_VENV_PATH="/opt/.venv/qbm-venv"
+QBM_SERVICE_NAME="qbmanage"
+QBM_UPSTREAM_GIT_REMOTE="origin"
+QBM_VERSION_FILE="$QBM_PATH/VERSION"
+QBM_REQUIREMENTS_FILE="$QBM_PATH/pyproject.toml"
+CURRENT_UID=$(id -un)
+
+# Check if QBM is installed and if the current user owns it
+check_qbm_installation() {
+ if [ -d "$QBM_PATH" ]; then
+ qbm_repo_owner=$(stat --format='%U' "$QBM_PATH")
+ qbm_repo_group=$(stat --format='%G' "$QBM_PATH")
+ if [ "$qbm_repo_owner" != "$CURRENT_UID" ]; then
+ echo "You do not own the QbitManage repo. Please run this script as the user that owns the repo [$qbm_repo_owner]."
+ echo "use 'sudo -u $qbm_repo_owner -g $qbm_repo_group /path/to/qbm-update.sh'"
+ exit 1
+ fi
+ else
+ echo "QbitManage folder does not exist. Please install QbitManage before running this script."
+ exit 1
+ fi
+}
+
+# Update QBM if necessary
+update_qbm() {
+ current_branch=$(git -C "$QBM_PATH" rev-parse --abbrev-ref HEAD)
+ echo "Current Branch: $current_branch. Checking for updates..."
+ git -C "$QBM_PATH" fetch
+ if [ "$(git -C "$QBM_PATH" rev-parse HEAD)" = "$(git -C "$QBM_PATH" rev-parse @'{u}')" ] && [ "$force_update" != true ]; then
+ current_version=$(cat "$QBM_VERSION_FILE")
+ echo "=== Already up to date $current_version on $current_branch ==="
+ exit 0
+ fi
+ current_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}')
+ git -C "$QBM_PATH" reset --hard "$QBM_UPSTREAM_GIT_REMOTE/$current_branch"
+}
+
+# Update virtual environment if requirements have changed
+update_venv() {
+ new_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}')
+ if [ "$current_requirements" != "$new_requirements" ] || [ "$force_update" = true ]; then
+ echo "=== Requirements changed, updating venv ==="
+ "$QBM_VENV_PATH/bin/python" -m pip install --upgrade "$QBM_PATH"
+ fi
+}
+
+# Restart the QBM service
+restart_service() {
+ echo "=== Restarting QBM Service ==="
+ sudo systemctl restart "$QBM_SERVICE_NAME"
+ new_version=$(cat "$QBM_VERSION_FILE")
+ echo "=== Updated to $new_version on $current_branch"
+}
+
+# Main script execution
+check_qbm_installation
+update_qbm
+update_venv
+restart_service
```
* Make the update script executable
diff --git a/docs/Unraid-Installation.md b/docs/Unraid-Installation.md
index a9ab201..ab9956c 100644
--- a/docs/Unraid-Installation.md
+++ b/docs/Unraid-Installation.md
@@ -56,11 +56,11 @@ In the new text field you'll need to place:
```bash
#!/bin/bash
echo "Installing required packages"
-python3 -m pip install -r /mnt/user/path/to/requirements.txt
+python3 -m pip install /mnt/user/path/to/qbit
echo "Required packages installed"
```
-Replace `path/to/` with your path example mines `/data/scripts/qbit/` or `/mnt/user/data/scripts/qbit/requirements.txt`
+Replace `path/to/` with your path example mines `/data/scripts/qbit/`
Now click **Save Changes**
diff --git a/modules/config.py b/modules/config.py
index eb54434..78ed818 100755
--- a/modules/config.py
+++ b/modules/config.py
@@ -4,7 +4,6 @@ import os
import re
import stat
import time
-from collections import OrderedDict
import requests
from retrying import retry
@@ -202,6 +201,7 @@ class Config:
self.data, "tracker_error_tag", parent="settings", default="issue"
),
"nohardlinks_tag": self.util.check_for_attribute(self.data, "nohardlinks_tag", parent="settings", default="noHL"),
+ "stalled_tag": self.util.check_for_attribute(self.data, "stalled_tag", parent="settings", default="stalledDL"),
"share_limits_tag": self.util.check_for_attribute(
self.data, "share_limits_tag", parent="settings", default=share_limits_tag
),
@@ -245,6 +245,7 @@ class Config:
self.tracker_error_tag = self.settings["tracker_error_tag"]
self.nohardlinks_tag = self.settings["nohardlinks_tag"]
+ self.stalled_tag = self.settings["stalled_tag"]
self.share_limits_tag = self.settings["share_limits_tag"]
self.share_limits_custom_tags = []
self.share_limits_min_seeding_time_tag = self.settings["share_limits_min_seeding_time_tag"]
@@ -424,10 +425,12 @@ class Config:
save=True,
)
priorities.add(priority)
- return OrderedDict(sorted_limits)
+ return dict(sorted_limits)
- self.share_limits = OrderedDict()
+ self.share_limits = dict()
sorted_share_limits = _sort_share_limits(self.data["share_limits"])
+ logger.trace(f"Unsorted Share Limits: {self.data['share_limits']}")
+ logger.trace(f"Sorted Share Limits: {sorted_share_limits}")
for group in sorted_share_limits:
self.share_limits[group] = {}
self.share_limits[group]["priority"] = sorted_share_limits[group]["priority"]
@@ -637,6 +640,7 @@ class Config:
self.notify(err, "Config")
raise Failed(err)
+ logger.trace(f"Share_limits config: {self.share_limits}")
# Add RecycleBin
self.recyclebin = {}
self.recyclebin["enabled"] = self.util.check_for_attribute(
diff --git a/modules/core/share_limits.py b/modules/core/share_limits.py
index 77ecf31..96a5f73 100644
--- a/modules/core/share_limits.py
+++ b/modules/core/share_limits.py
@@ -55,6 +55,14 @@ class ShareLimits:
torrents = group_config["torrents"]
self.torrents_updated = []
self.tdel_dict = {}
+ group_priority = group_config.get("priority", "Unknown")
+ num_torrents = len(torrents) if torrents else 0
+
+ logger.separator(
+ f"Updating Share Limits for [Group {group_name}] [Priority {group_priority}] [Torrents ({num_torrents})]",
+ space=False,
+ border=False,
+ )
if torrents:
self.update_share_limits_for_group(group_name, group_config, torrents)
attr = {
@@ -183,9 +191,6 @@ class ShareLimits:
def update_share_limits_for_group(self, group_name, group_config, torrents):
"""Updates share limits for torrents in a group"""
- logger.separator(
- f"Updating Share Limits for [Group {group_name}] [Priority {group_config['priority']}]", space=False, border=False
- )
group_upload_speed = group_config["limit_upload_speed"]
for torrent in torrents:
@@ -488,6 +493,7 @@ class ShareLimits:
torrent.add_tags(self.min_seeding_time_tag)
torrent_tags += f", {self.min_seeding_time_tag}"
torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1)
+ torrent.set_upload_limit(-1)
if resume_torrent:
torrent.resume()
return False
@@ -520,6 +526,7 @@ class ShareLimits:
torrent.add_tags(self.min_num_seeds_tag)
torrent_tags += f", {self.min_num_seeds_tag}"
torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1)
+ torrent.set_upload_limit(-1)
if resume_torrent:
torrent.resume()
return True
@@ -554,6 +561,7 @@ class ShareLimits:
torrent.add_tags(self.last_active_tag)
torrent_tags += f", {self.last_active_tag}"
torrent.set_share_limits(ratio_limit=-1, seeding_time_limit=-1, inactive_seeding_time_limit=-1)
+ torrent.set_upload_limit(-1)
if resume_torrent:
torrent.resume()
return False
@@ -570,7 +578,7 @@ class ShareLimits:
else:
_remove_min_seeding_time_tag()
return False
- if seeding_time_limit:
+ if seeding_time_limit is not None:
if (torrent.seeding_time >= seeding_time_limit * 60) and _has_reached_min_seeding_time_limit():
body += logger.insert_space(
f"Seeding Time vs Max Seed Time: {str(timedelta(seconds=torrent.seeding_time))} >= "
diff --git a/modules/core/tags.py b/modules/core/tags.py
index 5a813ab..01fb30f 100644
--- a/modules/core/tags.py
+++ b/modules/core/tags.py
@@ -13,7 +13,7 @@ class Tags:
self.share_limits_tag = qbit_manager.config.share_limits_tag
self.torrents_updated = [] # List of torrents updated
self.notify_attr = [] # List of single torrent attributes to send to notifiarr
- self.stalled_tag = "stalledDL"
+ self.stalled_tag = qbit_manager.config.stalled_tag
self.tag_stalled_torrents = self.config.settings["tag_stalled_torrents"]
self.tags()
diff --git a/modules/qbittorrent.py b/modules/qbittorrent.py
index 91cd972..9e10955 100755
--- a/modules/qbittorrent.py
+++ b/modules/qbittorrent.py
@@ -423,6 +423,8 @@ class Qbt:
save_path = categories[cat].savePath.replace(self.config.root_dir, self.config.remote_dir)
if save_path:
save_paths.add(save_path)
+ # Also add root_dir to the list
+ save_paths.add(self.config.remote_dir)
return list(save_paths)
def tor_delete_recycle(self, torrent, info):
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..065fd08
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,64 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
+
+# Keep using setup.py for version handling
+# Dependencies are specified here for uv to use
+
+[project]
+name = "qbit_manage"
+# Version is dynamically determined from setup.py
+dynamic = ["version"]
+description = "This tool will help manage tedious tasks in qBittorrent and automate them. Tag, categorize, remove Orphaned data, remove unregistered torrents and much much more."
+readme = "README.md"
+requires-python = ">=3.9"
+license = {text = "MIT"}
+authors = [
+ {name = "bobokun"},
+]
+dependencies = [
+ "bencodepy==0.9.5",
+ "croniter==6.0.0",
+ "GitPython==3.1.44",
+ "humanize==4.12.3",
+ "pytimeparse2==1.7.1",
+ "qbittorrent-api==2025.5.0",
+ "requests==2.32.3",
+ "retrying==1.3.4",
+ "ruamel.yaml==0.18.10",
+ "schedule==1.2.2",
+]
+
+[project.urls]
+Homepage = "https://github.com/StuffAnThings"
+Repository = "https://github.com/StuffAnThings/qbit_manage"
+
+[project.optional-dependencies]
+dev = [
+ "pre-commit==4.2.0",
+ "ruff==0.11.8",
+]
+
+[tool.ruff]
+line-length = 130
+
+[tool.ruff.lint]
+select = [
+ "I", # isort - import order
+ "UP", # pyupgrade
+ "T10", # debugger
+ "E", # pycodestyle errors
+ "W", # pycodestyle warnings
+ "F", # pyflakes
+]
+
+ignore = [
+ "E722", # E722 Do not use bare except, specify exception instead
+ "E402", # E402 module level import not at top of file
+]
+
+[tool.ruff.lint.isort]
+force-single-line = true
+
+[tool.ruff.format]
+line-ending = "auto"
diff --git a/requirements-dev.txt b/requirements-dev.txt
deleted file mode 100644
index 6565e48..0000000
--- a/requirements-dev.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-
-pre-commit==4.2.0
-ruff==0.11.7
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 47cb7dd..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-bencodepy==0.9.5
-croniter==6.0.0
-GitPython==3.1.44
-humanize==4.12.2
-pytimeparse2==1.7.1
-qbittorrent-api==2025.4.1
-requests==2.32.3
-retrying==1.3.4
-ruamel.yaml==0.18.10
-schedule==1.2.2
diff --git a/scripts/pre-commit/increase_version.sh b/scripts/pre-commit/increase_version.sh
index 8319b37..4f37c5a 100755
--- a/scripts/pre-commit/increase_version.sh
+++ b/scripts/pre-commit/increase_version.sh
@@ -13,28 +13,8 @@ if git diff --cached --name-only | grep -q "VERSION"; then
elif git diff --name-only | grep -q "VERSION"; then
echo "The VERSION file has unstaged changes. Please stage them before committing."
exit 0
+elif ! git show --name-only HEAD | grep -q "VERSION"; then
+ source "$(dirname "$0")/update_develop_version.sh"
fi
-# Read the current version from the VERSION file
-current_version=$(