# Improvements
- **ci(docker)**: add OCI labels and build metadata to Docker images
- **Web UI**: Show an "Update available" badge next to the version and a
toast notification when a newer version is detected
- **Web UI**: Add integrated docs with collapsible sections
- **ci(build)**: Publish to PyPI
- **Category**: Allow category changes regardless of the "Category
Update All" status (Fixes #913)

# Bug Fixes
- Fixes container hanging when using run command with QBT_RUN flag
(Fixes #911)
- Fixes bug on interval scheduler not displaying the correct next run
time
- Fix bug on webAPI requests not being queued correctly when called
during a scheduled run

**Full Changelog**:
https://github.com/StuffAnThings/qbit_manage/compare/v4.5.4...v4.5.5

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Actionbot <actions@github.com>
Co-authored-by: bakerboy448 <55419169+bakerboy448@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: ineednewpajamas <73252768+ineednewpajamas@users.noreply.github.com>
This commit is contained in:
bobokun 2025-08-24 18:44:54 -04:00 committed by GitHub
parent 1e12a1610f
commit 156291723f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
51 changed files with 2497 additions and 719 deletions

View file

@ -69,6 +69,7 @@ jobs:
ADD_SAMPLE_CFG="config/config.yml.sample${{ steps.sep.outputs.SEP }}config"
ADD_LOGO="icons/qbm_logo.png${{ steps.sep.outputs.SEP }}."
ADD_VERSION="VERSION${{ steps.sep.outputs.SEP }}."
ADD_DOCS="docs${{ steps.sep.outputs.SEP }}docs"
ICON_ARG=""
if [[ "${{ runner.os }}" == "Windows" ]]; then
ICON_ARG=--icon=icons/qbm_logo.ico
@ -90,6 +91,7 @@ jobs:
--add-data "$ADD_SAMPLE_CFG" \
--add-data "$ADD_LOGO" \
--add-data "$ADD_VERSION" \
--add-data "$ADD_DOCS" \
$ICON_ARG \
"${ENTRY}"
@ -186,13 +188,17 @@ jobs:
# Run cargo check to trigger build script and update version files
cargo check
- name: Build Tauri app
- name: Install Tauri CLI
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Install Tauri 2 CLI (project migrated to Tauri v2 config & deps)
cargo install tauri-cli --version ^2 --locked --force
- name: Build Tauri app (initial attempt)
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
@ -201,6 +207,60 @@ jobs:
else
cargo tauri build --bundles deb
fi
continue-on-error: true
id: tauri-build
- name: Wait before retry (macOS DMG recovery)
if: steps.tauri-build.outcome == 'failure' && runner.os == 'macOS'
run: sleep 30
shell: bash
- name: Wait before retry (build recovery)
if: steps.tauri-build.outcome == 'failure'
run: sleep 30
shell: bash
- name: Retry Tauri build on failure (attempt 2)
if: steps.tauri-build.outcome == 'failure'
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
elif [[ "${{ runner.os }}" == "macOS" ]]; then
cargo tauri build --bundles app,dmg
else
cargo tauri build --bundles deb
fi
continue-on-error: true
id: tauri-build-retry1
- name: Wait before final retry (build recovery)
if: steps.tauri-build-retry1.outcome == 'failure'
run: sleep 30
shell: bash
- name: Final retry Tauri build on failure (attempt 3)
if: steps.tauri-build-retry1.outcome == 'failure'
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
elif [[ "${{ runner.os }}" == "macOS" ]]; then
cargo tauri build --bundles app,dmg
else
cargo tauri build --bundles deb
fi
- name: Fail if all Tauri build attempts failed
if: steps.tauri-build.outcome == 'failure' && steps.tauri-build-retry1.outcome == 'failure'
shell: bash
run: |
echo "❌ Tauri build failed after all attempts"
exit 1
- name: Set BUILD_ARCH for artifact naming
shell: bash
@ -332,36 +392,6 @@ jobs:
if-no-files-found: error
compression-level: 6
- name: Clean up temporary build artifacts
uses: actions/github-script@v7
with:
script: |
// Get all artifacts from this workflow run
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.runId,
});
// Delete temporary build artifacts, keep only the final release assets
for (const artifact of artifacts.data.artifacts) {
if (artifact.name.startsWith('build-outputs-')) {
console.log(`Deleting temporary artifact: ${artifact.name}`);
try {
await github.rest.actions.deleteArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
});
console.log(`✓ Successfully deleted: ${artifact.name}`);
} catch (error) {
console.log(`⚠️ Failed to delete ${artifact.name}: ${error.message}`);
}
} else {
console.log(`✓ Keeping final artifact: ${artifact.name}`);
}
}
docker-develop:
runs-on: ubuntu-latest
@ -406,6 +436,16 @@ jobs:
id: buildx
uses: docker/setup-buildx-action@v3
- name: Read version from VERSION file
id: get_version
run: echo "APP_VERSION=$(cat VERSION)" >> $GITHUB_OUTPUT
- name: Set build metadata
id: build_meta
run: |
echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
echo "VCS_REF=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
- name: Build and push
id: docker_build
uses: docker/build-push-action@v6
@ -413,7 +453,10 @@ jobs:
context: ./
file: ./Dockerfile
build-args: |
"BRANCH_NAME=develop"
BRANCH_NAME=develop
APP_VERSION=${{ steps.get_version.outputs.APP_VERSION }}
BUILD_DATE=${{ steps.build_meta.outputs.BUILD_DATE }}
VCS_REF=${{ steps.build_meta.outputs.VCS_REF }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: |

68
.github/workflows/pypi-publish.yml vendored Normal file
View file

@ -0,0 +1,68 @@
name: PyPI Publish
on:
push:
tags:
- v*
workflow_dispatch:
inputs:
test_pypi:
description: 'Publish to Test PyPI instead of PyPI'
required: false
default: false
type: boolean
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
pypi-publish:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write # Required for trusted publishing to PyPI
steps:
- name: Check Out Repo
uses: actions/checkout@v5
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install build dependencies
run: |
python -m pip install --upgrade pip
python -m pip install build twine
- name: Build package
run: python -m build
- name: Verify package
run: |
python -m twine check dist/*
ls -la dist/
- name: Publish to Test PyPI
if: ${{ github.event.inputs.test_pypi == 'true' }}
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
# Option 1: Use trusted publishing (recommended)
# Repository must be configured in Test PyPI with GitHub as trusted publisher
# Option 2: Use API token (uncomment the line below and comment out the trusted publishing)
# password: ${{ secrets.TEST_PYPI_API_TOKEN }}
verbose: true
skip-existing: true
- name: Publish to PyPI
if: ${{ github.event.inputs.test_pypi != 'true' }}
uses: pypa/gh-action-pypi-publish@release/v1
with:
# Option 1: Use trusted publishing (recommended)
# Repository must be configured in PyPI with GitHub as trusted publisher
# Option 2: Use API token (uncomment the line below and comment out the trusted publishing)
# password: ${{ secrets.PYPI_API_TOKEN }}
verbose: true
skip-existing: true

View file

@ -47,9 +47,19 @@ jobs:
run: |
source .venv/bin/activate
python scripts/update-readme-version.py ${{ github.event.inputs.targetBranch || github.ref_name }}
- name: Check for SUPPORTED_VERSIONS changes
id: detect-changes
run: |
if git diff --name-only | grep -q '^SUPPORTED_VERSIONS\.json$'; then
echo "SUPPORTED_VERSIONS.json changed."
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "No changes to SUPPORTED_VERSIONS.json. Skipping remainder of workflow."
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Update develop versions
if: ${{ github.event.inputs.targetBranch || github.ref_name == 'develop' }}
if: ${{ steps.detect-changes.outputs.changed == 'true' && (github.event.inputs.targetBranch || github.ref_name) == 'develop' }}
id: get-develop-version
run: |
# Run the script and capture its output
@ -63,6 +73,7 @@ jobs:
echo "Captured Version: $version"
- name: Create Pull Request
if: ${{ steps.detect-changes.outputs.changed == 'true' }}
id: create-pr
uses: peter-evans/create-pull-request@v7
with:

View file

@ -1,7 +1,7 @@
name: Version Release
on:
create:
push:
tags:
- v*
@ -68,6 +68,7 @@ jobs:
ADD_SAMPLE_CFG="config/config.yml.sample${{ steps.sep.outputs.SEP }}config"
ADD_LOGO="icons/qbm_logo.png${{ steps.sep.outputs.SEP }}."
ADD_VERSION="VERSION${{ steps.sep.outputs.SEP }}."
ADD_DOCS="docs${{ steps.sep.outputs.SEP }}docs"
ICON_ARG=""
if [[ "${{ runner.os }}" == "Windows" ]]; then
ICON_ARG=--icon=icons/qbm_logo.ico
@ -89,6 +90,7 @@ jobs:
--add-data "$ADD_SAMPLE_CFG" \
--add-data "$ADD_LOGO" \
--add-data "$ADD_VERSION" \
--add-data "$ADD_DOCS" \
$ICON_ARG \
"${ENTRY}"
@ -185,13 +187,17 @@ jobs:
# Run cargo check to trigger build script and update version files
cargo check
- name: Build Tauri app
- name: Build Tauri CLI
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Install Tauri 2 CLI (project migrated to Tauri v2 config & deps)
cargo install tauri-cli --version ^2 --locked --force
- name: Build Tauri app (initial attempt)
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
@ -200,6 +206,60 @@ jobs:
else
cargo tauri build --bundles deb
fi
continue-on-error: true
id: tauri-build
- name: Wait before retry (macOS DMG recovery)
if: steps.tauri-build.outcome == 'failure' && runner.os == 'macOS'
run: sleep 30
shell: bash
- name: Wait before retry (build recovery)
if: steps.tauri-build.outcome == 'failure'
run: sleep 30
shell: bash
- name: Retry Tauri build on failure (attempt 2)
if: steps.tauri-build.outcome == 'failure'
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
elif [[ "${{ runner.os }}" == "macOS" ]]; then
cargo tauri build --bundles app,dmg
else
cargo tauri build --bundles deb
fi
continue-on-error: true
id: tauri-build-retry1
- name: Wait before final retry (build recovery)
if: steps.tauri-build-retry1.outcome == 'failure'
run: sleep 30
shell: bash
- name: Final retry Tauri build on failure (attempt 3)
if: steps.tauri-build-retry1.outcome == 'failure'
working-directory: desktop/tauri/src-tauri
shell: bash
run: |
# Build with explicit bundle targets for this platform
if [[ "${{ runner.os }}" == "Windows" ]]; then
cargo tauri build --target x86_64-pc-windows-msvc --bundles nsis
elif [[ "${{ runner.os }}" == "macOS" ]]; then
cargo tauri build --bundles app,dmg
else
cargo tauri build --bundles deb
fi
- name: Fail if all Tauri build attempts failed
if: steps.tauri-build.outcome == 'failure' && steps.tauri-build-retry1.outcome == 'failure'
shell: bash
run: |
echo "❌ Tauri build failed after all attempts"
exit 1
- name: Set BUILD_ARCH for artifact naming
shell: bash
@ -400,12 +460,26 @@ jobs:
id: get_version
run: echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
- name: Read version from VERSION file
id: get_app_version
run: echo "APP_VERSION=$(cat VERSION)" >> $GITHUB_OUTPUT
- name: Set build metadata
id: build_meta
run: |
echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
echo "VCS_REF=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
- name: Build and push
id: docker_build
uses: docker/build-push-action@v6
with:
context: ./
file: ./Dockerfile
build-args: |
APP_VERSION=${{ steps.get_app_version.outputs.APP_VERSION }}
BUILD_DATE=${{ steps.build_meta.outputs.BUILD_DATE }}
VCS_REF=${{ steps.build_meta.outputs.VCS_REF }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: |

1
.gitignore vendored
View file

@ -13,6 +13,7 @@ qbit_manage.egg-info/
.tox
*.env
**/build
dist/
.roo*
memory-bank
**/src-tauri/gen

View file

@ -5,6 +5,7 @@ repos:
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
exclude: ^desktop/tauri/src-tauri/tauri\.conf\.json$
- id: check-merge-conflict
- id: check-json
- id: check-yaml
@ -13,6 +14,7 @@ repos:
- id: fix-byte-order-marker
- id: pretty-format-json
args: [--autofix, --indent, '4', --no-sort-keys]
exclude: ^desktop/tauri/src-tauri/tauri\.conf\.json$
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.37.1 # or higher tag
hooks:
@ -26,7 +28,7 @@ repos:
exclude: ^.github/
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.12.8
rev: v0.12.9
hooks:
# Run the linter.
- id: ruff-check

View file

@ -1,14 +1,13 @@
# Improvements
- Support cross-platform binary builds (Linux/Windows/MacOS)
- Adds desktop app installers (Linux/Windows/MacOS)
- Container images for latest now pointed to newest version automatically (Fixes #897)
- Enable automatic open of webUI in local installs
- Add persistence toggling for webUI scheduler
- **ci(docker)**: add OCI labels and build metadata to Docker images
- **Web UI**: Show an "Update available" badge next to the version and a toast notification when a newer version is detected
- **Web UI**: Add integrated docs with collapsible sections
- **ci(build)**: Publish to PyPI
- **Category**: Allow category changes regardless of the "Category Update All" status (Fixes #913)
# Bug Fixes
- Fix schedule.yml not loaded upon restarting Docker container (Fixes #906)
- Fix bug where torrents were not being paused after share limits reached (Fixes #901)
- Fix(api): prevent path traversal vulnerability in backup restore endpoint (Fixes CWE-22 Security Vulnerability)
- Fix scheduler to run interval jobs immediately on startup
- Fixes container hanging when using run command with QBT_RUN flag (Fixes #911)
- Fixes bug on interval scheduler not displaying the correct next run time
- Fix bug on webAPI requests not being queued correctly when called during a scheduled run
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.5.3...v4.5.4
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.5.4...v4.5.5

View file

@ -27,6 +27,25 @@ RUN /root/.local/bin/uv pip install --system .
# Final stage: minimal runtime image
FROM python:3.13-alpine
# Build arguments
ARG APP_VERSION
ARG BUILD_DATE
ARG VCS_REF
# OCI Image Specification labels
LABEL org.opencontainers.image.title="qbit-manage"
LABEL org.opencontainers.image.description="This tool will help manage tedious tasks in qBittorrent and automate them. Tag, categorize, remove Orphaned data, remove unregistered torrents and much much more."
LABEL org.opencontainers.image.version="$APP_VERSION"
LABEL org.opencontainers.image.created="$BUILD_DATE"
LABEL org.opencontainers.image.revision="$VCS_REF"
LABEL org.opencontainers.image.authors="bobokun"
LABEL org.opencontainers.image.vendor="StuffAnThings"
LABEL org.opencontainers.image.licenses="MIT"
LABEL org.opencontainers.image.url="https://github.com/StuffAnThings/qbit_manage"
LABEL org.opencontainers.image.documentation="https://github.com/StuffAnThings/qbit_manage/wiki"
LABEL org.opencontainers.image.source="https://github.com/StuffAnThings/qbit_manage"
LABEL org.opencontainers.image.base.name="python:3.13-alpine"
ENV TINI_VERSION=v0.19.0
# Runtime dependencies (smaller than build stage)

253
Makefile
View file

@ -41,11 +41,14 @@ endif
venv: install-uv
@echo "Creating virtual environment..."
@$(UV_PATH) venv $(VENV)
@echo "Installing project dependencies..."
@$(UV_PATH) pip install -e .
@echo "Installing project dependencies from pyproject.toml..."
@$(UV_PATH) pip install --python $(VENV_PYTHON) -e . --config-settings editable_mode=compat
@echo "Removing conflicting console script to avoid PATH conflicts..."
@rm -f $(VENV)/bin/qbit-manage 2>/dev/null || true
@echo "Installing development dependencies..."
@$(UV_PATH) pip install pre-commit ruff
@$(UV_PATH) pip install --python $(VENV_PYTHON) pre-commit ruff
@echo "Virtual environment created and dependencies installed."
@echo "✓ Virtual environment ready for development"
@echo "To activate the virtual environment, run: source $(VENV_ACTIVATE)"
.PHONY: sync
@ -76,6 +79,15 @@ clean:
@rm -rf $(VENV)
@rm -rf .pytest_cache
@rm -rf .ruff_cache
@rm -rf dist/
@rm -rf build/
@rm -rf *.egg-info/
@rm -rf web-ui/dist/
@rm -rf web-ui/build/
@rm -rf web-ui/node_modules/
@rm -rf desktop/tauri/src-tauri/target/
@rm -rf desktop/tauri/src-tauri/gen/
@rm -rf desktop/tauri/node_modules/
@echo "Cleanup complete."
.PHONY: lint
@ -87,3 +99,238 @@ lint: venv
format: venv
@echo "Running formatter..."
@. $(VENV_ACTIVATE) && $(VENV_RUFF) format .
.PHONY: build
build: venv
@echo "Building package..."
@$(UV_PATH) pip install --python $(VENV_PYTHON) build twine
@. $(VENV_ACTIVATE) && $(VENV_PYTHON) -m build
@echo "Package built successfully. Files in dist/"
.PHONY: check-dist
check-dist: build
@echo "Checking distribution files..."
@. $(VENV_ACTIVATE) && $(VENV_PYTHON) -m twine check dist/*
.PHONY: setup-pypi
setup-pypi:
@echo "Setting up PyPI configuration..."
@if [ -f ~/.pypirc ] && grep -q "password = pypi-" ~/.pypirc 2>/dev/null; then \
echo "✓ ~/.pypirc already exists with API tokens configured"; \
else \
$(MAKE) setup-pypi-interactive; \
fi
.PHONY: setup-pypi-interactive
setup-pypi-interactive:
@echo ""
@echo "This will set up your PyPI credentials for automatic uploads."
@echo "You'll need API tokens from:"
@echo " - Test PyPI: https://test.pypi.org/manage/account/token/"
@echo " - Live PyPI: https://pypi.org/manage/account/token/"
@echo ""
@echo "Creating accounts (if needed):"
@echo " - Test PyPI: https://test.pypi.org/account/register/"
@echo " - Live PyPI: https://pypi.org/account/register/"
@echo ""
@printf "Press Enter to continue or Ctrl+C to cancel..."
@read dummy
@echo ""
@printf "Please enter your Test PyPI API token (starts with 'pypi-'): "
@read testpypi_token; \
echo ""; \
printf "Please enter your PyPI API token (starts with 'pypi-'): "; \
read pypi_token; \
echo ""; \
if [ -z "$$testpypi_token" ] || [ -z "$$pypi_token" ]; then \
echo "❌ Both tokens are required. Setup cancelled."; \
exit 1; \
fi; \
if ! echo "$$testpypi_token" | grep -q "^pypi-" || ! echo "$$pypi_token" | grep -q "^pypi-"; then \
echo "❌ Invalid token format. Tokens should start with 'pypi-'"; \
exit 1; \
fi; \
echo "Creating ~/.pypirc configuration file..."; \
echo "[distutils]" > ~/.pypirc; \
echo "index-servers =" >> ~/.pypirc; \
echo " pypi" >> ~/.pypirc; \
echo " testpypi" >> ~/.pypirc; \
echo "" >> ~/.pypirc; \
echo "[pypi]" >> ~/.pypirc; \
echo "repository = https://upload.pypi.org/legacy/" >> ~/.pypirc; \
echo "username = __token__" >> ~/.pypirc; \
echo "password = $$pypi_token" >> ~/.pypirc; \
echo "" >> ~/.pypirc; \
echo "[testpypi]" >> ~/.pypirc; \
echo "repository = https://test.pypi.org/legacy/" >> ~/.pypirc; \
echo "username = __token__" >> ~/.pypirc; \
echo "password = $$testpypi_token" >> ~/.pypirc; \
chmod 600 ~/.pypirc; \
echo "✓ PyPI configuration saved to ~/.pypirc"; \
echo "✓ You can now use 'make upload-test' and 'make upload-pypi' without entering tokens"
.PHONY: upload-test
upload-test: check-dist
@echo "Uploading to Test PyPI..."
@if [ -z "$$TWINE_PASSWORD_TESTPYPI" ] && ! grep -q "password = pypi-" ~/.pypirc 2>/dev/null; then \
echo ""; \
echo "No API token found. Please either:"; \
echo "1. Set environment variable: export TWINE_PASSWORD_TESTPYPI=your-test-pypi-token"; \
echo "2. Run 'make setup-pypi' and edit ~/.pypirc with your tokens"; \
echo "3. Get token from: https://test.pypi.org/manage/account/token/"; \
exit 1; \
fi
@if [ -n "$$TWINE_PASSWORD_TESTPYPI" ]; then \
echo "Using environment variable for authentication"; \
. $(VENV_ACTIVATE) && TWINE_USERNAME=__token__ TWINE_PASSWORD=$$TWINE_PASSWORD_TESTPYPI $(VENV_PYTHON) -m twine upload --repository testpypi --verbose --skip-existing dist/*; \
else \
echo "Using ~/.pypirc for authentication"; \
. $(VENV_ACTIVATE) && $(VENV_PYTHON) -m twine upload --repository testpypi --verbose --skip-existing dist/*; \
fi
@echo "Upload to Test PyPI complete!"
@echo "Test installation with: pip install --index-url https://test.pypi.org/simple/ qbit-manage"
.PHONY: upload-pypi
upload-pypi: check-dist
@echo "Uploading to PyPI..."
@echo "WARNING: This will upload to the LIVE PyPI repository!"
@if [ -z "$$TWINE_PASSWORD_PYPI" ] && ! grep -q "password = pypi-" ~/.pypirc 2>/dev/null; then \
echo ""; \
echo "No API token found. Please either:"; \
echo "1. Set environment variable: export TWINE_PASSWORD_PYPI=your-pypi-token"; \
echo "2. Run 'make setup-pypi' and edit ~/.pypirc with your tokens"; \
echo "3. Get token from: https://pypi.org/manage/account/token/"; \
exit 1; \
fi
@read -p "Are you sure you want to continue? (y/N): " confirm && [ "$$confirm" = "y" ]
@if [ -n "$$TWINE_PASSWORD_PYPI" ]; then \
echo "Using environment variable for authentication"; \
. $(VENV_ACTIVATE) && TWINE_USERNAME=__token__ TWINE_PASSWORD=$$TWINE_PASSWORD_PYPI $(VENV_PYTHON) -m twine upload --verbose --skip-existing dist/*; \
else \
echo "Using ~/.pypirc for authentication"; \
. $(VENV_ACTIVATE) && $(VENV_PYTHON) -m twine upload --verbose --skip-existing dist/*; \
fi
@echo "Upload to PyPI complete!"
@echo "Package is now available at: https://pypi.org/project/qbit-manage/"
.PHONY: bump-version
bump-version:
@echo "Current version: $$(cat VERSION)"
@echo "Bumping patch version for testing..."
@current_version=$$(cat VERSION | cut -d'-' -f1); \
IFS='.' read -r major minor patch <<< "$$current_version"; \
new_patch=$$((patch + 1)); \
new_version="$$major.$$minor.$$new_patch"; \
echo "$$new_version-dev" > VERSION; \
echo "✓ Version bumped to: $$(cat VERSION)"
@echo "Now you can run: make build && make upload-test"
.PHONY: debug-upload
debug-upload: check-dist
@echo "Debugging upload configuration..."
@echo "Current version: $$(cat VERSION 2>/dev/null || echo 'VERSION file not found')"
@echo ""
@echo "Checking ~/.pypirc configuration:"
@if [ -f ~/.pypirc ]; then \
echo "✓ ~/.pypirc exists"; \
echo "Repositories configured:"; \
grep -E "^\[.*\]" ~/.pypirc || echo "No repositories found"; \
echo ""; \
echo "Test PyPI config:"; \
sed -n '/\[testpypi\]/,/^\[/p' ~/.pypirc | head -n -1 || echo "No testpypi section found"; \
else \
echo "❌ ~/.pypirc not found"; \
fi
@echo ""
@echo "Environment variables:"
@echo "TWINE_USERNAME: $${TWINE_USERNAME:-not set}"
@echo "TWINE_PASSWORD_TESTPYPI: $${TWINE_PASSWORD_TESTPYPI:+set (hidden)}"
@echo "TWINE_PASSWORD_PYPI: $${TWINE_PASSWORD_PYPI:+set (hidden)}"
@echo ""
@echo "Package information:"
@ls -la dist/ 2>/dev/null || echo "No dist/ directory found"
@echo ""
@echo "Common issues and solutions:"
@echo " - File already exists: Run 'make bump-version' to create a new version"
@echo " - Invalid token: Run 'make setup-pypi' to reconfigure"
@echo " - Package name taken: Change name in pyproject.toml"
# UV Tool Installation targets
.PHONY: install
install:
@echo "Installing qbit-manage using uv tool..."
@echo "Cleaning cache and build artifacts to ensure fresh install..."
@rm -rf build/ dist/ *.egg-info/ 2>/dev/null || true
@find . -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true
@$(UV_PATH) cache clean >/dev/null 2>&1 || true
@$(UV_PATH) tool install . --force
@echo "✓ Installation complete!"
@echo "Test with: qbit-manage --version"
.PHONY: uninstall
uninstall:
@echo "Uninstalling qbit-manage..."
@$(UV_PATH) tool uninstall qbit-manage || echo "qbit-manage was not installed"
@echo "✓ Uninstall complete!"
.PHONY: reinstall
reinstall: uninstall install
@echo "✓ Reinstall complete!"
.PHONY: prep-release
prep-release:
@echo "Preparing release..."
@# Step 1: Strip '-develop*' suffix from VERSION
@current_version=$$(cat VERSION); \
clean_version=$$(echo $$current_version | sed 's/-develop.*$$//'); \
echo "$$clean_version" > VERSION; \
echo "✓ VERSION updated to $$clean_version"
@# Step 2: Check Tauri Rust project builds
@echo "Running cargo check in desktop/tauri/src-tauri..."
@cd desktop/tauri/src-tauri && cargo check
@# Step 3: Prepare CHANGELOG skeleton and bump Full Changelog link
@new_version=$$(cat VERSION); \
major=$$(echo "$$new_version" | cut -d. -f1); \
minor=$$(echo "$$new_version" | cut -d. -f2); \
patch=$$(echo "$$new_version" | cut -d. -f3); \
prev_patch=$$((patch - 1)); \
prev_version="$$major.$$minor.$$prev_patch"; \
echo "# Requirements Updated" > CHANGELOG; \
echo "" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "# New Features" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "# Improvements" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "# Bug Fixes" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "" >> CHANGELOG; \
echo "**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v$$prev_version...v$$new_version" >> CHANGELOG; \
echo "✓ CHANGELOG prepared for release $$new_version"
@echo ""
@echo "REMINDER: Update the CHANGELOG contents with actual improvements and bug fixes before making the release."
.PHONY: help
help:
@echo "Available targets:"
@echo " install - Install qbit-manage using uv tool (overwrites existing)"
@echo " uninstall - Uninstall qbit-manage from uv tools"
@echo " reinstall - Uninstall then install (clean reinstall)"
@echo " venv - Create virtual environment and install dependencies"
@echo " sync - Sync dependencies from pyproject.toml"
@echo " test - Run tests"
@echo " lint - Run linter with fixes"
@echo " format - Run code formatter"
@echo " pre-commit - Run pre-commit hooks"
@echo " build - Build package for distribution"
@echo " check-dist - Check distribution files"
@echo " setup-pypi - Set up PyPI configuration (~/.pypirc)"
@echo " bump-version - Bump patch version for testing uploads"
@echo " prep-release - Strip '-develop*' from VERSION, cargo check, and template CHANGELOG"
@echo " debug-upload - Debug PyPI upload configuration"
@echo " upload-test - Upload to Test PyPI (uses env vars or ~/.pypirc)"
@echo " upload-pypi - Upload to PyPI (LIVE) (uses env vars or ~/.pypirc)"
@echo " clean - Clean up all generated files (venv, dist, build, cache)"
@echo " help - Show this help message"

View file

@ -3,7 +3,7 @@
[![GitHub release (latest by date)](https://img.shields.io/github/v/release/StuffAnThings/qbit_manage?style=plastic)](https://github.com/StuffAnThings/qbit_manage/releases)
[![GitHub commits since latest release (by SemVer)](https://img.shields.io/github/commits-since/StuffAnThings/qbit_manage/latest/develop?label=Commits%20in%20Develop&style=plastic)](https://github.com/StuffAnThings/qbit_manage/tree/develop)
[![Docker Image Version (latest semver)](https://img.shields.io/docker/v/bobokun/qbit_manage?label=docker&sort=semver&style=plastic)](https://hub.docker.com/r/bobokun/qbit_manage)
![Github Workflow Status](https://img.shields.io/github/actions/workflow/status/StuffAnThings/qbit_manage/latest.yml?style=plastic)
![Github Workflow Status](https://img.shields.io/github/actions/workflow/status/StuffAnThings/qbit_manage/version.yml?style=plastic)
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/StuffAnThings/qbit_manage/master.svg)](https://results.pre-commit.ci/latest/github/StuffAnThings/qbit_manage/master)
[![Ghcr packages](https://img.shields.io/badge/ghcr.io-packages?style=plastic&label=packages)](https://ghcr.io/StuffAnThings/qbit_manage)
[![Docker Pulls](https://img.shields.io/docker/pulls/bobokun/qbit_manage?style=plastic)](https://hub.docker.com/r/bobokun/qbit_manage)

View file

@ -1 +1 @@
4.5.4
4.5.5

View file

@ -211,7 +211,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241b621213072e993be4f6f3a9e4b45f65b7e6faad43001be957184b7bb1824b"
dependencies = [
"atk-sys",
"glib",
"glib 0.18.5",
"libc",
]
@ -221,10 +221,10 @@ version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5e48b684b0ca77d2bbadeef17424c2ea3c897d44d566a1617e7e8f30614d086"
dependencies = [
"glib-sys",
"gobject-sys",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -377,7 +377,7 @@ checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2"
dependencies = [
"bitflags 2.9.2",
"cairo-sys-rs",
"glib",
"glib 0.18.5",
"libc",
"once_cell",
"thiserror 1.0.69",
@ -389,9 +389,9 @@ version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "685c9fa8e590b8b3d678873528d83411db17242a73fccaed827770ea0fedda51"
dependencies = [
"glib-sys",
"glib-sys 0.18.1",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -469,7 +469,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02"
dependencies = [
"smallvec",
"target-lexicon",
"target-lexicon 0.12.16",
]
[[package]]
name = "cfg-expr"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8d458d63f0f0f482c8da9b7c8b76c21bd885a02056cc94c6404d861ca2b8206"
dependencies = [
"smallvec",
"target-lexicon 0.13.2",
]
[[package]]
@ -1142,7 +1152,7 @@ dependencies = [
"gdk-pixbuf",
"gdk-sys",
"gio",
"glib",
"glib 0.18.5",
"libc",
"pango",
]
@ -1155,7 +1165,7 @@ checksum = "50e1f5f1b0bfb830d6ccc8066d18db35c487b1b2b1e8589b5dfe9f07e8defaec"
dependencies = [
"gdk-pixbuf-sys",
"gio",
"glib",
"glib 0.18.5",
"libc",
"once_cell",
]
@ -1166,11 +1176,11 @@ version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9839ea644ed9c97a34d129ad56d38a25e6756f99f3a88e15cd39c20629caf7"
dependencies = [
"gio-sys",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -1181,13 +1191,13 @@ checksum = "5c2d13f38594ac1e66619e188c6d5a1adb98d11b2fcf7894fc416ad76aa2f3f7"
dependencies = [
"cairo-sys-rs",
"gdk-pixbuf-sys",
"gio-sys",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"pango-sys",
"pkg-config",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -1197,11 +1207,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "140071d506d223f7572b9f09b5e155afbd77428cd5cc7af8f2694c41d98dfe69"
dependencies = [
"gdk-sys",
"glib-sys",
"gobject-sys",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"pkg-config",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -1213,7 +1223,7 @@ dependencies = [
"gdk",
"gdkx11-sys",
"gio",
"glib",
"glib 0.18.5",
"libc",
"x11",
]
@ -1225,9 +1235,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e2e7445fe01ac26f11601db260dd8608fe172514eb63b3b5e261ea6b0f4428d"
dependencies = [
"gdk-sys",
"glib-sys",
"glib-sys 0.18.1",
"libc",
"system-deps",
"system-deps 6.2.2",
"x11",
]
@ -1291,8 +1301,8 @@ dependencies = [
"futures-core",
"futures-io",
"futures-util",
"gio-sys",
"glib",
"gio-sys 0.18.1",
"glib 0.18.5",
"libc",
"once_cell",
"pin-project-lite",
@ -1306,13 +1316,26 @@ version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37566df850baf5e4cb0dfb78af2e4b9898d817ed9263d1090a2df958c64737d2"
dependencies = [
"glib-sys",
"gobject-sys",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
"winapi",
]
[[package]]
name = "gio-sys"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "521e93a7e56fc89e84aea9a52cfc9436816a4b363b030260b699950ff1336c83"
dependencies = [
"glib-sys 0.20.10",
"gobject-sys 0.20.10",
"libc",
"system-deps 7.0.5",
"windows-sys 0.59.0",
]
[[package]]
name = "glib"
version = "0.18.5"
@ -1325,10 +1348,10 @@ dependencies = [
"futures-executor",
"futures-task",
"futures-util",
"gio-sys",
"glib-macros",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-macros 0.18.5",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"memchr",
"once_cell",
@ -1336,6 +1359,27 @@ dependencies = [
"thiserror 1.0.69",
]
[[package]]
name = "glib"
version = "0.20.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffc4b6e352d4716d84d7dde562dd9aee2a7d48beb872dd9ece7f2d1515b2d683"
dependencies = [
"bitflags 2.9.2",
"futures-channel",
"futures-core",
"futures-executor",
"futures-task",
"futures-util",
"gio-sys 0.20.10",
"glib-macros 0.20.12",
"glib-sys 0.20.10",
"gobject-sys 0.20.10",
"libc",
"memchr",
"smallvec",
]
[[package]]
name = "glib-macros"
version = "0.18.5"
@ -1343,13 +1387,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc"
dependencies = [
"heck 0.4.1",
"proc-macro-crate 2.0.2",
"proc-macro-crate 2.0.0",
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.106",
]
[[package]]
name = "glib-macros"
version = "0.20.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8084af62f09475a3f529b1629c10c429d7600ee1398ae12dd3bf175d74e7145"
dependencies = [
"heck 0.5.0",
"proc-macro-crate 3.3.0",
"proc-macro2",
"quote",
"syn 2.0.106",
]
[[package]]
name = "glib-sys"
version = "0.18.1"
@ -1357,7 +1414,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "063ce2eb6a8d0ea93d2bf8ba1957e78dbab6be1c2220dd3daca57d5a9d869898"
dependencies = [
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
name = "glib-sys"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ab79e1ed126803a8fb827e3de0e2ff95191912b8db65cee467edb56fc4cc215"
dependencies = [
"libc",
"system-deps 7.0.5",
]
[[package]]
@ -1372,9 +1439,20 @@ version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0850127b514d1c4a4654ead6dedadb18198999985908e6ffe4436f53c785ce44"
dependencies = [
"glib-sys",
"glib-sys 0.18.1",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
name = "gobject-sys"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec9aca94bb73989e3cfdbf8f2e0f1f6da04db4d291c431f444838925c4c63eda"
dependencies = [
"glib-sys 0.20.10",
"libc",
"system-deps 7.0.5",
]
[[package]]
@ -1390,7 +1468,7 @@ dependencies = [
"gdk",
"gdk-pixbuf",
"gio",
"glib",
"glib 0.18.5",
"gtk-sys",
"gtk3-macros",
"libc",
@ -1408,12 +1486,12 @@ dependencies = [
"cairo-sys-rs",
"gdk-pixbuf-sys",
"gdk-sys",
"gio-sys",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"pango-sys",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -1882,7 +1960,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca5671e9ffce8ffba57afc24070e906da7fc4b1ba66f2cabebf61bf2ea257fcc"
dependencies = [
"bitflags 1.3.2",
"glib",
"glib 0.18.5",
"javascriptcore-rs-sys",
]
@ -1892,10 +1970,10 @@ version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af1be78d14ffa4b75b66df31840478fef72b51f8c2465d4ca7c194da9f7a5124"
dependencies = [
"glib-sys",
"gobject-sys",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -1987,7 +2065,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03589b9607c868cc7ae54c0b2a22c8dc03dd41692d48f2d7df73615c6a95dc0a"
dependencies = [
"glib",
"glib 0.18.5",
"gtk",
"gtk-sys",
"libappindicator-sys",
@ -2244,7 +2322,7 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d"
dependencies = [
"proc-macro-crate 2.0.2",
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"syn 2.0.106",
@ -2564,7 +2642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ca27ec1eb0457ab26f3036ea52229edbdb74dee1edd29063f5b9b010e7ebee4"
dependencies = [
"gio",
"glib",
"glib 0.18.5",
"libc",
"once_cell",
"pango-sys",
@ -2576,10 +2654,10 @@ version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436737e391a843e5933d6d9aa102cb126d501e815b83601365a948a518555dc5"
dependencies = [
"glib-sys",
"gobject-sys",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -2868,14 +2946,22 @@ dependencies = [
[[package]]
name = "proc-macro-crate"
version = "2.0.2"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24"
checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8"
dependencies = [
"toml_datetime 0.6.3",
"toml_edit 0.20.2",
]
[[package]]
name = "proc-macro-crate"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35"
dependencies = [
"toml_edit 0.22.27",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@ -2917,8 +3003,9 @@ dependencies = [
[[package]]
name = "qbit-manage-desktop"
version = "4.5.4-develop51"
version = "4.5.5-develop10"
dependencies = [
"glib 0.20.12",
"libc",
"once_cell",
"reqwest 0.11.27",
@ -3686,7 +3773,7 @@ checksum = "471f924a40f31251afc77450e781cb26d55c0b650842efafc9c6cbd2f7cc4f9f"
dependencies = [
"futures-channel",
"gio",
"glib",
"glib 0.18.5",
"libc",
"soup3-sys",
]
@ -3697,11 +3784,11 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ebe8950a680a12f24f15ebe1bf70db7af98ad242d9db43596ad3108aab86c27"
dependencies = [
"gio-sys",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"libc",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -3833,7 +3920,20 @@ version = "6.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349"
dependencies = [
"cfg-expr",
"cfg-expr 0.15.8",
"heck 0.5.0",
"pkg-config",
"toml 0.8.2",
"version-compare",
]
[[package]]
name = "system-deps"
version = "7.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4be53aa0cba896d2dc615bd42bbc130acdcffa239e0a2d965ea5b3b2a86ffdb"
dependencies = [
"cfg-expr 0.20.2",
"heck 0.5.0",
"pkg-config",
"toml 0.8.2",
@ -3896,6 +3996,12 @@ version = "0.12.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
[[package]]
name = "target-lexicon"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
[[package]]
name = "tauri"
version = "2.7.0"
@ -4347,7 +4453,7 @@ checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d"
dependencies = [
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.3",
"toml_datetime 0.6.11",
"toml_edit 0.20.2",
]
@ -4368,9 +4474,9 @@ dependencies = [
[[package]]
name = "toml_datetime"
version = "0.6.3"
version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
dependencies = [
"serde",
]
@ -4391,7 +4497,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap 2.10.0",
"toml_datetime 0.6.3",
"toml_datetime 0.6.11",
"winnow 0.5.40",
]
@ -4404,10 +4510,21 @@ dependencies = [
"indexmap 2.10.0",
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.3",
"toml_datetime 0.6.11",
"winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap 2.10.0",
"toml_datetime 0.6.11",
"winnow 0.7.12",
]
[[package]]
name = "toml_parser"
version = "1.0.2"
@ -4834,10 +4951,10 @@ dependencies = [
"gdk",
"gdk-sys",
"gio",
"gio-sys",
"glib",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib 0.18.5",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"gtk",
"gtk-sys",
"javascriptcore-rs",
@ -4856,15 +4973,15 @@ dependencies = [
"bitflags 1.3.2",
"cairo-sys-rs",
"gdk-sys",
"gio-sys",
"glib-sys",
"gobject-sys",
"gio-sys 0.18.1",
"glib-sys 0.18.1",
"gobject-sys 0.18.0",
"gtk-sys",
"javascriptcore-rs-sys",
"libc",
"pkg-config",
"soup3-sys",
"system-deps",
"system-deps 6.2.2",
]
[[package]]
@ -5441,6 +5558,9 @@ name = "winnow"
version = "0.7.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"

View file

@ -43,9 +43,10 @@ license = "MIT"
name = "qbit-manage-desktop"
repository = ""
rust-version = "1.70"
version = "4.5.4-develop52"
version = "4.5.5"
[target."cfg(unix)".dependencies]
glib = "0.20.0"
libc = "0.2"
[target."cfg(windows)".dependencies.windows]

View file

@ -1,72 +1,72 @@
{
"app": {
"security": {
"csp": null
},
"windows": [
{
"decorations": true,
"fullscreen": false,
"height": 800,
"label": "main",
"minHeight": 600,
"minWidth": 900,
"resizable": true,
"title": "qBit Manage",
"visible": false,
"width": 1100
}
],
"withGlobalTauri": true
"app": {
"security": {
"csp": null
},
"build": {
"beforeBuildCommand": "",
"beforeDevCommand": "",
"devUrl": "http://localhost:8080",
"frontendDist": "../src"
"windows": [
{
"decorations": true,
"fullscreen": false,
"height": 800,
"label": "main",
"minHeight": 600,
"minWidth": 900,
"resizable": true,
"title": "qBit Manage",
"visible": false,
"width": 1100
}
],
"withGlobalTauri": true
},
"build": {
"beforeBuildCommand": "",
"beforeDevCommand": "",
"devUrl": "http://localhost:8080",
"frontendDist": "../src"
},
"bundle": {
"active": true,
"category": "Utility",
"icon": [
"../../../icons/qbm_logo.icns",
"../../../icons/qbm_logo.ico",
"../../../icons/qbm_logo.png"
],
"linux": {
"deb": {
"depends": [
"libgtk-3-0",
"libayatana-appindicator3-1",
"libwebkit2gtk-4.1-0"
]
}
},
"bundle": {
"active": true,
"category": "Utility",
"icon": [
"../../../icons/qbm_logo.icns",
"../../../icons/qbm_logo.ico",
"../../../icons/qbm_logo.png"
],
"linux": {
"deb": {
"depends": [
"libgtk-3-0",
"libayatana-appindicator3-1",
"libwebkit2gtk-4.1-0"
]
}
},
"macOS": {
"frameworks": [],
"minimumSystemVersion": "10.13"
},
"resources": [
"bin/*"
],
"targets": [
"deb",
"nsis",
"app",
"dmg"
],
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"nsis": {
"displayLanguageSelector": true,
"installMode": "currentUser",
"installerIcon": "../../../icons/qbm_logo.ico"
},
"timestampUrl": ""
}
"macOS": {
"frameworks": [],
"minimumSystemVersion": "10.13"
},
"identifier": "com.qbitmanage.desktop",
"productName": "qBit Manage",
"version": "4.5.4-develop52"
}
"resources": [
"bin/*"
],
"targets": [
"deb",
"nsis",
"app",
"dmg"
],
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"nsis": {
"displayLanguageSelector": true,
"installMode": "currentUser",
"installerIcon": "../../../icons/qbm_logo.ico"
},
"timestampUrl": ""
}
},
"identifier": "com.qbitmanage.desktop",
"productName": "qBit Manage",
"version": "4.5.5"
}

View file

@ -8,10 +8,10 @@
| `-r` or`--run` | QBT_RUN | N/A | Run without the scheduler. Script will exit after completion. | False |
| `-sch` or `--schedule` | QBT_SCHEDULE | N/A | Schedule to run every x minutes or choose customize schedule via [cron](https://crontab.guru/examples.html). (Default set to 1440 (1 day)) | 1440 |
| `-sd` or `--startup-delay` | QBT_STARTUP_DELAY | N/A | Set delay in seconds on the first run of a schedule (Default set to 0) | 0 |
| `-c CONFIG` or `--config-file CONFIG` | QBT_CONFIG | N/A | Override the default config file location. By default, qbit_manage looks for `config.yml` in platform-specific directories (see [Config-Setup](Config-Setup.md) for details). Use this to specify a custom path or filename. `Example: tv.yml`. Supports wildcards to use multiple configs. `Example: config-*.yml` | Platform-specific |
| `-c CONFIG` or `--config-file CONFIG` | QBT_CONFIG | N/A | Override the default config file location. By default, qbit_manage looks for `config.yml` in platform-specific directories (see [Config-Setup](Config-Setup) for details). Use this to specify a custom path or filename. `Example: tv.yml`. Supports wildcards to use multiple configs. `Example: config-*.yml` | Platform-specific |
| `-lf LOGFILE,` or `--log-file LOGFILE,` | QBT_LOGFILE | N/A | This is used if you want to use a different name for your log file. `Example: tv.log` | activity.log |
| `-re` or `--recheck` | QBT_RECHECK | recheck | Recheck paused torrents sorted by lowest size. Resume if Completed. | False |
| `-cu` or `--cat-update` | QBT_CAT_UPDATE | cat_update | Use this if you would like to update your categories or move from one category to another. | False |
| `-cu` or `--cat-update` | QBT_CAT_UPDATE | cat_update | Use this option to update your categories or switch between them. The category function takes the save path of the torrent and assigns the corresponding category to it based on that path. | False |
| `-tu` or `--tag-update` | QBT_TAG_UPDATE | tag_update | Use this if you would like to update your tags and/or set seed goals/limit upload speed by tag. | False |
| `-ru` or `--rem-unregistered` | QBT_REM_UNREGISTERED | rem_unregistered | Use this if you would like to remove unregistered torrents. (It will the delete data & torrent if it is not being cross-seeded, otherwise it will just remove the torrent without deleting data). Trackers that have an error and not covered by the remove unregistered logic will also be tagged as `issue` for manual review. | False |
| `-tte` or `--tag-tracker-error` | QBT_TAG_TRACKER_ERROR | tag_tracker_error | Use this if you would like to tag torrents that do not have a working tracker. | False |

View file

@ -1,7 +1,7 @@
# Overview
The script utilizes a YAML config file to load information to connect to the various APIs you can connect with. Alternatively, you can configure qBit Manage using the [Web UI](Web-UI.md), which requires the [Web API](Web-API.md) to be enabled.
The script utilizes a YAML config file to load information to connect to the various APIs you can connect with. Alternatively, you can configure qBit Manage using the [Web UI](Web-UI), which requires the [Web API](Web-API) to be enabled.
## Default Configuration File Locations
@ -21,7 +21,8 @@ A template Configuration File can be found in the repo [config/config.yml.sample
You can reference environment variables inside your `config.yml` by `!ENV VAR_NAME`
**WARNING**: As this software is constantly evolving and this wiki might not be up to date the sample shown here might not might not be current. Please refer to the repo for the most current version.
> [!CAUTION]
> As this software is constantly evolving and this wiki might not be up to date the sample shown here might not be current. Please refer to the repo for the most current version.
# Config File
@ -91,7 +92,12 @@ This section defines the directories that qbit_manage will be looking into for v
---
This section defines the categories that you are currently using and the save path's that are associated with them.
> **NOTE** ALL save paths must be defined, if it is in your qBit, then it **MUST** be defined here, if not the script will throw errors. If you want to leave a save_path as uncategorized you can use the key 'Uncategorized' as the name of the category. You may map a folder and its subdirectories to a single folder by appending `*` to the path e.g. `<path>/<to>/category/*`
The `cat` command will use the cats defined to Category function takes the save path of the torrent and assigns the category to it based on that save path
> [!WARNING]
> **ALL save paths** of all torrents must be defined here. If the save path is in your qBit, then it **MUST** be defined here, if not the script will throw errors.
> If you want to leave a save_path as uncategorized you can use the key 'Uncategorized' as the name of the category.
> You may map a folder and its subdirectories to a single folder by appending `*` to the path e.g. `<path>/<to>/category/*`
| Configuration | Definition | Required |
| :------------ | :------------------------ | :----------------- |
@ -108,7 +114,9 @@ category: <path>/<to>/category
---
This moves all the torrents from one category to another category if the torrents are marked as complete.
> **NOTE** **WARNING**: if the paths are different and Default Torrent Management Mode is set to automatic the files could be moved !!!
> [!CAUTION]
> If the paths are different and Default Torrent Management Mode is set to Automatic the files could be moved !!!
| Configuration | Definition | Required |
| :------------ | :---------------------------- | :----------------- |
@ -138,9 +146,11 @@ This section defines the tags used based upon the tracker's URL.
If you are unsure what key word to use. Simply select a torrent within qB and down at the bottom you should see a tab that says `Trackers` within the list that is populated there are ea list of trackers that are associated with this torrent, select a keyword from there and add it to the config file. Make sure this key word is unique enough that the script will not get confused with any other tracker.
> **NOTE** The `other` key is a special keyword and if defined will tag any other trackers that don't match the above trackers into this tag.
>[!TIP]
> The `other` key is a special keyword and if defined will tag any other trackers that don't match the above trackers into this tag.
> **NOTE** If `other` is not used then trackers will be auto added.
> [!NOTE]
> If `other` is not used then trackers will be auto added.
## **nohardlinks:**
@ -154,7 +164,8 @@ If you're needing information regarding hardlinks here are some excellent resour
* [Trash-Guides: Hardlinks and Instant Moves (Atomic-Moves)](https://trash-guides.info/Hardlinks/Hardlinks-and-Instant-Moves/)
* [Wikipedia: Hardlinks](https://en.wikipedia.org/wiki/Hard_link)
Mandatory to fill out [directory parameter](#directory) above to use this function (root_dir/remote_dir)
> [!NOTE]
> Mandatory to fill out [directory parameter](#directory) above to use this function (root_dir/remote_dir)
Beyond this you'll need to use one of the [categories](#cat) above as the key.
| Configuration | Definition | Required |
@ -190,7 +201,7 @@ Control how torrent share limits are set depending on the priority of your group
| `max_last_active` | Will delete the torrent if cleanup variable is set and if torrent has been inactive longer than x minutes. See Some examples of [valid time expressions](https://github.com/onegreyonewhite/pytimeparse2?tab=readme-ov-file#pytimeparse2-time-expression-parser) 32m, 2h32m, 3d2h32m, 1w3d2h32m | -1 | str | <center></center> |
| `min_seeding_time` | Will prevent torrent deletion by the cleanup variable if the torrent has reached the `max_ratio` limit you have set. If the torrent has not yet reached this minimum seeding time, it will change the share limits back to no limits and resume the torrent to continue seeding. See Some examples of [valid time expressions](https://github.com/onegreyonewhite/pytimeparse2?tab=readme-ov-file#pytimeparse2-time-expression-parser) 32m, 2h32m, 3d2h32m, 1w3d2h32m. **MANDATORY: Must use also `max_ratio` with a value greater than `0` (default: `-1`) for this to work.** If you use both `min_seed_time` and `max_seed_time`, then you must set the value of `max_seed_time` to a number greater than `min_seed_time`. | 0 | str | <center></center> |
| `min_last_active` | Will prevent torrent deletion by cleanup variable if torrent has been active within the last x minutes. If the torrent has been active within the last x minutes, it will change the share limits back to no limits and resume the torrent to continue seeding. See Some examples of [valid time expressions](https://github.com/onegreyonewhite/pytimeparse2?tab=readme-ov-file#pytimeparse2-time-expression-parser) 32m, 2h32m, 3d2h32m, 1w3d2h32m | 0 | str | <center></center> |
| `limit_upload_speed` | Will limit the upload speed KiB/s (KiloBytes/second) (`-1` : No Limit) | 0 | int | <center></center> |
| `limit_upload_speed` | Will limit the upload speed KiB/s (KiloBytes/second) (`-1` : No Limit) | -1 | int | <center></center> |
| `upload_speed_on_limit_reached` | When cleanup is `false` and a torrent reaches its share limits, throttle pertorrent upload to this value (KiB/s). Use `-1` for unlimited. QBM will also clear the share limits to prevent qBittorrent from pausing, allowing continued seeding at the throttled rate. | -1 | int | <center></center> |
| `enable_group_upload_speed` | Upload speed limits are applied at the group level. This will take `limit_upload_speed` defined and divide it equally among the number of torrents in the group. | False | bool | <center></center> |
| `reset_upload_speed_on_unmet_minimums` | Controls whether upload speed limits are reset when minimum conditions are not met. When `true` (default), upload speed limits will be reset to unlimited if minimum seeding time, number of seeds, or last active time conditions are not satisfied. When `false`, existing upload speed limits will be preserved for bandwidth management purposes. | True | bool | <center></center> |
@ -213,6 +224,7 @@ Control how torrent share limits are set depending on the priority of your group
| `save_torrents` | This will save a copy of your .torrent and .fastresume file in the recycle bin before deleting it from qbittorrent. This requires the [torrents_dir](#directory) to be defined | False | <center></center> |
| `split_by_category` | This will split the recycle bin folder by the save path defined in the [cat](#cat) attribute and add the base folder name of the recycle bin that was defined in [recycle_bin](#directory) | False | <center></center> |
> [!TIP]
> Note: The more time you place for the `empty_after_x_days:` variable the better, allowing you more time to catch any mistakes by the script. If the variable is set to `0` it will delete contents immediately after every script run. If the variable is not set it will never delete the contents of the Recycle Bin.
## **orphaned:**
@ -230,8 +242,8 @@ This is handy when you have automatically generated files that certain OSs decid
| `min_file_age_minutes` | Minimum age in minutes for files to be considered orphaned. Files newer than this will be protected from deletion to prevent removal of actively uploading files. Set to 0 to disable age protection. | 0 | <center></center> |
> Note: The more time you place for the `empty_after_x_days:` variable the better, allowing you more time to catch any mistakes by the script. If the variable is set to `0` it will delete contents immediately after every script run. If the variable is not set it will never delete the contents of the Orphaned Data.
> [!TIP]
> The more time you place for the `empty_after_x_days:` variable the better, allowing you more time to catch any mistakes by the script. If the variable is set to `0` it will delete contents immediately after every script run. If the variable is not set it will never delete the contents of the Orphaned Data.
## **apprise:**

View file

@ -43,7 +43,7 @@ services:
# Scheduler Configuration
- QBT_RUN=false
- QBT_SCHEDULE=1440
- QBT_CONFIG=/config/config.yml
- QBT_CONFIG=config.yml
- QBT_LOGFILE=activity.log
# Command Flags
@ -78,7 +78,7 @@ The Web API and Web UI are enabled by default in this Docker setup.
2. Access the Web UI at `http://your-host:8080`
3. Access the Web API at `http://your-host:8080/api/run-command`
See the [Web API Documentation](Web-API.md) for detailed usage instructions and examples.
See the [Web API Documentation](Web-API) for detailed usage instructions and examples.
You will also need to define not just the config volume but the volume to your torrents, this is in order to use the recycling bin, remove orphans and the no hard link options

View file

@ -5,10 +5,10 @@ This wiki should tell you everything you need to know about the script to get it
## Getting Started
1. **Choose your installation method:**
- **Desktop App** (Recommended): Download and install the GUI application for [Windows, macOS, or Linux](Installation.md#desktop-app-installation)
- **Standalone Binary**: Download the command-line executable for [Windows, macOS, or Linux](Installation.md#standalone-binary-installation)
- **Desktop App** (Recommended): Download and install the GUI application for [Windows, macOS, or Linux](Installation#desktop-app-installation)
- **Standalone Binary**: Download the command-line executable for [Windows, macOS, or Linux](Installation#standalone-binary-installation)
- **Docker**: Follow the [Docker Installation](Docker-Installation) guide for containerized environments
- **Python/Source**: Install from [PyPI or source code](Local-Installations) for development
- **Python/Source**: Install from [PyPI or source code](Installation#pythonsource-installation) for development
- **unRAID**: Follow the [unRAID Installation](Unraid-Installation) guide for unRAID systems
2. **Configure qbit_manage:**

View file

@ -30,10 +30,9 @@ Perfect for server environments, automation, or users who prefer command-line to
- [Desktop App Installation](#desktop-app-installation)
- [Standalone Binary Installation](#standalone-binary-installation)
- [Python/Source Installation](#pythonsource-installation)
- [Docker Installation](Docker-Installation)
- [Python/Source Installation](Local-Installations)
- [unRAID Installation](Unraid-Installation)
- [NIX Installation](Nix-Installation)
## Desktop App Installation
@ -102,6 +101,99 @@ Perfect for server environments, automation, or users who prefer command-line to
./qbit-manage-linux-amd64 --help
```
## Python/Source Installation
For developers or users who want to modify the code, you can install from source or PyPI.
### Prerequisites
- Python 3.9 or higher
- Git (for source installation)
### Method 1: Install from PyPI
```bash
# Install uv first
curl -LsSf https://astral.sh/uv/install.sh | sh
# Install qbit-manage
uv tool install qbit-manage
```
### Method 2: Install from Source
```bash
# Clone the repository
git clone https://github.com/StuffAnThings/qbit_manage.git
cd qbit_manage
# Install uv if not already installed
curl -LsSf https://astral.sh/uv/install.sh | sh
# Install the package
uv tool install .
```
### Running qbit-manage
After installation, you can run qbit-manage from anywhere:
```bash
# Show help and available options
qbit-manage --help
# Run once (without scheduler)
qbit-manage --run
# Run with web UI (default on desktop)
qbit-manage --web-server
```
### Usage
After installation, you can run qbit_manage using:
```bash
qbit-manage --help
```
> [!TIP]
> For Python installations, it's recommended to use a virtual environment to avoid conflicts with other packages.
### Development Installation
For development work or to contribute to the project:
```bash
# Clone the repository
git clone https://github.com/StuffAnThings/qbit_manage.git
cd qbit_manage
# Install uv if not already installed
curl -LsSf https://astral.sh/uv/install.sh | sh
# Create virtual environment and install dependencies
uv venv
source .venv/bin/activate # Linux/macOS
# .venv\Scripts\activate # Windows
# Install in development mode
uv pip install -e .
```
### Updating
**Tool installation:**
```bash
uv tool upgrade qbit-manage
```
**Development installation:**
```bash
cd qbit_manage
git pull
uv pip install -e . --upgrade
```
## Quick Reference: Default Configuration File Locations
### Desktop App & Standalone Binary

View file

@ -1,179 +0,0 @@
# Python/Source Installation
This guide covers installing qbit_manage from source code or PyPI for development purposes or when you need the latest features.
**Note**: For most users, we recommend using the [Desktop App or Standalone Binary](Installation.md) instead, as they're easier to install and use.
## Prerequisites
* Python 3.9.0 or higher
* pip (Python package installer)
* Git (for source installation)
## Installation Methods
### Method 1: Install from PyPI (Recommended)
```bash
pip install qbit-manage
```
### Method 2: Install from Source
Navigate to the directory where you'd like to clone the repository:
```bash
# Clone the repository
git clone https://github.com/StuffAnThings/qbit_manage
cd qbit_manage
# Install the package
pip install .
```
If you encounter dependency issues, try:
```bash
pip install . --ignore-installed
```
### Method 3: Development Installation
For development or to get the latest unreleased features:
```bash
# Clone the repository
git clone https://github.com/StuffAnThings/qbit_manage
cd qbit_manage
# Install in development mode
pip install -e .
```
## Configuration File Setup
After installation, qbit_manage will look for configuration files in platform-specific locations:
- **Windows**: `%APPDATA%\qbit-manage\config.yml`
- **macOS**: `~/Library/Application Support/qbit-manage/config.yml`
- **Linux/Unix**: `~/.config/qbit-manage/config.yml`
### Setting up the Configuration
1. Create the configuration directory:
```bash
# Windows (PowerShell)
New-Item -ItemType Directory -Force -Path "$env:APPDATA\qbit-manage"
# macOS/Linux
mkdir -p ~/.config/qbit-manage # Linux
mkdir -p ~/Library/Application\ Support/qbit-manage # macOS
```
2. Copy the sample configuration:
```bash
# From the cloned repository
cp config/config.yml.sample ~/.config/qbit-manage/config.yml # Linux
cp config/config.yml.sample ~/Library/Application\ Support/qbit-manage/config.yml # macOS
copy config\config.yml.sample "%APPDATA%\qbit-manage\config.yml" # Windows
```
3. Edit the configuration file as outlined in the [Config-Setup](Config-Setup.md) guide.
**Alternative**: You can place the config file anywhere and specify its location using the `--config-file` option.
## Usage
### Running the Script
### Basic Usage
Run the script with `-h` to see all available commands:
```bash
qbit-manage -h
# or if installed from source
python qbit_manage.py -h
```
### Common Usage Examples
**Run with default configuration:**
```bash
qbit-manage
```
**Run specific commands:**
```bash
qbit-manage --cat-update --tag-update
```
**Run with Web API and Web UI:**
```bash
qbit-manage --web-server
```
You can then access the Web UI in your browser at `http://localhost:8080`.
**Use custom configuration file:**
```bash
qbit-manage --config-file /path/to/your/config.yml
```
**Run in dry-run mode (preview changes without applying them):**
```bash
qbit-manage --dry-run --cat-update --tag-update
```
**Run on a schedule:**
```bash
qbit-manage --schedule 1440 # Run every 24 hours (1440 minutes)
```
### Command Line Options
| Option | Description |
|--------|-------------|
| `--config-file`, `-c` | Specify custom config file location |
| `--log-file`, `-lf` | Specify custom log file location |
| `--web-server`, `-ws` | Start the web server for API and UI |
| `--port`, `-p` | Web server port (default: 8080) |
| `--dry-run`, `-dr` | Preview changes without applying them |
| `--schedule`, `-sch` | Run on a schedule (minutes) |
| `--run`, `-r` | Run once and exit (no scheduler) |
For a complete list of commands and options, see the [Commands](Commands.md) documentation.
### Virtual Environment (Recommended)
For Python installations, it's recommended to use a virtual environment:
```bash
# Create virtual environment
python -m venv qbit-manage-env
# Activate virtual environment
# Linux/macOS:
source qbit-manage-env/bin/activate
# Windows:
qbit-manage-env\Scripts\activate
# Install qbit-manage
pip install qbit-manage
# Run qbit-manage
qbit-manage --help
```
### Updating
**PyPI installation:**
```bash
pip install --upgrade qbit-manage
```
**Source installation:**
```bash
cd qbit_manage
git pull
pip install . --upgrade
```

View file

@ -1,150 +0,0 @@
# \*Nix Installation
* Download the script
```bash
wget -O - https://github.com/StuffAnThings/qbit_manage/archive/master.tar.gz | tar xz --strip=1 "qbit_manage-master"
```
* Make it executable
```bash
chmod +x qbit_manage.py
```
* Get & Install Requirements
```bash
pip install .
```
* Create Config
**Note:** If using the standalone desktop app, it will automatically create the necessary directories and config files. For command-line usage, you have these options:
**Option 1 - Use default system location:**
```bash
# Create the config directory
mkdir -p ~/.config/qbit-manage
# Copy the sample config
cp config/config.yml.sample ~/.config/qbit-manage/config.yml
# Edit the config file
nano ~/.config/qbit-manage/config.yml
```
**Option 2 - Keep in project directory:**
```bash
cd config
cp config.yml.sample config.yml
nano -e config.yml
```
* Create the update script
```bash
nano qbm-update.sh
```
* Paste the below into the update script and update the Paths and Service Name (if using systemd)
```bash
#!/usr/bin/env bash
set -e
set -o pipefail
force_update=${1:-false}
# Constants
QBM_PATH="/opt/qbit_manage"
QBM_VENV_PATH="/opt/.venv/qbm-venv"
QBM_SERVICE_NAME="qbmanage"
QBM_UPSTREAM_GIT_REMOTE="origin"
QBM_VERSION_FILE="$QBM_PATH/VERSION"
QBM_REQUIREMENTS_FILE="$QBM_PATH/pyproject.toml"
CURRENT_UID=$(id -un)
# Check if QBM is installed and if the current user owns it
check_qbm_installation() {
if [ -d "$QBM_PATH" ]; then
qbm_repo_owner=$(stat --format='%U' "$QBM_PATH")
qbm_repo_group=$(stat --format='%G' "$QBM_PATH")
if [ "$qbm_repo_owner" != "$CURRENT_UID" ]; then
echo "You do not own the QbitManage repo. Please run this script as the user that owns the repo [$qbm_repo_owner]."
echo "use 'sudo -u $qbm_repo_owner -g $qbm_repo_group /path/to/qbm-update.sh'"
exit 1
fi
else
echo "QbitManage folder does not exist. Please install QbitManage before running this script."
exit 1
fi
}
# Update QBM if necessary
update_qbm() {
current_branch=$(git -C "$QBM_PATH" rev-parse --abbrev-ref HEAD)
echo "Current Branch: $current_branch. Checking for updates..."
git -C "$QBM_PATH" fetch
if [ "$(git -C "$QBM_PATH" rev-parse HEAD)" = "$(git -C "$QBM_PATH" rev-parse @'{u}')" ] && [ "$force_update" != true ]; then
current_version=$(cat "$QBM_VERSION_FILE")
echo "=== Already up to date $current_version on $current_branch ==="
exit 0
fi
current_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}')
git -C "$QBM_PATH" reset --hard "$QBM_UPSTREAM_GIT_REMOTE/$current_branch"
}
# Update virtual environment if requirements have changed
update_venv() {
new_requirements=$(sha1sum "$QBM_REQUIREMENTS_FILE" | awk '{print $1}')
if [ "$current_requirements" != "$new_requirements" ] || [ "$force_update" = true ]; then
echo "=== Requirements changed, updating venv ==="
"$QBM_VENV_PATH/bin/python" -m pip install --upgrade "$QBM_PATH"
fi
}
# Restart the QBM service
restart_service() {
echo "=== Restarting QBM Service ==="
sudo systemctl restart "$QBM_SERVICE_NAME"
new_version=$(cat "$QBM_VERSION_FILE")
echo "=== Updated to $new_version on $current_branch"
}
# Main script execution
check_qbm_installation
update_qbm
update_venv
restart_service
```
* Make the update script executable
```bash
chmod +x qbm-update.sh
```
* Run the update script
```bash
./qbm-update.sh
```
## Running qBit Manage
To run qBit Manage with the Web API and Web UI enabled, execute the `qbit_manage.py` script with the `--web-server` flag:
**If using the default config location (`~/.config/qbit-manage/config.yml`):**
```bash
python qbit_manage.py --web-server
```
**If using a custom config location:**
```bash
python qbit_manage.py --web-server --config-file /path/to/your/config.yml --log-file /path/to/your/activity.log
```
After running, you can access the Web UI in your browser, typically at `http://localhost:8080`.

View file

@ -1,103 +1,111 @@
# Unraid Installation
# Unraid Installation - Docker (Recommended)
## Docker Installation (Recommended)
Thankfully, getting qbit_manager working on unRAID is a fairly simple task. unRAID works mostly with docker containers, so the pre-built container available on docker hub works perfectly with a little configuration. To install a container from docker hub, you will need community applications - a very popular plugin for unRAID servers. If you don't already have this installed, you can install it [here](https://forums.unraid.net/topic/38582-plug-in-community-applications/)
The easiest way to run qbit_manage on Unraid is using the Docker container from Docker Hub.
## Basic Installation
### Prerequisites
1. Head to the Apps tab of unRAID (Community Applications), and search qbit_manage in the upper left search box.
2. Once you have searched for qbit_manage you can simply select it from the list of containers and select install.
3. The template should show all variables that can be edited.
4. Fill out your location for your downloads downloads folder (`Root_Dir`).
1. qbit_manage needs to be able to view all torrents the way that your qbittorrent views them.
1. Example: If you have qbittorrent mapped to `/mnt/user/data/:/data` This means that you **MUST** have qbit_managed mapped the same way.
2. Furthermore, the config file must map the root directory you wish to monitor. This means that in our example of `/data` (which is how qbittorrent views the torrents) that if in your `/data` directory you drill down to `/torrents` that you'll need to update your config file to `/data/torrents`
2. This could be different depending on your specific setup.
3. The key takeaways are
1. Both qbit_manage needs to have the same mappings as qbittorrent
2. The config file needs to drill down (if required) further to the desired root dir.
5. Select what QBT env options you want to enable or disable (true/false). It is recommended to enable the Web API by setting `QBT_WEB_SERVER=true` to utilize the Web UI.
6. Hit Apply, and allow unRAID to download the docker container.
7. Navigate to the Docker tab in unRAID, and stop the qbit_manage container if it has auto-started.
8. Create the [config.yml](https://github.com/StuffAnThings/qbit_manage/blob/master/config/config.yml.sample) file as-per the [config-setup documentation](https://github.com/StuffAnThings/qbit_manage/wiki/Config-Setup) and place in the Appdata folder (`/mnt/user/appdata/qbit_manage/` in the example) **Remember to remove the .sample from the filename**
9. Once finished, run the container. Voila! Logs are located in `/mnt/user/appdata/qbit_manage/logs`.
Install [Community Applications](https://forums.unraid.net/topic/38582-plug-in-community-applications/) plugin if you haven't already.
# Unraid Installation - Localhost (Alternative)
### Installation Steps
We recommend using the Docker method to install qBit Manage but here is an alternative way to install it locally without the use of docker with the user of userscripts.
1. **Install the Container**
- Go to the **Apps** tab in Unraid
- Search for "qbit_manage" in the search box
- Select the qbit_manage container and click **Install**
**qBit Management**
First, we are going to need [Nerd Pack](https://forums.unraid.net/topic/35866-unraid-6-nerdpack-cli-tools-iftop-iotop-screen-kbd-etc/). <br>
This can be also downloaded from the **Apps** store
2. **Configure Path Mapping**
Nerd pack will be located in the settings tab
When you open it up you'll see a bunch of packages that you can install. <br> We'll need:
> [!IMPORTANT]
> qbit_manage must have the same path mappings as your qBittorrent container to properly access your torrents.
* `python-pip`
**Example:** If qBittorrent is mapped as `/mnt/user/data/:/data`, then qbit_manage must also be mapped the same way.
* `python3`
- Set the `Root_Dir` variable to match your qBittorrent download path
- Ensure both containers can see torrents at the same paths
* `python-setuptools`
3. **Configure Environment Variables**
- Set `QBT_WEB_SERVER=true` to enable the Web UI (recommended)
- Configure other QBT environment options as needed
To get this running in unRAID go ahead and download the repo to your computer.
4. **Apply and Download**
- Click **Apply** to download and create the container
- The container may auto-start - stop it if needed
Then take all the data from the zip file and place it somewhere on your server.
5. **Create Configuration File**
- Navigate to `/mnt/user/appdata/qbit_manage/` on your Unraid server
- Download the [sample config file](https://github.com/StuffAnThings/qbit_manage/blob/master/config/config.yml.sample)
- Rename it to `config.yml` (remove the `.sample` extension)
- Edit the file according to the [Config Setup guide](Config-Setup)
An example of this would be: `/mnt/user/data/scripts/qbit/`
> [!TIP]
> Make sure the `root_dir` in your config matches how qBittorrent sees your torrents (e.g., `/data/torrents`)
Now we need to install the requirements for this script.
6. **Start the Container**
- Start the qbit_manage container from the Docker tab
- Check logs at `/mnt/user/appdata/qbit_manage/logs/`
Head back over to **User Scripts**
### Web UI Access
Create a new script: An example of this would be `install-requirements`
In the new text field you'll need to place:
```bash
#!/bin/bash
echo "Installing required packages"
python3 -m pip install /mnt/user/path/to/qbit
echo "Required packages installed"
If you enabled the web server, access the Web UI at:
```
http://[UNRAID-IP]:8080
```
Replace `path/to/` with your path example mines `/data/scripts/qbit/`
## Alternative: User Scripts Installation
Now click **Save Changes**
> [!WARNING]
> This method is more complex and not recommended for most users. Use the Docker method above instead.
Now to set a schedule for this bash script to run.
<details>
<summary>Click to expand User Scripts installation method</summary>
Select **At First Array Start Only** This will run this script every time the array starts on every boot
### Requirements
- [Nerd Pack](https://forums.unraid.net/topic/35866-unraid-6-nerdpack-cli-tools-iftop-iotop-screen-kbd-etc/) plugin
- Python packages: `python-pip`, `python3`, `python-setuptools`
Now we need to edit the config file that came with the zip file.
<br>The config file should be pretty self-explanatory.
<br>The only thing that must be followed is that **ALL** categories that you see in your qBit **MUST** be added to the config file with associated directories, each directory must be unique for each category.
### Installation
1. Install required Python packages via Nerd Pack
2. Download qbit_manage source to your server (e.g., `/mnt/user/data/scripts/qbit/`)
3. Create a User Script to install requirements:
```bash
#!/bin/bash
echo "Installing required packages"
python3 -m pip install /mnt/user/data/scripts/qbit/
echo "Required packages installed"
```
4. Set the script to run "At First Array Start Only"
5. Create another User Script to run qbit_manage:
```bash
#!/bin/bash
echo "Running qBitTorrent Management"
python3 /mnt/user/data/scripts/qbit/qbit_manage.py \
--config-file /mnt/user/data/scripts/qbit/config.yml \
--log-file /mnt/user/data/scripts/qbit/activity.log \
--run
echo "qBitTorrent Management Completed"
```
6. Set a cron schedule (e.g., `*/30 * * * *` for every 30 minutes)
> If you'd like a guide on setting up cross-seed on unRAID please visit [here](https://github.com/Drazzilb08/cross-seed-guide)
> [!TIP]
> Use `--dry-run` flag first to test your configuration before running live.
Now we need to go back to **User Scripts** and create our script to run this script
</details>
## Add a new script
## Troubleshooting
You can name yours something like `auto-manage-qbittorrent`
Here is an example script:
### Common Issues
```bash
#!/bin/bash
echo "Running qBitTorrent Management"
python3 /mnt/user/data/scripts/qbit/qbit_manage.py --web-server -c /mnt/user/data/scripts/qbit/config.yml -l /mnt/user/data/scripts/qbit/activity.log -r -<list of commands>
echo "qBitTorrent Management Completed"
```
**Path Mapping Problems:**
- Ensure qbit_manage and qBittorrent have identical path mappings
- Check that the `root_dir` in config.yml matches the container's view of torrents
However, at the core, you'll want
**Permission Issues:**
- Verify the qbit_manage container has read/write access to your download directories
- Check Unraid user/group permissions
```bash
python3 /<path to script>/qbit_manage.py -c /<path to config>/config.yml -l /<path to where you want log file>/activity.log -r -<list of commands>
```
if you want to change the arguments in the `<list of commands>`. The full list of arguments can be seen by using the `-h` command or on the README.
Once you've got the config file set up you should be all set.
Don't forget to set a cron schedule mines <br>`*/30 * * * *` <-- Runs every 30 min
**Final note:**<br>
If you're wanting to do a test run please use the `--dry-run` argument anywhere w/in the call to test how things will look. Please do this before running a full run.
**Container Won't Start:**
- Review container logs in the Docker tab
- Verify config.yml syntax is correct
- Ensure all required path mappings exist

View file

@ -1,5 +1,8 @@
# qBit Manage Web UI
> [!IMPORTANT]
> Below is a summary of the WebUI. For details on the specific features and settings please see the rest of the wiki and sample config file to understand the functionality and description of each setting. The WebUI is effectively a GUI config file editor. You cannot edit environmental or container variables via the WebUI.
## Overview
The qBit Manage Web UI provides a modern interface for configuring and managing qBit Manage. It offers real-time editing of YAML configuration files through an intuitive visual interface, eliminating the need for manual file editing.

View file

@ -1,11 +1,10 @@
- [Home](Home)
- [Installation](Installation)
- [Desktop App & Binary Installation](Installation)
- [Desktop App](Installation#desktop-app-installation)
- [Standalone Binary Installation](Installation#standalone-binary-installation)
- [Python/Source Installation](Installation#pythonsource-installation)
- [Docker Installation](Docker-Installation)
- [Python/Source Installation](Local-Installations)
- [unRAID Installation](Unraid-Installation)
- [NIX Installation](Nix-Installation)
- [V4 Migration Guide](v4-Migration-Guide)
- [Config Setup](Config-Setup)
- [Sample Config File](Config-Setup#config-file)
- [List of variables](Config-Setup#list-of-variables)
@ -24,6 +23,8 @@
- [notifiarr](Config-Setup#notifiarr)
- [webhooks](Config-Setup#webhooks)
- [Commands](Commands)
- [Standalone Scripts](Standalone-Scripts)
- [Web API](Web-API)
- [Web UI](Web-UI)
- Extras
- [Standalone Scripts](Standalone-Scripts)
- [V4 Migration Guide](v4-Migration-Guide)

View file

@ -19,6 +19,7 @@ class Category:
self.status_filter = "completed" if self.config.settings["cat_filter_completed"] else "all"
self.cat_update_all = self.config.settings["cat_update_all"]
self.category()
self.change_categories()
self.config.webhooks_factory.notify(self.torrents_updated, self.notify_attr, group_by="category")
def category(self):
@ -47,10 +48,6 @@ class Category:
continue
if torrent_category not in new_cat:
self.update_cat(torrent, new_cat[0], False)
# Change categories
if self.config.cat_change and torrent_category in self.config.cat_change:
updated_cat = self.config.cat_change[torrent_category]
self.update_cat(torrent, updated_cat, True)
if self.stats >= 1:
logger.print_line(
@ -63,6 +60,29 @@ class Category:
duration = end_time - start_time
logger.debug(f"Category command completed in {duration:.2f} seconds")
def change_categories(self):
"""Handle category changes separately after main categorization"""
if not self.config.cat_change:
return
logger.separator("Changing Categories", space=False, border=False)
start_time = time.time()
for torrent_category, updated_cat in self.config.cat_change.items():
# Get torrents with the specific category to be changed
torrent_list_filter = {"status_filter": self.status_filter, "category": torrent_category}
if self.hashes:
torrent_list_filter["torrent_hashes"] = self.hashes
torrent_list = self.qbt.get_torrents(torrent_list_filter)
for torrent in torrent_list:
self.update_cat(torrent, updated_cat, True)
end_time = time.time()
duration = end_time - start_time
logger.debug(f"Category change command completed in {duration:.2f} seconds")
def get_tracker_cat(self, torrent):
tracker = self.qbt.get_tags(self.qbt.get_tracker_urls(torrent.trackers))
return [tracker["cat"]] if tracker["cat"] else None

View file

@ -427,7 +427,12 @@ class Scheduler:
def get_next_run(self) -> Optional[datetime]:
"""Get the next scheduled run time."""
with self.lock:
return self.next_run
# Return the stored next_run if it exists and is in the future
# This preserves the original schedule timing
if self.next_run and self.next_run > datetime.now():
return self.next_run
# Otherwise, calculate a new next run time
return self._calculate_next_run()
def get_current_schedule(self) -> Optional[tuple[str, Union[str, int]]]:
"""Get the current schedule configuration."""
@ -479,6 +484,8 @@ class Scheduler:
return cron.get_next(datetime)
elif schedule_type == "interval":
# For interval schedules, calculate from current time
# The scheduler loop will handle maintaining proper intervals
return now + timedelta(minutes=int(schedule_value))
except Exception as e:

View file

@ -397,30 +397,78 @@ def current_version(version, branch=None):
develop_version = None
develop_version_ts = 0.0
def get_develop():
global develop_version
if develop_version is None:
develop_version = get_version("develop")
return develop_version
"""Return latest develop version using TTL cache."""
global develop_version, develop_version_ts
ttl = _get_version_cache_ttl_seconds()
now = time.time()
if develop_version is not None and (now - develop_version_ts) < ttl:
return develop_version
value = get_version("develop")
# Only cache successful lookups
if value and value[0] != "Unknown":
develop_version = value
develop_version_ts = now
return value
master_version = None
master_version_ts = 0.0
def _get_version_cache_ttl_seconds() -> int:
"""Resolve TTL for version cache from env QBM_VERSION_CACHE_TTL.
Accepts seconds (e.g., "600") or human strings (e.g., "10m", "1h").
Defaults to 600 seconds (10 minutes) if unset or invalid.
"""
raw = os.environ.get("QBM_VERSION_CACHE_TTL", "600")
secs = None
try:
secs = int(raw)
except Exception:
try:
secs = parse(raw) if raw else None
except Exception:
secs = None
if not secs or secs < 1:
secs = 600
return int(secs)
def get_master():
global master_version
if master_version is None:
master_version = get_version("master")
return master_version
"""Return latest master version using TTL cache."""
global master_version, master_version_ts
ttl = _get_version_cache_ttl_seconds()
now = time.time()
if master_version is not None and (now - master_version_ts) < ttl:
return master_version
value = get_version("master")
# Only cache successful lookups
if value and value[0] != "Unknown":
master_version = value
master_version_ts = now
return value
def get_version(level):
try:
url = f"https://raw.githubusercontent.com/StuffAnThings/qbit_manage/{level}/VERSION"
return parse_version(requests.get(url).content.decode().strip(), text=level)
except requests.exceptions.ConnectionError:
# Always fetch fresh; bust caches and disable intermediaries
url = f"https://raw.githubusercontent.com/StuffAnThings/qbit_manage/refs/heads/{level}/VERSION"
params = {"ts": int(time.time())}
headers = {
"Cache-Control": "no-cache",
"Pragma": "no-cache",
"Accept": "text/plain",
"User-Agent": "qbit_manage-version-check",
}
resp = requests.get(url, headers=headers, params=params, timeout=5)
resp.raise_for_status()
return parse_version(resp.text.strip(), text=level)
except Exception:
return "Unknown", "Unknown", 0

View file

@ -118,17 +118,40 @@ async def process_queue_periodically(web_api: WebAPI) -> None:
if not is_currently_running and not web_api.web_api_queue.empty():
logger.info("Processing queued requests...")
while not web_api.web_api_queue.empty():
try:
request = web_api.web_api_queue.get_nowait()
# Set is_running flag to prevent concurrent execution
try:
if web_api.is_running_lock.acquire(timeout=0.1):
try:
await web_api._execute_command(request)
logger.info("Successfully processed queued request")
except Exception as e:
logger.error(f"Error processing queued request: {str(e)}")
except:
# Queue is empty, break out of inner loop
break
web_api.is_running.value = True
object.__setattr__(web_api, "_last_run_start", datetime.now())
finally:
web_api.is_running_lock.release()
else:
# If we can't acquire the lock, skip processing this cycle
continue
except Exception:
# If there's an error setting the flag, skip processing this cycle
continue
try:
while not web_api.web_api_queue.empty():
try:
request = web_api.web_api_queue.get_nowait()
try:
await web_api._execute_command(request)
logger.info("Successfully processed queued request")
except Exception as e:
logger.error(f"Error processing queued request: {str(e)}")
except:
# Queue is empty, break out of inner loop
break
finally:
# Always reset is_running flag after processing queue
try:
with web_api.is_running_lock:
web_api.is_running.value = False
object.__setattr__(web_api, "_last_run_start", None)
except Exception as e:
logger.error(f"Error resetting is_running flag after queue processing: {str(e)}")
await asyncio.sleep(1) # Check every second
except asyncio.CancelledError:
logger.info("Queue processing task cancelled")
@ -226,6 +249,7 @@ class WebAPI:
api_router.get("/logs")(self.get_logs)
api_router.get("/log_files")(self.list_log_files)
api_router.get("/docs")(self.get_documentation)
api_router.get("/version")(self.get_version)
api_router.get("/health")(self.health_check)
api_router.get("/get_base_url")(self.get_base_url)
@ -331,13 +355,33 @@ class WebAPI:
raise HTTPException(status_code=500, detail=str(e))
async def get_version(self) -> dict:
"""Get the current qBit Manage version using centralized util function"""
"""Get the current qBit Manage version with update availability details."""
try:
version, branch = util.get_current_version()
return {"version": version[0]}
latest_version = util.current_version(version, branch=branch)
update_available = False
latest_version_str = None
if latest_version and (version[1] != latest_version[1] or (version[2] and version[2] < latest_version[2])):
update_available = True
latest_version_str = latest_version[0]
return {
"version": version[0],
"branch": branch,
"build": version[2],
"latest_version": latest_version_str or version[0],
"update_available": update_available,
}
except Exception as e:
logger.error(f"Error getting version: {str(e)}")
return {"version": "Unknown"}
return {
"version": "Unknown",
"branch": "Unknown",
"build": 0,
"latest_version": None,
"update_available": False,
}
async def health_check(self) -> HealthCheckResponse:
"""Health check endpoint providing application status information."""
@ -835,7 +879,11 @@ class WebAPI:
try:
if self.is_running.value:
# Check if the process has been stuck for too long
if hasattr(self, "_last_run_start") and (datetime.now() - self._last_run_start).total_seconds() > 3600:
if (
hasattr(self, "_last_run_start")
and self._last_run_start is not None
and (datetime.now() - self._last_run_start).total_seconds() > 3600
):
logger.warning("Previous run appears to be stuck. Forcing reset of is_running flag.")
self.is_running.value = False
object.__setattr__(self, "_last_run_start", None) # Clear the stuck timestamp
@ -953,6 +1001,36 @@ class WebAPI:
logger.stacktrace()
raise HTTPException(status_code=500, detail=f"Failed to read log file: {str(e)}")
async def get_documentation(self, file: str):
"""Get documentation content from markdown files."""
try:
# Sanitize the file path to prevent directory traversal
safe_filename = os.path.basename(file)
# Only allow markdown files
if not safe_filename.endswith(".md"):
raise HTTPException(status_code=400, detail="Only markdown files are allowed")
# Construct the path to the docs directory
docs_path = util.runtime_path("docs", safe_filename)
if not docs_path.exists():
raise HTTPException(status_code=404, detail=f"Documentation file not found: {safe_filename}")
# Read and return the file content
with open(docs_path, encoding="utf-8") as f:
content = f.read()
from fastapi.responses import PlainTextResponse
return PlainTextResponse(content=content, media_type="text/markdown")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error reading documentation file: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error reading documentation: {str(e)}")
async def list_log_files(self) -> dict:
"""List available log files."""
if not self.logs_path.exists():

View file

@ -12,7 +12,7 @@ dynamic = ["version"]
description = "This tool will help manage tedious tasks in qBittorrent and automate them. Tag, categorize, remove Orphaned data, remove unregistered torrents and much much more."
readme = "README.md"
requires-python = ">=3.9"
license = {text = "MIT"}
license = "MIT"
authors = [
{name = "bobokun"},
]
@ -24,12 +24,15 @@ dependencies = [
"humanize==4.12.3",
"pytimeparse2==1.7.1",
"qbittorrent-api==2025.7.0",
"requests==2.32.4",
"requests==2.32.5",
"retrying==1.4.2",
"ruamel.yaml==0.18.14",
"ruamel.yaml==0.18.15",
"uvicorn==0.35.0",
]
[project.scripts]
qbit-manage = "qbit_manage:main"
[project.urls]
Homepage = "https://github.com/StuffAnThings"
Repository = "https://github.com/StuffAnThings/qbit_manage"
@ -37,7 +40,7 @@ Repository = "https://github.com/StuffAnThings/qbit_manage"
[project.optional-dependencies]
dev = [
"pre-commit==4.3.0",
"ruff==0.12.9",
"ruff==0.12.10",
]
[tool.ruff]

View file

@ -236,6 +236,7 @@ parser.add_argument(
parser.add_argument(
"-lc", "--log-count", dest="log_count", action="store", default=5, type=int, help="Maximum mumber of logs to keep"
)
parser.add_argument("-v", "--version", dest="version", action="store_true", default=False, help="Display the version and exit")
# Use parse_known_args to ignore PyInstaller/multiprocessing injected flags on Windows
args, _unknown_cli = parser.parse_known_args()
@ -486,6 +487,13 @@ def start():
nxt = cron.get_next(datetime)
return nxt
if stype == "interval":
# For interval schedules, we should use the scheduler's authoritative next_run
# rather than calculating from current time, to avoid drift from manual runs
scheduler_next = scheduler.get_next_run()
if scheduler_next and scheduler_next > now_local:
return scheduler_next
# Fallback: if scheduler's next_run is not available or in the past,
# calculate from current time
return now_local + timedelta(minutes=int(sval))
except Exception:
pass
@ -660,7 +668,24 @@ def print_logo(logger):
logger.info(f" Platform: {platform.platform()}")
if __name__ == "__main__":
def main():
"""Main entry point for qbit-manage."""
if len(sys.argv) > 1 and sys.argv[1] in ["--version", "-v"]:
try:
version_info, branch = util.get_current_version()
# Extract just the version string (first element if tuple, otherwise use as-is)
if isinstance(version_info, tuple):
version = version_info[0]
else:
version = version_info
print(f"qbit-manage version {version}")
if branch and branch != "master":
print(f"Branch: {branch}")
except Exception:
# Fallback if version detection fails
print("qbit-manage version unknown")
sys.exit(0)
multiprocessing.freeze_support()
killer = GracefulKiller()
logger.add_main_handler()
@ -683,6 +708,12 @@ if __name__ == "__main__":
scheduler_update_queue = manager.Queue() # Queue for scheduler updates from web API
next_scheduled_run_info_shared = manager.dict()
# Make these variables globally accessible
globals()["is_running"] = is_running
globals()["is_running_lock"] = is_running_lock
globals()["web_api_queue"] = web_api_queue
globals()["next_scheduled_run_info_shared"] = next_scheduled_run_info_shared
# Start web server if enabled and not in run mode
web_process = None
if web_server:
@ -864,13 +895,18 @@ if __name__ == "__main__":
# Stop the scheduler gracefully
scheduler.stop()
if web_process:
web_process.terminate()
web_process.join()
end()
# Cleanup and exit (common for both run and scheduled modes)
if web_process:
web_process.terminate()
web_process.join()
end()
except KeyboardInterrupt:
scheduler.stop()
if web_process:
web_process.terminate()
web_process.join()
end()
if __name__ == "__main__":
main()

View file

@ -1,7 +1,71 @@
#!/usr/bin/env bash
# Check if there are any changes staged for commit
if [[ -z $(git diff --cached --name-only) ]]; then
# Detect if running in CI (e.g., GitHub Actions or pre-commit.ci)
if [[ -n "$GITHUB_ACTIONS" || -n "$CI" || -n "$PRE_COMMIT_CI" ]]; then
IN_CI=true
else
IN_CI=false
fi
# CI: For pull_request events, check if the PR itself changes VERSION.
# If not, run the develop version updater. This avoids relying on staged files.
if [[ "$IN_CI" == "true" ]]; then
BASE_REF="${GITHUB_BASE_REF:-}"
# If BASE_REF not provided (e.g., pre-commit.ci), infer remote default branch
if [[ -z "$BASE_REF" ]]; then
DEFAULT_BASE="$(git symbolic-ref -q --short refs/remotes/origin/HEAD 2>/dev/null | sed 's#^origin/##')"
if [[ -z "$DEFAULT_BASE" ]]; then
DEFAULT_BASE="$(git remote show origin 2>/dev/null | sed -n 's/.*HEAD branch: //p' | head -n1)"
fi
BASE_REF="$DEFAULT_BASE"
fi
# Resolve a usable base ref
CANDIDATES=()
if [[ -n "$BASE_REF" ]]; then
CANDIDATES+=("refs/remotes/origin/$BASE_REF")
CANDIDATES+=("refs/heads/$BASE_REF")
fi
BASE_RESOLVED=""
for ref in "${CANDIDATES[@]}"; do
if git rev-parse --verify -q "$ref" >/dev/null; then
BASE_RESOLVED="$ref"
break
fi
done
# Attempt to fetch the remote-tracking base if missing (handles shallow clones)
if [[ -z "$BASE_RESOLVED" && -n "$BASE_REF" ]]; then
git fetch --no-tags --depth=100 origin "refs/heads/$BASE_REF:refs/remotes/origin/$BASE_REF" >/dev/null 2>&1 || true
if git rev-parse --verify -q "refs/remotes/origin/$BASE_REF" >/dev/null; then
BASE_RESOLVED="refs/remotes/origin/$BASE_REF"
elif git rev-parse --verify -q "refs/heads/$BASE_REF" >/dev/null; then
BASE_RESOLVED="refs/heads/$BASE_REF"
fi
fi
if [[ -z "$BASE_RESOLVED" ]]; then
echo "Warning: Could not resolve PR base ref for '$BASE_REF'."
echo "Hint: ensure the base ref is available (e.g., full fetch)."
echo "Skipping version update because PR base could not be resolved."
exit 0
fi
# If diff is quiet, there were no changes to VERSION between base and head.
if git diff --quiet "$BASE_RESOLVED...HEAD" -- VERSION; then
echo "No VERSION bump detected in PR range ($BASE_RESOLVED...HEAD). Updating develop version."
source "$(dirname "$0")/update_develop_version.sh"
else
echo "PR includes a VERSION change. Skipping version update."
fi
exit 0
fi
# When running locally during an actual commit, skip if nothing is staged.
# In CI, pre-commit typically runs outside of a commit with no staged files,
# so we must not early-exit there.
if [[ "$IN_CI" != "true" && -z $(git diff --cached --name-only) ]]; then
echo "There are no changes staged for commit. Skipping version update."
exit 0
fi
@ -15,6 +79,6 @@ elif git diff --name-only | grep -q "VERSION"; then
exit 0
elif ! git show --name-only HEAD | grep -q "VERSION"; then
source "$(dirname "$0")/update_develop_version.sh"
elif [[ -n "$(git diff --cached --name-only)" ]] && ! git diff --cached --name-only | grep -q "VERSION"; then
source "$(dirname "$0")/update_develop_version.sh"
fi
source "$(dirname "$0")/update_develop_version.sh"

View file

@ -19,3 +19,4 @@
@import './components/_complex-object-card.css';
@import './components/_share-limits.css';
@import './components/_scheduler-control.css';
@import './components/_documentation-viewer.css';

View file

@ -0,0 +1,481 @@
/* Documentation Viewer Styles */
.documentation-section {
margin: 1rem 0;
border: 1px solid var(--border-color);
border-radius: var(--border-radius);
background: var(--card-bg);
overflow: hidden;
}
.documentation-header {
background: var(--bg-secondary);
border-bottom: 1px solid var(--border-color);
}
.documentation-toggle {
width: 100%;
padding: 0.75rem 1rem;
background: transparent;
border: none;
cursor: pointer;
display: flex;
align-items: center;
gap: 0.5rem;
color: var(--text-primary);
font-size: 0.95rem;
font-weight: 500;
text-align: left;
transition: background-color 0.2s ease;
}
.documentation-toggle:hover {
background: var(--hover-bg);
}
.documentation-toggle:focus {
outline: 2px solid var(--primary-color);
outline-offset: -2px;
}
.documentation-toggle-icon {
display: inline-block;
width: 1rem;
transition: transform 0.2s ease;
color: var(--text-secondary);
}
.documentation-title {
display: flex;
align-items: center;
gap: 0.5rem;
}
.documentation-icon {
color: var(--text-secondary);
flex-shrink: 0;
}
.documentation-content {
max-height: 600px;
overflow-y: auto;
transition: max-height 0.3s ease;
}
.documentation-loading {
padding: 2rem;
text-align: center;
color: var(--text-secondary);
font-style: italic;
}
.documentation-error {
padding: 1rem;
color: var(--danger-color);
background: var(--danger-bg);
border-left: 4px solid var(--danger-color);
margin: 1rem;
border-radius: var(--border-radius);
}
.documentation-body {
padding: 1rem;
color: var(--text-primary);
line-height: 1.6;
}
/* Markdown content styling */
.documentation-body h1,
.documentation-body h2,
.documentation-body h3,
.documentation-body h4,
.documentation-body h5,
.documentation-body h6 {
margin-top: 1.5rem;
margin-bottom: 0.75rem;
color: var(--text-primary);
font-weight: 600;
}
.documentation-body h1 { font-size: 1.5rem; }
.documentation-body h2 { font-size: 1.3rem; }
.documentation-body h3 { font-size: 1.1rem; }
.documentation-body h4 { font-size: 1rem; }
.documentation-body h5 { font-size: 0.95rem; }
.documentation-body h6 { font-size: 0.9rem; }
.documentation-body p {
margin-bottom: 1rem;
}
.documentation-body ul,
.documentation-body ol {
margin-bottom: 1rem;
padding-left: 2rem;
}
.documentation-body li {
margin-bottom: 0.25rem;
}
.documentation-body blockquote {
margin: 1rem 0;
padding: 0.5rem 1rem;
border-left: 4px solid var(--primary-color);
background: var(--bg-secondary);
color: var(--text-secondary);
}
.documentation-body code {
padding: 0.2rem 0.4rem;
background: var(--bg-secondary);
border: 1px solid var(--border-color);
border-radius: 3px;
font-family: 'Courier New', Courier, monospace;
font-size: 0.9em;
color: var(--text-primary);
}
.documentation-body pre {
margin: 1rem 0;
background: var(--bg-secondary);
border: 1px solid var(--border-color);
border-radius: var(--border-radius);
overflow-x: auto;
}
.documentation-body pre code {
display: block;
padding: 1rem;
background: transparent;
border: none;
font-size: 0.875rem;
line-height: 1.5;
}
/* Code block wrapper with copy button */
.documentation-code-wrapper {
position: relative;
margin: 1rem 0;
}
.documentation-code-copy {
position: absolute;
top: 0.5rem;
right: 0.5rem;
padding: 0.25rem 0.5rem;
background: var(--primary-color);
color: white;
border: none;
border-radius: var(--border-radius);
font-size: 0.75rem;
cursor: pointer;
opacity: 0.8;
transition: opacity 0.2s ease;
z-index: 1;
}
.documentation-code-copy:hover {
opacity: 1;
}
/* Tables */
.documentation-table-wrapper {
overflow-x: auto;
margin: 1rem 0;
border: 1px solid var(--border-color);
border-radius: var(--border-radius);
}
.documentation-body table {
width: 100%;
border-collapse: collapse;
font-size: 0.9rem;
}
.documentation-body th,
.documentation-body td {
padding: 0.5rem 0.75rem;
text-align: left;
border-bottom: 1px solid var(--border-color);
}
.documentation-body th {
background: var(--bg-secondary);
font-weight: 600;
color: var(--text-primary);
}
.documentation-body tr:last-child td {
border-bottom: none;
}
.documentation-body tr:hover {
background: var(--hover-bg);
}
/* Links */
.documentation-body a {
color: var(--primary-color);
text-decoration: none;
}
.documentation-body a:hover {
text-decoration: underline;
}
/* Horizontal rule */
.documentation-body hr {
margin: 1.5rem 0;
border: none;
border-top: 1px solid var(--border-color);
}
/* Images */
.documentation-body img {
max-width: 100%;
height: auto;
border-radius: var(--border-radius);
}
/* GitHub-style Alerts */
.github-alert {
margin: 1rem 0;
padding: 0;
border-radius: var(--border-radius);
border: 1px solid;
overflow: hidden;
background: var(--card-bg);
}
.github-alert-header {
display: flex;
align-items: center;
gap: 0.5rem;
padding: 0.75rem 1rem;
font-weight: 600;
font-size: 0.875rem;
border-bottom: 1px solid;
}
.github-alert-icon {
display: flex;
align-items: center;
flex-shrink: 0;
}
.github-alert-title {
text-transform: uppercase;
letter-spacing: 0.025em;
}
.github-alert-content {
padding: 1rem;
line-height: 1.6;
}
.github-alert-content p:first-child {
margin-top: 0;
}
.github-alert-content p:last-child {
margin-bottom: 0;
}
/* Warning Alert */
.github-alert-warning {
border-color: var(--warning-color);
background: var(--warning-light);
}
.github-alert-warning .github-alert-header {
background: var(--warning-color);
color: white;
border-bottom-color: var(--warning-color);
}
.github-alert-warning .github-alert-content {
color: var(--text-primary);
}
/* Tip Alert */
.github-alert-tip {
border-color: var(--info-color);
background: var(--info-light);
}
.github-alert-tip .github-alert-header {
background: var(--info-color);
color: white;
border-bottom-color: var(--info-color);
}
.github-alert-tip .github-alert-content {
color: var(--text-primary);
}
/* Note Alert */
.github-alert-note {
border-color: var(--primary-color);
background: var(--primary-light);
}
.github-alert-note .github-alert-header {
background: var(--primary-color);
color: white;
border-bottom-color: var(--primary-color);
}
.github-alert-note .github-alert-content {
color: var(--text-primary);
}
/* Caution Alert */
.github-alert-caution {
border-color: var(--error-color);
background: var(--error-light);
}
.github-alert-caution .github-alert-header {
background: var(--error-color);
color: white;
border-bottom-color: var(--error-color);
}
.github-alert-caution .github-alert-content {
color: var(--text-primary);
}
/* Important Alert */
.github-alert-important {
border-color: var(--secondary-color);
background: var(--secondary-light);
}
.github-alert-important .github-alert-header {
background: var(--secondary-color);
color: white;
border-bottom-color: var(--secondary-color);
}
.github-alert-important .github-alert-content {
color: var(--text-primary);
}
/* Tooltips */
.documentation-tooltip-container {
position: relative;
display: inline-block;
}
.documentation-tooltip-trigger {
cursor: help;
display: inline-flex;
align-items: center;
gap: 0.25rem;
}
.documentation-help-icon {
color: var(--text-secondary);
vertical-align: middle;
}
.documentation-tooltip {
position: absolute;
bottom: 100%;
left: 50%;
transform: translateX(-50%);
margin-bottom: 0.5rem;
padding: 0.5rem 0.75rem;
background: var(--bg-primary);
border: 1px solid var(--border-color);
border-radius: var(--border-radius);
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15);
font-size: 0.875rem;
white-space: nowrap;
z-index: 1000;
opacity: 0;
visibility: hidden;
transition: opacity 0.2s ease, visibility 0.2s ease;
max-width: 300px;
white-space: normal;
}
.documentation-tooltip-trigger:hover + .documentation-tooltip,
.documentation-tooltip:hover {
opacity: 1;
visibility: visible;
}
/* Dark theme adjustments */
[data-theme="dark"] .documentation-body code {
background: rgba(255, 255, 255, 0.05);
border-color: rgba(255, 255, 255, 0.1);
}
[data-theme="dark"] .documentation-body pre {
background: rgba(0, 0, 0, 0.3);
border-color: rgba(255, 255, 255, 0.1);
}
[data-theme="dark"] .documentation-table-wrapper {
border-color: rgba(255, 255, 255, 0.1);
}
[data-theme="dark"] .documentation-body th {
background: rgba(255, 255, 255, 0.05);
}
/* Dark theme GitHub alerts */
[data-theme="dark"] .github-alert {
background: var(--card-bg);
}
[data-theme="dark"] .github-alert-warning {
background: rgba(245, 158, 11, 0.1);
border-color: var(--warning-color);
}
[data-theme="dark"] .github-alert-tip {
background: rgba(6, 182, 212, 0.1);
border-color: var(--info-color);
}
[data-theme="dark"] .github-alert-note {
background: rgba(59, 130, 246, 0.1);
border-color: var(--primary-color);
}
[data-theme="dark"] .github-alert-caution {
background: rgba(239, 68, 68, 0.1);
border-color: var(--error-color);
}
[data-theme="dark"] .github-alert-important {
background: rgba(107, 114, 128, 0.1);
border-color: var(--secondary-color);
}
[data-theme="dark"] .github-alert-content {
color: var(--text-primary);
}
/* Responsive adjustments */
@media (max-width: 768px) {
.documentation-content {
max-height: 400px;
}
.documentation-body {
padding: 0.75rem;
}
.documentation-body table {
font-size: 0.8rem;
}
.documentation-body th,
.documentation-body td {
padding: 0.375rem 0.5rem;
}
}

View file

@ -285,6 +285,7 @@
<!-- Scripts -->
<script src="static/js/components/header.js"></script>
<script src="static/js/components/documentation-viewer.js"></script>
<script type="module" src="static/js/app.js"></script>
</body>
</html>

View file

@ -118,10 +118,28 @@ class QbitManageApp {
try {
const response = await this.api.getVersion();
const versionText = response.version || 'Unknown';
document.getElementById('version-text').textContent = `qBit Manage v${versionText}`;
const versionEl = document.getElementById('version-text');
if (versionEl) {
versionEl.textContent = `qBit Manage v${versionText}`;
if (response.update_available) {
const badge = document.createElement('span');
badge.className = 'badge badge-warning';
const latest = response.latest_version || 'latest';
badge.textContent = `Update available: ${latest}`;
badge.style.marginLeft = '0.5rem';
if (versionEl.parentElement) {
versionEl.parentElement.appendChild(badge);
}
const branch = response.branch ? ` (${response.branch})` : '';
showToast(`A new version is available${branch}: ${latest}`, 'info');
}
}
} catch (error) {
console.error('Failed to fetch version from API:', error);
document.getElementById('version-text').textContent = 'qBit Manage vUnknown';
const versionEl = document.getElementById('version-text');
if (versionEl) {
versionEl.textContent = 'qBit Manage vUnknown';
}
}
}

View file

@ -118,18 +118,21 @@ class ConfigForm {
// Initialize ShareLimitsComponent for share_limits section
if (this.currentSection === 'share_limits') {
const shareLimitsContainer = this.container.querySelector('.share-limits-config');
if (shareLimitsContainer) {
this.shareLimitsComponent = new ShareLimitsComponent(
shareLimitsContainer,
this.currentData,
(newData) => {
this.currentData = newData;
this.onDataChange(this.currentData);
this._dispatchDirtyEvent();
}
);
}
// Wait for documentation components to be created before initializing ShareLimitsComponent
setTimeout(() => {
const shareLimitsContainer = this.container.querySelector('.share-limits-config');
if (shareLimitsContainer) {
this.shareLimitsComponent = new ShareLimitsComponent(
shareLimitsContainer,
this.currentData,
(newData) => {
this.currentData = newData;
this.onDataChange(this.currentData);
this._dispatchDirtyEvent();
}
);
}
}, 150); // Wait slightly longer than the documentation component creation (100ms)
}
// Initialize lazy loading for notifications section

View file

@ -0,0 +1,478 @@
/**
* Documentation Viewer Component
* Fetches and renders markdown documentation with collapsible sections
*/
class DocumentationViewer {
constructor() {
this.markedLoaded = false;
this.cache = new Map();
this.loadMarkedLibrary();
}
/**
* Load the marked.js library from CDN
*/
async loadMarkedLibrary() {
if (this.markedLoaded) return;
return new Promise((resolve, reject) => {
const script = document.createElement('script');
script.src = 'https://cdn.jsdelivr.net/npm/marked/marked.min.js';
script.onload = () => {
this.markedLoaded = true;
// Configure marked options
if (window.marked) {
window.marked.setOptions({
breaks: true,
gfm: true,
tables: true,
sanitize: false,
smartLists: true,
smartypants: false
});
}
resolve();
};
script.onerror = reject;
document.head.appendChild(script);
});
}
/**
* Fetch markdown content from a file
* @param {string} filePath - Path to the markdown file relative to the web root
* @returns {Promise<string>} The markdown content
*/
async fetchMarkdown(filePath) {
// Check cache first
if (this.cache.has(filePath)) {
return this.cache.get(filePath);
}
try {
const response = await fetch(`/api/docs?file=${encodeURIComponent(filePath)}`);
if (!response.ok) {
throw new Error(`Failed to fetch documentation: ${response.statusText}`);
}
const content = await response.text();
this.cache.set(filePath, content);
return content;
} catch (error) {
console.error('Error fetching markdown:', error);
return `*Unable to load documentation from ${filePath}*`;
}
}
/**
* Extract a specific section from markdown content
* @param {string} content - The full markdown content
* @param {string} sectionTitle - The section title to extract
* @param {number} headingLevel - The heading level (1-6)
* @returns {string} The extracted section content
*/
extractSection(content, sectionTitle, headingLevel = 2) {
const lines = content.split('\n');
const headingPrefix = '#'.repeat(headingLevel);
// Create a more flexible regex that handles markdown formatting like **text:** or *text*
// Escape special regex characters in the section title
const escapedTitle = sectionTitle.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
// Look for the section title with optional markdown formatting (**, *, :, etc.)
// Handle format like: ## **settings:** where asterisks surround "settings:"
const sectionRegex = new RegExp(`^${headingPrefix}\\s+(?:\\*{1,2})?${escapedTitle}:?(?:\\*{1,2})?\\s*$`, 'i');
const nextSectionRegex = new RegExp(`^#{1,${headingLevel}}\\s+`);
let sectionStart = -1;
let sectionEnd = lines.length;
// Find the start of the section
for (let i = 0; i < lines.length; i++) {
if (sectionRegex.test(lines[i])) {
sectionStart = i;
break;
}
}
if (sectionStart === -1) {
return '';
}
// Find the end of the section (next heading of same or higher level)
for (let i = sectionStart + 1; i < lines.length; i++) {
if (nextSectionRegex.test(lines[i])) {
const currentLevel = lines[i].match(/^#+/)[0].length;
if (currentLevel <= headingLevel) {
sectionEnd = i;
break;
}
}
}
return lines.slice(sectionStart, sectionEnd).join('\n');
}
/**
* Process GitHub-style alerts in markdown content
* @param {string} markdown - The markdown content
* @returns {string} The processed markdown with GitHub alerts converted to placeholders
*/
processGitHubAlerts(markdown) {
// Store alert data for post-processing
this.alertData = [];
// Split markdown into lines for processing
const lines = markdown.split('\n');
const processedLines = [];
let i = 0;
while (i < lines.length) {
const line = lines[i];
const alertMatch = line.match(/^>\s*\[!(WARNING|TIP|NOTE|CAUTION|IMPORTANT)\]\s*$/);
if (alertMatch) {
const alertType = alertMatch[1].toLowerCase();
const alertContent = [];
// Skip the alert header line
i++;
// Collect all subsequent blockquote lines that belong to this alert
while (i < lines.length && lines[i].startsWith('>')) {
// Remove the '> ' prefix and add to content
alertContent.push(lines[i].substring(2));
i++;
}
// Store alert data for post-processing
const alertId = this.alertData.length;
this.alertData.push({
type: alertType,
content: alertContent.join('\n').trim()
});
// Create placeholder that will be replaced after markdown processing
processedLines.push(`GITHUB_ALERT_PLACEHOLDER_${alertId}`);
// Don't increment i here as it's already been incremented in the while loop
continue;
} else {
processedLines.push(line);
i++;
}
}
return processedLines.join('\n');
}
/**
* Post-process HTML to replace alert placeholders with rendered alerts
* @param {string} html - The rendered HTML
* @returns {string} The HTML with alert placeholders replaced
*/
async processAlertPlaceholders(html) {
if (!this.alertData || this.alertData.length === 0) {
return html;
}
let processedHtml = html;
for (let i = 0; i < this.alertData.length; i++) {
const alert = this.alertData[i];
// Try multiple placeholder formats since markdown processing might wrap them differently
const placeholderPatterns = [
`<p>GITHUB_ALERT_PLACEHOLDER_${i}</p>`,
`GITHUB_ALERT_PLACEHOLDER_${i}`,
new RegExp(`<p[^>]*>\\s*GITHUB_ALERT_PLACEHOLDER_${i}\\s*</p>`, 'g'),
new RegExp(`GITHUB_ALERT_PLACEHOLDER_${i}`, 'g')
];
// Render the alert content as markdown
const renderedContent = await window.marked.parse(alert.content);
// Create the alert HTML
const alertHtml = `<div class="github-alert github-alert-${alert.type}">
<div class="github-alert-header">
<span class="github-alert-icon">${this.getAlertIcon(alert.type)}</span>
<span class="github-alert-title">${alert.type.toUpperCase()}</span>
</div>
<div class="github-alert-content">${renderedContent}</div>
</div>`;
// Try each placeholder pattern until one matches
for (const pattern of placeholderPatterns) {
if (typeof pattern === 'string') {
if (processedHtml.includes(pattern)) {
processedHtml = processedHtml.replace(pattern, alertHtml);
break;
}
} else {
// RegExp pattern
if (pattern.test(processedHtml)) {
processedHtml = processedHtml.replace(pattern, alertHtml);
break;
}
}
}
}
// Clean up
this.alertData = [];
return processedHtml;
}
/**
* Get the appropriate icon for each alert type
* @param {string} type - The alert type (warning, tip, note, caution, important)
* @returns {string} The SVG icon HTML
*/
getAlertIcon(type) {
const icons = {
warning: `<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>
<line x1="12" y1="9" x2="12" y2="13"></line>
<line x1="12" y1="17" x2="12.01" y2="17"></line>
</svg>`,
tip: `<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<circle cx="12" cy="12" r="10"></circle>
<path d="M12 16v-4"></path>
<path d="M12 8h.01"></path>
</svg>`,
note: `<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
<polyline points="14 2 14 8 20 8"></polyline>
<line x1="16" y1="13" x2="8" y2="13"></line>
<line x1="16" y1="17" x2="8" y2="17"></line>
<polyline points="10 9 9 9 8 9"></polyline>
</svg>`,
caution: `<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M10.29 3.86L1.82 18a2 2 0 0 0 1.71 3h16.94a2 2 0 0 0 1.71-3L13.71 3.86a2 2 0 0 0-3.42 0z"></path>
<line x1="12" y1="9" x2="12" y2="13"></line>
<line x1="12" y1="17" x2="12.01" y2="17"></line>
</svg>`,
important: `<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<circle cx="12" cy="12" r="10"></circle>
<line x1="12" y1="8" x2="12" y2="12"></line>
<line x1="12" y1="16" x2="12.01" y2="16"></line>
</svg>`
};
return icons[type] || icons.note;
}
/**
* Render markdown content to HTML
* @param {string} markdown - The markdown content
* @returns {string} The rendered HTML
*/
async renderMarkdown(markdown) {
await this.loadMarkedLibrary();
if (!window.marked) {
return '<p>Markdown renderer not available</p>';
}
// Process GitHub-style alerts before rendering markdown
const processedMarkdown = this.processGitHubAlerts(markdown);
// Render markdown to HTML
let html = window.marked.parse(processedMarkdown);
// Post-process to replace alert placeholders with rendered alerts
html = await this.processAlertPlaceholders(html);
return html;
}
/**
* Create a collapsible documentation section
* @param {Object} options - Configuration options
* @param {string} options.title - The title of the documentation section
* @param {string} options.filePath - Path to the markdown file
* @param {string} [options.section] - Specific section to extract
* @param {number} [options.headingLevel] - Heading level for section extraction
* @param {boolean} [options.defaultExpanded] - Whether to expand by default
* @param {string} [options.className] - Additional CSS class
* @returns {HTMLElement} The documentation section element
*/
async createDocumentationSection(options) {
const {
title,
filePath,
section = null,
headingLevel = 2,
defaultExpanded = false,
className = ''
} = options;
// Create container
const container = document.createElement('div');
container.className = `documentation-section ${className}`;
// Create header
const header = document.createElement('div');
header.className = 'documentation-header';
header.innerHTML = `
<button class="documentation-toggle" aria-expanded="${defaultExpanded}">
<span class="documentation-toggle-icon">${defaultExpanded ? '▼' : '▶'}</span>
<span class="documentation-title">
<svg class="documentation-icon" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"></path>
<polyline points="14 2 14 8 20 8"></polyline>
<line x1="16" y1="13" x2="8" y2="13"></line>
<line x1="16" y1="17" x2="8" y2="17"></line>
<polyline points="10 9 9 9 8 9"></polyline>
</svg>
${title}
</span>
</button>
`;
// Create content area
const content = document.createElement('div');
content.className = 'documentation-content';
content.style.display = defaultExpanded ? 'block' : 'none';
// Add loading indicator
content.innerHTML = '<div class="documentation-loading">Loading documentation...</div>';
// Add event listener for toggle
const toggleButton = header.querySelector('.documentation-toggle');
const toggleIcon = header.querySelector('.documentation-toggle-icon');
toggleButton.addEventListener('click', async () => {
const isExpanded = toggleButton.getAttribute('aria-expanded') === 'true';
if (!isExpanded) {
// Expand
toggleButton.setAttribute('aria-expanded', 'true');
toggleIcon.textContent = '▼';
content.style.display = 'block';
// Load content if not already loaded
if (content.querySelector('.documentation-loading')) {
try {
let markdown = await this.fetchMarkdown(filePath);
// Extract specific section if requested
if (section) {
markdown = this.extractSection(markdown, section, headingLevel);
}
const html = await this.renderMarkdown(markdown);
content.innerHTML = `<div class="documentation-body">${html}</div>`;
// Process tables to make them responsive
this.makeTablesResponsive(content);
// Add copy buttons to code blocks
this.addCodeCopyButtons(content);
} catch (error) {
content.innerHTML = `<div class="documentation-error">Failed to load documentation: ${error.message}</div>`;
}
}
} else {
// Collapse
toggleButton.setAttribute('aria-expanded', 'false');
toggleIcon.textContent = '▶';
content.style.display = 'none';
}
});
container.appendChild(header);
container.appendChild(content);
return container;
}
/**
* Make tables responsive by wrapping them in a scrollable container
* @param {HTMLElement} container - The container element
*/
makeTablesResponsive(container) {
const tables = container.querySelectorAll('table');
tables.forEach(table => {
const wrapper = document.createElement('div');
wrapper.className = 'documentation-table-wrapper';
table.parentNode.insertBefore(wrapper, table);
wrapper.appendChild(table);
});
}
/**
* Add copy buttons to code blocks
* @param {HTMLElement} container - The container element
*/
addCodeCopyButtons(container) {
const codeBlocks = container.querySelectorAll('pre code');
codeBlocks.forEach(block => {
const pre = block.parentElement;
const wrapper = document.createElement('div');
wrapper.className = 'documentation-code-wrapper';
const copyButton = document.createElement('button');
copyButton.className = 'documentation-code-copy';
copyButton.textContent = 'Copy';
copyButton.title = 'Copy code to clipboard';
copyButton.addEventListener('click', async () => {
try {
await navigator.clipboard.writeText(block.textContent);
copyButton.textContent = 'Copied!';
setTimeout(() => {
copyButton.textContent = 'Copy';
}, 2000);
} catch (error) {
console.error('Failed to copy:', error);
copyButton.textContent = 'Failed';
setTimeout(() => {
copyButton.textContent = 'Copy';
}, 2000);
}
});
pre.parentNode.insertBefore(wrapper, pre);
wrapper.appendChild(copyButton);
wrapper.appendChild(pre);
});
}
/**
* Create inline documentation tooltip
* @param {string} text - The tooltip text
* @param {string} markdown - The markdown content for the tooltip
* @returns {HTMLElement} The tooltip element
*/
async createTooltip(text, markdown) {
const container = document.createElement('span');
container.className = 'documentation-tooltip-container';
const trigger = document.createElement('span');
trigger.className = 'documentation-tooltip-trigger';
trigger.innerHTML = `
${text}
<svg class="documentation-help-icon" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<circle cx="12" cy="12" r="10"></circle>
<path d="M9.09 9a3 3 0 0 1 5.83 1c0 2-3 3-3 3"></path>
<line x1="12" y1="17" x2="12.01" y2="17"></line>
</svg>
`;
const tooltip = document.createElement('div');
tooltip.className = 'documentation-tooltip';
const html = await this.renderMarkdown(markdown);
tooltip.innerHTML = html;
container.appendChild(trigger);
container.appendChild(tooltip);
return container;
}
}
// Export as singleton
const documentationViewer = new DocumentationViewer();
window.DocumentationViewer = documentationViewer;

View file

@ -17,7 +17,7 @@ export class ShareLimitsComponent {
this.data = data;
this.onDataChange = onDataChange;
this.draggedElement = null;
this.schema = shareLimitsSchema.fields[0].properties; // Get the properties schema
this.schema = shareLimitsSchema.fields[1].properties; // Get the properties schema from share_limit_groups field
// Clean up any existing modals before initializing
this.closeExistingModals();

View file

@ -6,6 +6,15 @@ export const catSchema = {
keyDescription: 'Name of the category as it appears in qBittorrent.',
// Special handling for flat string values (category: path format)
flatStringValues: true,
fields: [
{
type: 'documentation',
title: 'Categories Configuration Guide',
filePath: 'Config-Setup.md',
section: 'cat',
defaultExpanded: false
}
],
patternProperties: {
".*": {
type: 'string',

View file

@ -4,6 +4,13 @@ export const catChangeSchema = {
type: 'dynamic-key-value-list',
useCategoryDropdown: true, // Flag to indicate this should use category dropdown for keys
fields: [
{
type: 'documentation',
title: 'Category Changes Documentation',
filePath: 'Config-Setup.md',
section: 'cat_change',
defaultExpanded: false
},
{
name: 'category_changes',
type: 'object',

View file

@ -1,7 +1,13 @@
export const commandsSchema = {
title: 'Commands',
description: 'Enable or disable specific commands to be executed during a run. These settings can be overridden by command-line arguments or environment variables.',
description: 'Enable or disable specific commands to be executed during a run. This section will override any commands that are defined via environment variable or command line',
fields: [
{
type: 'documentation',
title: 'Commands Documentation',
filePath: 'Commands.md',
defaultExpanded: false
},
{
name: 'recheck',
type: 'boolean',

View file

@ -2,6 +2,13 @@ export const directorySchema = {
title: 'Directory Paths',
description: 'Configure directory paths for various operations. Proper configuration is crucial for features like orphaned file detection, no-hardlinks tagging, and the recycle bin.',
fields: [
{
type: 'documentation',
title: 'Directory Configuration Guide',
filePath: 'Config-Setup.md',
section: 'directory',
defaultExpanded: false
},
{
name: 'root_dir',
type: 'text',

View file

@ -5,6 +5,15 @@ export const nohardlinksSchema = {
keyLabel: 'Category',
keyDescription: 'Category to check for torrents without hardlinks.',
useCategoryDropdown: true, // Flag to indicate this should use category dropdown
fields: [
{
type: 'documentation',
title: 'No Hardlinks Configuration Documentation',
filePath: 'Config-Setup.md',
section: 'nohardlinks',
defaultExpanded: false
}
],
patternProperties: {
".*": { // Matches any category name
type: 'object',

View file

@ -3,6 +3,13 @@ export const notificationsSchema = {
description: 'Configure notifications for various events using Apprise, Notifiarr, or custom webhooks.',
type: 'multi-root-object',
fields: [
{
type: 'documentation',
title: 'Apprise Configuration Guide',
filePath: 'Config-Setup.md',
section: 'apprise',
defaultExpanded: false
},
{
type: 'section_header',
label: 'Apprise Configuration'
@ -21,6 +28,13 @@ export const notificationsSchema = {
description: 'The notification URL(s) for your desired services, as supported by Apprise.',
placeholder: 'discord://webhook_id/webhook_token'
},
{
type: 'documentation',
title: 'Notifiarr Configuration Guide',
filePath: 'Config-Setup.md',
section: 'notifiarr',
defaultExpanded: false
},
{
type: 'section_header',
label: 'Notifiarr Configuration'
@ -59,6 +73,13 @@ export const notificationsSchema = {
label: 'Apply to All',
action: 'apply-to-all'
},
{
type: 'documentation',
title: 'Webhooks Configuration Guide',
filePath: 'Config-Setup.md',
section: 'webhooks',
defaultExpanded: false
},
{
type: 'section_header',
label: 'Webhooks Configuration'

View file

@ -2,6 +2,13 @@ export const orphanedSchema = {
title: 'Orphaned Files',
description: 'Configure settings for managing orphaned files, which are files in your root directory not associated with any torrent.',
fields: [
{
type: 'documentation',
title: 'Orphaned Files Configuration Documentation',
filePath: 'Config-Setup.md',
section: 'orphaned',
defaultExpanded: false
},
{
name: 'empty_after_x_days',
type: 'number',

View file

@ -2,6 +2,13 @@ export const qbtSchema = {
title: 'qBittorrent Connection',
description: 'Configure the connection to your qBittorrent client.',
fields: [
{
type: 'documentation',
title: 'qBittorrent Configuration Guide',
filePath: 'Config-Setup.md',
section: 'qbt',
defaultExpanded: false
},
{
name: 'host',
type: 'text',

View file

@ -2,6 +2,13 @@ export const recyclebinSchema = {
title: 'Recycle Bin',
description: 'Configure the recycle bin to move deleted files to a temporary location instead of permanently deleting them. This provides a safety net for accidental deletions.',
fields: [
{
type: 'documentation',
title: 'Recycle Bin Configuration Documentation',
filePath: 'Config-Setup.md',
section: 'recyclebin',
defaultExpanded: false
},
{
name: 'enabled',
type: 'boolean',

View file

@ -2,6 +2,13 @@ export const settingsSchema = {
title: 'General Settings',
description: 'Configure general application settings and default behaviors.',
fields: [
{
type: 'documentation',
title: 'Settings Configuration Guide',
filePath: 'Config-Setup.md',
section: 'settings',
defaultExpanded: false
},
{
name: 'force_auto_tmm',
type: 'boolean',

View file

@ -3,6 +3,13 @@ export const shareLimitsSchema = {
description: 'Define prioritized groups to manage share limits for your torrents. Each torrent is matched to the highest-priority group that meets the filter criteria.',
type: 'share-limits-config',
fields: [
{
type: 'documentation',
title: 'Share Limits Configuration Documentation',
filePath: 'Config-Setup.md',
section: 'share_limits',
defaultExpanded: false
},
{
name: 'share_limit_groups',
type: 'object',
@ -66,25 +73,25 @@ export const shareLimitsSchema = {
max_seeding_time: {
type: 'text',
label: 'Maximum Seeding Time',
description: 'The maximum seeding time before a torrent is paused. Use -2 for the global limit and -1 for no limit. (e.g., "30d", "1w 4d 2h").',
description: 'The maximum seeding time before a torrent is paused. Use -2 for the global limit and -1 for no limit. (e.g., "30d", "1w4d2h").',
default: '-1'
},
max_last_active: {
type: 'text',
label: 'Maximum Last Active',
description: 'If cleanup is enabled, delete torrents that have been inactive for this duration. Use -1 for no limit. (e.g., "30d", "1w 4d 2h").',
description: 'If cleanup is enabled, delete torrents that have been inactive for this duration. Use -1 for no limit. (e.g., "30d", "1w4d2h").',
default: '-1'
},
min_seeding_time: {
type: 'text',
label: 'Minimum Seeding Time',
description: 'Prevents cleanup from deleting a torrent until it has been seeding for at least this long. (e.g., "30d", "1w 4d 2h").',
description: 'Prevents cleanup from deleting a torrent until it has been seeding for at least this long. (e.g., "30d", "1w4d2h").',
default: '0'
},
min_last_active: {
type: 'text',
label: 'Minimum Last Active',
description: 'Prevents cleanup from deleting a torrent if it has been active within this duration. (e.g., "30d", "1w 4d 2h").',
description: 'Prevents cleanup from deleting a torrent if it has been active within this duration. (e.g., "30d", "1w4d2h").',
default: '0'
},
limit_upload_speed: {

View file

@ -2,6 +2,15 @@ export const trackerSchema = {
title: 'Tracker',
description: 'Configure tags and categories based on tracker URLs. Use a keyword from the tracker URL to define rules. The `other` key is a special keyword for trackers that do not match any other entry.',
type: 'complex-object',
fields: [
{
type: 'documentation',
title: 'Tracker Configuration Documentation',
filePath: 'Config-Setup.md',
section: 'tracker',
defaultExpanded: false
}
],
patternProperties: {
"^(?!other$).*$": { // Matches any key except 'other'
type: 'object',

View file

@ -67,6 +67,11 @@ export function generateSectionHTML(config, data) {
`;
if (config.type === 'dynamic-key-value-list') {
// Process documentation fields first
if (config.fields) {
const docFields = config.fields.filter(field => field.type === 'documentation');
html += generateFieldsHTML(docFields, data);
}
html += generateDynamicKeyValueListHTML(config, data);
} else if (config.type === 'fixed-object-config') {
// For fixed-object-config, render fields of the first (and usually only) field directly
@ -74,8 +79,18 @@ export function generateSectionHTML(config, data) {
const mainField = config.fields[0];
html += generateFieldsHTML(Object.values(mainField.properties), data[mainField.name] || {}, mainField.name);
} else if (config.type === 'share-limits-config') {
// Process documentation fields first
if (config.fields) {
const docFields = config.fields.filter(field => field.type === 'documentation');
html += generateFieldsHTML(docFields, data);
}
html += generateShareLimitsHTML(config, data);
} else if (config.type === 'complex-object') {
// Process documentation fields first
if (config.fields) {
const docFields = config.fields.filter(field => field.type === 'documentation');
html += generateFieldsHTML(docFields, data);
}
html += generateComplexObjectHTML(config, data);
} else if (config.type === 'multi-root-object') {
// For multi-root-object, render fields directly without nesting under section name
@ -133,47 +148,62 @@ function generateFieldsHTML(fields, data, prefix = '') {
* @returns {string} The HTML string for the fields.
*/
function generateNotificationsFieldsHTML(fields, data) {
// Group fields by section for better performance
const sections = {
apprise: [],
notifiarr: [],
applyToAll: [],
webhooks: [],
functionWebhooks: []
};
// Render fields in their original order to preserve documentation placement
let html = '';
let functionWebhooksFields = [];
let inFunctionWebhooks = false;
fields.forEach(field => {
if (field.type === 'section_header') {
if (field.label.includes('Apprise')) sections.apprise.push(field);
else if (field.label.includes('Notifiarr')) sections.notifiarr.push(field);
else if (field.label.includes('Apply to All')) sections.applyToAll.push(field);
else if (field.label.includes('Webhooks Configuration')) sections.webhooks.push(field);
else if (field.label.includes('Function Specific')) sections.functionWebhooks.push(field);
// Check if we're entering the Function Specific Webhooks section
if (field.type === 'section_header' && field.label.includes('Function Specific')) {
inFunctionWebhooks = true;
// Start the lazy loading container for function webhooks
html += `<div class="webhook-sections-container">`;
html += `<div class="function-webhooks-lazy" data-section="function-webhooks">`;
html += `<div class="lazy-load-placeholder">Click to load Function Specific Webhooks...</div>`;
html += `<div class="lazy-content hidden">`;
}
if (inFunctionWebhooks) {
// Collect function webhook fields for lazy loading
functionWebhooksFields.push(field);
} else {
if (field.name?.startsWith('apprise.')) sections.apprise.push(field);
else if (field.name?.startsWith('notifiarr.')) sections.notifiarr.push(field);
else if (field.name === 'apply_to_all_value' || field.action === 'apply-to-all') sections.applyToAll.push(field);
else if (field.name?.startsWith('webhooks.function.')) sections.functionWebhooks.push(field);
else if (field.name?.startsWith('webhooks.')) sections.webhooks.push(field);
// Render field normally in original order
if (field.type === 'section_header') {
html += generateFieldHTML(field, null, null);
} else {
let fieldName, value;
if (field.name) {
fieldName = field.name;
value = getNestedValue(data, fieldName) ?? field.default ?? '';
} else {
fieldName = null;
value = null;
}
html += generateFieldHTML(field, value, fieldName);
}
}
});
// Render sections with lazy loading containers
let html = '';
// Render critical sections first (Apprise, Notifiarr)
html += renderFieldSection(sections.apprise, data, 'apprise-section');
html += renderFieldSection(sections.notifiarr, data, 'notifiarr-section');
html += renderFieldSection(sections.applyToAll, data, 'apply-all-section');
// Render webhook sections with lazy loading
html += `<div class="webhook-sections-container">`;
html += renderFieldSection(sections.webhooks, data, 'webhooks-section');
html += `<div class="function-webhooks-lazy" data-section="function-webhooks">`;
html += `<div class="lazy-load-placeholder">Click to load Function Specific Webhooks...</div>`;
html += `<div class="lazy-content hidden">`;
html += renderFieldSection(sections.functionWebhooks, data, 'function-webhooks-section');
html += `</div></div></div>`;
// Render function webhooks in lazy loading container
if (functionWebhooksFields.length > 0) {
functionWebhooksFields.forEach(field => {
if (field.type === 'section_header') {
html += generateFieldHTML(field, null, null);
} else {
let fieldName, value;
if (field.name) {
fieldName = field.name;
value = getNestedValue(data, fieldName) ?? field.default ?? '';
} else {
fieldName = null;
value = null;
}
html += generateFieldHTML(field, value, fieldName);
}
});
html += `</div></div></div>`;
}
return html;
}
@ -234,6 +264,36 @@ export function generateFieldHTML(field, value, fieldName) {
`;
break;
case 'documentation':
// Create a placeholder div that will be replaced with the documentation component
const docId = `doc-${Math.random().toString(36).substr(2, 9)}`;
inputHTML = `<div id="${docId}" class="documentation-placeholder"
data-title="${field.title || 'Documentation'}"
data-file-path="${field.filePath || ''}"
data-section="${field.section || ''}"
data-heading-level="${field.headingLevel || 2}"
data-default-expanded="${field.defaultExpanded || false}"></div>`;
// Schedule the documentation component creation for after DOM insertion
setTimeout(() => {
const placeholder = document.getElementById(docId);
if (placeholder && window.DocumentationViewer) {
window.DocumentationViewer.createDocumentationSection({
title: placeholder.dataset.title,
filePath: placeholder.dataset.filePath,
section: placeholder.dataset.section || null,
headingLevel: parseInt(placeholder.dataset.headingLevel),
defaultExpanded: placeholder.dataset.defaultExpanded === 'true'
}).then(docSection => {
placeholder.parentNode.replaceChild(docSection, placeholder);
}).catch(error => {
console.error('Failed to create documentation section:', error);
placeholder.innerHTML = '<div class="documentation-error">Failed to load documentation</div>';
});
}
}, 100);
break;
case 'button':
inputHTML = `
<button type="button" class="btn btn-secondary apply-to-all-btn"
@ -554,6 +614,11 @@ function generateComplexObjectHTML(config, data) {
const complexObjectData = data || {};
let html = `
<div class="complex-object">
`;
// Documentation is now handled in generateSectionHTML, so we don't need to duplicate it here
html += `
<div class="complex-object-header">
<h3>${config.title}</h3>
<button type="button" class="btn btn-primary add-complex-object-item-btn">