Merge pull request #301 from StuffAnThings/develop

3.6.3
This commit is contained in:
bobokun 2023-05-23 20:48:57 -04:00 committed by GitHub
commit a1a67d2d6b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 388 additions and 182 deletions

View file

@ -26,4 +26,4 @@ Please delete options that are not relevant.
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have added or updated the docstring for new or existing methods
- [ ] I have added tests when applicable
- [ ] I have modified this PR to merge to the develop branch

View file

@ -19,7 +19,7 @@ jobs:
# will not occur.
- name: Dependabot metadata
id: dependabot-metadata
uses: dependabot/fetch-metadata@v1.4.0
uses: dependabot/fetch-metadata@v1.5.0
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
# Here the PR gets approved.

View file

@ -46,6 +46,8 @@ jobs:
with:
context: ./
file: ./Dockerfile
build-args: |
"BRANCH_NAME=develop"
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/qbit_manage:develop

View file

@ -14,6 +14,8 @@ jobs:
- name: Check Out Repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Login to Docker Hub
uses: docker/login-action@v2

View file

@ -1,6 +1,20 @@
# Bug Fixes
- Fixes bug in cross_seed (Fixes #270)
- Bug causing RecycleBin not to be created when full path is defined. (Fixes #271)
- Fixes Uncaught exception while emptying recycle bin (Fixes #272)
# Requirements Updated
- pre-commit updated to 3.3.3
- requests updated to 2.31.0
- ruamel.yaml updated to 0.17.26
- Adds new dependency bencodepy to generate hash for cross-seed
- Adds new dependency GitPython for checking git branches
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.1...v3.6.2
# Bug Fixes
- Changes HardLink Logic (Thanks to @ColinHebert for the suggestion) Fixes #291
- Additional error checking (Fixes #282)
- Fixes #287 (Thanks to @buthed010203 #290)
- Fixes Remove Orphan crashing when multiprocessing (Thanks to @buthed010203 #289)
- Speed optimization for Remove Orphan (Thanks to @buthed010203 #299)
- Fixes Remove Orphan from crashing in Windows (Fixes #275)
- Fixes #292
- Fixes #201
- Fixes #279
- Updates Dockerfile to debloat and move to Python 3.11
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.2...v3.6.3

View file

@ -1,15 +1,29 @@
FROM python:3.10-alpine
# install packages
RUN apk add --no-cache gcc g++ libxml2-dev libxslt-dev shadow bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates
FROM python:3.11-slim-buster
ARG BRANCH_NAME=master
ENV BRANCH_NAME ${BRANCH_NAME}
ENV TINI_VERSION v0.19.0
ENV QBM_DOCKER True
COPY requirements.txt /
RUN echo "**** install python packages ****" \
# install packages
RUN echo "**** install system packages ****" \
&& apt-get update \
&& apt-get upgrade -y --no-install-recommends \
&& apt-get install -y tzdata --no-install-recommends \
&& apt-get install -y gcc g++ libxml2-dev libxslt-dev libz-dev bash curl wget jq grep sed coreutils findutils unzip p7zip ca-certificates \
&& wget -O /tini https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-"$(dpkg --print-architecture | awk -F- '{ print $NF }')" \
&& chmod +x /tini \
&& pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \
&& rm -rf /requirements.txt /tmp/* /var/tmp/*
&& apt-get --purge autoremove gcc g++ libxml2-dev libxslt-dev libz-dev -y \
&& apt-get clean \
&& apt-get update \
&& apt-get check \
&& apt-get -f install \
&& apt-get autoclean \
&& rm -rf /requirements.txt /tmp/* /var/tmp/* /var/lib/apt/lists/*
COPY . /app
WORKDIR /app
VOLUME /config
ENTRYPOINT ["python3", "qbit_manage.py"]
ENTRYPOINT ["/tini", "-s", "python3", "qbit_manage.py", "--"]

View file

@ -16,7 +16,7 @@ This is a program used to manage your qBittorrent instance such as:
* Automatically add [cross-seed](https://github.com/mmgoodnow/cross-seed) torrents in paused state. **\*Note: cross-seed now allows for torrent injections directly to qBit, making this feature obsolete.\***
* Recheck paused torrents sorted by lowest size and resume if completed
* Remove orphaned files from your root directory that are not referenced by qBittorrent
* Tag any torrents that have no hard links and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded
* Tag any torrents that have no hard links outisde the root folder and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded
* RecycleBin function to move files into a RecycleBin folder instead of deleting the data directly when deleting a torrent
* Built-in scheduler to run the script every x minutes. (Can use `--run` command to run without the scheduler)
* Webhook notifications with [Notifiarr](https://notifiarr.com/) and [Apprise API](https://github.com/caronc/apprise-api) integration

View file

@ -1 +1 @@
3.6.2
3.6.3

View file

@ -136,7 +136,7 @@ tracker:
tag: other
nohardlinks:
# Tag Movies/Series that are not hard linked
# Tag Movies/Series that are not hard linked outside the root directory
# Mandatory to fill out directory parameter above to use this function (root_dir/remote_dir)
# This variable should be set to your category name of your completed movies/completed series in qbit. Acceptable variable can be any category you would like to tag if there are no hardlinks found
movies-completed:

View file

@ -13,8 +13,10 @@ version_file_path = os.path.join(project_dir, "..", "VERSION")
with open(version_file_path) as f:
version_str = f.read().strip()
# Get only the first 3 digits
version_str_split = version_str.rsplit("-", 1)[0]
# Convert the version string to a tuple of integers
__version_info__ = tuple(map(int, version_str.split(".")))
__version_info__ = tuple(map(int, version_str_split.split(".")))
# Define the version string using the version_info tuple
__version__ = ".".join(str(i) for i in __version_info__)

View file

@ -110,27 +110,28 @@ class Config:
self.data["notifiarr"] = self.data.pop("notifiarr")
if "webhooks" in self.data:
temp = self.data.pop("webhooks")
if "function" not in temp or ("function" in temp and temp["function"] is None):
temp["function"] = {}
if temp is not None:
if "function" not in temp or ("function" in temp and temp["function"] is None):
temp["function"] = {}
def hooks(attr):
if attr in temp:
items = temp.pop(attr)
if items:
temp["function"][attr] = items
if attr not in temp["function"]:
temp["function"][attr] = {}
temp["function"][attr] = None
def hooks(attr):
if attr in temp:
items = temp.pop(attr)
if items:
temp["function"][attr] = items
if attr not in temp["function"]:
temp["function"][attr] = {}
temp["function"][attr] = None
hooks("cross_seed")
hooks("recheck")
hooks("cat_update")
hooks("tag_update")
hooks("rem_unregistered")
hooks("rem_orphaned")
hooks("tag_nohardlinks")
hooks("cleanup_dirs")
self.data["webhooks"] = temp
hooks("cross_seed")
hooks("recheck")
hooks("cat_update")
hooks("tag_update")
hooks("rem_unregistered")
hooks("rem_orphaned")
hooks("tag_nohardlinks")
hooks("cleanup_dirs")
self.data["webhooks"] = temp
if "bhd" in self.data:
self.data["bhd"] = self.data.pop("bhd")
self.dry_run = self.commands["dry_run"]

View file

@ -2,6 +2,7 @@ import os
from collections import Counter
from modules import util
from modules.torrent_hash_generator import TorrentHashGenerator
logger = util.logger
@ -39,7 +40,7 @@ class CrossSeed:
dest = os.path.join(self.qbt.torrentinfo[t_name]["save_path"], "")
src = os.path.join(dir_cs, file)
dir_cs_out = os.path.join(dir_cs, "qbit_manage_added", file)
category = self.qbt.get_category(dest)
category = self.qbt.torrentinfo[t_name].get("Category", self.qbt.get_category(dest))
# Only add cross-seed torrent if original torrent is complete
if self.qbt.torrentinfo[t_name]["is_complete"]:
categories.append(category)
@ -67,12 +68,28 @@ class CrossSeed:
self.client.torrents.add(
torrent_files=src, save_path=dest, category=category, tags="cross-seed", is_paused=True
)
util.move_files(src, dir_cs_out)
self.qbt.torrentinfo[t_name]["count"] += 1
try:
torrent_hash_generator = TorrentHashGenerator(src)
torrent_hash = torrent_hash_generator.generate_torrent_hash()
util.move_files(src, dir_cs_out)
except Exception as e:
logger.warning(f"Unable to generate torrent hash from cross-seed {t_name}: {e}")
try:
if torrent_hash:
torrent_info = self.qbt.get_torrents({"torrent_hashes": torrent_hash})
except Exception as e:
logger.warning(f"Unable to find hash {torrent_hash} in qbt: {e}")
if torrent_info:
torrent = torrent_info[0]
self.qbt.torrentvalid.append(torrent)
self.qbt.torrentinfo[t_name]["torrents"].append(torrent)
self.qbt.torrent_list.append(torrent)
else:
logger.print_line(f"Found {t_name} in {dir_cs} but original torrent is not complete.", self.config.loglevel)
logger.print_line("Not adding to qBittorrent", self.config.loglevel)
else:
error = f"{t_name} not found in torrents. Cross-seed Torrent not added to qBittorrent."
error = f"{tr_name} not found in torrents. Cross-seed Torrent not added to qBittorrent."
if self.config.dry_run:
logger.print_line(error, self.config.loglevel)
else:

View file

@ -1,13 +1,10 @@
import os
from concurrent.futures import ThreadPoolExecutor
from fnmatch import fnmatch
from itertools import repeat
from multiprocessing import cpu_count
from multiprocessing import Pool
from modules import util
logger = util.logger
_config = None
class RemoveOrphaned:
@ -21,56 +18,34 @@ class RemoveOrphaned:
self.root_dir = qbit_manager.config.root_dir
self.orphaned_dir = qbit_manager.config.orphaned_dir
global _config
_config = self.config
self.pool = Pool(processes=max(cpu_count() - 1, 1))
max_workers = max(os.cpu_count() - 1, 1)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
self.rem_orphaned()
self.cleanup_pool()
self.executor.shutdown()
def rem_orphaned(self):
"""Remove orphaned files from remote directory"""
self.stats = 0
logger.separator("Checking for Orphaned Files", space=False, border=False)
torrent_files = []
root_files = []
orphaned_files = []
excluded_orphan_files = []
if self.remote_dir != self.root_dir:
local_orphaned_dir = self.orphaned_dir.replace(self.remote_dir, self.root_dir)
root_files = [
os.path.join(path.replace(self.remote_dir, self.root_dir), name)
for path, subdirs, files in os.walk(self.remote_dir)
for name in files
if local_orphaned_dir not in path
]
else:
root_files = [
os.path.join(path, name)
for path, subdirs, files in os.walk(self.root_dir)
for name in files
if self.orphaned_dir not in path
]
root_files = self.executor.submit(util.get_root_files, self.remote_dir, self.root_dir, self.orphaned_dir)
# Get an updated list of torrents
logger.print_line("Locating orphan files", self.config.loglevel)
torrent_list = self.qbt.get_torrents({"sort": "added_on"})
torrent_files_and_save_path = []
for torrent in torrent_list:
torrent_files = []
for torrent_files_dict in torrent.files:
torrent_files.append(torrent_files_dict.name)
torrent_files_and_save_path.append((torrent_files, torrent.save_path))
torrent_files.extend(
[
fullpath
for fullpathlist in self.pool.starmap(get_full_path_of_torrent_files, torrent_files_and_save_path)
for fullpathlist in self.executor.map(self.get_full_path_of_torrent_files, torrent_list)
for fullpath in fullpathlist
if fullpath not in torrent_files
]
)
orphaned_files = set(root_files) - set(torrent_files)
orphaned_files = set(root_files.result()) - set(torrent_files)
if self.config.orphaned["exclude_patterns"]:
logger.print_line("Processing orphan exclude patterns")
@ -108,30 +83,27 @@ class RemoveOrphaned:
self.config.send_notifications(attr)
# Delete empty directories after moving orphan files
if not self.config.dry_run:
orphaned_parent_path = set(self.pool.map(move_orphan, orphaned_files))
orphaned_parent_path = set(self.executor.map(self.move_orphan, orphaned_files))
logger.print_line("Removing newly empty directories", self.config.loglevel)
self.pool.starmap(util.remove_empty_directories, zip(orphaned_parent_path, repeat("**/*")))
self.executor.map(lambda dir: util.remove_empty_directories(dir, "**/*"), orphaned_parent_path)
else:
logger.print_line("No Orphaned Files found.", self.config.loglevel)
def cleanup_pool(self):
self.pool.close()
self.pool.join()
def move_orphan(self, file):
src = file.replace(self.root_dir, self.remote_dir)
dest = os.path.join(self.orphaned_dir, file.replace(self.root_dir, ""))
util.move_files(src, dest, True)
return os.path.dirname(file).replace(self.root_dir, self.remote_dir)
def get_full_path_of_torrent_files(self, torrent):
torrent_files = map(lambda dict: dict.name, torrent.files)
save_path = torrent.save_path
def get_full_path_of_torrent_files(torrent_files, save_path):
fullpath_torrent_files = []
for file in torrent_files:
fullpath = os.path.join(save_path, file)
# Replace fullpath with \\ if qbm is running in docker (linux) but qbt is on windows
fullpath = fullpath.replace(r"/", "\\") if ":\\" in fullpath else fullpath
fullpath_torrent_files.append(fullpath)
return fullpath_torrent_files
def move_orphan(file):
src = file.replace(_config.root_dir, _config.remote_dir) # Could be optimized to only run when root != remote
dest = os.path.join(_config.orphaned_dir, file.replace(_config.root_dir, ""))
util.move_files(src, dest, True)
return os.path.dirname(file).replace(_config.root_dir, _config.remote_dir) # Another candidate for micro optimizing
fullpath_torrent_files = []
for file in torrent_files:
fullpath = os.path.join(save_path, file)
# Replace fullpath with \\ if qbm is running in docker (linux) but qbt is on windows
fullpath = fullpath.replace(r"/", "\\") if ":\\" in fullpath else fullpath
fullpath_torrent_files.append(fullpath)
return fullpath_torrent_files

View file

@ -186,6 +186,7 @@ class TagNoHardLinks:
"""Tag torrents with no hardlinks"""
logger.separator("Tagging Torrents with No Hardlinks", space=False, border=False)
nohardlinks = self.nohardlinks
check_hardlinks = util.CheckHardLinks(self.root_dir, self.remote_dir)
for category in nohardlinks:
torrent_list = self.qbt.get_torrents({"category": category, "status_filter": "completed"})
if len(torrent_list) == 0:
@ -199,7 +200,7 @@ class TagNoHardLinks:
continue
for torrent in torrent_list:
tracker = self.qbt.get_tags(torrent.trackers)
has_nohardlinks = util.nohardlink(
has_nohardlinks = check_hardlinks.nohardlink(
torrent["content_path"].replace(self.root_dir, self.remote_dir), self.config.notify
)
if any(tag in torrent.tags for tag in nohardlinks[category]["exclude_tags"]):

View file

@ -17,7 +17,7 @@ WARN = WARNING
DRYRUN = 25
INFO = 20
DEBUG = 10
TRACE = 5
TRACE = 0
def fmt_filter(record):
@ -72,17 +72,19 @@ class MyLogger:
"""Get handler for log file"""
max_bytes = 1024 * 1024 * 2
_handler = RotatingFileHandler(log_file, delay=True, mode="w", maxBytes=max_bytes, backupCount=count, encoding="utf-8")
self._formatter(_handler)
self._formatter(handler=_handler)
# if os.path.isfile(log_file):
# _handler.doRollover()
return _handler
def _formatter(self, handler, border=True):
def _formatter(self, handler=None, border=True, log_only=False, space=False):
"""Format log message"""
text = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s"
if isinstance(handler, RotatingFileHandler):
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
handler.setFormatter(logging.Formatter(text))
console = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s"
file = f"{' '*65}" if space else "[%(asctime)s] %(filename)-27s %(levelname)-10s "
handlers = [handler] if handler else self._logger.handlers
for h in handlers:
if not log_only or isinstance(h, RotatingFileHandler):
h.setFormatter(logging.Formatter(f"{file if isinstance(h, RotatingFileHandler) else ''}{console}"))
def add_main_handler(self):
"""Add main handler to logger"""
@ -233,18 +235,15 @@ class MyLogger:
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, stacklevel=1):
"""Log"""
log_only = False
if self.spacing > 0:
self.exorcise()
if "\n" in msg:
for i, line in enumerate(msg.split("\n")):
self._log(level, line, args, exc_info=exc_info, extra=extra, stack_info=stack_info, stacklevel=stacklevel)
if i == 0:
for handler in self._logger.handlers:
if isinstance(handler, RotatingFileHandler):
handler.setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
for handler in self._logger.handlers:
if isinstance(handler, RotatingFileHandler):
handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
self._formatter(log_only=True, space=True)
log_only = True
else:
for secret in sorted(self.secrets, reverse=True):
if secret in msg:
@ -266,6 +265,8 @@ class MyLogger:
exc_info = sys.exc_info()
record = self._logger.makeRecord(self._logger.name, level, func, lno, msg, args, exc_info, func, extra, sinfo)
self._logger.handle(record)
if log_only:
self._formatter()
def find_caller(self, stack_info=False, stacklevel=1):
"""Find caller"""

View file

@ -0,0 +1,30 @@
import hashlib
import bencodepy
from modules import util
from modules.util import Failed
logger = util.logger
class TorrentHashGenerator:
def __init__(self, torrent_file_path):
self.torrent_file_path = torrent_file_path
def generate_torrent_hash(self):
try:
with open(self.torrent_file_path, "rb") as torrent_file:
torrent_data = torrent_file.read()
try:
torrent_info = bencodepy.decode(torrent_data)
info_data = bencodepy.encode(torrent_info[b"info"])
info_hash = hashlib.sha1(info_data).hexdigest()
logger.trace(f"info_hash: {info_hash}")
return info_hash
except KeyError:
logger.error("Invalid .torrent file format. 'info' key not found.")
except FileNotFoundError:
logger.error(f"Torrent file '{self.torrent_file_path}' not found.")
except Failed as err:
logger.error(f"TorrentHashGenerator Error: {err}")

View file

@ -7,6 +7,7 @@ import signal
import time
from pathlib import Path
import requests
import ruamel.yaml
logger = logging.getLogger("qBit Manage")
@ -71,6 +72,64 @@ class TorrentMessages:
]
def guess_branch(version, env_version, git_branch):
if git_branch:
return git_branch
elif env_version == "develop":
return env_version
elif version[2] > 0:
dev_version = get_develop()
if version[1] != dev_version[1] or version[2] <= dev_version[2]:
return "develop"
else:
return "master"
def current_version(version, branch=None):
if branch == "develop":
return get_develop()
elif version[2] > 0:
new_version = get_develop()
if version[1] != new_version[1] or new_version[2] >= version[2]:
return new_version
else:
return get_master()
develop_version = None
def get_develop():
global develop_version
if develop_version is None:
develop_version = get_version("develop")
return develop_version
master_version = None
def get_master():
global master_version
if master_version is None:
master_version = get_version("master")
return master_version
def get_version(level):
try:
url = f"https://raw.githubusercontent.com/StuffAnThings/qbit_manage/{level}/VERSION"
return parse_version(requests.get(url).content.decode().strip(), text=level)
except requests.exceptions.ConnectionError:
return "Unknown", "Unknown", 0
def parse_version(version, text="develop"):
version = version.replace("develop", text)
split_version = version.split(f"-{text}")
return version, split_version[0], int(split_version[1]) if len(split_version) > 1 else 0
class check:
"""Check for attributes in config."""
@ -307,59 +366,118 @@ def copy_files(src, dest):
def remove_empty_directories(pathlib_root_dir, pattern):
"""Remove empty directories recursively."""
pathlib_root_dir = Path(pathlib_root_dir)
# list all directories recursively and sort them by path,
# longest first
longest = sorted(
pathlib_root_dir.glob(pattern),
key=lambda p: len(str(p)),
reverse=True,
)
longest.append(pathlib_root_dir)
for pdir in longest:
try:
# list all directories recursively and sort them by path,
# longest first
longest = sorted(
pathlib_root_dir.glob(pattern),
key=lambda p: len(str(p)),
reverse=True,
)
longest.append(pathlib_root_dir) # delete the folder itself if it's empty
for pdir in longest:
try:
pdir.rmdir() # remove directory if empty
except (FileNotFoundError, OSError):
continue # catch and continue if non-empty, folders within could already be deleted if run in parallel
except FileNotFoundError:
pass # if this is being run in parallel, pathlib_root_dir could already be deleted
class CheckHardLinks:
"""
Class to check for hardlinks
"""
def __init__(self, root_dir, remote_dir):
self.root_dir = root_dir
self.remote_dir = remote_dir
self.root_files = set(get_root_files(self.root_dir, self.remote_dir))
self.get_inode_count()
def get_inode_count(self):
self.inode_count = {}
for file in self.root_files:
inode_no = os.stat(file.replace(self.root_dir, self.remote_dir)).st_ino
if inode_no in self.inode_count:
self.inode_count[inode_no] += 1
else:
self.inode_count[inode_no] = 1
def nohardlink(self, file, notify):
"""
Check if there are any hard links
Will check if there are any hard links if it passes a file or folder
If a folder is passed, it will take the largest file in that folder and only check for hardlinks
of the remaining files where the file is greater size a percentage of the largest file
This fixes the bug in #192
"""
check_for_hl = True
try:
pdir.rmdir() # remove directory if empty
except OSError:
continue # catch and continue if non-empty
def nohardlink(file, notify):
"""
Check if there are any hard links
Will check if there are any hard links if it passes a file or folder
If a folder is passed, it will take the largest file in that folder and only check for hardlinks
of the remaining files where the file is greater size a percentage of the largest file
This fixes the bug in #192
"""
check_for_hl = True
if os.path.isfile(file):
logger.trace(f"Checking file: {file}")
if os.stat(file).st_nlink > 1:
check_for_hl = False
else:
sorted_files = sorted(Path(file).rglob("*"), key=lambda x: os.stat(x).st_size, reverse=True)
logger.trace(f"Folder: {file}")
logger.trace(f"Files Sorted by size: {sorted_files}")
threshold = 0.5
if not sorted_files:
msg = (
f"Nohardlink Error: Unable to open the folder {file}. "
"Please make sure folder exists and qbit_manage has access to this directory."
)
notify(msg, "nohardlink")
logger.warning(msg)
else:
largest_file_size = os.stat(sorted_files[0]).st_size
logger.trace(f"Largest file: {sorted_files[0]}")
logger.trace(f"Largest file size: {largest_file_size}")
for files in sorted_files:
file_size = os.stat(files).st_size
file_no_hardlinks = os.stat(files).st_nlink
if os.path.isfile(file):
if os.path.islink(file):
logger.warning(f"Symlink found in {file}, unable to determine hardlinks. Skipping...")
return False
logger.trace(f"Checking file: {file}")
logger.trace(f"Checking file size: {file_size}")
logger.trace(f"Checking no of hard links: {file_no_hardlinks}")
if file_no_hardlinks > 1 and file_size >= (largest_file_size * threshold):
logger.trace(f"Checking file inum: {os.stat(file).st_ino}")
logger.trace(f"Checking no of hard links: {os.stat(file).st_nlink}")
logger.trace(f"Checking inode_count dict: {self.inode_count.get(os.stat(file).st_ino)}")
# https://github.com/StuffAnThings/qbit_manage/issues/291 for more details
if os.stat(file).st_nlink - self.inode_count.get(os.stat(file).st_ino, 1) > 0:
check_for_hl = False
return check_for_hl
else:
sorted_files = sorted(Path(file).rglob("*"), key=lambda x: os.stat(x).st_size, reverse=True)
logger.trace(f"Folder: {file}")
logger.trace(f"Files Sorted by size: {sorted_files}")
threshold = 0.5
if not sorted_files:
msg = (
f"Nohardlink Error: Unable to open the folder {file}. "
"Please make sure folder exists and qbit_manage has access to this directory."
)
notify(msg, "nohardlink")
logger.warning(msg)
else:
largest_file_size = os.stat(sorted_files[0]).st_size
logger.trace(f"Largest file: {sorted_files[0]}")
logger.trace(f"Largest file size: {largest_file_size}")
for files in sorted_files:
if os.path.islink(files):
logger.warning(f"Symlink found in {files}, unable to determine hardlinks. Skipping...")
continue
file_size = os.stat(files).st_size
file_no_hardlinks = os.stat(files).st_nlink
logger.trace(f"Checking file: {file}")
logger.trace(f"Checking file inum: {os.stat(file).st_ino}")
logger.trace(f"Checking file size: {file_size}")
logger.trace(f"Checking no of hard links: {file_no_hardlinks}")
logger.trace(f"Checking inode_count dict: {self.inode_count.get(os.stat(file).st_ino)}")
if file_no_hardlinks - self.inode_count.get(os.stat(file).st_ino, 1) > 0 and file_size >= (
largest_file_size * threshold
):
check_for_hl = False
except PermissionError as perm:
logger.warning(f"{perm} : file {file} has permission issues. Skipping...")
return False
except FileNotFoundError as file_not_found_error:
logger.warning(f"{file_not_found_error} : File {file} not found. Skipping...")
return False
except Exception as ex:
logger.stacktrace()
logger.error(ex)
return False
return check_for_hl
def get_root_files(root_dir, remote_dir, exclude_dir=None):
local_exclude_dir = exclude_dir.replace(remote_dir, root_dir) if exclude_dir and remote_dir != root_dir else exclude_dir
root_files = [
os.path.join(path.replace(remote_dir, root_dir) if remote_dir != root_dir else path, name)
for path, subdirs, files in os.walk(remote_dir if remote_dir != root_dir else root_dir)
for name in files
if not local_exclude_dir or local_exclude_dir not in path
]
return root_files
def load_json(file):

View file

@ -3,6 +3,7 @@
import argparse
import glob
import os
import platform
import sys
import time
from datetime import datetime
@ -166,16 +167,23 @@ parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default:
args = parser.parse_args()
static_envs = []
test_value = None
def get_arg(env_str, default, arg_bool=False, arg_int=False):
"""Get argument from environment variable or command line argument."""
global test_value
env_vars = [env_str] if not isinstance(env_str, list) else env_str
final_value = None
static_envs.extend(env_vars)
for env_var in env_vars:
env_value = os.environ.get(env_var)
if env_var == "BRANCH_NAME":
test_value = env_value
if env_value is not None:
final_value = env_value
break
if final_value is not None:
if final_value or (arg_int and final_value == 0):
if arg_bool:
if final_value is True or final_value is False:
return final_value
@ -184,13 +192,28 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False):
else:
return False
elif arg_int:
return int(final_value)
try:
return int(final_value)
except ValueError:
return default
else:
return str(final_value)
else:
return default
try:
from git import Repo, InvalidGitRepositoryError
try:
git_branch = Repo(path=".").head.ref.name # noqa
except InvalidGitRepositoryError:
git_branch = None
except ImportError:
git_branch = None
env_version = get_arg("BRANCH_NAME", "master")
is_docker = get_arg("QBM_DOCKER", False, arg_bool=True)
run = get_arg("QBT_RUN", args.run, arg_bool=True)
sch = get_arg("QBT_SCHEDULE", args.min)
startupDelay = get_arg("QBT_STARTUP_DELAY", args.startupDelay)
@ -306,13 +329,15 @@ def my_except_hook(exctype, value, tbi):
sys.excepthook = my_except_hook
version = "Unknown"
version = ("Unknown", "Unknown", 0)
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle:
for line in handle.readlines():
line = line.strip()
if len(line) > 0:
version = line
version = util.parse_version(line)
break
branch = util.guess_branch(version, env_version, git_branch)
version = (version[0].replace("develop", branch), version[1].replace("develop", branch), version[2])
def start_loop():
@ -377,16 +402,12 @@ def start():
try:
cfg = Config(default_dir, args)
qbit_manager = cfg.qbt
except Exception as ex:
if "Qbittorrent Error" in ex.args[0]:
logger.print_line(ex, "CRITICAL")
logger.print_line("Exiting scheduled Run.", "CRITICAL")
finished_run()
return None
else:
logger.stacktrace()
logger.print_line(ex, "CRITICAL")
logger.stacktrace()
logger.print_line(ex, "CRITICAL")
logger.print_line("Exiting scheduled Run.", "CRITICAL")
finished_run()
return None
if qbit_manager:
# Set Category
@ -397,6 +418,12 @@ def start():
if cfg.commands["tag_update"]:
stats["tagged"] += Tags(qbit_manager).stats
# Set Cross Seed
if cfg.commands["cross_seed"]:
cross_seed = CrossSeed(qbit_manager)
stats["added"] += cross_seed.stats_added
stats["tagged"] += cross_seed.stats_tagged
# Remove Unregistered Torrents and tag errors
if cfg.commands["rem_unregistered"] or cfg.commands["tag_tracker_error"]:
rem_unreg = RemoveUnregistered(qbit_manager)
@ -407,12 +434,6 @@ def start():
stats["untagged_tracker_error"] += rem_unreg.stats_untagged
stats["tagged"] += rem_unreg.stats_tagged
# Set Cross Seed
if cfg.commands["cross_seed"]:
cross_seed = CrossSeed(qbit_manager)
stats["added"] += cross_seed.stats_added
stats["tagged"] += cross_seed.stats_tagged
# Recheck Torrents
if cfg.commands["recheck"]:
recheck = ReCheck(qbit_manager)
@ -525,8 +546,17 @@ if __name__ == "__main__":
logger.info_center(r" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|") # noqa: W605
logger.info_center(" | | ______ __/ | ") # noqa: W605
logger.info_center(" |_| |______| |___/ ") # noqa: W605
logger.info(f" Version: {version}")
system_ver = "Docker" if is_docker else f"Python {platform.python_version()}"
logger.info(f" Version: {version[0]} ({system_ver}){f' (Git: {git_branch})' if git_branch else ''}")
latest_version = util.current_version(version, branch=branch)
new_version = (
latest_version[0]
if latest_version and (version[1] != latest_version[1] or (version[2] and version[2] < latest_version[2]))
else None
)
if new_version:
logger.info(f" Newest Version: {new_version}")
logger.info(f" Platform: {platform.platform()}")
logger.separator(loglevel="DEBUG")
logger.debug(f" --run (QBT_RUN): {run}")
logger.debug(f" --schedule (QBT_SCHEDULE): {sch}")

View file

@ -1,7 +1,9 @@
bencodepy==0.9.5
flake8==6.0.0
pre-commit==3.2.2
GitPython==3.1.31
pre-commit==3.3.2
qbittorrent-api==2023.4.47
requests==2.28.2
requests==2.31.0
retrying==1.3.4
ruamel.yaml==0.17.21
ruamel.yaml==0.17.26
schedule==1.2.0