Merge pull request #323 from StuffAnThings/develop

v4.0.0 release
This commit is contained in:
bobokun 2023-06-06 17:47:13 -04:00 committed by GitHub
commit 81104cea74
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 1040 additions and 734 deletions

View file

@ -51,3 +51,11 @@ repos:
hooks:
- id: flake8
args: [--config=.flake8]
- repo: local
hooks:
- id: increase-version
name: Increase version if branch contains "develop"
entry: ./scripts/pre-commit/increase_version.sh
language: script
pass_filenames: false
stages: [commit]

View file

@ -1,8 +1,23 @@
# Requirements Updated
- Updates ruamel.yaml to 0.17.27
- Updates ruamel.yaml to 0.17.31
- Updates qbitorrent-api to 2023.5.48
- Separate out dev requirements into requirements-dev.txt
# Breaking Changes
- `tag_nohardlinks` only updates/removes `noHL` tag. **It does not modify or cleanup share_limits anymore.**
- `tag_update` only adds tracker tags to torrent. **It does not modify or cleanup share_limits anymore.**
- Please remove any references to share_limits from your configuration in the tracker/nohardlinks section
- Migration guide can be followed here: [V4 Migration Guide](https://github.com/StuffAnThings/qbit_manage/wiki/v4-Migration-Guide)
- Webhook payloads changed (See [webhooks](https://github.com/StuffAnThings/qbit_manage/wiki/Config-Setup#webhooks) for updated payload)
# New Features
- Adds new command `share_limits`, `--share-limits` , `QBT_SHARE_LIMITS=True` to update share limits based on tags/categories specified per group (Closes #88, Closes #306, Closes #259, Closes #308, Closes #137)
- See [Config Setup - share_limits](https://github.com/StuffAnThings/qbit_manage/wiki/Config-Setup#share_limits) for more details
- Adds new command `skip_qb_version_check`, `--skip-qb-version-check`, `QBT_SKIP_QB_VERSION_CHECK` to bypass qbitorrent compatibility check (unsupported - Thanks to @ftc2 #307)
- Updates to webhook notifications to group notifications when a function updates more than 10 Torrents.
- Adds new webhooks for `share_limits`
- Adds rate limit to webhook notifications (1 msg/sec)
# Bug Fixes
- Fixes #302
- Adds a way to bypass qbt version check (unsupported - Thanks to @ftc2 #307)
- Fixes #317
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.3...v3.6.4
**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v3.6.4...v4.0.0

View file

@ -1 +1 @@
3.6.4
4.0.0

View file

@ -14,6 +14,8 @@ commands:
tag_tracker_error: False
rem_orphaned: False
tag_nohardlinks: False
share_limits: False
skip_qb_version_check: False
skip_cleanup: False
qbt:
@ -23,9 +25,10 @@ qbt:
pass: "password"
settings:
force_auto_tmm: False # Will force qBittorrent to enable Automatic Torrent Management for each torrent.
tracker_error_tag: issue # Will set the tag of any torrents that do not have a working tracker.
force_auto_tmm: False # Will force qBittorrent to enable Automatic Torrent Management for each torrent.
tracker_error_tag: issue # Will set the tag of any torrents that do not have a working tracker.
nohardlinks_tag: noHL # Will set the tag of any torrents with no hardlinks.
share_limits_suffix_tag: share_limit # Will add this suffix to the grouping separated by '.' to the tag of any torrents with share limits.
ignoreTags_OnUpdate: # When running tag-update function, it will update torrent tags for a given torrent even if the torrent has at least one or more of the tags defined here. Otherwise torrents will not be tagged if tags exist.
- noHL
- issue
@ -38,7 +41,7 @@ directory:
# Must be set if you're running qbit_manage locally and qBittorrent/cross_seed is in a docker
# <OPTIONAL> recycle_bin var: </your/path/here/> # Path of the RecycleBin folder. Default location is set to remote_dir/.RecycleBin
# <OPTIONAL> torrents_dir var: </your/path/here/> # Path of the your qbittorrent torrents directory. Required for `save_torrents` attribute in recyclebin
# <OPTIONAL> orphaned_dir var: </your/path/here/> # Path of the the Orphaned Data folder. This is similar to RecycleBin, but only for orphaned data.
cross_seed: "/your/path/here/"
root_dir: "/data/torrents/"
remote_dir: "/mnt/user/data/torrents/"
@ -61,21 +64,12 @@ cat_change:
movies-hd.cross-seed: movies-hd
movies-uhd.cross-seed: movies-uhd
tracker:
# Mandatory
# Tag Parameters
# <Tracker URL Keyword>: # <MANDATORY> This is the keyword in the tracker url
# <MANDATORY> Set tag name. Can be a list of tags or a single tag
# tag: <Tag Name>
# <OPTIONAL> Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading. -2 means the global limit should be used, -1 means no limit.
# max_ratio: 5.0
# <OPTIONAL> Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding. -2 means the global limit should be used, -1 means no limit.
# max_seeding_time: 129600
# <OPTIONAL> Will ensure that noHL torrents from this tracker are not deleted by cleanup variable if torrent has not yet met the minimum seeding time (min).
# min_seeding_time: 2000
# <OPTIONAL> Will limit the upload speed KiB/s (KiloBytes/second) (-1 means no limit)
# limit_upload_speed: 150
# <OPTIONAL> Set this to the notifiarr react name. This is used to add indexer reactions to the notifications sent by Notifiarr
# notifiarr: <notifiarr indexer>
animebytes.tv:
@ -86,10 +80,6 @@ tracker:
- Avistaz
- tag2
- tag3
max_ratio: 5.0
max_seeding_time: 129600
min_seeding_time: 30400
limit_upload_speed: 150
notifiarr: avistaz
beyond-hd:
tag: [Beyond-HD, tag2, tag3]
@ -101,14 +91,11 @@ tracker:
tag: CartoonChaos
digitalcore:
tag: DigitalCore
max_ratio: 5.0
notifiarr: digitalcore
gazellegames:
tag: GGn
limit_upload_speed: 150
hdts:
tag: HDTorrents
max_seeding_time: 129600
landof.tv:
tag: BroadcasTheNet
notifiarr: broadcasthenet
@ -139,54 +126,88 @@ nohardlinks:
# Tag Movies/Series that are not hard linked outside the root directory
# Mandatory to fill out directory parameter above to use this function (root_dir/remote_dir)
# This variable should be set to your category name of your completed movies/completed series in qbit. Acceptable variable can be any category you would like to tag if there are no hardlinks found
movies-completed:
# <OPTIONAL> exclude_tags var: Will exclude torrents with any of the following tags when searching through the category.
exclude_tags:
- Beyond-HD
- AnimeBytes
- MaM
# <OPTIONAL> cleanup var: WARNING!! Setting this as true Will remove and delete contents of any torrents that have a noHL tag and meets share limits
cleanup: false
# <OPTIONAL> max_ratio var: Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading.
# Delete this key from a category's config to use the tracker's configured max_ratio. Will default to -1 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
max_ratio: 4.0
# <OPTIONAL> max seeding time var: Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding.
# Delete this key from a category's config to use the tracker's configured max_seeding_time. Will default to -1 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
max_seeding_time: 86400
# <OPTIONAL> Limit Upload Speed var: Will limit the upload speed KiB/s (KiloBytes/second) (`-1` : No Limit)
limit_upload_speed:
# <OPTIONAL> min seeding time var: Will prevent torrent deletion by cleanup variable if torrent has not yet minimum seeding time (min).
# Delete this key from a category's config to use the tracker's configured min_seeding_time. Will default to 0 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
min_seeding_time: 43200
# <OPTIONAL> resume_torrent_after_untagging_noHL var: If a torrent was previously tagged as NoHL and now has hardlinks, this variable will resume your torrent after changing share limits
resume_torrent_after_untagging_noHL: false
- movies-completed-4k
- series-completed-4k
- movies-completed:
# <OPTIONAL> exclude_tags var: Will exclude torrents with any of the following tags when searching through the category.
exclude_tags:
- Beyond-HD
- AnimeBytes
- MaM
# Can have additional categories set with separate ratio/seeding times defined.
series-completed:
# <OPTIONAL> exclude_tags var: Will exclude torrents with any of the following tags when searching through the category.
exclude_tags:
- series-completed:
# <OPTIONAL> exclude_tags var: Will exclude torrents with any of the following tags when searching through the category.
exclude_tags:
- Beyond-HD
- BroadcasTheNet
share_limits:
# Control how torrent share limits are set depending on the priority of your grouping
# Each torrent will be matched with the share limit group with the highest priority that meets the group filter criteria.
# Each torrent can only be matched with one share limit group
# This variable is mandatory and is a text defining the name of your grouping. This can be any string you want
noHL:
# <MANDATORY> priority: <int/float> # This is the priority of your grouping. The lower the number the higher the priority
priority: 1
# <OPTIONAL> include_all_tags: <list> # Filter the group based on one or more tags. Multiple include_all_tags are checked with an AND condition
# All tags defined here must be present in the torrent for it to be included in this group
include_all_tags:
- noHL
# <OPTIONAL> include_any_tags: <list> # Filter the group based on one or more tags. Multiple include_any_tags are checked with an OR condition
# Any tags defined here must be present in the torrent for it to be included in this group
include_any_tags:
- noHL
# <OPTIONAL> exclude_all_tags: <list> # Filter by excluding one or more tags. Multiple exclude_all_tags are checked with an AND condition
# This is useful to combine with the category filter to exclude one or more tags from an entire category
# All tags defined here must be present in the torrent for it to be excluded in this group
exclude_all_tags:
- Beyond-HD
- BroadcasTheNet
# <OPTIONAL> cleanup var: WARNING!! Setting this as true Will remove and delete contents of any torrents that have a noHL tag and meets share limits
cleanup: false
# <OPTIONAL> max_ratio var: Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading.
# Delete this key from a category's config to use the tracker's configured max_ratio. Will default to -1 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
max_ratio: 4.0
# <OPTIONAL> max seeding time var: Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding.
# Delete this key from a category's config to use the tracker's configured max_seeding_time. Will default to -1 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
max_seeding_time: 86400
# <OPTIONAL> Limit Upload Speed var: Will limit the upload speed KiB/s (KiloBytes/second) (`-1` : No Limit)
limit_upload_speed:
# <OPTIONAL> min seeding time var: Will prevent torrent deletion by cleanup variable if torrent has not yet minimum seeding time (min).
# Delete this key from a category's config to use the tracker's configured min_seeding_time. Will default to 0 if not specified for the category or tracker.
# Uses the larger value of the noHL Category or Tracker specific setting.
# <OPTIONAL> exclude_any_tags: <list> # Filter by excluding one or more tags. Multiple exclude_any_tags are checked with an OR condition
# This is useful to combine with the category filter to exclude one or more tags from an entire category
# Any tags defined here must be present in the torrent for it to be excluded in this group
exclude_any_tags:
- Beyond-HD
# <OPTIONAL> categories: <list> # Filter by excluding one or more categories. Multiple categories are checked with an OR condition
# Since one torrent can only be associated with a single category, multiple categories are checked with an OR condition
categories:
- RadarrComplete
- SonarrComplete
# <OPTIONAL> max_ratio <float>: Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading.
# Will default to -1 (no limit) if not specified for the group.
max_ratio: 5.0
# <OPTIONAL> max_seeding_time <int>: Will set the torrent Maximum seeding time (minutes) until torrent is stopped from seeding.
# Will default to -1 (no limit) if not specified for the group.
max_seeding_time: 129600
# <OPTIONAL> min_seeding_time <int>: Will prevent torrent deletion by cleanup variable if torrent has not yet minimum seeding time (minutes).
# Will default to 0 if not specified for the group.
min_seeding_time: 43200
# <OPTIONAL> resume_torrent_after_untagging_noHL var: If a torrent was previously tagged as NoHL and now has hardlinks, this variable will resume your torrent after changing share limits
resume_torrent_after_untagging_noHL: false
# <OPTIONAL> Limit Upload Speed <int>: Will limit the upload speed KiB/s (KiloBytes/second) (`-1` : No Limit)
limit_upload_speed: 0
# <OPTIONAL> cleanup <bool>: WARNING!! Setting this as true Will remove and delete contents of any torrents that satisfies the share limits
cleanup: false
# <OPTIONAL> resume_torrent_after_change <bool>: This variable will resume your torrent after changing share limits. Default is true
resume_torrent_after_change: true
# <OPTIONAL> add_group_to_tag <bool>: This adds your grouping as a tag with a suffix defined in settings . Default is true
# Example: A grouping defined as noHL will have a tag set to noHL.share_limit (if using the default suffix)
add_group_to_tag: true
cross-seed:
priority: 2
include_all_tags:
- cross-seed
max_seeding_time: 10200
cleanup: false
PTP:
priority: 3
include_all_tags:
- PassThePopcorn
max_ratio: 2.0
max_seeding_time: 130000
cleanup: false
default:
priority: 999
max_ratio: -1
max_seeding_time: -1
cleanup: false
recyclebin:
# Recycle Bin method of deletion will move files into the recycle bin (Located in /root_dir/.RecycleBin) instead of directly deleting them in qbit
@ -219,7 +240,7 @@ orphaned:
- "**/@eaDir"
- "/data/torrents/temp/**"
- "**/*.!qB"
- '**/_unpackerred'
- "**/_unpackerred"
apprise:
# Apprise integration with webhooks
@ -256,6 +277,7 @@ webhooks:
tag_tracker_error: notifiarr
rem_orphaned: notifiarr
tag_nohardlinks: notifiarr
share_limits: notifiarr
cleanup_dirs: notifiarr
bhd:

View file

@ -1,4 +1,6 @@
"""Apprise notification class"""
import time
from modules import util
from modules.util import Failed
@ -14,5 +16,6 @@ class Apprise:
logger.secret(self.api_url)
self.notify_url = ",".join(params["notify_url"])
response = self.config.get(self.api_url)
time.sleep(1) # Pause for 1 second before sending the next request
if response.status_code != 200:
raise Failed(f"Apprise Error: Unable to connect to Apprise using {self.api_url}")

View file

@ -3,6 +3,7 @@ import os
import re
import stat
import time
from collections import OrderedDict
import requests
from retrying import retry
@ -28,6 +29,7 @@ COMMANDS = [
"tag_tracker_error",
"rem_orphaned",
"tag_nohardlinks",
"share_limits",
"skip_cleanup",
"skip_qb_version_check",
"dry_run",
@ -82,6 +84,7 @@ class Config:
logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {self.commands['tag_tracker_error']}")
logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {self.commands['rem_orphaned']}")
logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {self.commands['tag_nohardlinks']}")
logger.debug(f" --share-limits (QBT_SHARE_LIMITS): {self.commands['share_limits']}")
logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {self.commands['skip_cleanup']}")
logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {self.commands['skip_qb_version_check']}")
logger.debug(f" --dry-run (QBT_DRY_RUN): {self.commands['dry_run']}")
@ -136,6 +139,9 @@ class Config:
self.data["webhooks"] = temp
if "bhd" in self.data:
self.data["bhd"] = self.data.pop("bhd")
if "share_limits" in self.data:
self.data["share_limits"] = self.data.pop("share_limits")
self.dry_run = self.commands["dry_run"]
self.loglevel = "DRYRUN" if self.dry_run else "INFO"
self.session = requests.Session()
@ -148,10 +154,14 @@ class Config:
self.data, "tracker_error_tag", parent="settings", default="issue"
),
"nohardlinks_tag": self.util.check_for_attribute(self.data, "nohardlinks_tag", parent="settings", default="noHL"),
"share_limits_suffix_tag": self.util.check_for_attribute(
self.data, "share_limits_suffix_tag", parent="settings", default="share_limit"
),
}
self.tracker_error_tag = self.settings["tracker_error_tag"]
self.nohardlinks_tag = self.settings["nohardlinks_tag"]
self.share_limits_suffix_tag = "." + self.settings["share_limits_suffix_tag"]
default_ignore_tags = [self.nohardlinks_tag, self.tracker_error_tag, "cross-seed"]
self.settings["ignoreTags_OnUpdate"] = self.util.check_for_attribute(
@ -167,6 +177,7 @@ class Config:
"tag_tracker_error": None,
"rem_orphaned": None,
"tag_nohardlinks": None,
"share_limits": None,
"cleanup_dirs": None,
}
@ -249,111 +260,189 @@ class Config:
# nohardlinks
self.nohardlinks = None
if "nohardlinks" in self.data and self.commands["tag_nohardlinks"]:
if "nohardlinks" in self.data and self.commands["tag_nohardlinks"] and self.data["nohardlinks"] is not None:
self.nohardlinks = {}
for cat in self.data["nohardlinks"]:
if cat in list(self.data["cat"].keys()):
is_max_ratio_defined = self.data["nohardlinks"][cat].get("max_ratio")
is_max_seeding_time_defined = self.data["nohardlinks"][cat].get("max_seeding_time")
if isinstance(cat, dict):
cat_str = list(cat.keys())[0]
self.nohardlinks[cat_str] = {}
exclude_tags = cat[cat_str].get("exclude_tags", [])
if isinstance(exclude_tags, str):
exclude_tags = [exclude_tags]
self.nohardlinks[cat_str]["exclude_tags"] = exclude_tags
elif isinstance(cat, str):
self.nohardlinks[cat] = {}
self.nohardlinks[cat]["exclude_tags"] = self.util.check_for_attribute(
self.data,
"exclude_tags",
parent="nohardlinks",
subparent=cat,
var_type="list",
default_is_none=True,
do_print=False,
)
self.nohardlinks[cat]["cleanup"] = self.util.check_for_attribute(
self.data, "cleanup", parent="nohardlinks", subparent=cat, var_type="bool", default=False, do_print=False
)
if is_max_ratio_defined or is_max_seeding_time_defined:
self.nohardlinks[cat]["max_ratio"] = self.util.check_for_attribute(
self.data,
"max_ratio",
parent="nohardlinks",
subparent=cat,
var_type="float",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
self.nohardlinks[cat]["max_seeding_time"] = self.util.check_for_attribute(
self.data,
"max_seeding_time",
parent="nohardlinks",
subparent=cat,
var_type="int",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
else:
self.nohardlinks[cat]["max_ratio"] = self.util.check_for_attribute(
self.data,
"max_ratio",
parent="nohardlinks",
subparent=cat,
var_type="float",
min_int=-2,
do_print=False,
default_is_none=True,
save=False,
)
self.nohardlinks[cat]["max_seeding_time"] = self.util.check_for_attribute(
self.data,
"max_seeding_time",
parent="nohardlinks",
subparent=cat,
var_type="int",
min_int=-2,
do_print=False,
default_is_none=True,
save=False,
)
self.nohardlinks[cat]["min_seeding_time"] = self.util.check_for_attribute(
self.data,
"min_seeding_time",
parent="nohardlinks",
subparent=cat,
var_type="int",
min_int=0,
do_print=False,
default=0,
save=False,
)
self.nohardlinks[cat]["limit_upload_speed"] = self.util.check_for_attribute(
self.data,
"limit_upload_speed",
parent="nohardlinks",
subparent=cat,
var_type="int",
min_int=-1,
do_print=False,
default=0,
save=False,
)
self.nohardlinks[cat]["resume_torrent_after_untagging_noHL"] = self.util.check_for_attribute(
self.data,
"resume_torrent_after_untagging_noHL",
parent="nohardlinks",
subparent=cat,
var_type="bool",
default=True,
do_print=False,
save=False,
)
else:
err = f"Config Error: Category {cat} is defined under nohardlinks attribute "
"but is not defined in the cat attribute."
self.notify(err, "Config")
raise Failed(err)
self.nohardlinks[cat]["exclude_tags"] = []
else:
if self.commands["tag_nohardlinks"]:
err = "Config Error: nohardlinks attribute max_ratio not found"
err = "Config Error: nohardlinks must be a list of categories"
self.notify(err, "Config")
raise Failed(err)
# share limits
self.share_limits = None
if "share_limits" in self.data and self.commands["share_limits"]:
def _sort_share_limits(share_limits):
sorted_limits = sorted(
share_limits.items(), key=lambda x: x[1].get("priority", float("inf")) if x[1] is not None else float("inf")
)
priorities = set()
for key, value in sorted_limits:
if value is None:
value = {}
if "priority" in value:
priority = value["priority"]
if priority in priorities:
err = (
f"Config Error: Duplicate priority '{priority}' found in share_limits "
f"for the grouping '{key}'. Priority must be a unique value and greater than or equal to 1"
)
self.notify(err, "Config")
raise Failed(err)
else:
priority = max(priorities) + 1
logger.warning(
f"Priority not defined for the grouping '{key}' in share_limits. " f"Setting priority to {priority}"
)
value["priority"] = self.util.check_for_attribute(
self.data,
"priority",
parent="share_limits",
subparent=key,
var_type="float",
default=priority,
save=True,
)
priorities.add(priority)
return OrderedDict(sorted_limits)
self.share_limits = OrderedDict()
sorted_share_limits = _sort_share_limits(self.data["share_limits"])
for group in sorted_share_limits:
self.share_limits[group] = {}
self.share_limits[group]["priority"] = sorted_share_limits[group]["priority"]
self.share_limits[group]["include_all_tags"] = self.util.check_for_attribute(
self.data,
"include_all_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["include_any_tags"] = self.util.check_for_attribute(
self.data,
"include_any_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["exclude_all_tags"] = self.util.check_for_attribute(
self.data,
"exclude_all_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["exclude_any_tags"] = self.util.check_for_attribute(
self.data,
"exclude_any_tags",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["categories"] = self.util.check_for_attribute(
self.data,
"categories",
parent="share_limits",
subparent=group,
var_type="list",
default_is_none=True,
do_print=False,
save=False,
)
self.share_limits[group]["cleanup"] = self.util.check_for_attribute(
self.data, "cleanup", parent="share_limits", subparent=group, var_type="bool", default=False, do_print=False
)
self.share_limits[group]["max_ratio"] = self.util.check_for_attribute(
self.data,
"max_ratio",
parent="share_limits",
subparent=group,
var_type="float",
min_int=-2,
default=-1,
do_print=False,
save=False,
)
self.share_limits[group]["max_seeding_time"] = self.util.check_for_attribute(
self.data,
"max_seeding_time",
parent="share_limits",
subparent=group,
var_type="int",
min_int=-2,
default=-1,
do_print=False,
save=False,
)
self.share_limits[group]["min_seeding_time"] = self.util.check_for_attribute(
self.data,
"min_seeding_time",
parent="share_limits",
subparent=group,
var_type="int",
min_int=0,
default=0,
do_print=False,
save=False,
)
self.share_limits[group]["limit_upload_speed"] = self.util.check_for_attribute(
self.data,
"limit_upload_speed",
parent="share_limits",
subparent=group,
var_type="int",
min_int=-1,
default=0,
do_print=False,
save=False,
)
self.share_limits[group]["resume_torrent_after_change"] = self.util.check_for_attribute(
self.data,
"resume_torrent_after_change",
parent="share_limits",
subparent=group,
var_type="bool",
default=True,
do_print=False,
save=False,
)
self.share_limits[group]["add_group_to_tag"] = self.util.check_for_attribute(
self.data,
"add_group_to_tag",
parent="share_limits",
subparent=group,
var_type="bool",
default=True,
do_print=False,
save=False,
)
self.share_limits[group]["torrents"] = []
else:
if self.commands["share_limits"]:
err = "Config Error: share_limits. No valid grouping found."
self.notify(err, "Config")
raise Failed(err)

View file

@ -11,8 +11,11 @@ class Category:
self.config = qbit_manager.config
self.client = qbit_manager.client
self.stats = 0
self.torrents_updated = [] # List of torrents updated
self.notify_attr = [] # List of single torrent attributes to send to notifiarr
self.category()
self.config.webhooks_factory.notify(self.torrents_updated, self.notify_attr, group_by="category")
def category(self):
"""Update category for torrents that don't have any category defined and returns total number categories updated"""
@ -40,6 +43,7 @@ class Category:
def update_cat(self, torrent, new_cat, cat_change):
"""Update category based on the torrent information"""
tracker = self.qbt.get_tags(torrent.trackers)
t_name = torrent.name
old_cat = torrent.category
if not self.config.dry_run:
try:
@ -55,7 +59,7 @@ class Category:
self.client.torrent_categories.create_category(name=new_cat, save_path=torrent.save_path)
torrent.set_category(category=new_cat)
body = []
body += logger.print_line(logger.insert_space(f"Torrent Name: {torrent.name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
if cat_change:
body += logger.print_line(logger.insert_space(f"Old Category: {old_cat}", 3), self.config.loglevel)
title = "Moving Categories"
@ -67,10 +71,12 @@ class Category:
"function": "cat_update",
"title": title,
"body": "\n".join(body),
"torrent_name": torrent.name,
"torrents": [t_name],
"torrent_category": new_cat,
"torrent_tag": ", ".join(tracker["tag"]),
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
self.notify_attr.append(attr)
self.torrents_updated.append(t_name)
self.stats += 1

View file

@ -15,6 +15,9 @@ class CrossSeed:
self.stats_added = 0
self.stats_tagged = 0
self.torrents_updated = [] # List of torrents added by cross-seed
self.notify_attr = [] # List of single torrent attributes to send to notifiarr
self.cross_seed()
def cross_seed(self):
@ -56,13 +59,14 @@ class CrossSeed:
"function": "cross_seed",
"title": "Adding New Cross-Seed Torrent",
"body": "\n".join(body),
"torrent_name": t_name,
"torrents": [t_name],
"torrent_category": category,
"torrent_save_path": dest,
"torrent_tag": "cross-seed",
"torrent_tracker": t_tracker,
}
self.config.send_notifications(attr)
self.notify_attr.append(attr)
self.torrents_updated.append(t_name)
self.stats_added += 1
if not self.config.dry_run:
self.client.torrents.add(
@ -95,6 +99,10 @@ class CrossSeed:
else:
logger.print_line(error, "WARNING")
self.config.notify(error, "cross-seed", False)
self.config.webhooks_factory.notify(self.torrents_updated, self.notify_attr, group_by="category")
self.torrents_updated = []
self.notify_attr = []
# Tag missing cross-seed torrents tags
for torrent in self.qbt.torrent_list:
t_name = torrent.name
@ -113,15 +121,16 @@ class CrossSeed:
"function": "tag_cross_seed",
"title": "Tagging Cross-Seed Torrent",
"body": body,
"torrent_name": t_name,
"torrents": [t_name],
"torrent_category": t_cat,
"torrent_tag": "cross-seed",
"torrent_tracker": tracker,
}
self.config.send_notifications(attr)
self.notify_attr.append(attr)
self.torrents_updated.append(t_name)
if not self.config.dry_run:
torrent.add_tags(tags="cross-seed")
self.config.webhooks_factory.notify(self.torrents_updated, self.notify_attr, group_by="category")
numcategory = Counter(categories)
for cat in numcategory:
if numcategory[cat] > 0:

View file

@ -13,7 +13,14 @@ class ReCheck:
self.stats_resumed = 0
self.stats_rechecked = 0
self.torrents_updated_recheck = [] # List of torrents updated
self.notify_attr_recheck = [] # List of single torrent attributes to send to notifiarr
self.torrents_updated_resume = [] # List of torrents updated
self.notify_attr_resume = [] # List of single torrent attributes to send to notifiarr
self.recheck()
self.config.webhooks_factory.notify(self.torrents_updated_resume, self.notify_attr_resume, group_by="tag")
self.config.webhooks_factory.notify(self.torrents_updated_recheck, self.notify_attr_recheck, group_by="tag")
def recheck(self):
"""Function used to recheck paused torrents sorted by size and resume torrents that are completed"""
@ -24,30 +31,34 @@ class ReCheck:
if torrent_list:
for torrent in torrent_list:
tracker = self.qbt.get_tags(torrent.trackers)
t_name = torrent.name
t_category = torrent.category
# Resume torrent if completed
if torrent.progress == 1:
if torrent.max_ratio < 0 and torrent.max_seeding_time < 0:
self.stats_resumed += 1
body = logger.print_line(
f"{'Not Resuming' if self.config.dry_run else 'Resuming'} [{tracker['tag']}] - {torrent.name}",
f"{'Not Resuming' if self.config.dry_run else 'Resuming'} [{tracker['tag']}] - {t_name}",
self.config.loglevel,
)
attr = {
"function": "recheck",
"title": "Resuming Torrent",
"body": body,
"torrent_name": torrent.name,
"torrent_category": torrent.category,
"torrents": [t_name],
"torrent_tag": tracker["tag"],
"torrent_category": t_category,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
self.torrents_updated_resume.append(t_name)
self.notify_attr_resume.append(attr)
if not self.config.dry_run:
torrent.resume()
else:
# Check to see if torrent meets AutoTorrentManagement criteria
logger.debug("DEBUG: Torrent to see if torrent meets AutoTorrentManagement Criteria")
logger.debug(logger.insert_space(f"- Torrent Name: {torrent.name}", 2))
logger.debug(logger.insert_space(f"- Torrent Name: {t_name}", 2))
logger.debug(
logger.insert_space(f"-- Ratio vs Max Ratio: {torrent.ratio:.2f} < {torrent.max_ratio:.2f}", 4)
)
@ -74,42 +85,45 @@ class ReCheck:
):
self.stats_resumed += 1
body = logger.print_line(
f"{'Not Resuming' if self.config.dry_run else 'Resuming'} [{tracker['tag']}] - "
f"{torrent.name}",
f"{'Not Resuming' if self.config.dry_run else 'Resuming'} [{tracker['tag']}] - " f"{t_name}",
self.config.loglevel,
)
attr = {
"function": "recheck",
"title": "Resuming Torrent",
"body": body,
"torrent_name": torrent.name,
"torrent_category": torrent.category,
"torrents": [t_name],
"torrent_tag": tracker["tag"],
"torrent_category": t_category,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
self.torrents_updated_resume.append(t_name)
self.notify_attr_resume.append(attr)
if not self.config.dry_run:
torrent.resume()
# Recheck
elif (
torrent.progress == 0
and self.qbt.torrentinfo[torrent.name]["is_complete"]
and self.qbt.torrentinfo[t_name]["is_complete"]
and not torrent.state_enum.is_checking
):
self.stats_rechecked += 1
body = logger.print_line(
f"{'Not Rechecking' if self.config.dry_run else 'Rechecking'} [{tracker['tag']}] - {torrent.name}",
f"{'Not Rechecking' if self.config.dry_run else 'Rechecking'} [{tracker['tag']}] - {t_name}",
self.config.loglevel,
)
attr = {
"function": "recheck",
"title": "Rechecking Torrent",
"body": body,
"torrent_name": torrent.name,
"torrent_category": torrent.category,
"torrents": [t_name],
"torrent_tag": tracker["tag"],
"torrent_category": t_category,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
self.torrents_updated_recheck.append(t_name)
self.notify_attr_recheck.append(attr)
if not self.config.dry_run:
torrent.recheck()

View file

@ -39,8 +39,12 @@ class RemoveUnregistered:
def remove_previous_errors(self):
"""Removes any previous torrents that were tagged as an error but are now working."""
torrents_updated = []
notify_attr = []
for torrent in self.qbt.torrentvalid:
check_tags = util.get_list(torrent.tags)
t_name = torrent.name
# Remove any error torrents Tags that are no longer unreachable.
if self.tag_error in check_tags:
tracker = self.qbt.get_tags(torrent.trackers)
@ -49,7 +53,7 @@ class RemoveUnregistered:
body += logger.print_line(
f"Previous Tagged {self.tag_error} torrent currently has a working tracker.", self.config.loglevel
)
body += logger.print_line(logger.insert_space(f"Torrent Name: {torrent.name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f"Removed Tag: {self.tag_error}", 4), self.config.loglevel)
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
if not self.config.dry_run:
@ -58,13 +62,16 @@ class RemoveUnregistered:
"function": "untag_tracker_error",
"title": "Untagging Tracker Error Torrent",
"body": "\n".join(body),
"torrent_name": torrent.name,
"torrents": [t_name],
"torrent_category": torrent.category,
"torrent_tag": self.tag_error,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
torrents_updated.append(t_name)
notify_attr.append(attr)
self.config.webhooks_factory.notify(torrents_updated, notify_attr, group_by="tag")
def check_for_unregistered_torrents_using_bhd_api(self, tracker, msg_up, torrent_hash):
"""
@ -82,6 +89,12 @@ class RemoveUnregistered:
return False
def process_torrent_issues(self):
"""Process torrent issues."""
self.torrents_updated_issue = [] # List of torrents updated
self.notify_attr_issue = [] # List of single torrent attributes to send to notifiarr
self.torrents_updated_unreg = [] # List of torrents updated
self.notify_attr_unreg = [] # List of single torrent attributes to send to notifiarr
for torrent in self.qbt.torrentissue:
self.t_name = torrent.name
self.t_cat = self.qbt.torrentinfo[self.t_name]["Category"]
@ -97,7 +110,10 @@ class RemoveUnregistered:
if TrackerStatus(trk.status) == TrackerStatus.NOT_WORKING:
# Tag any error torrents
if self.cfg_tag_error and self.tag_error not in check_tags:
self.tag_tracker_error(msg, tracker, torrent)
if not list_in_text(msg_up, TorrentMessages.IGNORE_MSGS) and not list_in_text(
msg_up, TorrentMessages.UNREGISTERED_MSGS
):
self.tag_tracker_error(msg, tracker, torrent)
# Check for unregistered torrents
if self.cfg_rem_unregistered:
if list_in_text(msg_up, TorrentMessages.UNREGISTERED_MSGS) and not list_in_text(
@ -121,6 +137,10 @@ class RemoveUnregistered:
"""Remove torrents with unregistered trackers."""
self.remove_previous_errors()
self.process_torrent_issues()
self.config.webhooks_factory.notify(self.torrents_updated_issue, self.notify_attr_issue, group_by="tag")
self.config.webhooks_factory.notify(self.torrents_updated_unreg, self.notify_attr_unreg, group_by="tag")
if self.cfg_rem_unregistered:
if self.stats_deleted >= 1 or self.stats_deleted_contents >= 1:
if self.stats_deleted >= 1:
@ -165,14 +185,15 @@ class RemoveUnregistered:
"function": "tag_tracker_error",
"title": "Tag Tracker Error Torrents",
"body": tor_error,
"torrent_name": self.t_name,
"torrents": [self.t_name],
"torrent_category": self.t_cat,
"torrent_tag": self.tag_error,
"torrent_status": msg,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.config.send_notifications(attr)
self.torrents_updated_issue.append(self.t_name)
self.notify_attr_issue.append(attr)
if not self.config.dry_run:
torrent.add_tags(tags=self.tag_error)
@ -185,9 +206,10 @@ class RemoveUnregistered:
attr = {
"function": "rem_unregistered",
"title": "Removing Unregistered Torrents",
"torrent_name": self.t_name,
"torrents": [self.t_name],
"torrent_category": self.t_cat,
"torrent_status": msg,
"torrent_tag": ", ".join(tracker["tag"]),
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
@ -212,5 +234,6 @@ class RemoveUnregistered:
body += logger.print_line(logger.insert_space("Deleted .torrent AND content files.", 8), self.config.loglevel)
self.stats_deleted_contents += 1
attr["body"] = "\n".join(body)
self.config.send_notifications(attr)
self.torrents_updated_unreg.append(self.t_name)
self.notify_attr_unreg.append(attr)
self.qbt.torrentinfo[self.t_name]["count"] -= 1

View file

@ -0,0 +1,424 @@
import os
from datetime import timedelta
from modules import util
from modules.webhooks import GROUP_NOTIFICATION_LIMIT
logger = util.logger
class ShareLimits:
def __init__(self, qbit_manager):
self.qbt = qbit_manager
self.config = qbit_manager.config
self.client = qbit_manager.client
self.stats_tagged = 0 # counter for the number of share limits changed
self.stats_deleted = 0 # counter for the number of torrents that \
# meets the criteria for ratio limit/seed limit for deletion
self.stats_deleted_contents = 0 # counter for the number of torrents that \
# meets the criteria for ratio limit/seed limit for deletion including contents \
self.tdel_dict = {} # dictionary to track the torrent names and content path that meet the deletion criteria
self.root_dir = qbit_manager.config.root_dir # root directory of torrents
self.remote_dir = qbit_manager.config.remote_dir # remote directory of torrents
self.share_limits_config = qbit_manager.config.share_limits # configuration of share limits
self.torrents_updated = [] # list of torrents that have been updated
self.torrent_hash_checked = [] # list of torrent hashes that have been checked for share limits
self.share_limits_suffix_tag = qbit_manager.config.share_limits_suffix_tag # suffix tag for share limits
self.group_tag = None # tag for the share limit group
self.update_share_limits()
def update_share_limits(self):
"""Updates share limits for torrents based on grouping"""
logger.separator("Updating Share Limits based on priority", space=False, border=False)
torrent_list = self.qbt.get_torrents({"status_filter": "completed"})
self.assign_torrents_to_group(torrent_list)
for group_name, group_config in self.share_limits_config.items():
torrents = group_config["torrents"]
self.torrents_updated = []
self.tdel_dict = {}
if torrents:
self.update_share_limits_for_group(group_name, group_config, torrents)
attr = {
"function": "share_limits",
"title": f"Updating Share Limits for {group_name}. Priority {group_config['priority']}",
"body": f"Updated {len(self.torrents_updated)} torrents.",
"grouping": group_name,
"torrents": self.torrents_updated,
"torrent_tag": self.group_tag,
"torrent_max_ratio": group_config["max_ratio"],
"torrent_max_seeding_time": group_config["max_seeding_time"],
"torrent_min_seeding_time": group_config["min_seeding_time"],
"torrent_limit_upload_speed": group_config["limit_upload_speed"],
}
if len(self.torrents_updated) > 0:
self.config.send_notifications(attr)
if group_config["cleanup"] and len(self.tdel_dict) > 0:
self.cleanup_torrents_for_group(group_name, group_config["priority"])
def cleanup_torrents_for_group(self, group_name, priority):
"""Deletes torrents that have reached the ratio/seed limit"""
logger.separator(
f"Cleaning up torrents that have reached ratio/seed limit for {group_name}. Priority {priority}",
space=False,
border=False,
)
group_notifications = len(self.tdel_dict) > GROUP_NOTIFICATION_LIMIT
t_deleted = set()
t_deleted_and_contents = set()
for torrent_hash, torrent_dict in self.tdel_dict.items():
torrent = torrent_dict["torrent"]
t_name = torrent.name
t_count = self.qbt.torrentinfo[t_name]["count"]
t_msg = self.qbt.torrentinfo[t_name]["msg"]
t_status = self.qbt.torrentinfo[t_name]["status"]
# Double check that the content path is the same before we delete anything
if torrent["content_path"].replace(self.root_dir, self.remote_dir) == torrent_dict["content_path"]:
tracker = self.qbt.get_tags(torrent.trackers)
body = []
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
body += logger.print_line(torrent_dict["body"], self.config.loglevel)
body += logger.print_line(
logger.insert_space("Cleanup: True [Meets Share Limits]", 8),
self.config.loglevel,
)
attr = {
"function": "cleanup_share_limits",
"title": "Share limit removal",
"grouping": group_name,
"torrents": [t_name],
"torrent_category": torrent.category,
"cleanup": True,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
if os.path.exists(torrent["content_path"].replace(self.root_dir, self.remote_dir)):
# Checks if any of the original torrents are working
if t_count > 1 and ("" in t_msg or 2 in t_status):
self.stats_deleted += 1
attr["torrents_deleted_and_contents"] = False
t_deleted.add(t_name)
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent but NOT content files.", 8),
self.config.loglevel,
)
else:
self.stats_deleted_contents += 1
attr["torrents_deleted_and_contents"] = True
t_deleted_and_contents.add(t_name)
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent AND content files.", 8), self.config.loglevel
)
else:
self.stats_deleted += 1
attr["torrents_deleted_and_contents"] = False
t_deleted.add(t_name)
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent but NOT content files.", 8), self.config.loglevel
)
attr["body"] = "\n".join(body)
if not group_notifications:
self.config.send_notifications(attr)
self.qbt.torrentinfo[t_name]["count"] -= 1
if group_notifications:
if t_deleted:
attr = {
"function": "cleanup_share_limits",
"title": "Share limit removal - Deleted .torrent but NOT content files.",
"body": f"Deleted {self.stats_deleted} .torrents but NOT content files.",
"grouping": group_name,
"torrents": list(t_deleted),
"torrent_category": None,
"cleanup": True,
"torrent_tracker": None,
"notifiarr_indexer": None,
"torrents_deleted_and_contents": False,
}
self.config.send_notifications(attr)
if t_deleted_and_contents:
attr = {
"function": "cleanup_share_limits",
"title": "Share limit removal - Deleted .torrent AND content files.",
"body": f"Deleted {self.stats_deleted_contents} .torrents AND content files.",
"grouping": group_name,
"torrents": list(t_deleted_and_contents),
"torrent_category": None,
"cleanup": True,
"torrent_tracker": None,
"notifiarr_indexer": None,
"torrents_deleted_and_contents": True,
}
self.config.send_notifications(attr)
def update_share_limits_for_group(self, group_name, group_config, torrents):
"""Updates share limits for torrents in a group"""
logger.separator(
f"Updating Share Limits for [Group {group_name}] [Priority {group_config['priority']}]", space=False, border=False
)
for torrent in torrents:
t_name = torrent.name
t_hash = torrent.hash
tracker = self.qbt.get_tags(torrent.trackers)
check_max_ratio = group_config["max_ratio"] != torrent.max_ratio
check_max_seeding_time = group_config["max_seeding_time"] != torrent.max_seeding_time
# Treat upload limit as -1 if it is set to 0 (unlimited)
torrent_upload_limit = -1 if round(torrent.up_limit / 1024) == 0 else round(torrent.up_limit / 1024)
if group_config["limit_upload_speed"] == 0:
group_config["limit_upload_speed"] = -1
check_limit_upload_speed = group_config["limit_upload_speed"] != torrent_upload_limit
hash_not_prev_checked = t_hash not in self.torrent_hash_checked
logger.trace(f"Torrent: {t_name} [Hash: {t_hash}]")
logger.trace(f"Torrent Category: {torrent.category}")
logger.trace(f"Torrent Tags: {torrent.tags}")
logger.trace(f"Grouping: {group_name}")
logger.trace(f"Config Max Ratio vs Torrent Max Ratio:{group_config['max_ratio']} vs {torrent.max_ratio}")
logger.trace(f"check_max_ratio: {check_max_ratio}")
logger.trace(
"Config Max Seeding Time vs Torrent Max Seeding Time: "
f"{group_config['max_seeding_time']} vs {torrent.max_seeding_time}"
)
logger.trace(f"check_max_seeding_time: {check_max_seeding_time}")
logger.trace(
"Config Limit Upload Speed vs Torrent Limit Upload Speed: "
f"{group_config['limit_upload_speed']} vs {torrent_upload_limit}"
)
logger.trace(f"check_limit_upload_speed: {check_limit_upload_speed}")
logger.trace(f"hash_not_prev_checked: {hash_not_prev_checked}")
if (check_max_ratio or check_max_seeding_time or check_limit_upload_speed) and hash_not_prev_checked:
if "MinSeedTimeNotReached" not in torrent.tags:
self.group_tag = f"{group_name}{self.share_limits_suffix_tag}" if group_config["add_group_to_tag"] else None
logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
if self.group_tag:
logger.print_line(logger.insert_space(f"Added Tag: {self.group_tag}", 8), self.config.loglevel)
self.tag_and_update_share_limits_for_torrent(torrent, group_config)
self.stats_tagged += 1
self.torrents_updated.append(t_name)
# Cleanup torrents if the torrent meets the criteria for deletion and cleanup is enabled
if group_config["cleanup"]:
tor_reached_seed_limit = self.has_reached_seed_limit(
torrent=torrent,
max_ratio=group_config["max_ratio"],
max_seeding_time=group_config["max_seeding_time"],
min_seeding_time=group_config["min_seeding_time"],
resume_torrent=group_config["resume_torrent_after_change"],
tracker=tracker["url"],
)
if tor_reached_seed_limit:
if t_hash not in self.tdel_dict:
self.tdel_dict[t_hash] = {}
self.tdel_dict[t_hash]["torrent"] = torrent
self.tdel_dict[t_hash]["content_path"] = torrent["content_path"].replace(self.root_dir, self.remote_dir)
self.tdel_dict[t_hash]["body"] = tor_reached_seed_limit
self.torrent_hash_checked.append(t_hash)
def tag_and_update_share_limits_for_torrent(self, torrent, group_config):
"""Removes previous share limits tag, updates tag and share limits for a torrent, and resumes the torrent"""
# Remove previous share_limits tag
tags = util.get_list(torrent.tags)
for tag in tags:
if self.share_limits_suffix_tag in tag:
torrent.remove_tags(tag)
# Will tag the torrent with the group name if add_group_to_tag is True and set the share limits
self.set_tags_and_limits(
torrent=torrent,
max_ratio=group_config["max_ratio"],
max_seeding_time=group_config["max_seeding_time"],
limit_upload_speed=group_config["limit_upload_speed"],
tags=self.group_tag,
)
# Resume torrent if it was paused now that the share limit has changed
if torrent.state_enum.is_complete and group_config["resume_torrent_after_change"]:
if not self.config.dry_run:
torrent.resume()
def assign_torrents_to_group(self, torrent_list):
"""Assign torrents to a share limit group based on its tags and category"""
logger.info("Assigning torrents to share limit groups...")
for torrent in torrent_list:
tags = util.get_list(torrent.tags)
category = torrent.category or ""
grouping = self.get_share_limit_group(tags, category)
logger.trace(f"Torrent: {torrent.name} [Hash: {torrent.hash}] - Share Limit Group: {grouping}")
if grouping:
self.share_limits_config[grouping]["torrents"].append(torrent)
def get_share_limit_group(self, tags, category):
"""Get the share limit group based on the tags and category of the torrent"""
for group_name, group_config in self.share_limits_config.items():
check_tags = self.check_tags(
tags=tags,
include_all_tags=group_config["include_all_tags"],
include_any_tags=group_config["include_any_tags"],
exclude_all_tags=group_config["exclude_all_tags"],
exclude_any_tags=group_config["exclude_any_tags"],
)
check_category = self.check_category(category, group_config["categories"])
if check_tags and check_category:
return group_name
return None
def check_tags(self, tags, include_all_tags=set(), include_any_tags=set(), exclude_all_tags=set(), exclude_any_tags=set()):
"""Check if the torrent has the required tags"""
tags_set = set(tags)
if include_all_tags:
if not set(include_all_tags).issubset(tags_set):
return False
if include_any_tags:
if not set(include_any_tags).intersection(tags_set):
return False
if exclude_all_tags:
if set(exclude_all_tags).issubset(tags_set):
return False
if exclude_any_tags:
if set(exclude_any_tags).intersection(tags_set):
return False
return True
def check_category(self, category, categories):
"""Check if the torrent has the required category"""
if categories:
if category not in categories:
return False
return True
def set_tags_and_limits(
self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, tags=None, restore=False, do_print=True
):
"""Set tags and limits for a torrent"""
body = []
if limit_upload_speed:
if limit_upload_speed != -1:
msg = logger.insert_space(f"Limit UL Speed: {limit_upload_speed} kB/s", 1)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
if max_ratio or max_seeding_time:
if (max_ratio == -2 and max_seeding_time == -2) and not restore:
msg = logger.insert_space("Share Limit: Use Global Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif (max_ratio == -1 and max_seeding_time == -1) and not restore:
msg = logger.insert_space("Share Limit: Set No Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
else:
if max_ratio != torrent.max_ratio and (not max_seeding_time or max_seeding_time < 0):
msg = logger.insert_space(f"Share Limit: Max Ratio = {max_ratio}", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif max_seeding_time != torrent.max_seeding_time and (not max_ratio or max_ratio < 0):
msg = logger.insert_space(f"Share Limit: Max Seed Time = {max_seeding_time} min", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif max_ratio != torrent.max_ratio or max_seeding_time != torrent.max_seeding_time:
msg = logger.insert_space(f"Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
# Update Torrents
if not self.config.dry_run:
if tags and tags not in torrent.tags:
torrent.add_tags(tags)
if limit_upload_speed:
if limit_upload_speed == -1:
torrent.set_upload_limit(-1)
else:
torrent.set_upload_limit(limit_upload_speed * 1024)
if not max_ratio:
max_ratio = torrent.max_ratio
if not max_seeding_time:
max_seeding_time = torrent.max_seeding_time
if "MinSeedTimeNotReached" in torrent.tags:
return []
torrent.set_share_limits(max_ratio, max_seeding_time)
return body
def has_reached_seed_limit(self, torrent, max_ratio, max_seeding_time, min_seeding_time, resume_torrent, tracker):
"""Check if torrent has reached seed limit"""
body = ""
def _has_reached_min_seeding_time_limit():
print_log = []
if torrent.seeding_time >= min_seeding_time * 60:
if "MinSeedTimeNotReached" in torrent.tags:
torrent.remove_tags(tags="MinSeedTimeNotReached")
return True
else:
if "MinSeedTimeNotReached" not in torrent.tags:
print_log += logger.print_line(logger.insert_space(f"Torrent Name: {torrent.name}", 3), self.config.loglevel)
print_log += logger.print_line(logger.insert_space(f"Tracker: {tracker}", 8), self.config.loglevel)
print_log += logger.print_line(
logger.insert_space(
f"Min seed time not met: {timedelta(seconds=torrent.seeding_time)} <= "
f"{timedelta(minutes=min_seeding_time)}. Removing Share Limits so qBittorrent can continue seeding.",
8,
),
self.config.loglevel,
)
print_log += logger.print_line(
logger.insert_space("Adding Tag: MinSeedTimeNotReached", 8), self.config.loglevel
)
if not self.config.dry_run:
torrent.add_tags("MinSeedTimeNotReached")
torrent.set_share_limits(-1, -1)
if resume_torrent:
torrent.resume()
return False
def _has_reached_seeding_time_limit():
nonlocal body
seeding_time_limit = None
if not max_seeding_time:
return False
if max_seeding_time >= 0:
seeding_time_limit = max_seeding_time
elif max_seeding_time == -2 and self.global_max_seeding_time_enabled:
seeding_time_limit = self.global_max_seeding_time
else:
return False
if seeding_time_limit:
if (torrent.seeding_time >= seeding_time_limit * 60) and _has_reached_min_seeding_time_limit():
body += logger.insert_space(
f"Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} >= "
f"{timedelta(minutes=seeding_time_limit)}",
8,
)
return True
return False
if max_ratio:
if max_ratio >= 0:
if torrent.ratio >= max_ratio and _has_reached_min_seeding_time_limit():
body += logger.insert_space(f"Ratio vs Max Ratio: {torrent.ratio:.2f} >= {max_ratio:.2f}", 8)
return body
elif max_ratio == -2 and self.global_max_ratio_enabled and _has_reached_min_seeding_time_limit():
if torrent.ratio >= self.global_max_ratio:
body += logger.insert_space(
f"Ratio vs Global Max Ratio: {torrent.ratio:.2f} >= {self.global_max_ratio:.2f}", 8
)
return body
if _has_reached_seeding_time_limit():
return body
return False

View file

@ -1,5 +1,3 @@
import os
from modules import util
logger = util.logger
@ -12,123 +10,47 @@ class TagNoHardLinks:
self.client = qbit_manager.client
self.stats_tagged = 0 # counter for the number of torrents that has no hardlinks
self.stats_untagged = 0 # counter for number of torrents that previously had no hardlinks but now have hardlinks
self.stats_deleted = 0 # counter for the number of torrents that has no hardlinks and \
# meets the criteria for ratio limit/seed limit for deletion
self.stats_deleted_contents = 0 # counter for the number of torrents that has no hardlinks and \
# meets the criteria for ratio limit/seed limit for deletion including contents
self.tdel_dict = {} # dictionary to track the torrent names and content path that meet the deletion criteria
self.root_dir = qbit_manager.config.root_dir
self.remote_dir = qbit_manager.config.remote_dir
self.nohardlinks = qbit_manager.config.nohardlinks
self.nohardlinks_tag = qbit_manager.config.nohardlinks_tag
self.torrents_updated_tagged = [] # List of torrents updated
self.notify_attr_tagged = [] # List of single torrent attributes to send to notifiarr
self.torrents_updated_untagged = [] # List of torrents updated
self.notify_attr_untagged = [] # List of single torrent attributes to send to notifiarr
self.tag_nohardlinks()
def add_tag_no_hl(self, torrent, tracker, category, max_ratio, max_seeding_time, add_tag=True):
self.config.webhooks_factory.notify(self.torrents_updated_tagged, self.notify_attr_tagged, group_by="tag")
self.config.webhooks_factory.notify(self.torrents_updated_untagged, self.notify_attr_untagged, group_by="tag")
def add_tag_no_hl(self, torrent, tracker, category):
"""Add tag nohardlinks_tag to torrents with no hardlinks"""
body = []
body.append(logger.insert_space(f"Torrent Name: {torrent.name}", 3))
if add_tag:
body.append(logger.insert_space(f"Added Tag: {self.nohardlinks_tag}", 6))
title = "Tagging Torrents with No Hardlinks"
else:
title = "Changing Share Ratio of Torrents with No Hardlinks"
body.append(logger.insert_space(f"Added Tag: {self.nohardlinks_tag}", 6))
title = "Tagging Torrents with No Hardlinks"
body.append(logger.insert_space(f'Tracker: {tracker["url"]}', 8))
body_tags_and_limits = self.qbt.set_tags_and_limits(
torrent,
max_ratio,
max_seeding_time,
self.nohardlinks[category]["limit_upload_speed"],
tags=self.nohardlinks_tag,
do_print=False,
)
if body_tags_and_limits or add_tag:
self.stats_tagged += 1
# Resume torrent if it was paused now that the share limit has changed
if torrent.state_enum.is_complete and self.nohardlinks[category]["resume_torrent_after_untagging_noHL"]:
if not self.config.dry_run:
torrent.resume()
body.extend(body_tags_and_limits)
for rcd in body:
logger.print_line(rcd, self.config.loglevel)
attr = {
"function": "tag_nohardlinks",
"title": title,
"body": "\n".join(body),
"torrent_name": torrent.name,
"torrent_category": torrent.category,
"torrent_tag": self.nohardlinks_tag,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
"torrent_max_ratio": max_ratio,
"torrent_max_seeding_time": max_seeding_time,
"torrent_limit_upload_speed": self.nohardlinks[category]["limit_upload_speed"],
}
self.config.send_notifications(attr)
def cleanup_tagged_torrents_with_no_hardlinks(self, category):
"""Delete any tagged torrents that meet noHL criteria"""
# loop through torrent list again for cleanup purposes
if self.nohardlinks[category]["cleanup"]:
torrent_list = self.qbt.get_torrents({"category": category, "status_filter": "completed"})
for torrent in torrent_list:
t_name = torrent.name
t_hash = torrent.hash
if t_hash in self.tdel_dict and self.nohardlinks_tag in torrent.tags:
t_count = self.qbt.torrentinfo[t_name]["count"]
t_msg = self.qbt.torrentinfo[t_name]["msg"]
t_status = self.qbt.torrentinfo[t_name]["status"]
# Double check that the content path is the same before we delete anything
if torrent["content_path"].replace(self.root_dir, self.remote_dir) == self.tdel_dict[t_hash]["content_path"]:
tracker = self.qbt.get_tags(torrent.trackers)
body = []
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
body += logger.print_line(self.tdel_dict[t_hash]["body"], self.config.loglevel)
body += logger.print_line(
logger.insert_space("Cleanup: True [No hardlinks found and meets Share Limits.]", 8),
self.config.loglevel,
)
attr = {
"function": "cleanup_tag_nohardlinks",
"title": "Removing NoHL Torrents and meets Share Limits",
"torrent_name": t_name,
"torrent_category": torrent.category,
"cleanup": "True",
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
if os.path.exists(torrent["content_path"].replace(self.root_dir, self.remote_dir)):
# Checks if any of the original torrents are working
if t_count > 1 and ("" in t_msg or 2 in t_status):
self.stats_deleted += 1
attr["torrents_deleted_and_contents"] = False
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent but NOT content files.", 8),
self.config.loglevel,
)
else:
self.stats_deleted_contents += 1
attr["torrents_deleted_and_contents"] = True
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent AND content files.", 8), self.config.loglevel
)
else:
self.stats_deleted += 1
attr["torrents_deleted_and_contents"] = False
if not self.config.dry_run:
self.qbt.tor_delete_recycle(torrent, attr)
body += logger.print_line(
logger.insert_space("Deleted .torrent but NOT content files.", 8), self.config.loglevel
)
attr["body"] = "\n".join(body)
self.config.send_notifications(attr)
self.qbt.torrentinfo[t_name]["count"] -= 1
if not self.config.dry_run:
torrent.add_tags(self.nohardlinks_tag)
self.stats_tagged += 1
for rcd in body:
logger.print_line(rcd, self.config.loglevel)
attr = {
"function": "tag_nohardlinks",
"title": title,
"body": "\n".join(body),
"torrents": [torrent.name],
"torrent_category": torrent.category,
"torrent_tag": self.nohardlinks_tag,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
}
self.torrents_updated_tagged.append(torrent.name)
self.notify_attr_tagged.append(attr)
def check_previous_nohardlinks_tagged_torrents(self, has_nohardlinks, torrent, tracker, category):
"""
@ -145,42 +67,20 @@ class TagNoHardLinks:
)
body += logger.print_line(logger.insert_space(f"Removed Tag: {self.nohardlinks_tag}", 6), self.config.loglevel)
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
body += logger.print_line(
f"{'Not Reverting' if self.config.dry_run else 'Reverting'} to tracker or Global share limits.",
self.config.loglevel,
)
restore_max_ratio = tracker["max_ratio"]
restore_max_seeding_time = tracker["max_seeding_time"]
restore_limit_upload_speed = tracker["limit_upload_speed"]
if restore_max_ratio is None:
restore_max_ratio = -2
if restore_max_seeding_time is None:
restore_max_seeding_time = -2
if restore_limit_upload_speed is None:
restore_limit_upload_speed = -1
if not self.config.dry_run:
torrent.remove_tags(tags=self.nohardlinks_tag)
body.extend(
self.qbt.set_tags_and_limits(
torrent, restore_max_ratio, restore_max_seeding_time, restore_limit_upload_speed, restore=True
)
)
if torrent.state_enum.is_complete and self.nohardlinks[category]["resume_torrent_after_untagging_noHL"]:
torrent.resume()
attr = {
"function": "untag_nohardlinks",
"title": "Untagging Previous Torrents that now have hardlinks",
"body": "\n".join(body),
"torrent_name": torrent.name,
"torrents": [torrent.name],
"torrent_category": torrent.category,
"torrent_tag": self.nohardlinks_tag,
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
"torrent_max_ratio": restore_max_ratio,
"torrent_max_seeding_time": restore_max_seeding_time,
"torrent_limit_upload_speed": restore_limit_upload_speed,
}
self.config.send_notifications(attr)
self.torrents_updated_untagged.append(torrent.name)
self.notify_attr_untagged.append(attr)
def tag_nohardlinks(self):
"""Tag torrents with no hardlinks"""
@ -211,120 +111,17 @@ class TagNoHardLinks:
# Cleans up previously tagged nohardlinks_tag torrents that no longer have hardlinks
if has_nohardlinks:
tracker = self.qbt.get_tags(torrent.trackers)
# Determine min_seeding_time.
# If only tracker setting is set, use tracker's min_seeding_time
# If only nohardlinks category setting is set, use nohardlinks category's min_seeding_time
# If both tracker and nohardlinks category setting is set, use the larger of the two
# If neither set, use 0 (no limit)
min_seeding_time = 0
logger.trace(f'tracker["min_seeding_time"] is {tracker["min_seeding_time"]}')
logger.trace(f'nohardlinks[category]["min_seeding_time"] is {nohardlinks[category]["min_seeding_time"]}')
if tracker["min_seeding_time"] is not None and nohardlinks[category]["min_seeding_time"] is not None:
if tracker["min_seeding_time"] >= nohardlinks[category]["min_seeding_time"]:
min_seeding_time = tracker["min_seeding_time"]
logger.trace(f'Using tracker["min_seeding_time"] {min_seeding_time}')
else:
min_seeding_time = nohardlinks[category]["min_seeding_time"]
logger.trace(f'Using nohardlinks[category]["min_seeding_time"] {min_seeding_time}')
elif nohardlinks[category]["min_seeding_time"]:
min_seeding_time = nohardlinks[category]["min_seeding_time"]
logger.trace(f'Using nohardlinks[category]["min_seeding_time"] {min_seeding_time}')
elif tracker["min_seeding_time"]:
min_seeding_time = tracker["min_seeding_time"]
logger.trace(f'Using tracker["min_seeding_time"] {min_seeding_time}')
else:
logger.trace(f"Using default min_seeding_time {min_seeding_time}")
# Determine max_ratio.
# If only tracker setting is set, use tracker's max_ratio
# If only nohardlinks category setting is set, use nohardlinks category's max_ratio
# If both tracker and nohardlinks category setting is set, use the larger of the two
# If neither set, use -1 (no limit)
max_ratio = -1
logger.trace(f'tracker["max_ratio"] is {tracker["max_ratio"]}')
logger.trace(f'nohardlinks[category]["max_ratio"] is {nohardlinks[category]["max_ratio"]}')
if tracker["max_ratio"] is not None and nohardlinks[category]["max_ratio"] is not None:
if tracker["max_ratio"] >= nohardlinks[category]["max_ratio"]:
max_ratio = tracker["max_ratio"]
logger.trace(f'Using (tracker["max_ratio"]) {max_ratio}')
else:
max_ratio = nohardlinks[category]["max_ratio"]
logger.trace(f'Using (nohardlinks[category]["max_ratio"]) {max_ratio}')
elif nohardlinks[category]["max_ratio"]:
max_ratio = nohardlinks[category]["max_ratio"]
logger.trace(f'Using (nohardlinks[category]["max_ratio"]) {max_ratio}')
elif tracker["max_ratio"]:
max_ratio = tracker["max_ratio"]
logger.trace(f'Using (tracker["max_ratio"]) {max_ratio}')
else:
logger.trace(f"Using default (max_ratio) {max_ratio}")
# Determine max_seeding_time.
# If only tracker setting is set, use tracker's max_seeding_time
# If only nohardlinks category setting is set, use nohardlinks category's max_seeding_time
# If both tracker and nohardlinks category setting is set, use the larger of the two
# If neither set, use -1 (no limit)
max_seeding_time = -1
logger.trace(f'tracker["max_seeding_time"] is {tracker["max_seeding_time"]}')
logger.trace(f'nohardlinks[category]["max_seeding_time"] is {nohardlinks[category]["max_seeding_time"]}')
if tracker["max_seeding_time"] is not None and nohardlinks[category]["max_seeding_time"] is not None:
if tracker["max_seeding_time"] >= nohardlinks[category]["max_seeding_time"]:
max_seeding_time = tracker["max_seeding_time"]
logger.trace(f'Using (tracker["max_seeding_time"]) {max_seeding_time}')
else:
max_seeding_time = nohardlinks[category]["max_seeding_time"]
logger.trace(f'Using (nohardlinks[category]["max_seeding_time"]) {max_seeding_time}')
elif nohardlinks[category]["max_seeding_time"]:
max_seeding_time = nohardlinks[category]["max_seeding_time"]
logger.trace(f'Using (nohardlinks[category]["max_seeding_time"]) {max_seeding_time}')
elif tracker["max_seeding_time"]:
max_seeding_time = tracker["max_seeding_time"]
logger.trace(f'Using (tracker["max_seeding_time"]) {max_seeding_time}')
else:
logger.trace(f"Using default (max_seeding_time) {max_seeding_time}")
# Will only tag new torrents that don't have nohardlinks_tag tag
if self.nohardlinks_tag not in torrent.tags:
self.add_tag_no_hl(
torrent=torrent,
tracker=tracker,
category=category,
max_ratio=max_ratio,
max_seeding_time=max_seeding_time,
add_tag=True,
)
# Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements
if nohardlinks[category]["cleanup"] and len(nohardlinks[category]) > 0:
tor_reach_seed_limit = self.qbt.has_reached_seed_limit(
torrent,
max_ratio,
max_seeding_time,
min_seeding_time,
nohardlinks[category]["resume_torrent_after_untagging_noHL"],
tracker["url"],
)
if tor_reach_seed_limit:
if torrent.hash not in self.tdel_dict:
self.tdel_dict[torrent.hash] = {}
self.tdel_dict[torrent.hash]["content_path"] = torrent["content_path"].replace(
self.root_dir, self.remote_dir
)
self.tdel_dict[torrent.hash]["body"] = tor_reach_seed_limit
else:
# Updates torrent to see if "MinSeedTimeNotReached" tag has been added
torrent = self.qbt.get_torrents({"torrent_hashes": [torrent.hash]}).data[0]
# Checks to see if previously nohardlinks_tag share limits have changed.
self.add_tag_no_hl(
torrent=torrent,
tracker=tracker,
category=category,
max_ratio=max_ratio,
max_seeding_time=max_seeding_time,
add_tag=False,
)
self.check_previous_nohardlinks_tagged_torrents(has_nohardlinks, torrent, tracker, category)
self.cleanup_tagged_torrents_with_no_hardlinks(category)
if self.stats_tagged >= 1:
logger.print_line(
f"{'Did not Tag/set' if self.config.dry_run else 'Tag/set'} share limits for {self.stats_tagged} "
f"{'Did not Tag' if self.config.dry_run else 'Added Tag'} for {self.stats_tagged} "
f".torrent{'s.' if self.stats_tagged > 1 else '.'}",
self.config.loglevel,
)
@ -333,19 +130,7 @@ class TagNoHardLinks:
if self.stats_untagged >= 1:
logger.print_line(
f"{'Did not delete' if self.config.dry_run else 'Deleted'} "
f"{self.nohardlinks_tag} tags / share limits for {self.stats_untagged} "
f"{self.nohardlinks_tag} tags for {self.stats_untagged} "
f".torrent{'s.' if self.stats_untagged > 1 else '.'}",
self.config.loglevel,
)
if self.stats_deleted >= 1:
logger.print_line(
f"{'Did not delete' if self.config.dry_run else 'Deleted'} {self.stats_deleted} "
f".torrent{'s' if self.stats_deleted > 1 else ''} but not content files.",
self.config.loglevel,
)
if self.stats_deleted_contents >= 1:
logger.print_line(
f"{'Did not delete' if self.config.dry_run else 'Deleted'} {self.stats_deleted_contents} "
f".torrent{'s' if self.stats_deleted_contents > 1 else ''} AND content files.",
self.config.loglevel,
)

View file

@ -9,50 +9,47 @@ class Tags:
self.config = qbit_manager.config
self.client = qbit_manager.client
self.stats = 0
self.share_limits_suffix_tag = qbit_manager.config.share_limits_suffix_tag # suffix tag for share limits
self.torrents_updated = [] # List of torrents updated
self.notify_attr = [] # List of single torrent attributes to send to notifiarr
self.tags()
self.config.webhooks_factory.notify(self.torrents_updated, self.notify_attr, group_by="tag")
def tags(self):
"""Update tags for torrents"""
ignore_tags = self.config.settings["ignoreTags_OnUpdate"]
logger.separator("Updating Tags", space=False, border=False)
for torrent in self.qbt.torrent_list:
check_tags = util.get_list(torrent.tags)
check_tags = [tag for tag in util.get_list(torrent.tags) if self.share_limits_suffix_tag not in tag]
if torrent.tags == "" or (len([trk for trk in check_tags if trk not in ignore_tags]) == 0):
tracker = self.qbt.get_tags(torrent.trackers)
if tracker["tag"]:
t_name = torrent.name
self.stats += len(tracker["tag"])
body = []
body += logger.print_line(logger.insert_space(f"Torrent Name: {torrent.name}", 3), self.config.loglevel)
body += logger.print_line(logger.insert_space(f"Torrent Name: {t_name}", 3), self.config.loglevel)
body += logger.print_line(
logger.insert_space(f'New Tag{"s" if len(tracker["tag"]) > 1 else ""}: {", ".join(tracker["tag"])}', 8),
self.config.loglevel,
)
body += logger.print_line(logger.insert_space(f'Tracker: {tracker["url"]}', 8), self.config.loglevel)
body.extend(
self.qbt.set_tags_and_limits(
torrent,
tracker["max_ratio"],
tracker["max_seeding_time"],
tracker["limit_upload_speed"],
tracker["tag"],
)
)
if not self.config.dry_run:
torrent.add_tags(tracker["tag"])
category = self.qbt.get_category(torrent.save_path) if torrent.category == "" else torrent.category
attr = {
"function": "tag_update",
"title": "Updating Tags",
"body": "\n".join(body),
"torrent_name": torrent.name,
"torrents": [t_name],
"torrent_category": category,
"torrent_tag": ", ".join(tracker["tag"]),
"torrent_tracker": tracker["url"],
"notifiarr_indexer": tracker["notifiarr"],
"torrent_max_ratio": tracker["max_ratio"],
"torrent_max_seeding_time": tracker["max_seeding_time"],
"torrent_limit_upload_speed": tracker["limit_upload_speed"],
}
self.config.send_notifications(attr)
self.notify_attr.append(attr)
self.torrents_updated.append(t_name)
if self.stats >= 1:
logger.print_line(
f"{'Did not update' if self.config.dry_run else 'Updated'} {self.stats} new tags.", self.config.loglevel

View file

@ -17,7 +17,7 @@ WARN = WARNING
DRYRUN = 25
INFO = 20
DEBUG = 10
TRACE = 0
TRACE = 1
def fmt_filter(record):
@ -68,9 +68,9 @@ class MyLogger:
"""Clear saved errors"""
self.saved_errors = []
def _get_handler(self, log_file, count=3):
def _get_handler(self, log_file, count=5):
"""Get handler for log file"""
max_bytes = 1024 * 1024 * 2
max_bytes = 1024 * 1024 * 10
_handler = RotatingFileHandler(log_file, delay=True, mode="w", maxBytes=max_bytes, backupCount=count, encoding="utf-8")
self._formatter(handler=_handler)
# if os.path.isfile(log_file):
@ -88,7 +88,7 @@ class MyLogger:
def add_main_handler(self):
"""Add main handler to logger"""
self.main_handler = self._get_handler(self.main_log, count=9)
self.main_handler = self._get_handler(self.main_log, count=19)
self.main_handler.addFilter(fmt_filter)
self._logger.addHandler(self.main_handler)

View file

@ -1,3 +1,4 @@
import time
from json import JSONDecodeError
from modules import util
@ -36,4 +37,6 @@ class Notifiarr:
def notification(self, json):
"""Send notification to Notifiarr"""
params = {"qbit_client": self.config.data["qbt"]["host"], "instance": self.instance}
return self.config.get(f"{self.url}notification/qbitManage/", json=json, headers=self.header, params=params)
response = self.config.get(f"{self.url}notification/qbitManage/", json=json, headers=self.header, params=params)
time.sleep(1) # Pause for 1 second before sending the next request
return response

View file

@ -1,7 +1,6 @@
"""Qbittorrent Module"""
import os
import sys
from datetime import timedelta
from qbittorrentapi import APIConnectionError
from qbittorrentapi import Client
@ -25,7 +24,7 @@ class Qbt:
SUPPORTED_VERSION = Version.latest_supported_app_version()
MIN_SUPPORTED_VERSION = "v4.3.0"
TORRENT_DICT_COMMANDS = ["recheck", "cross_seed", "rem_unregistered", "tag_tracker_error", "tag_nohardlinks"]
TORRENT_DICT_COMMANDS = ["recheck", "cross_seed", "rem_unregistered", "tag_tracker_error", "tag_nohardlinks", "share_limits"]
def __init__(self, config, params):
self.config = config
@ -58,16 +57,14 @@ class Qbt:
+ f"Please downgrade your qBittorrent version to {self.SUPPORTED_VERSION} to use qbit_manage."
)
if ex:
self.config.notify(ex, "Qbittorrent")
logger.print_line(ex, "CRITICAL")
if self.config.commands["skip_qb_version_check"]:
logger.print_line(
"Continuing because qBittorrent version check is bypassed... Please do not ask for support!"
)
ex += "\n[BYPASS]: Continuing because qBittorrent version check is bypassed... Please do not ask for support!"
logger.print_line(ex, "WARN")
else:
self.config.notify(ex, "Qbittorrent")
logger.print_line(ex, "CRITICAL")
sys.exit(0)
else:
logger.info("Qbt Connection Successful")
logger.info("Qbt Connection Successful")
except LoginFailed as exc:
ex = "Qbittorrent Error: Failed to login. Invalid username/password."
self.config.notify(ex, "Qbittorrent")
@ -202,142 +199,11 @@ class Qbt:
"""Get torrents from qBittorrent"""
return self.client.torrents.info(**params)
def set_tags_and_limits(
self, torrent, max_ratio, max_seeding_time, limit_upload_speed=None, tags=None, restore=False, do_print=True
):
"""Set tags and limits for a torrent"""
body = []
if limit_upload_speed:
if limit_upload_speed != -1:
msg = logger.insert_space(f"Limit UL Speed: {limit_upload_speed} kB/s", 1)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
if max_ratio or max_seeding_time:
if (max_ratio == -2 and max_seeding_time == -2) and not restore:
msg = logger.insert_space("Share Limit: Use Global Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif (max_ratio == -1 and max_seeding_time == -1) and not restore:
msg = logger.insert_space("Share Limit: Set No Share Limit", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
else:
if max_ratio != torrent.max_ratio and (not max_seeding_time or max_seeding_time < 0):
msg = logger.insert_space(f"Share Limit: Max Ratio = {max_ratio}", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif max_seeding_time != torrent.max_seeding_time and (not max_ratio or max_ratio < 0):
msg = logger.insert_space(f"Share Limit: Max Seed Time = {max_seeding_time} min", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
elif max_ratio != torrent.max_ratio or max_seeding_time != torrent.max_seeding_time:
msg = logger.insert_space(f"Share Limit: Max Ratio = {max_ratio}, Max Seed Time = {max_seeding_time} min", 4)
if do_print:
body += logger.print_line(msg, self.config.loglevel)
else:
body.append(msg)
# Update Torrents
if not self.config.dry_run:
if tags:
torrent.add_tags(tags)
if limit_upload_speed:
if limit_upload_speed == -1:
torrent.set_upload_limit(-1)
else:
torrent.set_upload_limit(limit_upload_speed * 1024)
if not max_ratio:
max_ratio = torrent.max_ratio
if not max_seeding_time:
max_seeding_time = torrent.max_seeding_time
if "MinSeedTimeNotReached" in torrent.tags:
return []
torrent.set_share_limits(max_ratio, max_seeding_time)
return body
def has_reached_seed_limit(self, torrent, max_ratio, max_seeding_time, min_seeding_time, resume_torrent, tracker):
"""Check if torrent has reached seed limit"""
body = ""
def _has_reached_min_seeding_time_limit():
print_log = []
if torrent.seeding_time >= min_seeding_time * 60:
if "MinSeedTimeNotReached" in torrent.tags:
torrent.remove_tags(tags="MinSeedTimeNotReached")
return True
else:
print_log += logger.print_line(logger.insert_space(f"Torrent Name: {torrent.name}", 3), self.config.loglevel)
print_log += logger.print_line(logger.insert_space(f"Tracker: {tracker}", 8), self.config.loglevel)
print_log += logger.print_line(
logger.insert_space(
f"Min seed time not met: {timedelta(seconds=torrent.seeding_time)} <= "
f"{timedelta(minutes=min_seeding_time)}. Removing Share Limits so qBittorrent can continue seeding.",
8,
),
self.config.loglevel,
)
print_log += logger.print_line(logger.insert_space("Adding Tag: MinSeedTimeNotReached", 8), self.config.loglevel)
if not self.config.dry_run:
torrent.add_tags("MinSeedTimeNotReached")
torrent.set_share_limits(-1, -1)
if resume_torrent:
torrent.resume()
return False
def _has_reached_seeding_time_limit():
nonlocal body
seeding_time_limit = None
if not max_seeding_time:
return False
if max_seeding_time >= 0:
seeding_time_limit = max_seeding_time
elif max_seeding_time == -2 and self.global_max_seeding_time_enabled:
seeding_time_limit = self.global_max_seeding_time
else:
return False
if seeding_time_limit:
if (torrent.seeding_time >= seeding_time_limit * 60) and _has_reached_min_seeding_time_limit():
body += logger.insert_space(
f"Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} >= "
f"{timedelta(minutes=seeding_time_limit)}",
8,
)
return True
return False
if max_ratio:
if max_ratio >= 0:
if torrent.ratio >= max_ratio and _has_reached_min_seeding_time_limit():
body += logger.insert_space(f"Ratio vs Max Ratio: {torrent.ratio:.2f} >= {max_ratio:.2f}", 8)
return body
elif max_ratio == -2 and self.global_max_ratio_enabled and _has_reached_min_seeding_time_limit():
if torrent.ratio >= self.global_max_ratio:
body += logger.insert_space(
f"Ratio vs Global Max Ratio: {torrent.ratio:.2f} >= {self.global_max_ratio:.2f}", 8
)
return body
if _has_reached_seeding_time_limit():
return body
return False
def get_tags(self, trackers):
"""Get tags from config file based on keyword"""
urls = [x.url for x in trackers if x.url.startswith("http")]
tracker = {}
tracker["tag"] = None
tracker["max_ratio"] = None
tracker["min_seeding_time"] = None
tracker["max_seeding_time"] = None
tracker["limit_upload_speed"] = None
tracker["notifiarr"] = None
tracker["url"] = None
tracker_other_tag = self.config.util.check_for_attribute(
@ -376,76 +242,6 @@ class Qbt:
self.config.data["tracker"][tag_url]["tag"] = [tag_url]
if isinstance(tracker["tag"], str):
tracker["tag"] = [tracker["tag"]]
is_max_ratio_defined = self.config.data["tracker"].get("max_ratio")
is_max_seeding_time_defined = self.config.data["tracker"].get("max_seeding_time")
if is_max_ratio_defined or is_max_seeding_time_defined:
tracker["max_ratio"] = self.config.util.check_for_attribute(
self.config.data,
"max_ratio",
parent="tracker",
subparent=tag_url,
var_type="float",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
tracker["max_seeding_time"] = self.config.util.check_for_attribute(
self.config.data,
"max_seeding_time",
parent="tracker",
subparent=tag_url,
var_type="int",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
else:
tracker["max_ratio"] = self.config.util.check_for_attribute(
self.config.data,
"max_ratio",
parent="tracker",
subparent=tag_url,
var_type="float",
min_int=-2,
do_print=False,
default_is_none=True,
save=False,
)
tracker["max_seeding_time"] = self.config.util.check_for_attribute(
self.config.data,
"max_seeding_time",
parent="tracker",
subparent=tag_url,
var_type="int",
min_int=-2,
do_print=False,
default_is_none=True,
save=False,
)
tracker["min_seeding_time"] = self.config.util.check_for_attribute(
self.config.data,
"min_seeding_time",
parent="tracker",
subparent=tag_url,
var_type="int",
min_int=0,
do_print=False,
default=0,
save=False,
)
tracker["limit_upload_speed"] = self.config.util.check_for_attribute(
self.config.data,
"limit_upload_speed",
parent="tracker",
subparent=tag_url,
var_type="int",
min_int=-1,
do_print=False,
default=0,
save=False,
)
tracker["notifiarr"] = self.config.util.check_for_attribute(
self.config.data,
"notifiarr",
@ -458,50 +254,6 @@ class Qbt:
return tracker
if tracker_other_tag:
tracker["tag"] = tracker_other_tag
tracker["max_ratio"] = self.config.util.check_for_attribute(
self.config.data,
"max_ratio",
parent="tracker",
subparent="other",
var_type="float",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
tracker["min_seeding_time"] = self.config.util.check_for_attribute(
self.config.data,
"min_seeding_time",
parent="tracker",
subparent="other",
var_type="int",
min_int=0,
do_print=False,
default=-1,
save=False,
)
tracker["max_seeding_time"] = self.config.util.check_for_attribute(
self.config.data,
"max_seeding_time",
parent="tracker",
subparent="other",
var_type="int",
min_int=-2,
do_print=False,
default=-1,
save=False,
)
tracker["limit_upload_speed"] = self.config.util.check_for_attribute(
self.config.data,
"limit_upload_speed",
parent="tracker",
subparent="other",
var_type="int",
min_int=-1,
do_print=False,
default=0,
save=False,
)
tracker["notifiarr"] = self.config.util.check_for_attribute(
self.config.data,
"notifiarr",
@ -573,18 +325,18 @@ class Qbt:
# Create recycle bin if not exists
torrent_path = os.path.join(recycle_path, "torrents")
torrents_json_path = os.path.join(recycle_path, "torrents_json")
torrent_name = info["torrents"][0]
os.makedirs(recycle_path, exist_ok=True)
if self.config.recyclebin["save_torrents"]:
if os.path.isdir(torrent_path) is False:
os.makedirs(torrent_path)
if os.path.isdir(torrents_json_path) is False:
os.makedirs(torrents_json_path)
torrent_json_file = os.path.join(torrents_json_path, f"{info['torrent_name']}.json")
torrent_json_file = os.path.join(torrents_json_path, f"{torrent_name}.json")
torrent_json = util.load_json(torrent_json_file)
if not torrent_json:
logger.info(f"Saving Torrent JSON file to {torrent_json_file}")
torrent_json["torrent_name"] = info["torrent_name"]
torrent_json["torrent_name"] = torrent_name
torrent_json["category"] = info["torrent_category"]
else:
logger.info(f"Adding {info['torrent_tracker']} to existing {os.path.basename(torrent_json_file)}")

View file

@ -153,7 +153,31 @@ class check:
save=True,
make_dirs=False,
):
"""Check for attribute in config."""
"""
Check for attribute in config.
Args:
data (dict): The configuration data to search.
attribute (str): The name of the attribute key to search for.
parent (str, optional): The name of the top level attribute to search under. Defaults to None.
subparent (str, optional): The name of the second level attribute to search under. Defaults to None.
test_list (dict, optional): A dictionary of valid values for the attribute. Defaults to None.
default (any, optional): The default value to use if the attribute is not found. Defaults to None.
do_print (bool, optional): Whether to print warning messages. Defaults to True.
default_is_none (bool, optional): Whether to treat a None value as a valid default. Defaults to False.
req_default (bool, optional): Whether to raise an error if no default value is provided. Defaults to False.
var_type (str, optional): The expected type of the attribute value. Defaults to "str".
min_int (int, optional): The minimum value for an integer attribute. Defaults to 0.
throw (bool, optional): Whether to raise an error if the attribute value is invalid. Defaults to False.
save (bool, optional): Whether to save the default value to the config if it is used. Defaults to True.
make_dirs (bool, optional): Whether to create directories for path attributes if they do not exist. Defaults to False.
Returns:
any: The value of the attribute, or the default value if it is not found.
Raises:
Failed: If the attribute value is invalid or a required default value is missing.
"""
endline = ""
if parent is not None:
if subparent is not None:

View file

@ -9,6 +9,8 @@ from modules.util import Failed
logger = util.logger
GROUP_NOTIFICATION_LIMIT = 10
class Webhooks:
"""Class to handle webhooks."""
@ -139,6 +141,8 @@ class Webhooks:
"orphaned_files_found": stats["orphaned"],
"torrents_tagged_no_hardlinks": stats["tagged_noHL"],
"torrents_untagged_no_hardlinks": stats["untagged_noHL"],
"torrents_updated_share_limits": stats["updated_share_limits"],
"torrents_cleaned_share_limits": stats["cleaned_share_limits"],
"files_deleted_from_recyclebin": stats["recycle_emptied"],
"files_deleted_from_orphaned": stats["orphaned_emptied"],
},
@ -163,3 +167,63 @@ class Webhooks:
"""Send a webhook to notify that a function has completed."""
if self.function_webhooks:
self._request(webhook, json)
def notify(self, torrents_updated=[], payload={}, group_by=""):
if len(torrents_updated) > GROUP_NOTIFICATION_LIMIT:
logger.trace(
f"Number of torrents updated > {GROUP_NOTIFICATION_LIMIT}, grouping notifications"
f"{f' by {group_by}' if group_by else ''}",
)
if group_by == "category":
group_attr = group_notifications_by_key(payload, "torrent_category")
elif group_by == "tag":
group_attr = group_notifications_by_key(payload, "torrent_tag")
elif group_by == "tracker":
group_attr = group_notifications_by_key(payload, "torrent_tracker")
# group notifications by grouping attribute
for group in group_attr:
num_torrents_updated = len(group_attr[group]["torrents"])
only_one_torrent_updated = num_torrents_updated == 1
attr = {
"function": group_attr[group]["function"],
"title": f"{group_attr[group]['title']} for {group}",
"body": group_attr[group]["body"]
if only_one_torrent_updated
else f"Updated {num_torrents_updated} "
f"{'torrent' if only_one_torrent_updated else 'torrents'} with {group_by} '{group}'",
"torrents": group_attr[group]["torrents"],
}
if group_by == "category":
attr["torrent_category"] = group
attr["torrent_tag"] = group_attr[group].get("torrent_tag") if only_one_torrent_updated else None
attr["torrent_tracker"] = group_attr[group].get("torrent_tracker") if only_one_torrent_updated else None
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer") if only_one_torrent_updated else None
elif group_by == "tag":
attr["torrent_tag"] = group
attr["torrent_category"] = group_attr[group].get("torrent_category") if only_one_torrent_updated else None
attr["torrent_tracker"] = group_attr[group].get("torrent_tracker")
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer")
elif group_by == "tracker":
attr["torrent_tracker"] = group
attr["torrent_category"] = group_attr[group].get("torrent_category") if only_one_torrent_updated else None
attr["torrent_tag"] = group_attr[group].get("torrent_tag") if only_one_torrent_updated else None
attr["notifiarr_indexer"] = group_attr[group].get("notifiarr_indexer")
self.config.send_notifications(attr)
else:
for attr in payload:
self.config.send_notifications(attr)
def group_notifications_by_key(payload, key):
"""Group notifications by key"""
group_attr = {}
for attr in payload:
group = attr[key]
if group not in group_attr:
group_attr[group] = attr
else:
group_attr[group]["torrents"].append(attr.get("torrents", [None])[0])
return group_attr

View file

@ -141,6 +141,16 @@ parser.add_argument(
"When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. "
"You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder.",
)
parser.add_argument(
"-sl",
"--share-limits",
dest="share_limits",
action="store_true",
default=False,
help="Use this to help apply and manage your torrent share limits based on your tags/categories."
"This can apply a max ratio, seed time limits to your torrents or limit your torrent upload speed as well."
"Share limits are applied in the order of priority specified.",
)
parser.add_argument(
"-sc",
"--skip-cleanup",
@ -237,6 +247,7 @@ rem_unregistered = get_arg("QBT_REM_UNREGISTERED", args.rem_unregistered, arg_bo
tag_tracker_error = get_arg("QBT_TAG_TRACKER_ERROR", args.tag_tracker_error, arg_bool=True)
rem_orphaned = get_arg("QBT_REM_ORPHANED", args.rem_orphaned, arg_bool=True)
tag_nohardlinks = get_arg("QBT_TAG_NOHARDLINKS", args.tag_nohardlinks, arg_bool=True)
share_limits = get_arg("QBT_SHARE_LIMITS", args.share_limits, arg_bool=True)
skip_cleanup = get_arg("QBT_SKIP_CLEANUP", args.skip_cleanup, arg_bool=True)
skip_qb_version_check = get_arg("QBT_SKIP_QB_VERSION_CHECK", args.skip_qb_version_check, arg_bool=True)
dry_run = get_arg("QBT_DRY_RUN", args.dry_run, arg_bool=True)
@ -285,6 +296,7 @@ for v in [
"tag_tracker_error",
"rem_orphaned",
"tag_nohardlinks",
"share_limits",
"skip_cleanup",
"skip_qb_version_check",
"dry_run",
@ -329,6 +341,7 @@ from modules.core.cross_seed import CrossSeed # noqa
from modules.core.recheck import ReCheck # noqa
from modules.core.tag_nohardlinks import TagNoHardLinks # noqa
from modules.core.remove_orphaned import RemoveOrphaned # noqa
from modules.core.share_limits import ShareLimits # noqa
def my_except_hook(exctype, value, tbi):
@ -395,6 +408,8 @@ def start():
"untagged_tracker_error": 0,
"tagged_noHL": 0,
"untagged_noHL": 0,
"updated_share_limits": 0,
"cleaned_share_limits": 0,
}
def finished_run():
@ -458,8 +473,15 @@ def start():
stats["tagged"] += no_hardlinks.stats_tagged
stats["tagged_noHL"] += no_hardlinks.stats_tagged
stats["untagged_noHL"] += no_hardlinks.stats_untagged
stats["deleted"] += no_hardlinks.stats_deleted
stats["deleted_contents"] += no_hardlinks.stats_deleted_contents
# Set Share Limits
if cfg.commands["share_limits"]:
share_limits = ShareLimits(qbit_manager)
stats["tagged"] += share_limits.stats_tagged
stats["updated_share_limits"] += share_limits.stats_tagged
stats["deleted"] += share_limits.stats_deleted
stats["deleted_contents"] += share_limits.stats_deleted_contents
stats["cleaned_share_limits"] += share_limits.stats_deleted + share_limits.stats_deleted_contents
# Remove Orphaned Files
if cfg.commands["rem_orphaned"]:
@ -497,6 +519,10 @@ def start():
stats_summary.append(f"Total {cfg.nohardlinks_tag} Torrents Tagged: {stats['tagged_noHL']}")
if stats["untagged_noHL"] > 0:
stats_summary.append(f"Total {cfg.nohardlinks_tag} Torrents untagged: {stats['untagged_noHL']}")
if stats["updated_share_limits"] > 0:
stats_summary.append(f"Total Share Limits Updated: {stats['updated_share_limits']}")
if stats["cleaned_share_limits"] > 0:
stats_summary.append(f"Total Torrents Removed from Meeting Share Limits: {stats['cleaned_share_limits']}")
if stats["recycle_emptied"] > 0:
stats_summary.append(f"Total Files Deleted from Recycle Bin: {stats['recycle_emptied']}")
if stats["orphaned_emptied"] > 0:
@ -583,6 +609,7 @@ if __name__ == "__main__":
logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {tag_tracker_error}")
logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {rem_orphaned}")
logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {tag_nohardlinks}")
logger.debug(f" --share-limits (QBT_SHARE_LIMITS): {share_limits}")
logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {skip_cleanup}")
logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {skip_qb_version_check}")
logger.debug(f" --dry-run (QBT_DRY_RUN): {dry_run}")

2
requirements-dev.txt Normal file
View file

@ -0,0 +1,2 @@
flake8==6.0.0
pre-commit==3.3.2

View file

@ -1,9 +1,7 @@
bencodepy==0.9.5
flake8==6.0.0
GitPython==3.1.31
pre-commit==3.3.2
qbittorrent-api==2023.4.47
qbittorrent-api==2023.5.48
requests==2.31.0
retrying==1.3.4
ruamel.yaml==0.17.27
ruamel.yaml==0.17.31
schedule==1.2.0

View file

@ -0,0 +1,41 @@
#!/bin/bash
staged_changes=$(git diff-index --cached HEAD | wc -l | awk '{print $1}')
# Check if there are any changes staged for commit
if [ "$staged_changes" -eq 0 ]; then
echo "There are no changes staged for commit. Skipping version update."
exit 0
fi
# Check if the VERSION file is staged for modification
if git diff --cached --name-only | grep -q "VERSION"; then
echo "The VERSION file is already modified. Skipping version update."
exit 0
fi
# Read the current version from the VERSION file
current_version=$(cat VERSION)
echo "Current version: $current_version"
# Check if "develop" is not present in the version string
if [[ $current_version != *"develop"* ]]; then
echo "The word 'develop' is not present in the version string."
exit 0
fi
# Get the version number from the HEAD commit
current_version=$(git show HEAD:VERSION 2>/dev/null)
# Extract the version number after "develop"
version_number=$(echo "$current_version" | grep -oP '(?<=develop)\d+')
# Increment the version number
new_version_number=$((version_number + 1))
# Replace the old version number with the new one
new_version=$(echo "$current_version" | sed "s/develop$version_number/develop$new_version_number/")
# Update the VERSION file
echo "$new_version" > VERSION
echo "Version updated to: $new_version"