diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..281f674 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,21 @@ +**/dist +**/build +*.spec +**/__pycache__ +/.vscode +**/log +README.md +LICENSE +.gitignore +.dockerignore +.git +.github +.vscode +*.psd +config/**/* +config +Dockerfile +venv +.idea +test.py +!config/config.yml.sample \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..6afd691 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: bobokun \ No newline at end of file diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml new file mode 100644 index 0000000..51ec6ac --- /dev/null +++ b/.github/workflows/develop.yml @@ -0,0 +1,38 @@ +name: Docker Develop Release + +on: + push: + branches: [ develop ] + pull_request: + branches: [ develop ] + +jobs: + + docker-develop: + runs-on: ubuntu-latest + + steps: + + - name: Check Out Repo + uses: actions/checkout@v2 + with: + ref: develop + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: ./ + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKER_HUB_USERNAME }}/qbit_manage:develop \ No newline at end of file diff --git a/.github/workflows/latest.yml b/.github/workflows/latest.yml new file mode 100644 index 0000000..d7d0d9d --- /dev/null +++ b/.github/workflows/latest.yml @@ -0,0 +1,36 @@ +name: Docker Latest Release + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + + docker-latest: + runs-on: ubuntu-latest + + steps: + + - name: Check Out Repo + uses: actions/checkout@v2 + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: ./ + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKER_HUB_USERNAME }}/qbit_manage:latest \ No newline at end of file diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml new file mode 100644 index 0000000..05abffd --- /dev/null +++ b/.github/workflows/tag.yml @@ -0,0 +1,18 @@ +name: Tag + +on: + push: + branches: [ master ] + +jobs: + tag-new-versions: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + token: ${{ secrets.PAT }} + fetch-depth: 2 + - uses: salsify/action-detect-and-tag-new-version@v1.0.3 + with: + version-command: | + cat VERSION \ No newline at end of file diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml new file mode 100644 index 0000000..5c11ef2 --- /dev/null +++ b/.github/workflows/version.yml @@ -0,0 +1,39 @@ +name: Docker Version Release + +on: + create: + tags: + - v* + +jobs: + + docker-develop: + runs-on: ubuntu-latest + + steps: + + - name: Check Out Repo + uses: actions/checkout@v2 + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Get the version + id: get_version + run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: ./ + file: ./Dockerfile + push: true + tags: ${{ secrets.DOCKER_HUB_USERNAME }}/qbit_manage:${{ steps.get_version.outputs.VERSION }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9b1ac70..b92a666 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,9 @@ -*.log +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +*.log* *.yml -.vscode/settings.json +.vscode/* +!.github/** \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..96106f4 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.9-slim +COPY requirements.txt / +RUN echo "**** install python packages ****" \ + && pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \ + && apt-get autoremove -y \ + && apt-get clean \ + && rm -rf /requirements.txt /tmp/* /var/tmp/* /var/lib/apt/lists/* +COPY . / +VOLUME /config +ENTRYPOINT ["python3","qbit_manage.py"] \ No newline at end of file diff --git a/README.md b/README.md index a6d4def..8eb47a7 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,8 @@ This is a program used to manage your qBittorrent instance such as: * Recheck paused torrents sorted by lowest size and resume if completed * Remove orphaned files from your root directory that are not referenced by qBittorrent * Tag any torrents that have no hard links and allows optional cleanup to delete these torrents and contents based on maximum ratio and/or time seeded - +* RecycleBin function to move files into a RecycleBin folder instead of deleting the data directly when deleting a torrent +* Built-in scheduler to run the script every x minutes. (Can use `--run` command to run without the scheduler) ## Installation Check out the [wiki](https://github.com/StuffAnThings/qbit_manage/wiki) for installation help @@ -20,10 +21,12 @@ To run the script in an interactive terminal run: * copy the `config.yml.sample` file to `config.yml` * add your qBittorrent host, user and pass. If you are not using a username and password you can remove the `user` and `pass` lines. -* add your `cross_seed` and `root_dir`. If you are using a docker container you must fill out `remote_dir` as well. +* add your `cross_seed` and `root_dir`. If you're running cross-seed in a docker container you must fill out `remote_dir` as well. * Add your categories and save path to match with what is being used in your qBittorrent instance. I suggest using the full path when defining `save_path` * Add the `tag` definition based on tracker URL * Modify the `nohardlinks` by specifying your completed movies/series category to match with qBittorrent. Please ensure the `root_dir` and/or `remote_dir` is added in the `directory` section +* `root_dir` needs to be defined in order to use the RecycleBin function. If optional `empty_after_x_days` is not defined then it will never empty the RecycleBin. Setting it to 0 will empty the RecycleBin immediately. +* Modify the `orphaned` section to define file patterns not to consider as orphans. Use this to exclude your incomplete torrents directory, or to ignore auto-generated files such as Thumbs.db. * To run the script in an interactive terminal with a list of possible commands run: ```bash @@ -32,21 +35,24 @@ python qbit_manage.py -h ## Commands -| Shell Command | Description | Default Value | -| :------------ | :------------ | :------------ | -| `-c CONFIG` or `--config-file CONFIG` | This is used if you want to use a different name for your config.yml. `Example: tv.yml` | config.yml | -| `-l LOGFILE,` or `--log-file LOGFILE,` | This is used if you want to use a different name for your log file. `Example: tv.log` | activity.log | -| `-m` or `--manage` | Use this if you would like to update your tags, categories, remove unregistered torrents, AND recheck/resume paused torrents. | | -| `-s` or `--cross-seed` | Use this after running [cross-seed script](https://github.com/mmgoodnow/cross-seed) to add torrents from the cross-seed output folder to qBittorrent | | -| `-re` or `--recheck` | Recheck paused torrents sorted by lowest size. Resume if Completed. | | -| `-g` or `--cat-update` | Use this if you would like to update your categories. | | -| `-t` or `--tag-update` | Use this if you would like to update your tags. (Only adds tags to untagged torrents) | | -| `-r` or `--rem-unregistered` | Use this if you would like to remove unregistered torrents. (It will the delete data & torrent if it is not being cross-seeded, otherwise it will just remove the torrent without deleting data) | | -| `-ro` or `--rem-orphaned` | Use this if you would like to remove orphaned files from your `root_dir` directory that are not referenced by any torrents. It will scan your `root_dir` directory and compare it with what is in qBittorrent. Any data not referenced in qBittorrent will be moved into `/data/torrents/orphaned_data` folder for you to review/delete. | | -| `-tnhl` or `--tag-nohardlinks` | Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr that hard links your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder. | | -| `--dry-run` | If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything. | | -| `--log LOGLEVEL` | Change the ouput log level. | INFO | - +| Shell Command |Docker Environment Variable |Description | Default Value | +| :------------ | :------------ | :------------ | :------------ | +| `-r` or`--run` | QBT_RUN |Run without the scheduler. Script will exit after completion. | False | +| `-sch` or `--schedule` | QBT_SCHEDULE | Schedule to run every x minutes. (Default set to 30) | 30 | +| `-c CONFIG` or `--config-file CONFIG` | QBT_CONFIG | This is used if you want to use a different name for your config.yml. `Example: tv.yml` | config.yml | +| `-lf LOGFILE,` or `--log-file LOGFILE,` | QBT_LOGFILE | This is used if you want to use a different name for your log file. `Example: tv.log` | activity.log | +| `-cs` or `--cross-seed` | QBT_CROSS_SEED | Use this after running [cross-seed script](https://github.com/mmgoodnow/cross-seed) to add torrents from the cross-seed output folder to qBittorrent | False | +| `-re` or `--recheck` | QBT_RECHECK | Recheck paused torrents sorted by lowest size. Resume if Completed. | False | +| `-cu` or `--cat-update` | QBT_CAT_UPDATE | Use this if you would like to update your categories. | False | +| `-tu` or `--tag-update` | QBT_TAG_UPDATE | Use this if you would like to update your tags. (Only adds tags to untagged torrents) | False | +| `-ru` or `--rem-unregistered` | QBT_REM_UNREGISTERED | Use this if you would like to remove unregistered torrents. (It will the delete data & torrent if it is not being cross-seeded, otherwise it will just remove the torrent without deleting data) | False | +| `-ro` or `--rem-orphaned` | QBT_REM_ORPHANED | Use this if you would like to remove orphaned files from your `root_dir` directory that are not referenced by any torrents. It will scan your `root_dir` directory and compare it with what is in qBittorrent. Any data not referenced in qBittorrent will be moved into `/data/torrents/orphaned_data` folder for you to review/delete. | False | +| `-tnhl` or `--tag-nohardlinks` | QBT_TAG_NOHARDLINKS | Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr that hard links your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder. | False | +| `-sr` or `--skip-recycle` | QBT_SKIP_RECYCLE | Use this to skip emptying the Reycle Bin folder (`/root_dir/.RecycleBin`). | False | +| `-dr` or `--dry-run` | QBT_DRY_RUN | If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything. | False | +| `-ll` or `--log-level LOGLEVEL` | QBT_LOG_LEVEL | Change the ouput log level. | INFO | +| `-d` or `--divider` | QBT_DIVIDER | Character that divides the sections (Default: '=') | = | +| `-w` or `--width` | QBT_WIDTH | Screen Width (Default: 100) | 100 | ### Config To choose the location of the YAML config file diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..415b19f --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +2.0 \ No newline at end of file diff --git a/config.yml.sample b/config.yml.sample index 08f99cf..3253947 100644 --- a/config.yml.sample +++ b/config.yml.sample @@ -7,8 +7,8 @@ qbt: directory: # Do not remove these # Cross-seed var: #Output directory of cross-seed - # root_dir var: #Root downloads directory used to check for orphaned files - # remote_dir var: # Path of docker host mapping of root_dir + # root_dir var: #Root downloads directory used to check for orphaned files and used in RecycleBin + # remote_dir var: # Path of docker host mapping of root_dir. Must be set if you are using docker! cross_seed: "/your/path/here/" root_dir: "/data/torrents/" remote_dir: "/mnt/user/data/torrents/" @@ -68,3 +68,20 @@ nohardlinks: max_ratio: 4.0 # seeding time var: Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding max_seeding_time: 86400 + +#Recycle Bin method of deletion will move files into the recycle bin instead of directly deleting them in qbit +recyclebin: + enabled: true + # empty_after_x_days var: Will automatically remove all files and folders in recycle bin after x days. + # If this variable is not defined it, the RecycleBin will never be emptied. + # Setting this variable to 0 will delete files immediately. + empty_after_x_days: 60 + +# Orphaned files are those in the root_dir download directory that are not referenced by any active torrents. +orphaned: + # File patterns that will not be considered orphaned files. Handy for generated files that aren't part of the torrent but belong with the torrent's files + exclude_patterns: + - "**/.DS_Store" + - "**/Thumbs.db" + - "**/@eaDir" + - "/data/torrents/temp/**" \ No newline at end of file diff --git a/config/config.yml.sample b/config/config.yml.sample new file mode 100644 index 0000000..888d6a6 --- /dev/null +++ b/config/config.yml.sample @@ -0,0 +1,89 @@ +# qBittorrent parameters +qbt: + host: "localhost:8080" + user: "username" + pass: "password" + +directory: + # Do not remove these + # Cross-seed var: #Output directory of cross-seed + # root_dir var: #Root downloads directory used to check for orphaned files, noHL, and RecycleBin. + # remote_dir var: # Path of docker host mapping of root_dir. + # Must be set if you're running qbit_manage locally and qBittorrent/cross_seed is in a docker + cross_seed: "/your/path/here/" + root_dir: "/data/torrents/" + remote_dir: "/mnt/user/data/torrents/" + +# Category/Pathing Parameters +cat: + # : #Path of your save directory. Can be a keyword or full path + movies: "/data/torrents/Movies" + tv: "TV" + +# Tag Parameters +tags: + # : + animebytes.tv: AnimeBytes + avistaz: Avistaz + beyond-hd: Beyond-HD + blutopia: Blutopia + cartoonchaos: CartoonChaos + digitalcore: DigitalCore + gazellegames: GGn + hdts: HDTorrents + landof.tv: BroadcasTheNet + myanonamouse: MaM + passthepopcorn: PassThePopcorn + privatehd: PrivateHD + tleechreload: TorrentLeech + torrentdb: TorrentDB + torrentleech: TorrentLeech + tv-vault: TV-Vault + +#Tag Movies/Series that are not hard linked +nohardlinks: + # Mandatory to fill out directory parameter above to use this function (root_dir/remote_dir) + # This variable should be set to your category name of your completed movies/completed series in qbit. Acceptable variable can be any category you would like to tag if there are no hardlinks found + movies-completed: + # exclude_tags var: Will exclude the following tags when searching through the category. + exclude_tags: + - Beyond-HD + - AnimeBytes + - MaM + # cleanup var: WARNING!! Setting this as true Will remove and delete contents of any torrents that are in paused state and has the NoHL tag + cleanup: false + # max_ratio var: Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading + max_ratio: 4.0 + # seeding time var: Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding + max_seeding_time: 86400 + + #Can have additional categories set with separate ratio/seeding times defined. + series-completed: + # exclude_tags var: Will exclude the following tags when searching through the category. + exclude_tags: + - Beyond-HD + - BroadcasTheNet + # cleanup var: WARNING!! Setting this as true Will remove and delete contents of any torrents that are in paused state and has the NoHL tag + cleanup: false + # max_ratio var: Will set the torrent Maximum share ratio until torrent is stopped from seeding/uploading + max_ratio: 4.0 + # seeding time var: Will set the torrent Maximum seeding time (min) until torrent is stopped from seeding + max_seeding_time: 86400 + +#Recycle Bin method of deletion will move files into the recycle bin (Located in /root_dir/.RecycleBin) instead of directly deleting them in qbit +#By default the Recycle Bin will be emptied on every run of the qbit_manage script if empty_after_x_days is defined. +recyclebin: + enabled: true + # empty_after_x_days var: Will automatically remove all files and folders in recycle bin after x days. (Checks every script run) + # If this variable is not defined it, the RecycleBin will never be emptied. + # WARNING: Setting this variable to 0 will delete all files immediately upon script run! + empty_after_x_days: 60 + +# Orphaned files are those in the root_dir download directory that are not referenced by any active torrents. +orphaned: + # File patterns that will not be considered orphaned files. Handy for generated files that aren't part of the torrent but belong with the torrent's files + exclude_patterns: + - "**/.DS_Store" + - "**/Thumbs.db" + - "**/@eaDir" + - "/data/torrents/temp/**" \ No newline at end of file diff --git a/modules/docker.py b/modules/docker.py new file mode 100644 index 0000000..26767f4 --- /dev/null +++ b/modules/docker.py @@ -0,0 +1,10 @@ +import signal + +#Gracefully kill script when docker stops +class GracefulKiller: + kill_now = False + def __init__(self): + #signal.signal(signal.SIGINT, self.exit_gracefully) + signal.signal(signal.SIGTERM, self.exit_gracefully) + def exit_gracefully(self, *args): + self.kill_now = True \ No newline at end of file diff --git a/modules/util.py b/modules/util.py new file mode 100644 index 0000000..9f561d6 --- /dev/null +++ b/modules/util.py @@ -0,0 +1,95 @@ +import logging, traceback +from logging.handlers import RotatingFileHandler + +logger = logging.getLogger("qBit Manage") + +class TimeoutExpired(Exception): + pass + +class Failed(Exception): + pass + +class NotScheduled(Exception): + pass + +separating_character = "=" +screen_width = 100 +spacing = 0 + + +def print_multiline(lines, loglevel='INFO'): + line_list = str(lines).split("\n") + for i, line in enumerate(line_list): + if len(line) > 0 and i != len(line_list)-1: + logger.log(getattr(logging, loglevel),line) + if i == 0: + logger.handlers[1].setFormatter(logging.Formatter(" " * 37 + "| %(message)s")) + logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(levelname)-10s | %(message)s")) + +def print_stacktrace(): + print_multiline(traceback.format_exc()) + +def my_except_hook(exctype, value, tb): + for line in traceback.format_exception(etype=exctype, value=value, tb=tb): + print_multiline(line, critical=True) + + +def centered(text, sep=" "): + if len(text) > screen_width - 2: + return text + space = screen_width - len(text) - 2 + text = f" {text} " + if space % 2 == 1: + text += sep + space -= 1 + side = int(space / 2) - 1 + final_text = f"{sep * side}{text}{sep * side}" + return final_text + +def separator(text=None, space=True, border=True, loglevel='INFO'): + sep = " " if space else separating_character + for handler in logger.handlers: + apply_formatter(handler, border=False) + border_text = f"|{separating_character * screen_width}|" + if border: + logger.log(getattr(logging, loglevel),border_text) + if text: + text_list = text.split("\n") + for t in text_list: + logger.log(getattr(logging, loglevel),f"|{sep}{centered(t, sep=sep)}{sep}|") + if border: + logger.log(getattr(logging, loglevel),border_text) + for handler in logger.handlers: + apply_formatter(handler) + +def apply_formatter(handler, border=True): + text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s" + if isinstance(handler, RotatingFileHandler): + #text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}" + text = f"[%(asctime)s] %(levelname)-10s {text}" + handler.setFormatter(logging.Formatter(text)) + +def adjust_space(display_title): + display_title = str(display_title) + space_length = spacing - len(display_title) + if space_length > 0: + display_title += " " * space_length + return display_title + +def insert_space(display_title,space_length=0): + display_title = str(display_title) + if space_length == 0: + space_length = spacing - len(display_title) + if space_length > 0: + display_title = " " * space_length + display_title + return display_title + +def print_return(text): + print(adjust_space(f"| {text}"), end="\r") + global spacing + spacing = len(text) + 2 + +def print_end(): + print(adjust_space(" "), end="\r") + global spacing + spacing = 0 \ No newline at end of file diff --git a/qbit_manage.py b/qbit_manage.py index 81e75a7..c7a89ef 100644 --- a/qbit_manage.py +++ b/qbit_manage.py @@ -1,119 +1,173 @@ #!/usr/bin/python3 -import os -import shutil -import yaml -import argparse -import logging -import logging.handlers -from qbittorrentapi import Client -import urllib3 +import argparse, logging, os, sys, time, shutil, urllib3, stat, fnmatch +from logging.handlers import RotatingFileHandler +from datetime import timedelta,datetime from collections import Counter -import glob from pathlib import Path -# import apprise +try: + import yaml, schedule + from qbittorrentapi import Client + from modules.docker import GracefulKiller + from modules import util +except ModuleNotFoundError: + print("Requirements Error: Requirements are not installed") + sys.exit(0) -parser = argparse.ArgumentParser('qBittorrent Manager.', - description='A mix of scripts combined for managing qBittorrent.') -parser.add_argument('-c', '--config-file', - dest='config', - action='store', - default='config.yml', - help='This is used if you want to use a different name for your config.yml. Example: tv.yml') -parser.add_argument('-l', '--log-file', - dest='logfile', - action='store', - default='activity.log', - help='This is used if you want to use a different name for your log file. Example: tv.log') -parser.add_argument('-m', '--manage', - dest='manage', - action='store_const', - const='manage', - help='Use this if you would like to update your tags, categories,' - ' remove unregistered torrents, AND recheck/resume paused torrents.') -parser.add_argument('-s', '--cross-seed', - dest='cross_seed', - action='store_const', - const='cross_seed', - help='Use this after running cross-seed script to add torrents from the cross-seed output folder to qBittorrent') -parser.add_argument('-re', '--recheck', - dest='recheck', - action='store_const', - const='recheck', - help='Recheck paused torrents sorted by lowest size. Resume if Completed.') -parser.add_argument('-g', '--cat-update', - dest='cat_update', - action='store_const', - const='cat_update', - help='Use this if you would like to update your categories.') -parser.add_argument('-t', '--tag-update', - dest='tag_update', - action='store_const', - const='tag_update', - help='Use this if you would like to update your tags. (Only adds tags to untagged torrents)') -parser.add_argument('-r', '--rem-unregistered', - dest='rem_unregistered', - action='store_const', - const='rem_unregistered', - help='Use this if you would like to remove unregistered torrents.') -parser.add_argument('-ro', '--rem-orphaned', - dest='rem_orphaned', - action='store_const', - const='rem_orphaned', - help='Use this if you would like to remove orphaned files from your `root_dir` directory that are not referenced by any torrents.' - ' It will scan your `root_dir` directory and compare it with what is in Qbitorrent. Any data not referenced in Qbitorrent will be moved into ' - ' `/data/torrents/orphaned_data` folder for you to review/delete.') -parser.add_argument('-tnhl', '--tag-nohardlinks', - dest='tag_nohardlinks', - action='store_const', - const='tag_nohardlinks', - help='Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr' - 'that hard link your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL' - 'You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder.') -parser.add_argument('--dry-run', - dest='dry_run', - action='store_const', - const='dry_run', - help='If you would like to see what is gonna happen but not actually move/delete or ' - 'tag/categorize anything.') -parser.add_argument('--log', - dest='loglevel', - action='store', - default='INFO', - help='Change your log level. ') + +if sys.version_info[0] != 3 or sys.version_info[1] < 6: + print("Version Error: Version: %s.%s.%s incompatible please use Python 3.6+" % (sys.version_info[0], sys.version_info[1], sys.version_info[2])) + sys.exit(0) + +parser = argparse.ArgumentParser('qBittorrent Manager.', description='A mix of scripts combined for managing qBittorrent.') +parser.add_argument('-r', '--run', dest='run', action='store_true', default=False, help='Run without the scheduler. Script will exit after completion.') +parser.add_argument('-sch', '--schedule', dest='min', default='30', type=str, help='Schedule to run every x minutes. (Default set to 30)') +parser.add_argument('-c', '--config-file', dest='configfile', action='store', default='config.yml', type=str, help='This is used if you want to use a different name for your config.yml. Example: tv.yml') +parser.add_argument('-lf', '--log-file', dest='logfile', action='store',default='activity.log', type=str, help='This is used if you want to use a different name for your log file. Example: tv.log',) +parser.add_argument('-cs', '--cross-seed', dest='cross_seed', action="store_true", default=False, help='Use this after running cross-seed script to add torrents from the cross-seed output folder to qBittorrent') +parser.add_argument('-re', '--recheck', dest='recheck', action="store_true", default=False, help='Recheck paused torrents sorted by lowest size. Resume if Completed.') +parser.add_argument('-cu', '--cat-update', dest='cat_update', action="store_true", default=False, help='Use this if you would like to update your categories.') +parser.add_argument('-tu', '--tag-update', dest='tag_update', action="store_true", default=False, help='Use this if you would like to update your tags. (Only adds tags to untagged torrents)') +parser.add_argument('-ru', '--rem-unregistered', dest='rem_unregistered', action="store_true", default=False, help='Use this if you would like to remove unregistered torrents.') +parser.add_argument('-ro', '--rem-orphaned', dest='rem_orphaned', action="store_true", default=False, help='Use this if you would like to remove unregistered torrents.') +parser.add_argument('-tnhl', '--tag-nohardlinks', dest='tag_nohardlinks', action="store_true", default=False, help='Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr which hard link your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder.') +parser.add_argument('-sr', '--skip-recycle', dest='skip_recycle', action="store_true", default=False, help='Use this to skip emptying the Reycle Bin folder.') +parser.add_argument('-dr', '--dry-run', dest='dry_run', action="store_true", default=False, help='If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything.') +parser.add_argument('-ll', '--log-level', dest='log_level', action="store", default='INFO', type=str, help='Change your log level.') +parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str) +parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int) args = parser.parse_args() -with open(args.config, 'r') as cfg_file: +def get_arg(env_str, default, arg_bool=False, arg_int=False): + env_var = os.environ.get(env_str) + if env_var: + if arg_bool: + if env_var is True or env_var is False: + return env_var + elif env_var.lower() in ["t", "true"]: + return True + else: + return False + elif arg_int: + return int(env_var) + else: + return str(env_var) + else: + return default + +run = get_arg("QBT_RUN", args.run, arg_bool=True) +sch = get_arg("QBT_SCHEDULE", args.min) +config_file = get_arg("QBT_CONFIG", args.configfile) +log_file = get_arg("QBT_LOGFILE", args.logfile) +cross_seed = get_arg("QBT_CROSS_SEED", args.cross_seed, arg_bool=True) +recheck = get_arg("QBT_RECHECK", args.recheck, arg_bool=True) +cat_update = get_arg("QBT_CAT_UPDATE", args.cat_update, arg_bool=True) +tag_update = get_arg("QBT_TAG_UPDATE", args.tag_update, arg_bool=True) +rem_unregistered = get_arg("QBT_REM_UNREGISTERED", args.rem_unregistered, arg_bool=True) +rem_orphaned = get_arg("QBT_REM_ORPHANED", args.rem_orphaned, arg_bool=True) +tag_nohardlinks = get_arg("QBT_TAG_NOHARDLINKS", args.tag_nohardlinks, arg_bool=True) +skip_recycle = get_arg("QBT_SKIP_RECYCLE", args.skip_recycle, arg_bool=True) +dry_run = get_arg("QBT_DRY_RUN", args.dry_run, arg_bool=True) +log_level = get_arg("QBT_LOG_LEVEL", args.log_level) +divider = get_arg("QBT_DIVIDER", args.divider) +screen_width = get_arg("QBT_WIDTH", args.width, arg_int=True) + +default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config") +root_path = '' #Global variable +remote_path = '' #Global variable + +util.separating_character = divider[0] + +if screen_width < 90 or screen_width > 300: + print(f"Argument Error: width argument invalid: {screen_width} must be an integer between 90 and 300 using the default 100") + screen_width = 100 +util.screen_width = screen_width + + +#Check if Schedule parameter is a number +if sch.isnumeric(): + sch = int(sch) +else: + print(f"Schedule Error: Schedule is not a number. Current value is set to '{sch}'") + sys.exit(0) + +#Config error handling +if not os.path.exists(os.path.join(default_dir, config_file)): + print(f"Config Error: config not found at {os.path.join(os.path.abspath(default_dir),config_file)}") + sys.exit(0) + +with open(os.path.join(default_dir,config_file), 'r') as cfg_file: cfg = yaml.load(cfg_file, Loader=yaml.FullLoader) + + +#Set root and remote directories +def validate_path(): + global root_path + global remote_path + #Assign root_dir + if 'root_dir' in cfg['directory']: + root_path = os.path.join(cfg['directory']['root_dir'], '') + else: + print('root_dir not defined in config.') + sys.exit(0) + #Assign remote_path + if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): + remote_path = os.path.join(cfg['directory']['remote_dir'], '') + else: + remote_path = root_path + #Check to see if path exists + if not os.path.exists(remote_path): + print(f"Config Error: Path does not exist at '{os.path.abspath(remote_path)}'. Is your root_dir/remote_dir correctly defined in the config?") + sys.exit(0) + +#Root_dir/remote_dir error handling +if cross_seed or tag_nohardlinks or rem_orphaned: + validate_path() +else: + if 'recyclebin' in cfg and cfg["recyclebin"] != None: + if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled']: + validate_path() + + +os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True) urllib3.disable_warnings() -file_name_format = args.logfile -msg_format = '%(asctime)s - %(levelname)s: %(message)s' -max_bytes = 1024 * 1024 * 2 -backup_count = 5 logger = logging.getLogger('qBit Manage') logging.DRYRUN = 25 logging.addLevelName(logging.DRYRUN, 'DRY-RUN') setattr(logger, 'dryrun', lambda dryrun, *args: logger._log(logging.DRYRUN, dryrun, args)) -log_lev = getattr(logging, args.loglevel.upper()) +log_lev = getattr(logging, log_level.upper()) logger.setLevel(log_lev) -file_handler = logging.handlers.RotatingFileHandler(filename=file_name_format, - maxBytes=max_bytes, - backupCount=backup_count) -file_handler.setLevel(log_lev) -file_formatter = logging.Formatter(msg_format) -file_handler.setFormatter(file_formatter) -logger.addHandler(file_handler) +def fmt_filter(record): + record.levelname = f"[{record.levelname}]" + #record.filename = f"[{record.filename}:{record.lineno}]" + return True -stream_handler = logging.StreamHandler() -stream_handler.setLevel(log_lev) -stream_formatter = logging.Formatter(msg_format) -stream_handler.setFormatter(stream_formatter) -logger.addHandler(stream_handler) +cmd_handler = logging.StreamHandler() +cmd_handler.setLevel(log_level) +logger.addHandler(cmd_handler) + +sys.excepthook = util.my_except_hook + +version = "Unknown" +with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle: + for line in handle.readlines(): + line = line.strip() + if len(line) > 0: + version = line + break + + +file_logger = os.path.join(default_dir, "logs", log_file) +max_bytes = 1024 * 1024 * 2 +file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", maxBytes=max_bytes, backupCount=10, encoding="utf-8") +util.apply_formatter(file_handler) +file_handler.addFilter(fmt_filter) +logger.addHandler(file_handler) # Actual API call to connect to qbt. host = cfg['qbt']['host'] @@ -131,10 +185,13 @@ client = Client(host=host, password=password) +############FUNCTIONS############## +#truncate the value of the torrent url to remove sensitive information def trunc_val(s, d, n=3): return d.join(s.split(d, n)[:n]) +#Get category from config file based on path provided def get_category(path): if 'cat' in cfg and cfg["cat"] != None: cat_path = cfg["cat"] @@ -149,7 +206,7 @@ def get_category(path): logger.warning('No categories matched. Check your config.yml file. - Setting category to NULL') return category - +#Get tags from config file based on keyword def get_tags(urls): if 'tags' in cfg and cfg["tags"] != None: tag_path = cfg['tags'] @@ -165,11 +222,24 @@ def get_tags(urls): logger.warning('No tags matched. Check your config.yml file. Setting tag to NULL') return tag -def remove_empty_directories(pathlib_root_dir): + +#Move files from source to destination, mod variable is to change the date modified of the file being moved +def move_files(src,dest,mod=False): + dest_path = os.path.dirname(dest) + if os.path.isdir(dest_path) == False: + os.makedirs(dest_path) + shutil.move(src, dest) + if(mod == True): + modTime = time.time() + os.utime(dest,(modTime,modTime)) + + +#Remove any empty directories after moving files +def remove_empty_directories(pathlib_root_dir,pattern): # list all directories recursively and sort them by path, # longest first L = sorted( - pathlib_root_dir.glob("*/*"), + pathlib_root_dir.glob(pattern), key=lambda p: len(str(p)), reverse=True, ) @@ -180,8 +250,16 @@ def remove_empty_directories(pathlib_root_dir): continue # catch and continue if non-empty # Will create a 2D Dictionary with the torrent name as the key -# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV', 'count':1, 'msg':'[]'}, -# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}, 'count':2, 'msg':'[]'} +# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV', 'count':1, 'msg':'[]'...}, +# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}, 'count':2, 'msg':'[]'...} +# List of dictionary key definitions +# Category = Returns category of the torrent (str) +# save_path = Returns the save path of the torrent (str) +# count = Returns a count of the total number of torrents with the same name (int) +# msg = Returns a list of torrent messages by name (list of str) +# status = Returns the list of status numbers of the torrent by name (0: Tracker is disabled (used for DHT, PeX, and LSD), 1: Tracker has not been contacted yet, 2:Tracker has been contacted and is working, 3:Tracker is updating, 4:Tracker has been contacted, but it is not working (or doesn't send proper replies) +# is_complete = Returns the state of torrent (Returns True if at least one of the torrent with the State is categorized as Complete.) +# first_hash = Returns the hash number of the original torrent (Assuming the torrent list is sorted by date added (Asc)) def get_torrent_info(t_list): torrentdict = {} for torrent in t_list: @@ -193,58 +271,66 @@ def get_torrent_info(t_list): msg_list = torrentdict[torrent.name]['msg'] status_list = torrentdict[torrent.name]['status'] is_complete = True if torrentdict[torrent.name]['is_complete'] == True else torrent.state_enum.is_complete + first_hash = torrentdict[torrent.name]['first_hash'] else: t_count = 1 msg_list = [] status_list = [] is_complete = torrent.state_enum.is_complete + first_hash = torrent.hash msg,status = [(x.msg,x.status) for x in torrent.trackers if x.url.startswith('http')][0] msg_list.append(msg) status_list.append(status) - torrentattr = {'Category': category, 'save_path': save_path, 'count': t_count, 'msg': msg_list, 'status': status_list, 'is_complete': is_complete} + torrentattr = {'Category': category, 'save_path': save_path, 'count': t_count, 'msg': msg_list, 'status': status_list, 'is_complete': is_complete, 'first_hash':first_hash} torrentdict[torrent.name] = torrentattr return torrentdict # Function used to recheck paused torrents sorted by size and resume torrents that are completed -def recheck(): - if args.cross_seed == 'cross_seed' or args.manage == 'manage' or args.recheck == 'recheck': +def set_recheck(): + if recheck: + util.separator(f"Rechecking Paused Torrents", space=False, border=False) #sort by size and paused torrent_sorted_list = client.torrents.info(status_filter='paused',sort='size') - torrentdict = get_torrent_info(client.torrents.info(sort='added_on',reverse=True)) - for torrent in torrent_sorted_list: - new_tag,t_url = get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) - if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1): torrent.add_tags(tags=new_tag) - #Resume torrent if completed - if torrent.progress == 1: - if args.dry_run == 'dry_run': - logger.dryrun(f'\n - Not Resuming {new_tag} - {torrent.name}') - else: - logger.info(f'\n - Resuming {new_tag} - {torrent.name}') - torrent.resume() - #Recheck - elif torrent.progress == 0 and torrentdict[torrent.name]['is_complete']: - if args.dry_run == 'dry_run': - logger.dryrun(f'\n - Not Rechecking {new_tag} - {torrent.name}') - else: - logger.info(f'\n - Rechecking {new_tag} - {torrent.name}') - torrent.recheck() + if torrent_sorted_list: + for torrent in torrent_sorted_list: + new_tag,t_url = get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) + if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1): torrent.add_tags(tags=new_tag) + #Resume torrent if completed + if torrent.progress == 1: + #Check to see if torrent meets AutoTorrentManagement criteria + logger.debug(f'Rechecking Torrent to see if torrent meets AutoTorrentManagement Criteria') + logger.debug(util.insert_space(f'- Torrent Name: {torrent.name}',2)) + logger.debug(util.insert_space(f'-- Ratio vs Max Ratio: {torrent.ratio} < {torrent.max_ratio}',4)) + logger.debug(util.insert_space(f'-- Seeding Time vs Max Seed Time: {timedelta(seconds=torrent.seeding_time)} < {timedelta(minutes=torrent.max_seeding_time)}',4)) + if torrent.ratio < torrent.max_ratio and (torrent.seeding_time < (torrent.max_seeding_time * 60)): + if dry_run: + logger.dryrun(f'Not Resuming {new_tag} - {torrent.name}') + else: + logger.info(f'Resuming {new_tag} - {torrent.name}') + torrent.resume() + #Recheck + elif torrent.progress == 0 and torrentdict[torrent.name]['is_complete'] and not torrent.state_enum.is_checking: + if dry_run: + logger.dryrun(f'Not Rechecking {new_tag} - {torrent.name}') + else: + logger.info(f'Rechecking {new_tag} - {torrent.name}') + torrent.recheck() # Function used to move any torrents from the cross seed directory to the correct save directory -def cross_seed(): - if args.cross_seed == 'cross_seed': +def set_cross_seed(): + if cross_seed: + util.separator(f"Checking for Cross-Seed Torrents", space=False, border=False) # List of categories for all torrents moved categories = [] # Keep track of total torrents moved total = 0 - # Used to output the final list torrents moved to output in the log - torrents_added = '' + #Track # of torrents tagged that are not cross-seeded + t_tagged = 0 # Only get torrent files cs_files = [f for f in os.listdir(os.path.join(cfg['directory']['cross_seed'], '')) if f.endswith('torrent')] dir_cs = os.path.join(cfg['directory']['cross_seed'], '') dir_cs_out = os.path.join(dir_cs,'qbit_manage_added') os.makedirs(dir_cs_out,exist_ok=True) - torrent_list = client.torrents.info(sort='added_on',reverse=True) - torrentdict = get_torrent_info(torrent_list) for file in cs_files: t_name = file.split(']', 2)[2].split('.torrent')[0] # Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key) @@ -258,66 +344,79 @@ def cross_seed(): src = os.path.join(dir_cs,file) dir_cs_out = os.path.join(dir_cs,'qbit_manage_added',file) categories.append(category) - if args.dry_run == 'dry_run': - logger.dryrun(f'Not Adding {t_name} to qBittorrent with: ' - f'\n - Category: {category}' - f'\n - Save_Path: {dest}' - f'\n - Paused: True') + if dry_run: + logger.dryrun(f'Not Adding to qBittorrent:') + logger.dryrun(util.insert_space(f'Torrent Name: {t_name}',3)) + logger.dryrun(util.insert_space(f'Category: {category}',7)) + logger.dryrun(util.insert_space(f'Save_Path: {dest}',6)) else: if torrentdict[t_name]['is_complete']: client.torrents.add(torrent_files=src, save_path=dest, category=category, + tags='cross-seed', is_paused=True) shutil.move(src, dir_cs_out) - logger.info(f'Adding {t_name} to qBittorrent with: ' - f'\n - Category: {category}' - f'\n - Save_Path: {dest}' - f'\n - Paused: True') + logger.info(f'Adding to qBittorrent:') + logger.info(util.insert_space(f'Torrent Name: {t_name}',3)) + logger.info(util.insert_space(f'Category: {category}',7)) + logger.info(util.insert_space(f'Save_Path: {dest}',6)) else: - logger.info(f'Found {t_name} in {dir_cs} but original torrent is not complete. Not adding to qBittorrent') + logger.info(f'Found {t_name} in {dir_cs} but original torrent is not complete.') + logger.info(f'Not adding to qBittorrent') else: - if args.dry_run == 'dry_run': + if dry_run: logger.dryrun(f'{t_name} not found in torrents.') else: logger.warning(f'{t_name} not found in torrents.') numcategory = Counter(categories) - if args.dry_run == 'dry_run': + #Tag missing cross-seed torrents tags + for torrent in torrent_list: + t_name = torrent.name + if 'cross-seed' not in torrent.tags and torrentdict[t_name]['count'] > 1 and torrentdict[t_name]['first_hash'] != torrent.hash: + t_tagged += 1 + if dry_run: + logger.dryrun(f'Not Adding cross-seed tag to {t_name}') + else: + logger.info(f'Adding cross-seed tag to {t_name}') + torrent.add_tags(tags='cross-seed') + + + if dry_run: for c in numcategory: total += numcategory[c] - torrents_added += f'\n - {c} .torrents not added: {numcategory[c]}' - torrents_added += f'\n -- Total .torrents not added: {total}' - logger.dryrun(torrents_added) + if numcategory[c] > 0: logger.dryrun(f'{numcategory[c]} {c} cross-seed .torrents not added.') + if total > 0: logger.dryrun(f'Total {total} cross-seed .torrents not added.') + if t_tagged > 0:logger.dryrun(f'Total {t_tagged} cross-seed .torrents not tagged.') else: for c in numcategory: total += numcategory[c] - torrents_added += f'\n - {c} .torrents added: {numcategory[c]}' - torrents_added += f'\n -- Total .torrents added: {total}' - logger.info(torrents_added) + if numcategory[c] > 0: logger.info(f'{numcategory[c]} {c} cross-seed .torrents added.') + if total > 0: logger.info(f'Total {total} cross-seed .torrents added.') + if t_tagged > 0:logger.info(f'Total {t_tagged} cross-seed .torrents tagged.') - -def update_category(): - if args.manage == 'manage' or args.cat_update == 'cat_update': +def set_category(): + if cat_update: + util.separator(f"Updating Categories", space=False, border=False) num_cat = 0 - torrent_list = client.torrents.info(sort='added_on',reverse=True) for torrent in torrent_list: if torrent.category == '': for x in torrent.trackers: if x.url.startswith('http'): t_url = trunc_val(x.url, '/') new_cat = get_category(torrent.save_path) - if args.dry_run == 'dry_run': - logger.dryrun(f'\n - Torrent Name: {torrent.name}' - f'\n - New Category: {new_cat}' - f'\n - Tracker: {t_url}') + if dry_run: + logger.dryrun(util.insert_space(f'Torrent Name: {torrent.name}',3)) + logger.dryrun(util.insert_space(f'New Category: {new_cat}',3)) + logger.dryrun(util.insert_space(f'Tracker: {t_url}',8)) num_cat += 1 else: - logger.info(f'\n - Torrent Name: {torrent.name}' - f'\n - New Category: {new_cat}' - f'\n - Tracker: {t_url}') + logger.info(util.insert_space(f'- Torrent Name: {torrent.name}',1)) + logger.info(util.insert_space(f'-- New Category: {new_cat}',5)) + logger.info(util.insert_space(f'-- Tracker: {t_url}',5)) torrent.set_category(category=new_cat) num_cat += 1 - if args.dry_run == 'dry_run': + if dry_run: if num_cat >= 1: logger.dryrun(f'Did not update {num_cat} new categories.') else: @@ -329,25 +428,25 @@ def update_category(): logger.info(f'No new torrents to categorize.') -def update_tags(): - if args.manage == 'manage' or args.tag_update == 'tag_update': +def set_tags(): + if tag_update: + util.separator(f"Updating Tags", space=False, border=False) num_tags = 0 - torrent_list = client.torrents.info(sort='added_on',reverse=True) for torrent in torrent_list: if torrent.tags == '' or ('cross-seed' in torrent.tags and len([e for e in torrent.tags.split(",") if not 'noHL' in e]) == 1): new_tag,t_url = get_tags([x.url for x in torrent.trackers if x.url.startswith('http')]) - if args.dry_run == 'dry_run': - logger.dryrun(f'\n - Torrent Name: {torrent.name}' - f'\n - New Tag: {new_tag}' - f'\n - Tracker: {t_url}') + if dry_run: + logger.dryrun(util.insert_space(f'Torrent Name: {torrent.name}',3)) + logger.dryrun(util.insert_space(f'New Tag: {new_tag}',8)) + logger.dryrun(util.insert_space(f'Tracker: {t_url}',8)) num_tags += 1 else: - logger.info(f'\n - Torrent Name: {torrent.name}' - f'\n - New Tag: {new_tag}' - f'\n - Tracker: {t_url}') + logger.info(util.insert_space(f'Torrent Name: {torrent.name}',3)) + logger.info(util.insert_space(f'New Tag: {new_tag}',8)) + logger.info(util.insert_space(f'Tracker: {t_url}',8)) torrent.add_tags(tags=new_tag) num_tags += 1 - if args.dry_run == 'dry_run': + if dry_run: if num_tags >= 1: logger.dryrun(f'Did not update {num_tags} new tags.') else: @@ -359,10 +458,9 @@ def update_tags(): logger.info('No new torrents to tag. ') -def rem_unregistered(): - if args.manage == 'manage' or args.rem_unregistered == 'rem_unregistered': - torrent_list = client.torrents.info(sort='added_on',reverse=True) - torrentdict = get_torrent_info(torrent_list) +def set_rem_unregistered(): + if rem_unregistered: + util.separator(f"Removing Unregistered Torrents", space=False, border=False) rem_unr = 0 del_tor = 0 pot_unr = '' @@ -375,17 +473,22 @@ def rem_unregistered(): if x.url.startswith('http'): t_url = trunc_val(x.url, '/') msg_up = x.msg.upper() - n_info = (f'\n - Torrent Name: {t_name} ' - f'\n - Status: {msg_up} ' - f'\n - Tracker: {t_url} ' - f'\n - Deleted .torrent but not content files.') - n_d_info = (f'\n - Torrent Name: {t_name} ' - f'\n - Status: {msg_up} ' - f'\n - Tracker: {t_url} ' - f'\n - Deleted .torrent AND content files.') + n_info = '' + n_d_info = '' + + n_info += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') + n_info += (util.insert_space(f'Status: {msg_up}',9)+'\n') + n_info += (util.insert_space(f'Tracker: {t_url}',8)+'\n') + n_info += (util.insert_space(f'Deleted .torrent but NOT content files.',8)+'\n') + + n_d_info += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') + n_d_info += (util.insert_space(f'Status: {msg_up}',9)+'\n') + n_d_info += (util.insert_space(f'Tracker: {t_url}',8)+'\n') + n_d_info += (util.insert_space(f'Deleted .torrent AND content files.',8)+'\n') + if (x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up): - pot_unr += (f'\n - Torrent: {torrent.name}') - pot_unr += (f'\n - Message: {x.msg}') + pot_unr += (util.insert_space(f'Torrent Name: {t_name}',3)+'\n') + pot_unr += (util.insert_space(f'Status: {msg_up}',9)+'\n') if ('UNREGISTERED' in msg_up or \ 'TORRENT NOT FOUND' in msg_up or \ 'TORRENT IS NOT FOUND' in msg_up or \ @@ -396,41 +499,39 @@ def rem_unregistered(): 'REDOWNLOAD' in msg_up or \ 'PACKS' in msg_up or \ 'REPACKED' in msg_up or \ - 'PACK' in msg_up \ + 'PACK' in msg_up or \ + 'TRUMP' in msg_up ) and x.status == 4 and 'DOWN' not in msg_up and 'UNREACHABLE' not in msg_up: - logger.debug(f'Torrent counts: {t_count}') - logger.debug(f'msg: {t_msg}') - logger.debug(f'status: {t_status}') if t_count > 1: - if args.dry_run == 'dry_run': - if '' in t_msg: - logger.dryrun(n_info) + if dry_run: + if '' in t_msg: + util.print_multiline(n_info,"DRYRUN") rem_unr += 1 else: - logger.dryrun(n_d_info) + util.print_multiline(n_d_info,"DRYRUN") del_tor += 1 else: # Checks if any of the original torrents are working - if '' in t_msg or 2 in t_status: - logger.info(n_info) + if '' in t_msg or 2 in t_status: + util.print_multiline(n_info) torrent.delete(hash=torrent.hash, delete_files=False) rem_unr += 1 else: - logger.info(n_d_info) - torrent.delete(hash=torrent.hash, delete_files=True) + util.print_multiline(n_d_info) + tor_delete_recycle(torrent) del_tor += 1 else: - if args.dry_run == 'dry_run': - logger.dryrun(n_d_info) + if dry_run: + util.print_multiline(n_d_info,"DRYRUN") del_tor += 1 else: - logger.info(n_d_info) - torrent.delete(hash=torrent.hash, delete_files=True) + util.print_multiline(n_d_info) + tor_delete_recycle(torrent) del_tor += 1 - if args.dry_run == 'dry_run': + if dry_run: if rem_unr >= 1 or del_tor >= 1: - logger.dryrun(f'Did not delete {rem_unr} .torrents(s) or content files.') - logger.dryrun(f'Did not delete {del_tor} .torrents(s) or content files.') + logger.dryrun(f'Did not delete {rem_unr} .torrents(s) but not content files.') + logger.dryrun(f'Did not delete {del_tor} .torrents(s) AND content files.') else: logger.dryrun('No unregistered torrents found.') else: @@ -440,47 +541,52 @@ def rem_unregistered(): else: logger.info('No unregistered torrents found.') if (len(pot_unr) > 0): - logger.debug(f'Potential Unregistered torrents: {pot_unr}') + util.separator(f"Potential Unregistered torrents", space=False, border=False, loglevel='DEBUG') + util.print_multiline(pot_unr,"DEBUG") -def rem_orphaned(): - if args.rem_orphaned == 'rem_orphaned': - torrent_list = client.torrents.info() + +def set_rem_orphaned(): + if rem_orphaned: + util.separator(f"Checking for Orphaned Files", space=False, border=False) torrent_files = [] root_files = [] orphaned_files = [] + excluded_orphan_files = [] + orphaned_parent_path = set() - if 'root_dir' in cfg['directory']: - root_path = os.path.join(cfg['directory']['root_dir'], '') + if (remote_path != root_path): + root_files = [os.path.join(path.replace(remote_path,root_path), name) for path, subdirs, files in os.walk(remote_path) for name in files if os.path.join(remote_path,'orphaned_data') not in path and os.path.join(remote_path,'.RecycleBin') not in path] else: - logger.error('root_dir not defined in config.') - return - - if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): - remote_path = os.path.join(cfg['directory']['remote_dir'], '') - root_files = [os.path.join(path.replace(remote_path,root_path), name) for path, subdirs, files in os.walk(remote_path) for name in files if os.path.join(remote_path,'orphaned_data') not in path] - else: - remote_path = root_path - root_files = [os.path.join(path, name) for path, subdirs, files in os.walk(root_path) for name in files if os.path.join(root_path,'orphaned_data') not in path] + root_files = [os.path.join(path, name) for path, subdirs, files in os.walk(root_path) for name in files if os.path.join(root_path,'orphaned_data') not in path and os.path.join(root_path,'.RecycleBin') not in path] for torrent in torrent_list: for file in torrent.files: torrent_files.append(os.path.join(torrent.save_path,file.name)) - + orphaned_files = set(root_files) - set(torrent_files) orphaned_files = sorted(orphaned_files) - logger.debug('----------torrent files-----------') - logger.debug("\n".join(torrent_files)) - logger.debug('----------root_files-----------') - logger.debug("\n".join(root_files)) - logger.debug('----------orphaned_files-----------') - logger.debug("\n".join(orphaned_files)) - logger.debug('----------Deleting orphan files-----------') + + if 'orphaned' in cfg and cfg["orphaned"] is not None and 'exclude_patterns' in cfg['orphaned'] and cfg['orphaned']['exclude_patterns'] != '': + exclude_patterns = cfg['orphaned']['exclude_patterns'] + excluded_orphan_files = [file for file in orphaned_files for exclude_pattern in exclude_patterns if fnmatch.fnmatch(file, exclude_pattern.replace(remote_path,root_path))] + + orphaned_files = set(orphaned_files) - set(excluded_orphan_files) + util.separator(f"Torrent Files", space=False, border=False, loglevel='DEBUG') + util.print_multiline("\n".join(torrent_files),'DEBUG') + util.separator(f"Root Files", space=False, border=False,loglevel='DEBUG') + util.print_multiline("\n".join(root_files),'DEBUG') + util.separator(f"Excluded Orphan Files", space=False, border=False,loglevel='DEBUG') + util.print_multiline("\n".join(excluded_orphan_files),'DEBUG') + util.separator(f"Orphaned Files", space=False, border=False,loglevel='DEBUG') + util.print_multiline("\n".join(orphaned_files),'DEBUG') + util.separator(f"Deleting Orphaned Files", space=False, border=False,loglevel='DEBUG') + if (orphaned_files): - if args.dry_run == 'dry_run': + if dry_run: dir_out = os.path.join(remote_path,'orphaned_data') - logger.dryrun(f'\n----------{len(orphaned_files)} Orphan files found-----------' - f'\n - '+'\n - '.join(orphaned_files)+ - f'\n - Did not move {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') + util.separator(f"{len(orphaned_files)} Orphaned files found", space=False, border=False,loglevel='DRYRUN') + util.print_multiline("\n".join(orphaned_files),'DRYRUN') + logger.dryrun(f'Did not move {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') else: dir_out = os.path.join(remote_path,'orphaned_data') os.makedirs(dir_out,exist_ok=True) @@ -488,24 +594,25 @@ def rem_orphaned(): for file in orphaned_files: src = file.replace(root_path,remote_path) dest = os.path.join(dir_out,file.replace(root_path,'')) - src_path = trunc_val(src, '/',len(remote_path.split('/'))) - dest_path = os.path.dirname(dest) - if os.path.isdir(dest_path) == False: - os.makedirs(dest_path) - shutil.move(src, dest) - logger.info(f'\n----------{len(orphaned_files)} Orphan files found-----------' - f'\n - '+'\n - '.join(orphaned_files)+ - f'\n - Moved {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') + move_files(src,dest) + orphaned_parent_path.add(os.path.dirname(file).replace(root_path,remote_path)) + util.separator(f"{len(orphaned_files)} Orphaned files found", space=False, border=False) + util.print_multiline("\n".join(orphaned_files)) + logger.info(f'Moved {len(orphaned_files)} Orphaned files to {dir_out.replace(remote_path,root_path)}') #Delete empty directories after moving orphan files - remove_empty_directories(Path(remote_path)) + logger.info(f'Cleaning up any empty directories...') + for parent_path in orphaned_parent_path: + remove_empty_directories(Path(parent_path),"**/*") else: - if args.dry_run == 'dry_run': + if dry_run: logger.dryrun('No Orphaned Files found.') else: logger.info('No Orphaned Files found.') -def tag_nohardlinks(): - if args.tag_nohardlinks == 'tag_nohardlinks': + +def set_tag_nohardlinks(): + if tag_nohardlinks: + util.separator(f"Tagging Torrents with No Hardlinks", space=False, border=False) nohardlinks = cfg['nohardlinks'] n_info = '' t_count = 0 #counter for the number of torrents that has no hard links @@ -514,15 +621,6 @@ def tag_nohardlinks(): tdel_tags = 0 #counter for number of torrents that previously had no hard links but now have hard links tdel_dict = {} #dictionary to track the torrent names and content path that meet the deletion criteria t_excl_tags = []#list of tags to exclude based on config.yml - if 'root_dir' in cfg['directory']: - root_path = os.path.join(cfg['directory']['root_dir'], '') - else: - logger.error('root_dir not defined in config.') - return - if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): - remote_path = os.path.join(cfg['directory']['remote_dir'], '') - else: - remote_path = root_path for category in nohardlinks: torrent_list = client.torrents.info(category=category,filter='completed') @@ -537,7 +635,7 @@ def tag_nohardlinks(): logger.error('No torrents found in the category ('+category+') defined in config.yml inside the nohardlinks section. Please check if this matches with any category in qbittorrent and has 1 or more torrents.') continue for torrent in torrent_list: - if args.dry_run != 'dry_run': + if not dry_run: torrent.resume() if('exclude_tags' in nohardlinks[category] and (any(tag in torrent.tags for tag in t_excl_tags))): #Skip to the next torrent if we find any torrents that are in the exclude tag @@ -548,25 +646,26 @@ def tag_nohardlinks(): #Will only tag new torrents that don't have noHL tag if('noHL' not in torrent.tags): t_count += 1 - n_info += (f'\n - Torrent Name: {torrent.name} has no hard links found.') - n_info += (' Adding tags noHL.') + n_info += (f"No hard links found! Adding tags noHL\n") + n_info += (util.insert_space(f'Torrent Name: {torrent.name}',3)+'\n') + if(nohardlinks[category] != None): #set the max seeding time for the torrent if ('max_seeding_time' in nohardlinks[category]): seeding_time_limit = nohardlinks[category]['max_seeding_time'] - n_info += (' \n Setting max seed time to ' + str(seeding_time_limit) + '.') + n_info += (util.insert_space(f'New Max Seed Time: {str(seeding_time_limit)}',3)+'\n') else: seeding_time_limit = -2 #set the max ratio for the torrent if ('max_ratio' in nohardlinks[category]): ratio_limit = nohardlinks[category]['max_ratio'] - n_info += (' \n Setting max ratio to ' + str(ratio_limit)+ '.') + n_info += (util.insert_space(f'New Max Ratio: {str(ratio_limit)}',3)+'\n') else: ratio_limit = -2 else: seeding_time_limit = -2 ratio_limit = -2 - if args.dry_run != 'dry_run': + if not dry_run: #set the tag for no hard links torrent.add_tags(tags='noHL') client.torrents_set_share_limits(ratio_limit,seeding_time_limit,torrent.hash) @@ -577,20 +676,20 @@ def tag_nohardlinks(): # Deletes torrent with data if cleanup is set to true and meets the ratio/seeding requirements if ('cleanup' in nohardlinks[category] and nohardlinks[category]['cleanup'] and torrent.state_enum.is_paused and len(nohardlinks[category])>0): t_del += 1 - n_info += (f'\n - Torrent Name: {torrent.name} has no hard links found and meets ratio/seeding requirements.') + n_info += (f'Torrent Name: {torrent.name} has no hard links found and meets ratio/seeding requirements.\n') tdel_dict[torrent.name] = torrent['content_path'].replace(root_path,remote_path) - if args.dry_run == 'dry_run': - n_info += (' \n Cleanup flag set to true. NOT Deleting torrent + contents.') + if dry_run: + n_info += (util.insert_space(f'Cleanup flag set to true. NOT Deleting torrent + contents.',6)+'\n') else: - n_info += (' \n Cleanup flag set to true. Deleting torrent + contents.') + n_info += (util.insert_space(f'Cleanup flag set to true. Deleting torrent + contents.',6)+'\n') #Checks to see if previous noHL tagged torrents now have hard links. if (not (nohardlink(torrent['content_path'].replace(root_path,remote_path))) and ('noHL' in torrent.tags)): - n_info += (f'\n - Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.') - n_info += (' Removing tags noHL.') - n_info += (' Removing ratio and seeding time limits.') + n_info += (f'Previous Tagged noHL Torrent Name: {torrent.name} has hard links found now.\n') + n_info += ('Removing tags noHL.\n') + n_info += ('Removing ratio and seeding time limits.\n') tdel_tags += 1 - if args.dry_run != 'dry_run': + if not dry_run: #Remove tags and share limits torrent.remove_tags(tags='noHL') client.torrents_set_share_limits(-2,-2,torrent.hash) @@ -603,15 +702,15 @@ def tag_nohardlinks(): #Double check that the content path is the same before we delete anything if torrent['content_path'].replace(root_path,remote_path) == tdel_dict[torrent.name]: t_del_cs += 1 - if args.dry_run != 'dry_run': + if not dry_run: if (os.path.exists(torrent['content_path'].replace(root_path,remote_path))): - torrent.delete(hash=torrent.hash, delete_files=True) + tor_delete_recycle(torrent) else: torrent.delete(hash=torrent.hash, delete_files=False) - if args.dry_run == 'dry_run': + if dry_run: if t_count >= 1 or len(n_info) > 1: - logger.dryrun(n_info) + util.print_multiline(n_info,"DRYRUN") logger.dryrun(f'Did not tag/set ratio limit/seeding time for {t_count} .torrents(s)') if t_del >= 1: logger.dryrun(f'Did not delete {t_del} .torrents(s) or content files.') @@ -622,7 +721,7 @@ def tag_nohardlinks(): logger.dryrun('No torrents to tag with no hard links.') else: if t_count >= 1 or len(n_info) > 1: - logger.info(n_info) + util.print_multiline(n_info) logger.info(f'tag/set ratio limit/seeding time for {t_count} .torrents(s)') if t_del >= 1: logger.info(f'Deleted {t_del} .torrents(s) AND content files.') @@ -646,14 +745,154 @@ def nohardlink(file): check = False return check -def run(): - update_category() - update_tags() - rem_unregistered() - cross_seed() - recheck() - rem_orphaned() - tag_nohardlinks() +def tor_delete_recycle(torrent): + if 'recyclebin' in cfg and cfg["recyclebin"] != None: + if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled']: + tor_files = [] + #Define torrent files/folders + for file in torrent.files: + tor_files.append(os.path.join(torrent.save_path,file.name)) + + #Create recycle bin if not exists + recycle_path = os.path.join(remote_path,'.RecycleBin') + os.makedirs(recycle_path,exist_ok=True) + + #Move files from torrent contents to Recycle bin + for file in tor_files: + src = file.replace(root_path,remote_path) + dest = os.path.join(recycle_path,file.replace(root_path,'')) + #move files and change date modified + move_files(src,dest,True) + util.separator(f"Moving {len(tor_files)} files to RecycleBin", space=False, border=False,loglevel='DEBUG') + util.print_multiline("\n".join(tor_files),'DEBUG') + logger.debug(f'Moved {len(tor_files)} files to {recycle_path.replace(remote_path,root_path)}') + #Delete torrent and files + torrent.delete(hash=torrent.hash, delete_files=False) + #Remove any empty directories + remove_empty_directories(Path(torrent.save_path.replace(root_path,remote_path)),"**/*") + else: + torrent.delete(hash=torrent.hash, delete_files=True) + else: + logger.error('recyclebin not defined in config.') + return + + + +def set_empty_recycle(): + if not skip_recycle: + num_del = 0 + n_info = '' + if 'recyclebin' in cfg and cfg["recyclebin"] != None: + if 'enabled' in cfg["recyclebin"] and cfg["recyclebin"]['enabled'] and 'empty_after_x_days' in cfg["recyclebin"]: + if 'root_dir' in cfg['directory']: + root_path = os.path.join(cfg['directory']['root_dir'], '') + else: + logger.error('root_dir not defined in config. This is required to use recyclebin feature') + return + + if ('remote_dir' in cfg['directory'] and cfg['directory']['remote_dir'] != ''): + remote_path = os.path.join(cfg['directory']['remote_dir'], '') + recycle_path = os.path.join(remote_path,'.RecycleBin') + else: + remote_path = root_path + recycle_path = os.path.join(root_path,'.RecycleBin') + recycle_files = [os.path.join(path, name) for path, subdirs, files in os.walk(recycle_path) for name in files] + recycle_files = sorted(recycle_files) + empty_after_x_days = cfg["recyclebin"]['empty_after_x_days'] + if recycle_files: + util.separator(f"Emptying Recycle Bin (Files > {empty_after_x_days} days)", space=False, border=False) + for file in recycle_files: + fileStats = os.stat(file) + filename = file.replace(recycle_path,'') + last_modified = fileStats[stat.ST_MTIME] # in seconds (last modified time) + now = time.time() # in seconds + days = (now - last_modified) / (60 * 60 * 24) + if (empty_after_x_days <= days): + num_del += 1 + if dry_run: + n_info += (f'Did not delete {filename} from the recycle bin. (Last modified {round(days)} days ago).\n') + else: + n_info += (f'Deleted {filename} from the recycle bin. (Last modified {round(days)} days ago).\n') + os.remove(file) + if num_del > 0: + if dry_run: + util.print_multiline(n_info,'DRYRUN') + logger.dryrun(f'Did not delete {num_del} files from the Recycle Bin.') + else: + remove_empty_directories(Path(recycle_path),"**/*") + util.print_multiline(n_info) + logger.info(f'Deleted {num_del} files from the Recycle Bin.') + else: + logger.debug('No files found in "' + recycle_path + '"') + else: + logger.debug('Recycle bin has been disabled or "empty_after_x_days" var not defined in config.') + + else: + logger.error('recyclebin not defined in config.') + return + + +#Define global parameters +torrent_list = None +torrentdict = None + + +def start(): + #Global parameters to get the torrent dictionary + global torrent_list + global torrentdict + start_time = datetime.now() + if dry_run: + start_type = "Dry-" + else: + start_type = "" + util.separator(f"Starting {start_type}Run") + util.separator(f"Getting Torrent List", space=False, border=False) + #Get an updated list of torrents + torrent_list = client.torrents.info(sort='added_on') + if recheck or cross_seed or rem_unregistered: + #Get an updated torrent dictionary information of the torrents + torrentdict = get_torrent_info(torrent_list) + set_category() + set_tags() + set_rem_unregistered() + set_cross_seed() + set_recheck() + set_rem_orphaned() + set_tag_nohardlinks() + set_empty_recycle() + end_time = datetime.now() + run_time = str(end_time - start_time).split('.')[0] + util.separator(f"Finished {start_type}Run\nRun Time: {run_time}") + +def end(): + logger.info("Exiting Qbit_manage") + logger.removeHandler(file_handler) + sys.exit(0) if __name__ == '__main__': - run() + killer = GracefulKiller() + util.separator() + logger.info(util.centered(" _ _ _ ")) + logger.info(util.centered(" | | (_) | ")) + logger.info(util.centered(" __ _| |__ _| |_ _ __ ___ __ _ _ __ __ _ __ _ ___ ")) + logger.info(util.centered(" / _` | '_ \| | __| | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \\")) + logger.info(util.centered(" | (_| | |_) | | |_ | | | | | | (_| | | | | (_| | (_| | __/")) + logger.info(util.centered(" \__, |_.__/|_|\__| |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|")) + logger.info(util.centered(" | | ______ __/ | ")) + logger.info(util.centered(" |_| |______| |___/ ")) + logger.info(f" Version: {version}") + try: + if run: + logger.info(f" Run Mode: Script will exit after completion.") + start() + else: + schedule.every(sch).minutes.do(start) + logger.info(f" Scheduled Mode: Running every {sch} minutes.") + start() + while not killer.kill_now: + schedule.run_pending() + time.sleep(1) + end() + except KeyboardInterrupt: + end() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index f62ec42..431e554 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ PyYAML qbittorrent-api +schedule \ No newline at end of file