mirror of
https://github.com/StuffAnThings/qbit_manage.git
synced 2025-11-10 08:20:49 +08:00
commit
b5c6cc56ef
2 changed files with 154 additions and 138 deletions
19
config.yml
19
config.yml
|
|
@ -3,27 +3,16 @@ qbt:
|
||||||
host: 'localhost:8080'
|
host: 'localhost:8080'
|
||||||
user: 'username'
|
user: 'username'
|
||||||
pass: 'password'
|
pass: 'password'
|
||||||
|
|
||||||
# Optional parameter to define any remote paths
|
|
||||||
# If not using remote_dir just add a # in front of the whole section.
|
|
||||||
remote_dir:
|
|
||||||
# 'Docker container path of root save directory' : 'Local path of root save directory'
|
|
||||||
# <'/container/path/data/'> : <'/host/path/data/'>
|
|
||||||
'/data/torrents/' : '/mnt/cache/data/torrents/'
|
|
||||||
directory:
|
directory:
|
||||||
# Do not remove these
|
# Do not remove these
|
||||||
# Cross-seed var: </your/path/here/>
|
# Cross-seed var: </your/path/here/>
|
||||||
cross_seed: '/your/path/here/'
|
cross_seed: '/your/path/here/'
|
||||||
# Category/Pathing Parameters
|
# Category/Pathing Parameters
|
||||||
cat:
|
cat:
|
||||||
# <Category Name>
|
# <Category Name> : <save_path> #Path of your save directory. Can be a keyword or full path
|
||||||
# - save_path #Path of your save directory. Can be a keyword or full path
|
movies: '/data/torrents/Movies'
|
||||||
# - watch_path #OPTIONAL parameter: where cross_seed will drop the torrents to. (Defaults to save_path if not defined)
|
tv: 'TV'
|
||||||
movies:
|
|
||||||
- save_path: '/data/torrents/Movies'
|
|
||||||
- watch_path: '/data/torrents/watch/Movies'
|
|
||||||
tv:
|
|
||||||
- save_path: 'TV'
|
|
||||||
|
|
||||||
# Tag Parameters
|
# Tag Parameters
|
||||||
tags:
|
tags:
|
||||||
|
|
|
||||||
273
qbit_manage.py
273
qbit_manage.py
|
|
@ -12,7 +12,7 @@ from collections import Counter
|
||||||
|
|
||||||
# import apprise
|
# import apprise
|
||||||
|
|
||||||
parser = argparse.ArgumentParser("qBittorrent Manager.",
|
parser = argparse.ArgumentParser('qBittorrent Manager.',
|
||||||
description='A mix of scripts combined for managing qBittorrent.')
|
description='A mix of scripts combined for managing qBittorrent.')
|
||||||
parser.add_argument('-c', '--config-file',
|
parser.add_argument('-c', '--config-file',
|
||||||
dest='config',
|
dest='config',
|
||||||
|
|
@ -36,6 +36,11 @@ parser.add_argument('-s', '--cross-seed',
|
||||||
const='cross_seed',
|
const='cross_seed',
|
||||||
help='Use this after running cross-seed script to organize your torrents into specified '
|
help='Use this after running cross-seed script to organize your torrents into specified '
|
||||||
'watch folders.')
|
'watch folders.')
|
||||||
|
parser.add_argument('-re', '--recheck',
|
||||||
|
dest='recheck',
|
||||||
|
action='store_const',
|
||||||
|
const='recheck',
|
||||||
|
help='Recheck paused torrents sorted by lowest size. Resume if Completed.')
|
||||||
parser.add_argument('-g', '--cat-update',
|
parser.add_argument('-g', '--cat-update',
|
||||||
dest='cat_update',
|
dest='cat_update',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
|
|
@ -64,7 +69,7 @@ parser.add_argument('--log',
|
||||||
help='Change your log level. ')
|
help='Change your log level. ')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with open(args.config, "r") as cfg_file:
|
with open(args.config, 'r') as cfg_file:
|
||||||
cfg = yaml.load(cfg_file, Loader=yaml.FullLoader)
|
cfg = yaml.load(cfg_file, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
urllib3.disable_warnings()
|
urllib3.disable_warnings()
|
||||||
|
|
@ -96,13 +101,13 @@ stream_handler.setFormatter(stream_formatter)
|
||||||
logger.addHandler(stream_handler)
|
logger.addHandler(stream_handler)
|
||||||
|
|
||||||
# Actual API call to connect to qbt.
|
# Actual API call to connect to qbt.
|
||||||
host = cfg["qbt"]["host"]
|
host = cfg['qbt']['host']
|
||||||
if 'user' in cfg["qbt"]:
|
if 'user' in cfg['qbt']:
|
||||||
username = cfg["qbt"]["user"]
|
username = cfg['qbt']['user']
|
||||||
else:
|
else:
|
||||||
username = ''
|
username = ''
|
||||||
if 'pass' in cfg["qbt"]:
|
if 'pass' in cfg['qbt']:
|
||||||
password = cfg["qbt"]["pass"]
|
password = cfg['qbt']['pass']
|
||||||
else:
|
else:
|
||||||
password = ''
|
password = ''
|
||||||
|
|
||||||
|
|
@ -116,11 +121,11 @@ def trunc_val(s, d, n=3):
|
||||||
|
|
||||||
|
|
||||||
def get_category(path):
|
def get_category(path):
|
||||||
for cat, attr in cfg["cat"].items():
|
cat_path = cfg["cat"]
|
||||||
for attr_path in attr:
|
for i, f in cat_path.items():
|
||||||
if 'save_path' in attr_path and attr_path['save_path'] in path:
|
if f in path:
|
||||||
category = cat
|
category = i
|
||||||
return category
|
return category
|
||||||
else:
|
else:
|
||||||
category = ''
|
category = ''
|
||||||
logger.warning('No categories matched. Check your config.yml file. - Setting tag to NULL')
|
logger.warning('No categories matched. Check your config.yml file. - Setting tag to NULL')
|
||||||
|
|
@ -128,34 +133,18 @@ def get_category(path):
|
||||||
|
|
||||||
|
|
||||||
def get_tags(url):
|
def get_tags(url):
|
||||||
tag_path = cfg["tags"]
|
tag_path = cfg['tags']
|
||||||
for i, f in tag_path.items():
|
for i, f in tag_path.items():
|
||||||
if i in url:
|
if i in url:
|
||||||
tag = f
|
tag = f
|
||||||
return tag
|
return tag
|
||||||
else:
|
else:
|
||||||
tag = ''
|
tag = ''
|
||||||
logger.warning('No tags matched. Check your config.yml file. Setting category to NULL')
|
logger.warning('No tags matched. Check your config.yml file. Setting tag to NULL')
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
|
|
||||||
def get_name(t_list):
|
# Will create a 2D Dictionary with the torrent name as the key
|
||||||
dupes = []
|
|
||||||
no_dupes = []
|
|
||||||
t_name = []
|
|
||||||
for torrent in t_list:
|
|
||||||
n = torrent.name
|
|
||||||
t_name.append(n)
|
|
||||||
for s in t_name:
|
|
||||||
if t_name.count(s) > 1:
|
|
||||||
if s not in dupes:
|
|
||||||
dupes.append(s)
|
|
||||||
if t_name.count(s) == 1:
|
|
||||||
if s not in no_dupes:
|
|
||||||
no_dupes.append(s)
|
|
||||||
return dupes, no_dupes
|
|
||||||
|
|
||||||
#Will create a 2D Dictionary with the torrent name as the key
|
|
||||||
# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV'},
|
# torrentdict = {'TorrentName1' : {'Category':'TV', 'save_path':'/data/torrents/TV'},
|
||||||
# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}}
|
# 'TorrentName2' : {'Category':'Movies', 'save_path':'/data/torrents/Movies'}}
|
||||||
def get_torrent_info(t_list):
|
def get_torrent_info(t_list):
|
||||||
|
|
@ -163,70 +152,104 @@ def get_torrent_info(t_list):
|
||||||
for torrent in t_list:
|
for torrent in t_list:
|
||||||
save_path = torrent.save_path
|
save_path = torrent.save_path
|
||||||
category = get_category(save_path)
|
category = get_category(save_path)
|
||||||
torrentattr = {'Category':category, 'save_path':save_path}
|
if torrent.name in torrentdict:
|
||||||
|
t_count = torrentdict[torrent.name]['count'] + 1
|
||||||
|
else:
|
||||||
|
t_count = 1
|
||||||
|
torrentattr = {'Category': category, 'save_path': save_path, 'count': t_count}
|
||||||
|
logger.debug(torrent.name, t_count)
|
||||||
torrentdict[torrent.name] = torrentattr
|
torrentdict[torrent.name] = torrentattr
|
||||||
return torrentdict
|
return torrentdict
|
||||||
|
|
||||||
#Function used to move any torrents from the cross seed directory to the correct save directory
|
# Function used to recheck paused torrents sorted by size and resume torrents that are completed
|
||||||
|
def recheck():
|
||||||
|
if args.cross_seed == 'cross_seed' or args.manage == 'manage' or args.recheck == 'recheck':
|
||||||
|
#sort by size and paused
|
||||||
|
torrent_sorted_list = client.torrents.info(status_filter='paused',sort='size')
|
||||||
|
for torrent in torrent_sorted_list:
|
||||||
|
#Tag the torrents
|
||||||
|
new_tag = [get_tags(x.url) for x in torrent.trackers if x.url.startswith('http')]
|
||||||
|
torrent.add_tags(tags=new_tag)
|
||||||
|
#print(f'{torrent.hash[-6:]}: {torrent.name} ({torrent.state}) {torrent.progress}')
|
||||||
|
#Resume torrent if completed
|
||||||
|
if torrent.progress == 1:
|
||||||
|
if args.dry_run == 'dry_run':
|
||||||
|
logger.dryrun(f'\n - Not Resuming {new_tag} - {torrent.name}')
|
||||||
|
else:
|
||||||
|
logger.info(f'\n - Resuming {new_tag} - {torrent.name}')
|
||||||
|
torrent.resume()
|
||||||
|
#Recheck
|
||||||
|
elif torrent.progress == 0:
|
||||||
|
if args.dry_run == 'dry_run':
|
||||||
|
logger.dryrun(f'\n - Not Rechecking {new_tag} - {torrent.name}')
|
||||||
|
else:
|
||||||
|
logger.info(f'\n - Rechecking {new_tag} - {torrent.name}')
|
||||||
|
torrent.recheck()
|
||||||
|
|
||||||
|
# Function used to move any torrents from the cross seed directory to the correct save directory
|
||||||
def cross_seed():
|
def cross_seed():
|
||||||
if args.cross_seed == 'cross_seed':
|
if args.cross_seed == 'cross_seed':
|
||||||
categories = [] #List of categories for all torrents moved
|
# List of categories for all torrents moved
|
||||||
|
categories = []
|
||||||
total = 0 #Keep track of total torrents moved
|
# Keep track of total torrents moved
|
||||||
torrents_moved = "" #Used to output the final list torrents moved to output in the log
|
total = 0
|
||||||
|
# Used to output the final list torrents moved to output in the log
|
||||||
cs_files = [f for f in os.listdir(os.path.join(cfg["directory"]["cross_seed"],'')) if f.endswith('torrent')] #Only get torrent files
|
torrents_added = ''
|
||||||
dir_cs = os.path.join(cfg["directory"]["cross_seed"],'')
|
# Only get torrent files
|
||||||
|
cs_files = [f for f in os.listdir(os.path.join(cfg['directory']['cross_seed'], '')) if f.endswith('torrent')]
|
||||||
|
dir_cs = os.path.join(cfg['directory']['cross_seed'], '')
|
||||||
|
dir_cs_out = os.path.join(dir_cs,'qbit_manage_added')
|
||||||
|
os.makedirs(dir_cs_out,exist_ok=True)
|
||||||
torrent_list = client.torrents.info()
|
torrent_list = client.torrents.info()
|
||||||
torrentdict = get_torrent_info(torrent_list)
|
torrentdict = get_torrent_info(torrent_list)
|
||||||
for file in cs_files:
|
for file in cs_files:
|
||||||
t_name = file.split("]",2)[2].split('.torrent')[0]
|
t_name = file.split(']', 2)[2].split('.torrent')[0]
|
||||||
dest = ''
|
# Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key)
|
||||||
#Substring Key match in dictionary (used because t_name might not match exactly with torrentdict key)
|
# Returned the dictionary of filtered item
|
||||||
#Returned the dictionary of filtered item
|
|
||||||
torrentdict_file = dict(filter(lambda item: t_name in item[0], torrentdict.items()))
|
torrentdict_file = dict(filter(lambda item: t_name in item[0], torrentdict.items()))
|
||||||
|
|
||||||
if torrentdict_file:
|
if torrentdict_file:
|
||||||
#Get the exact torrent match name from torrentdict
|
# Get the exact torrent match name from torrentdict
|
||||||
t_name = next(iter(torrentdict_file))
|
t_name = next(iter(torrentdict_file))
|
||||||
category = torrentdict[t_name]['Category']
|
category = torrentdict[t_name]['Category']
|
||||||
|
dest = os.path.join(torrentdict[t_name]['save_path'], '')
|
||||||
dest = os.path.join(torrentdict[t_name]['save_path'],'') #Default save destination to save path if watch_path not defined
|
src = os.path.join(dir_cs,file)
|
||||||
for attr_path in cfg["cat"][category]:
|
dir_cs_out = os.path.join(dir_cs,'qbit_manage_added',file)
|
||||||
if 'watch_path' in attr_path: #Update to watch path if defined
|
|
||||||
dest=os.path.join(attr_path['watch_path'],'')
|
|
||||||
if "remote_dir" in cfg:
|
|
||||||
#Replace remote directory with local directory
|
|
||||||
for dir in cfg["remote_dir"]:
|
|
||||||
if dir in dest : dest = dest.replace(dir,cfg["remote_dir"][dir])
|
|
||||||
src = dir_cs + file
|
|
||||||
dest += file
|
|
||||||
categories.append(category)
|
categories.append(category)
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
logger.dryrun('Not Moving %s to %s', src, dest)
|
logger.dryrun(f'Adding {t_name} to qBittorrent with: '
|
||||||
|
f'\n - Category: {category}'
|
||||||
|
f'\n - Save_Path: {dest}'
|
||||||
|
f'\n - Paused: True')
|
||||||
else:
|
else:
|
||||||
shutil.move(src, dest)
|
client.torrents.add(torrent_files=src,
|
||||||
logger.info('Moving %s to %s', src, dest)
|
save_path=dest,
|
||||||
|
category=category,
|
||||||
|
is_paused=True)
|
||||||
|
shutil.move(src, dir_cs_out)
|
||||||
|
logger.info(f'Adding {t_name} to qBittorrent with: '
|
||||||
|
f'\n - Category: {category}'
|
||||||
|
f'\n - Save_Path: {dest}'
|
||||||
|
f'\n - Paused: True')
|
||||||
else:
|
else:
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
logger.dryrun('{} not found in torrents.'.format(t_name))
|
logger.dryrun(f'{t_name} not found in torrents.')
|
||||||
else:
|
else:
|
||||||
logger.info('{} not found in torrents.'.format(t_name))
|
logger.warning(f'{t_name} not found in torrents.')
|
||||||
|
recheck()
|
||||||
numcategory = Counter(categories)
|
numcategory = Counter(categories)
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
for c in numcategory:
|
for c in numcategory:
|
||||||
total += numcategory[c]
|
total += numcategory[c]
|
||||||
torrents_moved+="\n - {} .torrents not moved: {} ".format(c, numcategory[c])
|
torrents_added += f'\n - {c} .torrents not added: {numcategory[c]}'
|
||||||
torrents_moved+="\n -- Total .torrents not moved: {} ".format(total)
|
torrents_added += f'\n -- Total .torrents not added: {total}'
|
||||||
logger.dryrun(torrents_moved)
|
logger.dryrun(torrents_added)
|
||||||
else:
|
else:
|
||||||
for c in numcategory:
|
for c in numcategory:
|
||||||
total += numcategory[c]
|
total += numcategory[c]
|
||||||
torrents_moved+="\n - {} .torrents moved: {} ".format(c, numcategory[c])
|
torrents_added += f'\n - {c} .torrents added: {numcategory[c]}'
|
||||||
torrents_moved+="\n -- Total .torrents moved: {} ".format(total)
|
torrents_added += f'\n -- Total .torrents added: {total}'
|
||||||
logger.info(torrents_moved)
|
logger.info(torrents_added)
|
||||||
|
|
||||||
|
|
||||||
def update_category():
|
def update_category():
|
||||||
if args.manage == 'manage' or args.cat_update == 'cat_update':
|
if args.manage == 'manage' or args.cat_update == 'cat_update':
|
||||||
|
|
@ -239,24 +262,26 @@ def update_category():
|
||||||
t_url = trunc_val(x.url, '/')
|
t_url = trunc_val(x.url, '/')
|
||||||
new_cat = get_category(torrent.save_path)
|
new_cat = get_category(torrent.save_path)
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
logger.dryrun('\n - Torrent Name: %s \n - New Category: %s \n - Tracker: %s',
|
logger.dryrun(f'\n - Torrent Name: {torrent.name}'
|
||||||
torrent.name, new_cat, t_url)
|
f'\n - New Category: {new_cat}'
|
||||||
|
f'\n - Tracker: {t_url}')
|
||||||
num_cat += 1
|
num_cat += 1
|
||||||
else:
|
else:
|
||||||
logger.info('\n - Torrent Name: %s \n - New Category: %s \n - Tracker: %s',
|
logger.info(f'\n - Torrent Name: {torrent.name}'
|
||||||
torrent.name, new_cat, t_url)
|
f'\n - New Category: {new_cat}'
|
||||||
|
f'\n - Tracker: {t_url}')
|
||||||
torrent.set_category(category=new_cat)
|
torrent.set_category(category=new_cat)
|
||||||
num_cat += 1
|
num_cat += 1
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
if num_cat >= 1:
|
if num_cat >= 1:
|
||||||
logger.dryrun('Did not update %s new categories.', num_cat)
|
logger.dryrun(f'Did not update {num_cat} new categories.')
|
||||||
else:
|
else:
|
||||||
logger.dryrun('No new torrents to categorize.')
|
logger.dryrun(f'No new torrents to categorize.')
|
||||||
else:
|
else:
|
||||||
if num_cat >= 1:
|
if num_cat >= 1:
|
||||||
logger.info('Updated %s new categories.', num_cat)
|
logger.info(f'Updated {num_cat} new categories.')
|
||||||
else:
|
else:
|
||||||
logger.info('No new torrents to categorize.')
|
logger.info(f'No new torrents to categorize.')
|
||||||
|
|
||||||
|
|
||||||
def update_tags():
|
def update_tags():
|
||||||
|
|
@ -270,82 +295,84 @@ def update_tags():
|
||||||
t_url = trunc_val(x.url, '/')
|
t_url = trunc_val(x.url, '/')
|
||||||
new_tag = get_tags(x.url)
|
new_tag = get_tags(x.url)
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
logger.dryrun('\n - Torrent Name: %s \n - New Tag: %s \n - Tracker: %s',
|
logger.dryrun(f'\n - Torrent Name: {torrent.name}'
|
||||||
torrent.name, new_tag, t_url)
|
f'\n - New Tag: {new_tag}'
|
||||||
|
f'\n - Tracker: {t_url}')
|
||||||
num_tags += 1
|
num_tags += 1
|
||||||
else:
|
else:
|
||||||
logger.info('\n - Torrent Name: %s \n - New Tag: %s \n - Tracker: %s',
|
logger.info(f'\n - Torrent Name: {torrent.name}'
|
||||||
torrent.name, new_tag, t_url)
|
f'\n - New Tag: {new_tag}'
|
||||||
|
f'\n - Tracker: {t_url}')
|
||||||
torrent.add_tags(tags=new_tag)
|
torrent.add_tags(tags=new_tag)
|
||||||
num_tags += 1
|
num_tags += 1
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
if num_tags >= 1:
|
if num_tags >= 1:
|
||||||
logger.dryrun('Did not update %s new tags.', num_tags)
|
logger.dryrun(f'Did not update {num_tags} new tags.')
|
||||||
else:
|
else:
|
||||||
logger.dryrun('No new torrents to tag.')
|
logger.dryrun('No new torrents to tag.')
|
||||||
else:
|
else:
|
||||||
if num_tags >= 1:
|
if num_tags >= 1:
|
||||||
logger.info('Updated %s new tags.', num_tags)
|
logger.info(f'Updated {num_tags} new tags.')
|
||||||
else:
|
else:
|
||||||
logger.info('No new torrents to tag.')
|
logger.info('No new torrents to tag. ')
|
||||||
|
|
||||||
|
|
||||||
def rem_unregistered():
|
def rem_unregistered():
|
||||||
if args.manage == 'manage' or args.rem_unregistered == 'rem_unregistered':
|
if args.manage == 'manage' or args.rem_unregistered == 'rem_unregistered':
|
||||||
torrent_list = client.torrents.info()
|
torrent_list = client.torrents.info()
|
||||||
dupes, no_dupes = get_name(torrent_list)
|
torrentdict = get_torrent_info(torrent_list)
|
||||||
rem_unr = 0
|
rem_unr = 0
|
||||||
del_tor = 0
|
del_tor = 0
|
||||||
for torrent in torrent_list:
|
for torrent in torrent_list:
|
||||||
for status in torrent.trackers:
|
t_name = torrent.name
|
||||||
for x in torrent.trackers:
|
t_count = torrentdict[t_name]['count']
|
||||||
if x.url.startswith('http'):
|
for x in torrent.trackers:
|
||||||
t_url = trunc_val(x.url, '/')
|
if x.url.startswith('http'):
|
||||||
if 'Unregistered torrent' in status.msg or 'Torrent is not found' in status.msg:
|
t_url = trunc_val(x.url, '/')
|
||||||
if torrent.name in dupes:
|
n_info = (f'\n - Torrent Name: {t_name} '
|
||||||
if args.dry_run == 'dry_run':
|
f'\n - Status: {x.msg} '
|
||||||
logger.dryrun('\n - Torrent Name: %s \n - Status: %s \n - Tracker: %s \n '
|
f'\n - Tracker: {t_url} '
|
||||||
'- Deleted .torrent but not content files.',
|
f'\n - Deleted .torrent but not content files.')
|
||||||
torrent.name, status.msg, t_url)
|
n_d_info = (f'\n - Torrent Name: {t_name} '
|
||||||
rem_unr += 1
|
f'\n - Status: {x.msg} '
|
||||||
else:
|
f'\n - Tracker: {t_url} '
|
||||||
logger.info('\n - Torrent Name: %s \n - Status: %s \n - Tracker: %s \n '
|
f'\n - Deleted .torrent AND content files.')
|
||||||
'- Deleted .torrent but not content files.',
|
if 'Unregistered torrent' in x.msg or 'Torrent is not found' in x.msg:
|
||||||
torrent.name, status.msg, t_url)
|
if t_count > 1:
|
||||||
torrent.delete(hash=torrent.hash, delete_files=False)
|
if args.dry_run == 'dry_run':
|
||||||
rem_unr += 1
|
logger.dryrun(n_info)
|
||||||
elif torrent.name in no_dupes:
|
rem_unr += 1
|
||||||
if args.dry_run == 'dry_run':
|
else:
|
||||||
logger.dryrun('\n - Torrent Name: %s \n - Status: %s \n - Tracker: %s \n '
|
logger.info(n_info)
|
||||||
'- Deleted .torrent AND content files.',
|
torrent.delete(hash=torrent.hash, delete_files=False)
|
||||||
torrent.name, status.msg, t_url)
|
rem_unr += 1
|
||||||
del_tor += 1
|
else:
|
||||||
else:
|
if args.dry_run == 'dry_run':
|
||||||
logger.info('\n - Torrent Name: %s \n - Status: %s \n - Tracker: %s \n '
|
logger.dryrun(n_d_info)
|
||||||
'- Deleted .torrent AND content files.',
|
del_tor += 1
|
||||||
torrent.name, status.msg, t_url)
|
else:
|
||||||
torrent.delete(hash=torrent.hash, delete_files=True)
|
logger.info(n_d_info)
|
||||||
del_tor += 1
|
torrent.delete(hash=torrent.hash, delete_files=True)
|
||||||
|
del_tor += 1
|
||||||
if args.dry_run == 'dry_run':
|
if args.dry_run == 'dry_run':
|
||||||
if rem_unr >= 1 or del_tor >= 1:
|
if rem_unr >= 1 or del_tor >= 1:
|
||||||
logger.dryrun('Did not delete %s .torrents(s) but not content files.', rem_unr)
|
logger.dryrun(f'Did not delete {rem_unr} .torrents(s) or content files.')
|
||||||
logger.dryrun('Did not delete %s .torrents(s) AND content files.', del_tor)
|
logger.dryrun(f'Did not delete {del_tor} .torrents(s) or content files.')
|
||||||
else:
|
else:
|
||||||
logger.dryrun('No unregistered torrents found.')
|
logger.dryrun('No unregistered torrents found.')
|
||||||
else:
|
else:
|
||||||
if rem_unr >= 1 or del_tor >= 1:
|
if rem_unr >= 1 or del_tor >= 1:
|
||||||
logger.info('Deleted %s .torrents(s) but not content files.', rem_unr)
|
logger.info(f'Deleted {rem_unr} .torrents(s) but not content files.')
|
||||||
logger.info('Deleted %s .torrents(s) AND content files.', del_tor)
|
logger.info(f'Deleted {del_tor} .torrents(s) AND content files.')
|
||||||
else:
|
else:
|
||||||
logger.info('No unregistered torrents found.')
|
logger.info('No unregistered torrents found.')
|
||||||
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
cross_seed()
|
|
||||||
update_category()
|
update_category()
|
||||||
update_tags()
|
update_tags()
|
||||||
rem_unregistered()
|
rem_unregistered()
|
||||||
|
cross_seed()
|
||||||
|
recheck()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
if __name__ == "__main__":
|
|
||||||
run()
|
run()
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue