Merge development into master

This commit is contained in:
github-actions[bot] 2024-06-02 14:21:09 +00:00 committed by GitHub
commit 97ea3a8124
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3620 changed files with 205660 additions and 120048 deletions

View file

@ -8,6 +8,19 @@ updates:
prefix: "[bot]"
open-pull-requests-limit: 1
target-branch: "development"
groups:
fortawesome:
patterns:
- "@fortawesome*"
mantine:
patterns:
- "@mantine*"
react:
patterns:
- "react"
- "react-dom"
- "@types/react"
- "@types/react-dom"
- package-ecosystem: 'github-actions'
directory: '/'
schedule:

View file

@ -1,4 +1,5 @@
bazarr
custom_libs
frontend/build
libs
bazarr.py

View file

@ -27,14 +27,14 @@ jobs:
uses: actions/checkout@v4
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: "lts/*"
@ -62,7 +62,7 @@ jobs:
run: npm run build:ci
working-directory: ${{ env.UI_DIRECTORY }}
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"
@ -76,12 +76,12 @@ jobs:
uses: actions/checkout@v4
- name: Set up Python 3.8
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.8"
- name: Install UI
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
name: ${{ env.UI_ARTIFACT_NAME }}
path: "${{ env.UI_DIRECTORY }}/build"

View file

@ -29,14 +29,14 @@ jobs:
git fetch --depth ${{ env.FETCH_DEPTH }} --tags
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: "lts/*"

View file

@ -31,14 +31,14 @@ jobs:
run: git config --global user.name "github-actions"
- name: Cache node_modules
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: "${{ env.UI_DIRECTORY }}/node_modules"
key: ${{ runner.os }}-modules-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-modules-
- name: Setup NodeJS
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: "lts/*"

View file

@ -22,7 +22,7 @@ jobs:
ref: development
- name: Setup NodeJS
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: "lts/*"
@ -35,7 +35,7 @@ jobs:
working-directory: ${{ env.UI_DIRECTORY }}
- name: Set up Python 3.8
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: "3.8"

View file

@ -2,7 +2,7 @@
## Tools required
- Python 3.7.x to 3.11.x (3.9.x is highly recommended and 3.12 or greater is proscribed).
- Python 3.8.x to 3.11.x (3.10.x is highly recommended and 3.12 or greater is proscribed).
- Pycharm or Visual Studio code IDE are recommended but if you're happy with VIM, enjoy it!
- Git.
- UI testing must be done using Chrome latest version.
@ -42,4 +42,4 @@ As we're using Git in the development process, you better disable automatic upda
- All commits must have a meaningful commit message (ex.: Fixed issue with this, Improved process abc, Added input field to UI, etc.).
- Fixes can be made directly to `development` branch but keep in mind that a pre-release with a beta versioning will be created every day a new push is made.
- Features must be developed in dedicated feature branch and merged back to `development` branch using PR.
- Once reviewed, your PR will be merged using Squash and Merge with a meaningful message.
- Once reviewed, your PR will be merged by morpheus65535 using Squash and Merge with a meaningful message.

View file

@ -48,7 +48,9 @@ If you need something that is not already part of Bazarr, feel free to create a
## Supported subtitles providers:
- Addic7ed
- Animetosho (requires AniDb HTTP API client described [here](https://wiki.anidb.net/HTTP_API_Definition))
- Assrt
- AvistaZ, CinemaZ (Get session cookies using method described [here](https://github.com/morpheus65535/bazarr/pull/2375#issuecomment-2057010996))
- BetaSeries
- BSplayer
- Embedded Subtitles

116
bazarr.py
View file

@ -6,9 +6,14 @@ import signal
import subprocess
import sys
import time
import atexit
from bazarr.app.get_args import args
from bazarr.literals import EXIT_PYTHON_UPGRADE_NEEDED, EXIT_NORMAL, FILE_RESTART, FILE_STOP, ENV_RESTARTFILE, ENV_STOPFILE, EXIT_INTERRUPT
def exit_program(status_code):
print(f'Bazarr exited with status code {status_code}.')
raise SystemExit(status_code)
def check_python_version():
@ -19,7 +24,7 @@ def check_python_version():
if int(python_version[0]) < minimum_py3_tuple[0]:
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
sys.exit(1)
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
elif int(python_version[0]) == 3 and int(python_version[1]) > 11:
print("Python version greater than 3.11.x is unsupported. Current version is " + platform.python_version() +
". Keep in mind that even if it works, you're on your own.")
@ -27,7 +32,7 @@ def check_python_version():
(int(python_version[0]) != minimum_py3_tuple[0]):
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
sys.exit(1)
exit_program(EXIT_PYTHON_UPGRADE_NEEDED)
def get_python_path():
@ -50,79 +55,98 @@ check_python_version()
dir_name = os.path.dirname(__file__)
def end_child_process(ep):
try:
if os.name != 'nt':
try:
ep.send_signal(signal.SIGINT)
except ProcessLookupError:
pass
else:
import win32api
import win32con
try:
win32api.GenerateConsoleCtrlEvent(win32con.CTRL_C_EVENT, ep.pid)
except KeyboardInterrupt:
pass
except:
ep.terminate()
def start_bazarr():
script = [get_python_path(), "-u", os.path.normcase(os.path.join(dir_name, 'bazarr', 'main.py'))] + sys.argv[1:]
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL)
atexit.register(end_child_process, ep=ep)
signal.signal(signal.SIGTERM, lambda signal_no, frame: end_child_process(ep))
ep = subprocess.Popen(script, stdout=None, stderr=None, stdin=subprocess.DEVNULL, env=os.environ)
print(f"Bazarr starting child process with PID {ep.pid}...")
return ep
def terminate_child():
print(f"Terminating child process with PID {child_process.pid}")
child_process.terminate()
def get_stop_status_code(input_file):
try:
with open(input_file, 'r') as file:
# read status code from file, if it exists
line = file.readline()
try:
status_code = int(line)
except (ValueError, TypeError):
status_code = EXIT_NORMAL
file.close()
except Exception:
status_code = EXIT_NORMAL
return status_code
def check_status():
if os.path.exists(stopfile):
global child_process
if os.path.exists(stop_file):
status_code = get_stop_status_code(stop_file)
try:
os.remove(stopfile)
print("Deleting stop file...")
os.remove(stop_file)
except Exception:
print('Unable to delete stop file.')
finally:
print('Bazarr exited.')
sys.exit(0)
terminate_child()
exit_program(status_code)
if os.path.exists(restartfile):
if os.path.exists(restart_file):
try:
os.remove(restartfile)
print("Deleting restart file...")
os.remove(restart_file)
except Exception:
print('Unable to delete restart file.')
else:
finally:
terminate_child()
print("Bazarr is restarting...")
start_bazarr()
child_process = start_bazarr()
def interrupt_handler(signum, frame):
# catch and ignore keyboard interrupt Ctrl-C
# the child process Server object will catch SIGINT and perform an orderly shutdown
global interrupted
if not interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
interrupted = True
print('Handling keyboard interrupt...')
else:
print("Stop doing that! I heard you the first time!")
if __name__ == '__main__':
restartfile = os.path.join(args.config_dir, 'bazarr.restart')
stopfile = os.path.join(args.config_dir, 'bazarr.stop')
interrupted = False
signal.signal(signal.SIGINT, interrupt_handler)
restart_file = os.path.join(args.config_dir, FILE_RESTART)
stop_file = os.path.join(args.config_dir, FILE_STOP)
os.environ[ENV_STOPFILE] = stop_file
os.environ[ENV_RESTARTFILE] = restart_file
# Cleanup leftover files
try:
os.remove(restartfile)
os.remove(restart_file)
except FileNotFoundError:
pass
try:
os.remove(stopfile)
os.remove(stop_file)
except FileNotFoundError:
pass
# Initial start of main bazarr process
print("Bazarr starting...")
start_bazarr()
child_process = start_bazarr()
# Keep the script running forever until stop is requested through term or keyboard interrupt
# Keep the script running forever until stop is requested through term, special files or keyboard interrupt
while True:
check_status()
try:
if sys.platform.startswith('win'):
time.sleep(5)
else:
os.wait()
time.sleep(1)
time.sleep(5)
except (KeyboardInterrupt, SystemExit, ChildProcessError):
print('Bazarr exited.')
sys.exit(0)
# this code should never be reached, if signal handling is working properly
print('Bazarr exited main script file via keyboard interrupt.')
exit_program(EXIT_INTERRUPT)

View file

@ -1,13 +1,14 @@
# coding=utf-8
import io
import os
import re
from flask_restx import Resource, Namespace, fields, marshal
from app.config import settings
from app.logger import empty_log
from app.get_args import args
from utilities.central import get_log_file_path
from ..utils import authenticate
api_ns_system_logs = Namespace('System Logs', description='List log file entries or empty log file')
@ -29,12 +30,62 @@ class SystemLogs(Resource):
def get(self):
"""List log entries"""
logs = []
with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file:
include = str(settings.log.include_filter)
exclude = str(settings.log.exclude_filter)
ignore_case = settings.log.ignore_case
regex = settings.log.use_regex
if regex:
# pre-compile regular expressions for better performance
if ignore_case:
flags = re.IGNORECASE
else:
flags = 0
if len(include) > 0:
try:
include_compiled = re.compile(include, flags)
except Exception:
include_compiled = None
if len(exclude) > 0:
try:
exclude_compiled = re.compile(exclude, flags)
except Exception:
exclude_compiled = None
elif ignore_case:
include = include.casefold()
exclude = exclude.casefold()
with io.open(get_log_file_path(), encoding='UTF-8') as file:
raw_lines = file.read()
lines = raw_lines.split('|\n')
for line in lines:
if line == '':
continue
if ignore_case and not regex:
compare_line = line.casefold()
else:
compare_line = line
if len(include) > 0:
if regex:
if include_compiled is None:
# if invalid re, keep the line
keep = True
else:
keep = include_compiled.search(compare_line)
else:
keep = include in compare_line
if not keep:
continue
if len(exclude) > 0:
if regex:
if exclude_compiled is None:
# if invalid re, keep the line
skip = False
else:
skip = exclude_compiled.search(compare_line)
else:
skip = exclude in compare_line
if skip:
continue
raw_message = line.split('|')
raw_message_len = len(raw_message)
if raw_message_len > 3:

View file

@ -1,6 +1,6 @@
# coding=utf-8
from flask import Flask, redirect
from flask import Flask, redirect, Request
from flask_compress import Compress
from flask_cors import CORS
@ -13,9 +13,17 @@ from .config import settings, base_url
socketio = SocketIO()
class CustomRequest(Request):
def __init__(self, *args, **kwargs):
super(CustomRequest, self).__init__(*args, **kwargs)
# required to increase form-data size before returning a 413
self.max_form_parts = 10000
def create_app():
# Flask Setup
app = Flask(__name__)
app.request_class = CustomRequest
app.config['COMPRESS_ALGORITHM'] = 'gzip'
Compress(app)
app.wsgi_app = ReverseProxied(app.wsgi_app)
@ -34,6 +42,7 @@ def create_app():
else:
app.config["DEBUG"] = False
from engineio.async_drivers import threading # noqa W0611 # required to prevent an import exception in engineio
socketio.init_app(app, path=f'{base_url.rstrip("/")}/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling', engineio_logger=False)

View file

@ -25,7 +25,7 @@ def check_releases():
url_releases = 'https://api.github.com/repos/morpheus65535/Bazarr/releases?per_page=100'
try:
logging.debug(f'BAZARR getting releases from Github: {url_releases}')
r = requests.get(url_releases, allow_redirects=True)
r = requests.get(url_releases, allow_redirects=True, timeout=15)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("Error trying to get releases from Github. Http error.")
@ -160,12 +160,14 @@ def apply_update():
'BAZARR was unable to delete the previous build directory during upgrade process.')
for file in archive.namelist():
if file.startswith(zip_root_directory) and file != zip_root_directory and not \
file.endswith('bazarr.py'):
if file.startswith(zip_root_directory) and file != zip_root_directory:
file_path = os.path.join(bazarr_dir, file[len(zip_root_directory):])
parent_dir = os.path.dirname(file_path)
os.makedirs(parent_dir, exist_ok=True)
if not os.path.isdir(file_path):
if os.path.exists(file_path):
# remove the file first to handle case-insensitive file systems
os.remove(file_path)
with open(file_path, 'wb+') as f:
f.write(archive.read(file))
except Exception:
@ -230,6 +232,9 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
dir_to_ignore_regex = re.compile(dir_to_ignore_regex_string)
file_to_ignore = ['nssm.exe', '7za.exe', 'unins000.exe', 'unins000.dat']
# prevent deletion of leftover Apprise.py/pyi files after 1.8.0 version that caused issue on case-insensitive
# filesystem. This could be removed in a couple of major versions.
file_to_ignore += ['Apprise.py', 'Apprise.pyi', 'apprise.py', 'apprise.pyi']
logging.debug(f'BAZARR upgrade leftover cleaner will ignore those files: {", ".join(file_to_ignore)}')
extension_to_ignore = ['.pyc']
logging.debug(

View file

@ -4,8 +4,12 @@ import hashlib
import os
import ast
import logging
import re
from urllib.parse import quote_plus
from utilities.binaries import BinaryNotFound, get_binary
from literals import EXIT_VALIDATION_ERROR
from utilities.central import stop_bazarr
from subliminal.cache import region
from dynaconf import Dynaconf, Validator as OriginalValidator
from dynaconf.loaders.yaml_loader import write
@ -37,6 +41,7 @@ def validate_ip_address(ip_string):
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
ONE_HUNDRED_YEARS_IN_HOURS = 876000
class Validator(OriginalValidator):
# Give the ability to personalize messages sent by the original dynasync Validator class.
default_messages = MappingProxyType(
@ -50,6 +55,14 @@ class Validator(OriginalValidator):
)
def check_parser_binary(value):
try:
get_binary(value)
except BinaryNotFound:
raise ValidationError(f"Executable '{value}' not found in search path. Please install before making this selection.")
return True
validators = [
# general section
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
@ -96,13 +109,14 @@ validators = [
Validator('general.adaptive_searching_delta', must_exist=True, default='1w', is_type_of=str,
is_in=['3d', '1w', '2w', '3w', '4w']),
Validator('general.enabled_providers', must_exist=True, default=[], is_type_of=list),
Validator('general.enabled_integrations', must_exist=True, default=[], is_type_of=list),
Validator('general.multithreading', must_exist=True, default=True, is_type_of=bool),
Validator('general.chmod_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.chmod', must_exist=True, default='0640', is_type_of=str),
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int,
is_in=[6, 12, 24, ONE_HUNDRED_YEARS_IN_HOURS]),
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
@ -115,13 +129,19 @@ validators = [
Validator('general.dont_notify_manual_actions', must_exist=True, default=False, is_type_of=bool),
Validator('general.hi_extension', must_exist=True, default='hi', is_type_of=str, is_in=['hi', 'cc', 'sdh']),
Validator('general.embedded_subtitles_parser', must_exist=True, default='ffprobe', is_type_of=str,
is_in=['ffprobe', 'mediainfo']),
is_in=['ffprobe', 'mediainfo'], condition=check_parser_binary),
Validator('general.default_und_audio_lang', must_exist=True, default='', is_type_of=str),
Validator('general.default_und_embedded_subtitles_lang', must_exist=True, default='', is_type_of=str),
Validator('general.parse_embedded_audio_track', must_exist=True, default=False, is_type_of=bool),
Validator('general.skip_hashing', must_exist=True, default=False, is_type_of=bool),
Validator('general.language_equals', must_exist=True, default=[], is_type_of=list),
# log section
Validator('log.include_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.exclude_filter', must_exist=True, default='', is_type_of=str, cast=str),
Validator('log.ignore_case', must_exist=True, default=False, is_type_of=bool),
Validator('log.use_regex', must_exist=True, default=False, is_type_of=bool),
# auth section
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
@ -155,7 +175,7 @@ validators = [
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
@ -178,7 +198,7 @@ validators = [
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, ONE_HUNDRED_YEARS_IN_MINUTES]),
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
@ -215,6 +235,19 @@ validators = [
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
# animetosho section
Validator('animetosho.search_threshold', must_exist=True, default=6, is_type_of=int, gte=1, lte=15),
Validator('animetosho.anidb_api_client', must_exist=True, default='', is_type_of=str, cast=str),
Validator('animetosho.anidb_api_client_ver', must_exist=True, default=1, is_type_of=int, gte=1, lte=9),
# avistaz section
Validator('avistaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('avistaz.user_agent', must_exist=True, default='', is_type_of=str),
# cinemaz section
Validator('cinemaz.cookies', must_exist=True, default='', is_type_of=str),
Validator('cinemaz.user_agent', must_exist=True, default='', is_type_of=str),
# podnapisi section
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
@ -260,10 +293,6 @@ validators = [
Validator('napisy24.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str, cast=str),
# subscene section
Validator('subscene.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('subscene.password', must_exist=True, default='', is_type_of=str, cast=str),
# betaseries section
Validator('betaseries.token', must_exist=True, default='', is_type_of=str, cast=str),
@ -283,7 +312,8 @@ validators = [
Validator('embeddedsubtitles.included_codecs', must_exist=True, default=[], is_type_of=list),
Validator('embeddedsubtitles.hi_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.timeout', must_exist=True, default=600, is_type_of=int, gte=1),
Validator('embeddedsubtitles.unknown_as_english', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.unknown_as_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.fallback_lang', must_exist=True, default='en', is_type_of=str, cast=str),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str, cast=str),
@ -341,6 +371,10 @@ validators = [
Validator('postgresql.database', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str, cast=str),
# anidb section
Validator('anidb.api_client', must_exist=True, default='', is_type_of=str),
Validator('anidb.api_client_ver', must_exist=True, default=1, is_type_of=int),
]
@ -393,8 +427,9 @@ while failed_validator:
settings[current_validator_details.names[0]] = current_validator_details.default
else:
logging.critical(f"Value for {current_validator_details.names[0]} doesn't pass validation and there's no "
f"default value. This issue must be reported. Bazarr won't works until it's been fixed.")
os._exit(0)
f"default value. This issue must be reported to and fixed by the development team. "
f"Bazarr won't work until it's been fixed.")
stop_bazarr(EXIT_VALIDATION_ERROR)
def write_config():
@ -413,6 +448,7 @@ array_keys = ['excluded_tags',
'subzero_mods',
'excluded_series_types',
'enabled_providers',
'enabled_integrations',
'path_mappings',
'path_mappings_movie',
'language_equals',
@ -421,7 +457,7 @@ array_keys = ['excluded_tags',
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod']
str_keys = ['chmod', 'log_include_filter', 'log_exclude_filter', 'password', 'f_password', 'hashed_password']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if settings.sonarr.series_sync < 15:
@ -440,6 +476,12 @@ if settings.general.wanted_search_frequency == 3:
if settings.general.wanted_search_frequency_movie == 3:
settings.general.wanted_search_frequency_movie = 6
# backward compatibility embeddedsubtitles provider
if hasattr(settings.embeddedsubtitles, 'unknown_as_english'):
if settings.embeddedsubtitles.unknown_as_english:
settings.embeddedsubtitles.unknown_as_fallback = True
settings.embeddedsubtitles.fallback_lang = 'en'
del settings.embeddedsubtitles.unknown_as_english
# save updated settings to file
write_config()
@ -463,6 +505,26 @@ def get_settings():
return settings_to_return
def validate_log_regex():
# handle bug in dynaconf that changes strings to numbers, so change them back to str
if not isinstance(settings.log.include_filter, str):
settings.log.include_filter = str(settings.log.include_filter)
if not isinstance(settings.log.exclude_filter, str):
settings.log.exclude_filter = str(settings.log.exclude_filter)
if settings.log.use_regex:
# compile any regular expressions specified to see if they are valid
# if invalid, tell the user which one
try:
re.compile(settings.log.include_filter)
except Exception:
raise ValidationError(f"Include filter: invalid regular expression: {settings.log.include_filter}")
try:
re.compile(settings.log.exclude_filter)
except Exception:
raise ValidationError(f"Exclude filter: invalid regular expression: {settings.log.exclude_filter}")
def save_settings(settings_items):
configure_debug = False
configure_captcha = False
@ -615,21 +677,10 @@ def save_settings(settings_items):
if key != settings.opensubtitlescom.username:
reset_providers = True
region.delete('oscom_token')
region.delete('oscom_server')
elif key == 'settings-opensubtitlescom-password':
if key != settings.opensubtitlescom.password:
reset_providers = True
region.delete('oscom_token')
region.delete('oscom_server')
if key == 'settings-subscene-username':
if key != settings.subscene.username:
reset_providers = True
region.delete('subscene_cookies2')
elif key == 'settings-subscene-password':
if key != settings.subscene.password:
reset_providers = True
region.delete('subscene_cookies2')
if key == 'settings-titlovi-username':
if key != settings.titlovi.username:
@ -703,6 +754,7 @@ def save_settings(settings_items):
try:
settings.validators.validate()
validate_log_regex()
except ValidationError:
settings.reload()
raise

View file

@ -125,7 +125,7 @@ def provider_throttle_map():
PROVIDERS_FORCED_OFF = ["addic7ed", "tvsubtitles", "legendasdivx", "napiprojekt", "shooter",
"hosszupuska", "supersubtitles", "titlovi", "assrt", "subscene"]
"hosszupuska", "supersubtitles", "titlovi", "assrt"]
throttle_count = {}
@ -229,6 +229,14 @@ def get_providers_auth():
'user_agent': settings.addic7ed.user_agent,
'is_vip': settings.addic7ed.vip,
},
'avistaz': {
'cookies': settings.avistaz.cookies,
'user_agent': settings.avistaz.user_agent,
},
'cinemaz': {
'cookies': settings.cinemaz.cookies,
'user_agent': settings.cinemaz.user_agent,
},
'opensubtitles': {
'username': settings.opensubtitles.username,
'password': settings.opensubtitles.password,
@ -251,11 +259,6 @@ def get_providers_auth():
'also_foreign': False, # fixme
'verify_ssl': settings.podnapisi.verify_ssl
},
'subscene': {
'username': settings.subscene.username,
'password': settings.subscene.password,
'only_foreign': False, # fixme
},
'legendasdivx': {
'username': settings.legendasdivx.username,
'password': settings.legendasdivx.password,
@ -293,7 +296,8 @@ def get_providers_auth():
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_english': settings.embeddedsubtitles.unknown_as_english,
'unknown_as_fallback': settings.embeddedsubtitles.unknown_as_fallback,
'fallback_lang': settings.embeddedsubtitles.fallback_lang,
},
'karagarga': {
'username': settings.karagarga.username,
@ -314,7 +318,10 @@ def get_providers_auth():
'response': settings.whisperai.response,
'timeout': settings.whisperai.timeout,
'ffmpeg_path': _FFMPEG_BINARY,
'loglevel': settings.whisperai.loglevel,
'loglevel': settings.whisperai.loglevel,
},
"animetosho": {
'search_threshold': settings.animetosho.search_threshold,
}
}
@ -489,7 +496,7 @@ def get_throttled_providers():
except Exception:
# set empty content in throttled_providers.dat
logging.error("Invalid content in throttled_providers.dat. Resetting")
set_throttled_providers(providers)
set_throttled_providers(str(providers))
finally:
return providers

View file

@ -18,6 +18,7 @@ def clean_libs():
def set_libs():
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../custom_libs/'))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), '../libs/'))

View file

@ -8,9 +8,9 @@ import platform
import warnings
from logging.handlers import TimedRotatingFileHandler
from utilities.central import get_log_file_path
from pytz_deprecation_shim import PytzUsageWarning
from .get_args import args
from .config import settings
@ -55,34 +55,37 @@ class NoExceptionFormatter(logging.Formatter):
def formatException(self, record):
return ''
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug == True:
if settings.general.debug:
# no filtering in debug mode
return True
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found' ],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'" ],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"' ]
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found'],
"Exception while serving /api/socket.io/",
["'Session is disconnected'", "'Session not found'"],
"Exception while serving /api/socket.io/",
['"Session is disconnected"', '"Session not found"'],
"Exception when servicing %r",
[],
]
wanted = True
wanted = True
listLength = len(unwantedMessages)
for i in range(0, listLength, 2):
if record.msg == unwantedMessages[i]:
exceptionTuple = record.exc_info
if exceptionTuple != None:
if str(exceptionTuple[1]) in unwantedMessages[i+1]:
if exceptionTuple is not None:
if len(unwantedMessages[i+1]) == 0 or str(exceptionTuple[1]) in unwantedMessages[i+1]:
wanted = False
break
return wanted
@ -91,10 +94,10 @@ def configure_logging(debug=False):
warnings.simplefilter('ignore', category=PytzUsageWarning)
# warnings.simplefilter('ignore', category=SAWarning)
if not debug:
log_level = "INFO"
if debug:
log_level = logging.DEBUG
else:
log_level = "DEBUG"
log_level = logging.INFO
logger.handlers = []
@ -106,21 +109,21 @@ def configure_logging(debug=False):
'%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s')
ch.setFormatter(cf)
ch.setLevel(log_level)
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
# File Logging
global fh
if sys.version_info >= (3, 9):
fh = PatchedTimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight",
fh = PatchedTimedRotatingFileHandler(get_log_file_path(), when="midnight",
interval=1, backupCount=7, delay=True, encoding='utf-8')
else:
fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1,
fh = TimedRotatingFileHandler(get_log_file_path(), when="midnight", interval=1,
backupCount=7, delay=True, encoding='utf-8')
f = FileHandlerFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|',
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(f)
fh.setLevel(log_level)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
if debug:
@ -159,7 +162,7 @@ def configure_logging(debug=False):
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.ERROR)
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("knowit").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.CRITICAL)
logging.getLogger("guessit").setLevel(logging.WARNING)

View file

@ -1,6 +1,6 @@
# coding=utf-8
import apprise
from apprise import Apprise, AppriseAsset
import logging
from .database import TableSettingsNotifier, TableEpisodes, TableShows, TableMovies, database, insert, delete, select
@ -8,7 +8,7 @@ from .database import TableSettingsNotifier, TableEpisodes, TableShows, TableMov
def update_notifier():
# define apprise object
a = apprise.Apprise()
a = Apprise()
# Retrieve all the details
results = a.details()
@ -70,9 +70,9 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
if not episode:
return
asset = apprise.AppriseAsset(async_mode=False)
asset = AppriseAsset(async_mode=False)
apobj = apprise.Apprise(asset=asset)
apobj = Apprise(asset=asset)
for provider in providers:
if provider.url is not None:
@ -101,9 +101,9 @@ def send_notifications_movie(radarr_id, message):
else:
movie_year = ''
asset = apprise.AppriseAsset(async_mode=False)
asset = AppriseAsset(async_mode=False)
apobj = apprise.Apprise(asset=asset)
apobj = Apprise(asset=asset)
for provider in providers:
if provider.url is not None:

View file

@ -8,12 +8,14 @@ from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.date import DateTrigger
from apscheduler.events import EVENT_JOB_SUBMITTED, EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
from apscheduler.jobstores.base import JobLookupError
from datetime import datetime, timedelta
from calendar import day_name
from random import randrange
from tzlocal import get_localzone
from tzlocal.utils import ZoneInfoNotFoundError
try:
import zoneinfo # pragma: no cover
except ImportError:
from backports import zoneinfo # pragma: no cover
from dateutil import tz
import logging
@ -40,17 +42,24 @@ from dateutil.relativedelta import relativedelta
NO_INTERVAL = "None"
NEVER_DATE = "Never"
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365
def a_long_time_from_now(job):
# job isn't scheduled at all
if job.next_run_time is None:
return True
# currently defined as more than a year from now
delta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
return delta.total_seconds() > ONE_YEAR_IN_SECONDS
def in_a_century():
century = datetime.now() + relativedelta(years=100)
return century.year
class Scheduler:
def __init__(self):
@ -58,7 +67,7 @@ class Scheduler:
try:
self.timezone = get_localzone()
except ZoneInfoNotFoundError as e:
except zoneinfo.ZoneInfoNotFoundError as e:
logging.error(f"BAZARR cannot use specified timezone: {e}")
self.timezone = tz.gettz("UTC")
@ -133,7 +142,6 @@ class Scheduler:
return ", ".join(strings)
def get_time_from_cron(cron):
year = str(cron[0])
day = str(cron[4])
hour = str(cron[5])
@ -183,8 +191,8 @@ class Scheduler:
else:
interval = get_time_from_cron(job.trigger.fields)
task_list.append({'name': job.name, 'interval': interval,
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
'job_running': running})
'next_run_in': next_run, 'next_run_time': next_run, 'job_id': job.id,
'job_running': running})
return task_list
@ -219,8 +227,8 @@ class Scheduler:
elif backup == "Manually":
trigger = CronTrigger(year=in_a_century())
self.aps_scheduler.add_job(backup_to_zip, trigger,
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup Database and Configuration File', replace_existing=True)
max_instances=1, coalesce=True, misfire_grace_time=15, id='backup',
name='Backup Database and Configuration File', replace_existing=True)
def __sonarr_full_update_task(self):
if settings.general.use_sonarr:
@ -301,17 +309,11 @@ class Scheduler:
name='Search for Missing Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.upgrade_subs and \
(settings.general.use_sonarr or settings.general.use_radarr):
if settings.general.use_sonarr or settings.general.use_radarr:
self.aps_scheduler.add_job(
upgrade_subtitles, IntervalTrigger(hours=int(settings.general.upgrade_frequency)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
name='Upgrade Previously Downloaded Subtitles', replace_existing=True)
else:
try:
self.aps_scheduler.remove_job(job_id='upgrade_subtitles')
except JobLookupError:
pass
def __randomize_interval_task(self):
for job in self.aps_scheduler.get_jobs():
@ -322,8 +324,8 @@ class Scheduler:
self.aps_scheduler.modify_job(job.id,
next_run_time=datetime.now(tz=self.timezone) +
timedelta(seconds=randrange(
job.trigger.interval.total_seconds() * 0.75,
job.trigger.interval.total_seconds())))
int(job.trigger.interval.total_seconds() * 0.75),
int(job.trigger.interval.total_seconds()))))
def __no_task(self):
for job in self.aps_scheduler.get_jobs():

View file

@ -1,10 +1,11 @@
# coding=utf-8
import signal
import warnings
import logging
import os
import io
import errno
from literals import EXIT_INTERRUPT, EXIT_NORMAL, EXIT_PORT_ALREADY_IN_USE_ERROR
from utilities.central import restart_bazarr, stop_bazarr
from waitress.server import create_server
from time import sleep
@ -17,10 +18,7 @@ from .database import close_database
from .app import create_app
app = create_app()
ui_bp.register_blueprint(api_bp, url_prefix='/api')
# Mute UserWarning with flask-restx and Flask >= 2.2.0. Will be raised as an exception in 2.3.0
# https://github.com/python-restx/flask-restx/issues/485
warnings.filterwarnings('ignore', message='The setup method ')
app.register_blueprint(api_bp, url_prefix=base_url.rstrip('/') + '/api')
app.register_blueprint(ui_bp, url_prefix=base_url.rstrip('/'))
@ -37,6 +35,7 @@ class Server:
self.connected = False
self.address = str(settings.general.ip)
self.port = int(args.port) if args.port else int(settings.general.port)
self.interrupted = False
while not self.connected:
sleep(0.1)
@ -54,17 +53,32 @@ class Server:
logging.exception("BAZARR cannot bind to specified IP, trying with default (0.0.0.0)")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
elif error.errno == errno.EADDRINUSE:
logging.exception("BAZARR cannot bind to specified TCP port, trying with default (6767)")
self.port = '6767'
self.connected = False
if self.port != '6767':
logging.exception("BAZARR cannot bind to specified TCP port, trying with default (6767)")
self.port = '6767'
self.connected = False
super(Server, self).__init__()
else:
logging.exception("BAZARR cannot bind to default TCP port (6767) because it's already in use, "
"exiting...")
self.shutdown(EXIT_PORT_ALREADY_IN_USE_ERROR)
else:
logging.exception("BAZARR cannot start because of unhandled exception.")
self.shutdown()
def interrupt_handler(self, signum, frame):
# print('Server signal interrupt handler called with signal', signum)
if not self.interrupted:
# ignore user hammering Ctrl-C; we heard you the first time!
self.interrupted = True
self.shutdown(EXIT_INTERRUPT)
def start(self):
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
f'{self.server.effective_port}')
signal.signal(signal.SIGINT, self.interrupt_handler)
try:
self.server.run()
except (KeyboardInterrupt, SystemExit):
@ -72,31 +86,19 @@ class Server:
except Exception:
pass
def shutdown(self):
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
close_database()
self.server.close()
os._exit(0)
def close_all(self):
print("Closing database...")
close_database()
print("Closing webserver...")
self.server.close()
def shutdown(self, status=EXIT_NORMAL):
self.close_all()
stop_bazarr(status, False)
def restart(self):
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
close_database()
self.server.close()
os._exit(0)
self.close_all()
restart_bazarr()
webserver = Server()

View file

@ -12,7 +12,7 @@ from signalrcore.hub_connection_builder import HubConnectionBuilder
from collections import deque
from time import sleep
from constants import headers
from constants import HEADERS
from app.event_handler import event_stream
from sonarr.sync.episodes import sync_episodes, sync_one_episode
from sonarr.sync.series import update_series, update_one_series
@ -39,7 +39,7 @@ class SonarrSignalrClientLegacy:
self.session = Session()
self.session.timeout = 60
self.session.verify = False
self.session.headers = headers
self.session.headers = HEADERS
self.connection = None
self.connected = False
@ -162,7 +162,7 @@ class SonarrSignalrClient:
.with_url(f"{url_sonarr()}/signalr/messages?access_token={self.apikey_sonarr}",
options={
"verify_ssl": False,
"headers": headers
"headers": HEADERS
}) \
.with_automatic_reconnect({
"type": "raw",
@ -229,7 +229,7 @@ class RadarrSignalrClient:
.with_url(f"{url_radarr()}/signalr/messages?access_token={self.apikey_radarr}",
options={
"verify_ssl": False,
"headers": headers
"headers": HEADERS
}) \
.with_automatic_reconnect({
"type": "raw",

View file

@ -4,14 +4,17 @@ import os
import requests
import mimetypes
from flask import request, abort, render_template, Response, session, send_file, stream_with_context, Blueprint
from flask import (request, abort, render_template, Response, session, send_file, stream_with_context, Blueprint,
redirect)
from functools import wraps
from urllib.parse import unquote
from constants import headers
from constants import HEADERS
from literals import FILE_LOG
from sonarr.info import url_api_sonarr
from radarr.info import url_api_radarr
from utilities.helper import check_credentials
from utilities.central import get_log_file_path
from .config import settings, base_url
from .database import System
@ -63,6 +66,10 @@ def check_login(actual_method):
@ui_bp.route('/', defaults={'path': ''})
@ui_bp.route('/<path:path>')
def catch_all(path):
if path.startswith('login') and settings.auth.type not in ['basic', 'form']:
# login page has been accessed when no authentication is enabled
return redirect(base_url or "/", code=302)
auth = True
if settings.auth.type == 'basic':
auth = request.authorization
@ -98,9 +105,9 @@ def catch_all(path):
@check_login
@ui_bp.route('/bazarr.log')
@ui_bp.route('/' + FILE_LOG)
def download_log():
return send_file(os.path.join(args.config_dir, 'log', 'bazarr.log'), max_age=0, as_attachment=True)
return send_file(get_log_file_path(), max_age=0, as_attachment=True)
@check_login
@ -111,7 +118,7 @@ def series_images(url):
baseUrl = settings.sonarr.base_url
url_image = f'{url_api_sonarr()}{url.lstrip(baseUrl)}?apikey={apikey}'.replace('poster-250', 'poster-500')
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=HEADERS)
except Exception:
return '', 404
else:
@ -125,7 +132,7 @@ def movies_images(url):
baseUrl = settings.radarr.base_url
url_image = f'{url_api_radarr()}{url.lstrip(baseUrl)}?apikey={apikey}'
try:
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=headers)
req = requests.get(url_image, stream=True, timeout=15, verify=False, headers=HEADERS)
except Exception:
return '', 404
else:
@ -162,25 +169,25 @@ def configured():
@ui_bp.route('/test/<protocol>/<path:url>', methods=['GET'])
def proxy(protocol, url):
if protocol.lower() not in ['http', 'https']:
return dict(status=False, error='Unsupported protocol')
return dict(status=False, error='Unsupported protocol', code=0)
url = f'{protocol}://{unquote(url)}'
params = request.args
try:
result = requests.get(url, params, allow_redirects=False, verify=False, timeout=5, headers=headers)
result = requests.get(url, params, allow_redirects=False, verify=False, timeout=5, headers=HEADERS)
except Exception as e:
return dict(status=False, error=repr(e))
else:
if result.status_code == 200:
try:
version = result.json()['version']
return dict(status=True, version=version)
return dict(status=True, version=version, code=result.status_code)
except Exception:
return dict(status=False, error='Error Occurred. Check your settings.')
return dict(status=False, error='Error Occurred. Check your settings.', code=result.status_code)
elif result.status_code == 401:
return dict(status=False, error='Access Denied. Check API key.')
return dict(status=False, error='Access Denied. Check API key.', code=result.status_code)
elif result.status_code == 404:
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?')
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?', code=result.status_code)
elif 300 <= result.status_code <= 399:
return dict(status=False, error='Wrong URL Base.')
return dict(status=False, error='Wrong URL Base.', code=result.status_code)
else:
return dict(status=False, error=result.raise_for_status())
return dict(status=False, error=result.raise_for_status(), code=result.status_code)

View file

@ -1,13 +1,12 @@
# coding=utf-8
import os
import re
# set Bazarr user-agent used to make requests
headers = {"User-Agent": os.environ["SZ_USER_AGENT"]}
# hearing-impaired detection regex
hi_regex = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
HEADERS = {"User-Agent": os.environ["SZ_USER_AGENT"]}
# minimum file size for Bazarr to consider it a video
MINIMUM_VIDEO_SIZE = 20480
# maximum size for a subtitles file
MAXIMUM_SUBTITLE_SIZE = 1 * 1024 * 1024

View file

@ -1,7 +1,6 @@
# coding=utf-8
import os
import io
import sys
import subprocess
import subliminal
@ -20,6 +19,10 @@ from utilities.backup import restore_from_backup
from app.database import init_db
from literals import (EXIT_CONFIG_CREATE_ERROR, ENV_BAZARR_ROOT_DIR, DIR_BACKUP, DIR_CACHE, DIR_CONFIG, DIR_DB, DIR_LOG,
DIR_RESTORE, EXIT_REQUIREMENTS_ERROR)
from utilities.central import make_bazarr_dir, restart_bazarr, stop_bazarr
# set start time global variable as epoch
global startTime
startTime = time.time()
@ -37,20 +40,15 @@ if not os.path.exists(args.config_dir):
os.mkdir(os.path.join(args.config_dir))
except OSError:
print("BAZARR The configuration directory doesn't exist and Bazarr cannot create it (permission issue?).")
exit(2)
stop_bazarr(EXIT_CONFIG_CREATE_ERROR)
if not os.path.exists(os.path.join(args.config_dir, 'config')):
os.mkdir(os.path.join(args.config_dir, 'config'))
if not os.path.exists(os.path.join(args.config_dir, 'db')):
os.mkdir(os.path.join(args.config_dir, 'db'))
if not os.path.exists(os.path.join(args.config_dir, 'log')):
os.mkdir(os.path.join(args.config_dir, 'log'))
if not os.path.exists(os.path.join(args.config_dir, 'cache')):
os.mkdir(os.path.join(args.config_dir, 'cache'))
if not os.path.exists(os.path.join(args.config_dir, 'backup')):
os.mkdir(os.path.join(args.config_dir, 'backup'))
if not os.path.exists(os.path.join(args.config_dir, 'restore')):
os.mkdir(os.path.join(args.config_dir, 'restore'))
os.environ[ENV_BAZARR_ROOT_DIR] = os.path.join(args.config_dir)
make_bazarr_dir(DIR_BACKUP)
make_bazarr_dir(DIR_CACHE)
make_bazarr_dir(DIR_CONFIG)
make_bazarr_dir(DIR_DB)
make_bazarr_dir(DIR_LOG)
make_bazarr_dir(DIR_RESTORE)
# set subliminal_patch hearing-impaired extension to use when naming subtitles
os.environ["SZ_HI_EXTENSION"] = settings.general.hi_extension
@ -99,19 +97,11 @@ if not args.no_update:
subprocess.check_output(pip_command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
logging.exception(f'BAZARR requirements.txt installation result: {e.stdout}')
os._exit(1)
os._exit(EXIT_REQUIREMENTS_ERROR)
else:
logging.info('BAZARR requirements installed.')
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.close()
os._exit(0)
restart_bazarr()
# change default base_url to ''
settings.general.base_url = settings.general.base_url.rstrip('/')

31
bazarr/literals.py Normal file
View file

@ -0,0 +1,31 @@
# coding=utf-8
# only primitive types can be specified here
# for other derived values, use constants.py
# bazarr environment variable names
ENV_STOPFILE = 'STOPFILE'
ENV_RESTARTFILE = 'RESTARTFILE'
ENV_BAZARR_ROOT_DIR = 'BAZARR_ROOT'
# bazarr subdirectories
DIR_BACKUP = 'backup'
DIR_CACHE = 'cache'
DIR_CONFIG = 'config'
DIR_DB = 'db'
DIR_LOG = 'log'
DIR_RESTORE = 'restore'
# bazarr special files
FILE_LOG = 'bazarr.log'
FILE_RESTART = 'bazarr.restart'
FILE_STOP = 'bazarr.stop'
# bazarr exit codes
EXIT_NORMAL = 0
EXIT_INTERRUPT = -100
EXIT_VALIDATION_ERROR = -101
EXIT_CONFIG_CREATE_ERROR = -102
EXIT_PYTHON_UPGRADE_NEEDED = -103
EXIT_REQUIREMENTS_ERROR = -104
EXIT_PORT_ALREADY_IN_USE_ERROR = -105

View file

@ -20,6 +20,7 @@ from app.get_args import args # noqa E402
from app.check_update import apply_update, check_releases, check_if_new_update # noqa E402
from app.config import settings, configure_proxy_func, base_url # noqa E402
from init import * # noqa E402
import logging # noqa E402
# Install downloaded update
if bazarr_version != '':
@ -40,18 +41,12 @@ from languages.get_languages import load_language_in_db # noqa E402
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
from app.server import webserver, app # noqa E402
from app.announcements import get_announcements_to_file # noqa E402
from utilities.central import stop_bazarr # noqa E402
from literals import EXIT_NORMAL # noqa E402
if args.create_db_revision:
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
else:
create_db_revision(app)
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
os._exit(0)
create_db_revision(app)
stop_bazarr(EXIT_NORMAL)
else:
migrate_db(app)

View file

@ -5,7 +5,7 @@ import logging
from app.config import settings
from radarr.info import url_api_radarr
from constants import headers
from constants import HEADERS
def browse_radarr_filesystem(path='#'):
@ -16,7 +16,7 @@ def browse_radarr_filesystem(path='#'):
f"includeFiles=false&apikey={settings.radarr.apikey}")
try:
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers)
headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get series from Radarr. Http error.")

View file

@ -3,12 +3,12 @@
import logging
import requests
import datetime
import json
from requests.exceptions import JSONDecodeError
from dogpile.cache import make_region
from app.config import settings, empty_values
from constants import headers
from constants import HEADERS
region = make_region().configure('dogpile.cache.memory')
@ -30,17 +30,17 @@ class GetRadarrInfo:
try:
rv = f"{url_radarr()}/api/system/status?apikey={settings.radarr.apikey}"
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers).json()
headers=HEADERS).json()
if 'version' in radarr_json:
radarr_version = radarr_json['version']
else:
raise json.decoder.JSONDecodeError
except json.decoder.JSONDecodeError:
raise JSONDecodeError
except JSONDecodeError:
try:
rv = f"{url_radarr()}/api/v3/system/status?apikey={settings.radarr.apikey}"
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False,
headers=headers).json()['version']
except json.decoder.JSONDecodeError:
headers=HEADERS).json()['version']
except JSONDecodeError:
logging.debug('BAZARR cannot get Radarr version')
radarr_version = 'unknown'
except Exception:

View file

@ -5,7 +5,7 @@ import requests
from app.config import settings
from radarr.info import url_api_radarr
from constants import headers
from constants import HEADERS
def notify_radarr(radarr_id):
@ -15,6 +15,6 @@ def notify_radarr(radarr_id):
'name': 'RescanMovie',
'movieId': int(radarr_id)
}
requests.post(url, json=data, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
requests.post(url, json=data, timeout=int(settings.radarr.http_timeout), verify=False, headers=HEADERS)
except Exception:
logging.exception('BAZARR cannot notify Radarr')

View file

@ -8,7 +8,7 @@ from app.config import settings
from utilities.path_mappings import path_mappings
from app.database import TableMoviesRootfolder, TableMovies, database, delete, update, insert, select
from radarr.info import url_api_radarr
from constants import headers
from constants import HEADERS
def get_radarr_rootfolder():
@ -19,7 +19,7 @@ def get_radarr_rootfolder():
url_radarr_api_rootfolder = f"{url_api_radarr()}rootfolder?apikey={apikey_radarr}"
try:
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=int(settings.radarr.http_timeout), verify=False, headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get rootfolder from Radarr. Connection Error.")
return []
@ -75,8 +75,8 @@ def check_radarr_rootfolder():
if not os.path.isdir(path_mappings.path_replace_movie(root_path)):
database.execute(
update(TableMoviesRootfolder)
.values(accessible=0, error='This Radarr root directory does not seems to be accessible by Please '
'check path mapping.')
.values(accessible=0, error='This Radarr root directory does not seem to be accessible by Bazarr. '
'Please check path mapping or if directory/drive is online.')
.where(TableMoviesRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace_movie(root_path), os.W_OK):
database.execute(

View file

@ -21,10 +21,13 @@ from .parser import movieParser
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_MOVIES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def update_all_movies():
movies_full_scan_subtitles()
logging.info('BAZARR All existing movie subtitles indexed from disk.')
@ -63,6 +66,7 @@ def get_movie_monitored_status(movie_id):
else:
return bool_map[existing_movie_monitored[0]]
# Insert new movies in DB
def add_movie(added_movie, send_event):
try:
@ -158,7 +162,7 @@ def update_movies(send_event=True):
# Only movies that Radarr says have files downloaded will be kept up to date in the DB
if movie['hasFile'] is True:
if 'movieFile' in movie:
if sync_monitored:
if sync_monitored:
if get_movie_monitored_status(movie['tmdbId']) != movie['monitored']:
# monitored status is not the same as our DB
trace(f"{i}: (Monitor Status Mismatch) {movie['title']}")
@ -187,19 +191,21 @@ def update_movies(send_event=True):
add_movie(parsed_movie, send_event)
movies_added.append(parsed_movie['title'])
else:
trace(f"{i}: (Skipped File Missing) {movie['title']}")
files_missing += 1
trace(f"{i}: (Skipped File Missing) {movie['title']}")
files_missing += 1
if send_event:
hide_progress(id='movies_progress')
trace(f"Skipped {files_missing} file missing movies out of {i}")
trace(f"Skipped {files_missing} file missing movies out of {movies_count}")
if sync_monitored:
trace(f"Skipped {skipped_count} unmonitored movies out of {i}")
trace(f"Processed {i - files_missing - skipped_count} movies out of {i} " +
f"with {len(movies_added)} added, {len(movies_updated)} updated and {len(movies_deleted)} deleted")
trace(f"Skipped {skipped_count} unmonitored movies out of {movies_count}")
trace(f"Processed {movies_count - files_missing - skipped_count} movies out of {movies_count} "
f"with {len(movies_added)} added, {len(movies_updated)} updated and "
f"{len(movies_deleted)} deleted")
else:
trace(f"Processed {i - files_missing} movies out of {i} with {len(movies_added)} added and {len(movies_updated)} updated")
trace(f"Processed {movies_count - files_missing} movies out of {movies_count} with {len(movies_added)} added and "
f"{len(movies_updated)} updated")
logging.debug('BAZARR All movies synced from Radarr into database.')

View file

@ -115,27 +115,27 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
parsed_movie = {'radarrId': int(movie["id"]),
'title': movie["title"],
'path': os.path.join(movie["path"], movie['movieFile']['relativePath']),
'tmdbId': str(movie["tmdbId"]),
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sceneName': sceneName,
'monitored': str(bool(movie['monitored'])),
'year': str(movie['year']),
'sortTitle': movie['sortTitle'],
'alternativeTitles': alternativeTitles,
'format': format,
'resolution': resolution,
'video_codec': videoCodec,
'audio_codec': audioCodec,
'overview': overview,
'imdbId': imdbId,
'movie_file_id': int(movie['movieFile']['id']),
'tags': str(tags),
'file_size': movie['movieFile']['size']}
'title': movie["title"],
'path': os.path.join(movie["path"], movie['movieFile']['relativePath']),
'tmdbId': str(movie["tmdbId"]),
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sceneName': sceneName,
'monitored': str(bool(movie['monitored'])),
'year': str(movie['year']),
'sortTitle': movie['sortTitle'],
'alternativeTitles': alternativeTitles,
'format': format,
'resolution': resolution,
'video_codec': videoCodec,
'audio_codec': audioCodec,
'overview': overview,
'imdbId': imdbId,
'movie_file_id': int(movie['movieFile']['id']),
'tags': str(tags),
'file_size': movie['movieFile']['size']}
if action == 'insert':
parsed_movie['subtitles'] = '[]'
parsed_movie['profileId'] = movie_default_profile

View file

@ -5,7 +5,7 @@ import logging
from app.config import settings
from radarr.info import get_radarr_info, url_api_radarr
from constants import headers
from constants import HEADERS
def get_profile_list():
@ -16,7 +16,7 @@ def get_profile_list():
f"apikey={apikey_radarr}")
try:
profiles_json = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
profiles_json = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get profiles from Radarr. Connection Error.")
except requests.exceptions.Timeout:
@ -45,7 +45,7 @@ def get_tags():
url_radarr_api_series = f"{url_api_radarr()}tag?apikey={apikey_radarr}"
try:
tagsDict = requests.get(url_radarr_api_series, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
tagsDict = requests.get(url_radarr_api_series, timeout=int(settings.radarr.http_timeout), verify=False, headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get tags from Radarr. Connection Error.")
return []
@ -69,7 +69,7 @@ def get_movies_from_radarr_api(apikey_radarr, radarr_id=None):
url_radarr_api_movies = f'{url_api_radarr()}movie{f"/{radarr_id}" if radarr_id else ""}?apikey={apikey_radarr}'
try:
r = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=HEADERS)
if r.status_code == 404:
return
r.raise_for_status()
@ -93,3 +93,32 @@ def get_movies_from_radarr_api(apikey_radarr, radarr_id=None):
return r.json()
else:
return
def get_history_from_radarr_api(apikey_radarr, movie_id):
url_radarr_api_history = f"{url_api_radarr()}history?eventType=1&movieIds={movie_id}&apikey={apikey_radarr}"
try:
r = requests.get(url_radarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get history from Radarr. Http error.")
return
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get history from Radarr. Connection Error.")
return
except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get history from Radarr. Timeout Error.")
return
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get history from Radarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting history from Radarr API: {e}")
return
else:
if r.status_code == 200:
return r.json()
else:
return

View file

@ -5,7 +5,7 @@ import logging
from app.config import settings
from sonarr.info import url_api_sonarr
from constants import headers
from constants import HEADERS
def browse_sonarr_filesystem(path='#'):
@ -15,7 +15,7 @@ def browse_sonarr_filesystem(path='#'):
f"includeFiles=false&apikey={settings.sonarr.apikey}")
try:
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get series from Sonarr. Http error.")

View file

@ -3,12 +3,12 @@
import logging
import requests
import datetime
import json
from requests.exceptions import JSONDecodeError
from dogpile.cache import make_region
from app.config import settings, empty_values
from constants import headers
from constants import HEADERS
region = make_region().configure('dogpile.cache.memory')
@ -30,17 +30,17 @@ class GetSonarrInfo:
try:
sv = f"{url_sonarr()}/api/system/status?apikey={settings.sonarr.apikey}"
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers).json()
headers=HEADERS).json()
if 'version' in sonarr_json:
sonarr_version = sonarr_json['version']
else:
raise json.decoder.JSONDecodeError
except json.decoder.JSONDecodeError:
raise JSONDecodeError
except JSONDecodeError:
try:
sv = f"{url_sonarr()}/api/v3/system/status?apikey={settings.sonarr.apikey}"
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers).json()['version']
except json.decoder.JSONDecodeError:
headers=HEADERS).json()['version']
except JSONDecodeError:
logging.debug('BAZARR cannot get Sonarr version')
sonarr_version = 'unknown'
except Exception:

View file

@ -5,7 +5,7 @@ import requests
from app.config import settings
from sonarr.info import url_api_sonarr
from constants import headers
from constants import HEADERS
def notify_sonarr(sonarr_series_id):
@ -15,6 +15,6 @@ def notify_sonarr(sonarr_series_id):
'name': 'RescanSeries',
'seriesId': int(sonarr_series_id)
}
requests.post(url, json=data, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
requests.post(url, json=data, timeout=int(settings.sonarr.http_timeout), verify=False, headers=HEADERS)
except Exception:
logging.exception('BAZARR cannot notify Sonarr')

View file

@ -8,7 +8,7 @@ from app.config import settings
from app.database import TableShowsRootfolder, TableShows, database, insert, update, delete, select
from utilities.path_mappings import path_mappings
from sonarr.info import url_api_sonarr
from constants import headers
from constants import HEADERS
def get_sonarr_rootfolder():
@ -19,7 +19,7 @@ def get_sonarr_rootfolder():
url_sonarr_api_rootfolder = f"{url_api_sonarr()}rootfolder?apikey={apikey_sonarr}"
try:
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=int(settings.sonarr.http_timeout), verify=False, headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get rootfolder from Sonarr. Connection Error.")
return []
@ -75,8 +75,8 @@ def check_sonarr_rootfolder():
if not os.path.isdir(path_mappings.path_replace(root_path)):
database.execute(
update(TableShowsRootfolder)
.values(accessible=0, error='This Sonarr root directory does not seems to be accessible by Bazarr. '
'Please check path mapping.')
.values(accessible=0, error='This Sonarr root directory does not seem to be accessible by Bazarr. '
'Please check path mapping or if directory/drive is online.')
.where(TableShowsRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace(root_path), os.W_OK):
database.execute(

View file

@ -12,7 +12,7 @@ from utilities.path_mappings import path_mappings
from subtitles.indexer.series import store_subtitles, series_full_scan_subtitles
from subtitles.mass_download import episode_download_subtitles
from app.event_handler import event_stream
from sonarr.info import get_sonarr_info, url_sonarr
from sonarr.info import get_sonarr_info
from .parser import episodeParser
from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_api
@ -21,10 +21,13 @@ from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_a
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_EPISODES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def get_episodes_monitored_table(series_id):
episodes_monitored = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.monitored)
@ -32,7 +35,8 @@ def get_episodes_monitored_table(series_id):
.all()
episode_dict = dict((x, y) for x, y in episodes_monitored)
return episode_dict
def update_all_episodes():
series_full_scan_subtitles()
logging.info('BAZARR All existing episode subtitles indexed from disk.')
@ -74,7 +78,6 @@ def sync_episodes(series_id, send_event=True):
if item:
episode['episodeFile'] = item[0]
sync_monitored = settings.sonarr.sync_only_monitored_series and settings.sonarr.sync_only_monitored_episodes
if sync_monitored:
episodes_monitored = get_episodes_monitored_table(series_id)
@ -122,7 +125,7 @@ def sync_episodes(series_id, send_event=True):
episodes_to_add.append(episodeParser(episode))
else:
return
if sync_monitored:
# try to avoid unnecessary database calls
if settings.general.debug:
@ -175,7 +178,6 @@ def sync_episodes(series_id, send_event=True):
def sync_one_episode(episode_id, defer_search=False):
logging.debug(f'BAZARR syncing this specific episode from Sonarr: {episode_id}')
url = url_sonarr()
apikey_sonarr = settings.sonarr.apikey
# Check if there's a row in database for this episode ID

View file

@ -5,7 +5,6 @@ import logging
from sqlalchemy.exc import IntegrityError
from app.config import settings
from sonarr.info import url_sonarr
from subtitles.indexer.series import list_missing_subtitles
from sonarr.rootfolder import check_sonarr_rootfolder
from app.database import TableShows, TableLanguagesProfiles, database, insert, update, delete, select
@ -20,10 +19,13 @@ from .utils import get_profile_list, get_tags, get_series_from_sonarr_api
bool_map = {"True": True, "False": False}
FEATURE_PREFIX = "SYNC_SERIES "
def trace(message):
if settings.general.debug:
logging.debug(FEATURE_PREFIX + message)
def get_series_monitored_table():
series_monitored = database.execute(
select(TableShows.tvdbId, TableShows.monitored))\
@ -31,6 +33,7 @@ def get_series_monitored_table():
series_dict = dict((x, y) for x, y in series_monitored)
return series_dict
def update_series(send_event=True):
check_sonarr_rootfolder()
apikey_sonarr = settings.sonarr.apikey
@ -74,7 +77,7 @@ def update_series(send_event=True):
series_monitored = get_series_monitored_table()
skipped_count = 0
trace(f"Starting sync for {series_count} shows")
for i, show in enumerate(series):
if send_event:
show_progress(id='series_progress',
@ -152,7 +155,7 @@ def update_series(send_event=True):
removed_series = list(set(current_shows_db) - set(current_shows_sonarr))
for series in removed_series:
# try to avoid unnecessary database calls
# try to avoid unnecessary database calls
if settings.general.debug:
series_title = database.execute(select(TableShows.title).where(TableShows.sonarrSeriesId == series)).first()[0]
trace(f"Deleting {series_title}")

View file

@ -5,7 +5,7 @@ import logging
from app.config import settings
from sonarr.info import get_sonarr_info, url_api_sonarr
from constants import headers
from constants import HEADERS
def get_profile_list():
@ -23,7 +23,7 @@ def get_profile_list():
try:
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get profiles from Sonarr. Connection Error.")
return None
@ -53,7 +53,7 @@ def get_tags():
url_sonarr_api_series = f"{url_api_sonarr()}tag?apikey={apikey_sonarr}"
try:
tagsDict = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
tagsDict = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=HEADERS)
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get tags from Sonarr. Connection Error.")
return []
@ -71,7 +71,7 @@ def get_series_from_sonarr_api(apikey_sonarr, sonarr_series_id=None):
url_sonarr_api_series = (f"{url_api_sonarr()}series/{sonarr_series_id if sonarr_series_id else ''}?"
f"apikey={apikey_sonarr}")
try:
r = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code:
@ -110,7 +110,7 @@ def get_episodes_from_sonarr_api(apikey_sonarr, series_id=None, episode_id=None)
return
try:
r = requests.get(url_sonarr_api_episode, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
r = requests.get(url_sonarr_api_episode, timeout=int(settings.sonarr.http_timeout), verify=False, headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get episodes from Sonarr. Http error.")
@ -144,7 +144,7 @@ def get_episodesFiles_from_sonarr_api(apikey_sonarr, series_id=None, episode_fil
try:
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=headers)
headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get episodeFiles from Sonarr. Http error.")
@ -166,3 +166,32 @@ def get_episodesFiles_from_sonarr_api(apikey_sonarr, series_id=None, episode_fil
return r.json()
else:
return
def get_history_from_sonarr_api(apikey_sonarr, episode_id):
url_sonarr_api_history = f"{url_api_sonarr()}history?eventType=1&episodeId={episode_id}&apikey={apikey_sonarr}"
try:
r = requests.get(url_sonarr_api_history, timeout=int(settings.sonarr.http_timeout), verify=False,
headers=HEADERS)
r.raise_for_status()
except requests.exceptions.HTTPError:
logging.exception("BAZARR Error trying to get history from Sonarr. Http error.")
return
except requests.exceptions.ConnectionError:
logging.exception("BAZARR Error trying to get history from Sonarr. Connection Error.")
return
except requests.exceptions.Timeout:
logging.exception("BAZARR Error trying to get history from Sonarr. Timeout Error.")
return
except requests.exceptions.RequestException:
logging.exception("BAZARR Error trying to get history from Sonarr.")
return
except Exception as e:
logging.exception(f"Exception raised while getting history from Sonarr API: {e}")
return
else:
if r.status_code == 200:
return r.json()
else:
return

View file

@ -24,8 +24,9 @@ from .processing import process_subtitle
@update_pools
def generate_subtitles(path, languages, audio_language, sceneName, title, media_type,
forced_minimum_score=None, is_upgrade=False, profile_id=None, check_if_still_required=False):
def generate_subtitles(path, languages, audio_language, sceneName, title, media_type, forced_minimum_score=None,
is_upgrade=False, profile_id=None, check_if_still_required=False,
previous_subtitles_to_delete=None):
if not languages:
return None
@ -87,6 +88,13 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.chmod_enabled else None
if is_upgrade and previous_subtitles_to_delete:
try:
# delete previously downloaded subtitles in case of an upgrade to prevent edge loop
# issue.
os.remove(previous_subtitles_to_delete)
except (OSError, FileNotFoundError):
pass
saved_subtitles = save_subtitles(video.original_path, subtitles,
single=settings.general.single_language,
tags=None, # fixme

View file

@ -264,7 +264,10 @@ def list_missing_subtitles_movies(no=None, send_event=True):
event_stream(type='badges')
def movies_full_scan_subtitles(use_cache=settings.radarr.use_ffprobe_cache):
def movies_full_scan_subtitles(use_cache=None):
if use_cache is None:
use_cache = settings.radarr.use_ffprobe_cache
movies = database.execute(
select(TableMovies.path))\
.all()

View file

@ -266,7 +266,10 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
event_stream(type='badges')
def series_full_scan_subtitles(use_cache=settings.sonarr.use_ffprobe_cache):
def series_full_scan_subtitles(use_cache=None):
if use_cache is None:
use_cache = settings.sonarr.use_ffprobe_cache
episodes = database.execute(
select(TableEpisodes.path))\
.all()

View file

@ -9,8 +9,8 @@ from subliminal_patch import core
from subzero.language import Language
from charset_normalizer import detect
from constants import MAXIMUM_SUBTITLE_SIZE
from app.config import settings
from constants import hi_regex
from utilities.path_mappings import path_mappings
@ -68,7 +68,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
forced = True if os.path.splitext(os.path.splitext(subtitle)[0])[1] == '.forced' else False
# to improve performance, skip detection of files larger that 1M
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
if os.path.getsize(subtitle_path) > MAXIMUM_SUBTITLE_SIZE:
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
f"{subtitle_path}")
continue
@ -119,7 +119,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
# check if file exist:
if os.path.exists(subtitle_path) and os.path.splitext(subtitle_path)[1] in core.SUBTITLE_EXTENSIONS:
# to improve performance, skip detection of files larger that 1M
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
if os.path.getsize(subtitle_path) > MAXIMUM_SUBTITLE_SIZE:
logging.debug(f"BAZARR subtitles file is too large to be text based. Skipping this file: "
f"{subtitle_path}")
continue
@ -136,6 +136,6 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
continue
text = text.decode(encoding)
if bool(re.search(hi_regex, text)):
if bool(re.search(core.HI_REGEX, text)):
subtitles[subtitle] = Language.rebuild(subtitles[subtitle], forced=False, hi=True)
return subtitles

View file

@ -18,7 +18,7 @@ from app.config import get_scores, settings, get_array_from
from utilities.helper import get_target_folder, force_unicode
from app.database import get_profiles_list
from .pool import update_pools, _get_pool, _init_pool
from .pool import update_pools, _get_pool
from .utils import get_video, _get_lang_obj, _get_scores, _set_forced_providers
from .processing import process_subtitle
@ -46,21 +46,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
try:
if providers:
subtitles = list_all_subtitles([video], language_set, pool)
if 'subscene' in providers:
s_pool = _init_pool("movie", profile_id, {"subscene"})
subscene_language_set = set()
for language in language_set:
if language.forced:
subscene_language_set.add(language)
if len(subscene_language_set):
s_pool.provider_configs.update({"subscene": {"only_foreign": True}})
subtitles_subscene = list_all_subtitles([video], subscene_language_set, s_pool)
s_pool.provider_configs.update({"subscene": {"only_foreign": False}})
subtitles[video] += subtitles_subscene[video]
else:
subtitles = []
logging.info("BAZARR All providers are throttled")
return 'All providers are throttled'
except Exception:

View file

@ -2,5 +2,12 @@
from .ffprobe import refine_from_ffprobe
from .database import refine_from_db
from .arr_history import refine_from_arr_history
from .anidb import refine_from_anidb
registered = {"database": refine_from_db, "ffprobe": refine_from_ffprobe}
registered = {
"database": refine_from_db,
"ffprobe": refine_from_ffprobe,
"arr_history": refine_from_arr_history,
"anidb": refine_from_anidb,
}

View file

@ -0,0 +1,140 @@
# coding=utf-8
# fmt: off
import logging
import requests
from collections import namedtuple
from datetime import timedelta
from requests.exceptions import HTTPError
from app.config import settings
from subliminal import Episode, region
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
refined_providers = {'animetosho'}
api_url = 'http://api.anidb.net:9001/httpapi'
class AniDBClient(object):
def __init__(self, api_client_key=None, api_client_ver=1, session=None):
self.session = session or requests.Session()
self.api_client_key = api_client_key
self.api_client_ver = api_client_ver
AnimeInfo = namedtuple('AnimeInfo', ['anime', 'episode_offset'])
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_mappings(self):
r = self.session.get(
'https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list.xml',
timeout=10
)
r.raise_for_status()
return r.content
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_id(self, mappings, tvdb_series_season, tvdb_series_id, episode):
# Enrich the collection of anime with the episode offset
animes = [
self.AnimeInfo(anime, int(anime.attrib.get('episodeoffset', 0)))
for anime in mappings.findall(
f".//anime[@tvdbid='{tvdb_series_id}'][@defaulttvdbseason='{tvdb_series_season}']"
)
]
if not animes:
return None, None
# Sort the anime by offset in ascending order
animes.sort(key=lambda a: a.episode_offset)
# Different from Tvdb, Anidb have different ids for the Parts of a season
anidb_id = None
offset = 0
for index, anime_info in enumerate(animes):
anime, episode_offset = anime_info
anidb_id = int(anime.attrib.get('anidbid'))
if episode > episode_offset:
anidb_id = anidb_id
offset = episode_offset
return anidb_id, episode - offset
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_episodes_ids(self, tvdb_series_id, season, episode):
mappings = etree.fromstring(self.get_series_mappings())
series_id, episode_no = self.get_series_id(mappings, season, tvdb_series_id, episode)
if not series_id:
return None, None
episodes = etree.fromstring(self.get_episodes(series_id))
return series_id, int(episodes.find(f".//episode[epno='{episode_no}']").attrib.get('id'))
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_episodes(self, series_id):
r = self.session.get(
api_url,
params={
'request': 'anime',
'client': self.api_client_key,
'clientver': self.api_client_ver,
'protover': 1,
'aid': series_id
},
timeout=10)
r.raise_for_status()
xml_root = etree.fromstring(r.content)
response_code = xml_root.attrib.get('code')
if response_code == '500':
raise HTTPError('AniDB API Abuse detected. Banned status.')
elif response_code == '302':
raise HTTPError('AniDB API Client error. Client is disabled or does not exists.')
episode_elements = xml_root.find('episodes')
if not episode_elements:
raise ValueError
return etree.tostring(episode_elements, encoding='utf8', method='xml')
def refine_from_anidb(path, video):
if not isinstance(video, Episode) or not video.series_tvdb_id:
logging.debug(f'Video is not an Anime TV series, skipping refinement for {video}')
return
if refined_providers.intersection(settings.general.enabled_providers) and video.series_anidb_id is None:
refine_anidb_ids(video)
def refine_anidb_ids(video):
anidb_client = AniDBClient(settings.anidb.api_client, settings.anidb.api_client_ver)
season = video.season if video.season else 0
anidb_series_id, anidb_episode_id = anidb_client.get_series_episodes_ids(video.series_tvdb_id, season, video.episode)
if not anidb_episode_id:
logging.error(f'Could not find anime series {video.series}')
return video
video.series_anidb_id = anidb_series_id
video.series_anidb_episode_id = anidb_episode_id

View file

@ -0,0 +1,32 @@
# coding=utf-8
# fmt: off
import logging
from app.config import settings
from radarr.sync.utils import get_history_from_radarr_api
from sonarr.sync.utils import get_history_from_sonarr_api
from subliminal import Episode, Movie
refined_providers = {'avistaz', 'cinemaz'}
def refine_from_arr_history(path, video):
if refined_providers.intersection(settings.general.enabled_providers) and video.info_url is None:
refine_info_url(video)
def refine_info_url(video):
if isinstance(video, Episode) and video.sonarrEpisodeId:
history = get_history_from_sonarr_api(settings.sonarr.apikey, video.sonarrEpisodeId)
elif isinstance(video, Movie) and video.radarrId:
history = get_history_from_radarr_api(settings.radarr.apikey, video.radarrId)
else:
return
for grab in history['records']:
# take the latest grab for the episode
if 'nzbInfoUrl' in grab['data'] and grab['data']['nzbInfoUrl']:
video.info_url = grab['data']['nzbInfoUrl']
logging.debug(f'Refining {video} with Info URL: {video.info_url}')
break

View file

@ -26,8 +26,19 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, percent_score, sonarr
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):
subsync = SubSyncer()
subsync.sync(video_path=video_path, srt_path=srt_path, srt_lang=srt_lang,
sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, radarr_id=radarr_id)
sync_kwargs = {
'video_path': video_path,
'srt_path': srt_path,
'srt_lang': srt_lang,
'max_offset_seconds': str(settings.subsync.max_offset_seconds),
'no_fix_framerate': settings.subsync.no_fix_framerate,
'gss': settings.subsync.gss,
'reference': None, # means choose automatically within video file
'sonarr_series_id': sonarr_series_id,
'sonarr_episode_id': sonarr_episode_id,
'radarr_id': radarr_id,
}
subsync.sync(**sync_kwargs)
del subsync
gc.collect()
return True

View file

@ -30,12 +30,18 @@ class SubSyncer:
self.vad = 'subs_then_webrtc'
self.log_dir_path = os.path.join(args.config_dir, 'log')
def sync(self, video_path, srt_path, srt_lang, sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None,
reference=None, max_offset_seconds=str(settings.subsync.max_offset_seconds),
no_fix_framerate=settings.subsync.no_fix_framerate, gss=settings.subsync.gss):
def sync(self, video_path, srt_path, srt_lang,
max_offset_seconds, no_fix_framerate, gss, reference=None,
sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None):
self.reference = video_path
self.srtin = srt_path
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced.srt'
if self.srtin.casefold().endswith('.ass'):
# try to preserve original subtitle style
# ffmpeg will be able to handle this automatically as long as it has the libass filter
extension = '.ass'
else:
extension = '.srt'
self.srtout = f'{os.path.splitext(self.srtin)[0]}.synced{extension}'
self.args = None
ffprobe_exe = get_binary('ffprobe')

View file

@ -110,7 +110,9 @@ def upgrade_subtitles():
episode['seriesTitle'],
'series',
forced_minimum_score=int(episode['score']),
is_upgrade=True))
is_upgrade=True,
previous_subtitles_to_delete=path_mappings.path_replace(
episode['subtitles_path'])))
if result:
if isinstance(result, list) and len(result):
@ -195,7 +197,9 @@ def upgrade_subtitles():
movie['title'],
'movie',
forced_minimum_score=int(movie['score']),
is_upgrade=True))
is_upgrade=True,
previous_subtitles_to_delete=path_mappings.path_replace_movie(
movie['subtitles_path'])))
if result:
if isinstance(result, list) and len(result):
result = result[0]

View file

@ -97,7 +97,6 @@ def _set_forced_providers(pool, also_forced=False, forced_required=False):
pool.provider_configs.update(
{
"podnapisi": {'also_foreign': also_forced, "only_foreign": forced_required},
"subscene": {"only_foreign": forced_required},
"opensubtitles": {'also_foreign': also_forced, "only_foreign": forced_required}
}
)

View file

@ -1,7 +1,6 @@
# coding=utf-8
import os
import io
import sqlite3
import shutil
import logging
@ -12,6 +11,7 @@ from glob import glob
from app.get_args import args
from app.config import settings
from utilities.central import restart_bazarr
def get_backup_path():
@ -33,7 +33,7 @@ def get_restore_path():
def get_backup_files(fullpath=True):
backup_file_pattern = os.path.join(get_backup_path(), 'bazarr_backup_v*.zip')
file_list = glob(backup_file_pattern)
file_list.sort(key=os.path.getmtime)
file_list.sort(key=os.path.getmtime, reverse=True)
if fullpath:
return file_list
else:
@ -133,16 +133,7 @@ def restore_from_backup():
logging.exception(f'Unable to delete {dest_database_path}')
logging.info('Backup restored successfully. Bazarr will restart.')
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write('')
restart_file.close()
os._exit(0)
restart_bazarr()
elif os.path.isfile(restore_config_path) or os.path.isfile(restore_database_path):
logging.debug('Cannot restore a partial backup. You must have both config and database.')
else:

View file

@ -0,0 +1,61 @@
# coding=utf-8
# only methods can be specified here that do not cause other moudules to be loaded
# for other methods that use settings, etc., use utilities/helper.py
import contextlib
import logging
import os
from pathlib import Path
from literals import ENV_BAZARR_ROOT_DIR, DIR_LOG, ENV_STOPFILE, ENV_RESTARTFILE, EXIT_NORMAL, FILE_LOG
def get_bazarr_dir(sub_dir):
path = os.path.join(os.environ[ENV_BAZARR_ROOT_DIR], sub_dir)
return path
def make_bazarr_dir(sub_dir):
path = get_bazarr_dir(sub_dir)
if not os.path.exists(path):
os.mkdir(path)
def get_log_file_path():
path = os.path.join(get_bazarr_dir(DIR_LOG), FILE_LOG)
return path
def get_stop_file_path():
return os.environ[ENV_STOPFILE]
def get_restart_file_path():
return os.environ[ENV_RESTARTFILE]
def stop_bazarr(status_code=EXIT_NORMAL, exit_main=True):
try:
with open(get_stop_file_path(), 'w', encoding='UTF-8') as file:
# write out status code for final exit
file.write(f'{status_code}\n')
file.close()
except Exception as e:
logging.error(f'BAZARR Cannot create stop file: {repr(e)}')
logging.info('Bazarr is being shutdown...')
if exit_main:
raise SystemExit(status_code)
def restart_bazarr():
try:
Path(get_restart_file_path()).touch()
except Exception as e:
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
logging.info('Bazarr is being restarted...')
# Wrap the SystemExit for a graceful restart. The SystemExit still performs the cleanup but the traceback is omitted
# preventing to throw the exception to the caller but still terminates the Python process with the desired Exit Code
with contextlib.suppress(SystemExit):
raise SystemExit(EXIT_NORMAL)

View file

@ -16,7 +16,7 @@ def _escape(in_str):
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language,
episode_language_code2, episode_language_code3, score, subtitle_id, provider, uploader,
episode_language_code2, episode_language_code3, score, subtitle_id, provider, uploader,
release_info, series_id, episode_id):
pp_command = re.sub(r'[\'"]?{{directory}}[\'"]?', _escape(os.path.dirname(episode)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode}}[\'"]?', _escape(episode), pp_command)

View file

@ -270,7 +270,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
if not os.path.exists(file):
logging.error(f'Video file "{file}" cannot be found for analysis')
return None
# if we have ffprobe available
if ffprobe_path:
try:

View file

@ -0,0 +1,18 @@
# Bazarr dependencies
subliminal_patch
subzero
py-pretty==1 # modified version to support Python 3
# Bazarr modified dependencies
signalr-client-threads==0.0.12 # Modified to work with Sonarr v3. Not used anymore with v4
Flask-Compress==1.14 # modified to import brotli only if required
# Required-by: signalr-client-threads
sseclient==0.0.27 # Modified to work with Sonarr v3
# Required-by: subliminal_patch
deathbycaptcha # unknown version, only found on gist
git+https://github.com/pannal/libfilebot#egg=libfilebot
git+https://github.com/RobinDavid/pyADS.git@28a2f6dbfb357f85b2c2f49add770b336e88840d#egg=pyads
py7zr==0.7.0 # modified to prevent importing of modules that can't be vendored
subliminal==2.1.0 # modified specifically for Bazarr

View file

@ -0,0 +1 @@
__version__ = "1.14"

View file

@ -63,9 +63,14 @@ class Compress(object):
def init_app(self, app):
defaults = [
('COMPRESS_MIMETYPES', ['text/html', 'text/css', 'text/xml',
'application/json',
'application/javascript']),
('COMPRESS_MIMETYPES', [
'application/javascript', # Obsolete (RFC 9239)
'application/json',
'text/css',
'text/html',
'text/javascript',
'text/xml',
]),
('COMPRESS_LEVEL', 6),
('COMPRESS_BR_LEVEL', 4),
('COMPRESS_BR_MODE', 0),

View file

@ -50,7 +50,7 @@ def default_xattr(fn):
XATTR_MAP = {
"default": (
default_xattr,
lambda result: re.search('(?um)(net\.filebot\.filename(?=="|: )[=:" ]+|Attribute.+:\s)([^"\n\r\0]+)',
lambda result: re.search(r'(?um)(net\.filebot\.filename(?=="|: )[=:" ]+|Attribute.+:\s)([^"\n\r\0]+)',
result).group(2)
),
# "darwin": (
@ -60,7 +60,7 @@ XATTR_MAP = {
# ),
"darwin": (
lambda fn: ["filebot", "-script", "fn:xattr", fn],
lambda result: re.search('(?um)(net\.filebot\.filename(?=="|: )[=:" ]+|Attribute.+:\s)([^"\n\r\0]+)',
lambda result: re.search(r'(?um)(net\.filebot\.filename(?=="|: )[=:" ]+|Attribute.+:\s)([^"\n\r\0]+)',
result).group(2)
),
"win32": (

View file

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
from babelfish import LanguageReverseConverter
from ..exceptions import ConfigurationError
class LegendasTVConverter(LanguageReverseConverter):
def __init__(self):
self.from_legendastv = {1: ('por', 'BR'), 2: ('eng',), 3: ('spa',), 4: ('fra',), 5: ('deu',), 6: ('jpn',),
7: ('dan',), 8: ('nor',), 9: ('swe',), 10: ('por',), 11: ('ara',), 12: ('ces',),
13: ('zho',), 14: ('kor',), 15: ('bul',), 16: ('ita',), 17: ('pol',)}
self.to_legendastv = {v: k for k, v in self.from_legendastv.items()}
self.codes = set(self.from_legendastv.keys())
def convert(self, alpha3, country=None, script=None):
if (alpha3, country) in self.to_legendastv:
return self.to_legendastv[(alpha3, country)]
if (alpha3,) in self.to_legendastv:
return self.to_legendastv[(alpha3,)]
raise ConfigurationError('Unsupported language code for legendastv: %s, %s, %s' % (alpha3, country, script))
def reverse(self, legendastv):
if legendastv in self.from_legendastv:
return self.from_legendastv[legendastv]
raise ConfigurationError('Unsupported language number for legendastv: %s' % legendastv)

View file

@ -591,7 +591,7 @@ def scan_videos(path, age=None, archives=True):
def refine(video, episode_refiners=None, movie_refiners=None, **kwargs):
"""Refine a video using :ref:`refiners`.
r"""Refine a video using :ref:`refiners`.
.. note::
@ -619,7 +619,7 @@ def refine(video, episode_refiners=None, movie_refiners=None, **kwargs):
def list_subtitles(videos, languages, pool_class=ProviderPool, **kwargs):
"""List subtitles.
r"""List subtitles.
The `videos` must pass the `languages` check of :func:`check_video`.
@ -660,7 +660,7 @@ def list_subtitles(videos, languages, pool_class=ProviderPool, **kwargs):
def download_subtitles(subtitles, pool_class=ProviderPool, **kwargs):
"""Download :attr:`~subliminal.subtitle.Subtitle.content` of `subtitles`.
r"""Download :attr:`~subliminal.subtitle.Subtitle.content` of `subtitles`.
:param subtitles: subtitles to download.
:type subtitles: list of :class:`~subliminal.subtitle.Subtitle`
@ -677,7 +677,7 @@ def download_subtitles(subtitles, pool_class=ProviderPool, **kwargs):
def download_best_subtitles(videos, languages, min_score=0, hearing_impaired=False, only_one=False, compute_score=None,
pool_class=ProviderPool, **kwargs):
"""List and download the best matching subtitles.
r"""List and download the best matching subtitles.
The `videos` must pass the `languages` and `undefined` (`only_one`) checks of :func:`check_video`.

Some files were not shown because too many files have changed in this diff Show more